]>
Commit | Line | Data |
---|---|---|
f6e6ff79 MT |
1 | #!/usr/bin/python |
2 | ||
f6e6ff79 MT |
3 | import logging |
4 | import os | |
5 | import re | |
f6e6ff79 MT |
6 | import uuid |
7 | ||
f6e6ff79 MT |
8 | import pakfire.packages |
9 | ||
2c909128 | 10 | from . import base |
2c909128 | 11 | from . import logs |
2c909128 MT |
12 | from . import updates |
13 | from . import users | |
14 | ||
e153d3f6 MT |
15 | log = logging.getLogger("builds") |
16 | log.propagate = 1 | |
17 | ||
2c909128 | 18 | from .constants import * |
044a9c43 | 19 | from .decorators import * |
f6e6ff79 | 20 | |
f6e6ff79 | 21 | class Builds(base.Object): |
764b87d2 MT |
22 | def _get_build(self, query, *args): |
23 | res = self.db.get(query, *args) | |
24 | ||
25 | if res: | |
26 | return Build(self.backend, res.id, data=res) | |
27 | ||
28 | def _get_builds(self, query, *args): | |
29 | res = self.db.query(query, *args) | |
30 | ||
31 | for row in res: | |
32 | yield Build(self.backend, row.id, data=row) | |
33 | ||
eedc6432 | 34 | def get_by_id(self, id, data=None): |
326664a5 | 35 | return Build(self.backend, id, data=data) |
f6e6ff79 MT |
36 | |
37 | def get_by_uuid(self, uuid): | |
38 | build = self.db.get("SELECT id FROM builds WHERE uuid = %s LIMIT 1", uuid) | |
39 | ||
40 | if build: | |
41 | return self.get_by_id(build.id) | |
42 | ||
43 | def get_all(self, limit=50): | |
eedc6432 | 44 | query = "SELECT * FROM builds ORDER BY time_created DESC" |
f6e6ff79 MT |
45 | |
46 | if limit: | |
47 | query += " LIMIT %d" % limit | |
48 | ||
eedc6432 | 49 | return [self.get_by_id(b.id, b) for b in self.db.query(query)] |
f6e6ff79 | 50 | |
1104bcbf | 51 | def get_by_user(self, user, type=None): |
f6e6ff79 MT |
52 | args = [] |
53 | conditions = [] | |
54 | ||
55 | if not type or type == "scratch": | |
56 | # On scratch builds the user id equals the owner id. | |
57 | conditions.append("(builds.type = 'scratch' AND owner_id = %s)") | |
58 | args.append(user.id) | |
59 | ||
60 | elif not type or type == "release": | |
61 | pass # TODO | |
62 | ||
eedc6432 | 63 | query = "SELECT builds.* AS id FROM builds \ |
f6e6ff79 MT |
64 | JOIN packages ON builds.pkg_id = packages.id" |
65 | ||
66 | if conditions: | |
67 | query += " WHERE %s" % " AND ".join(conditions) | |
68 | ||
eedc6432 | 69 | query += " ORDER BY builds.time_created DESC" |
f6e6ff79 | 70 | |
eedc6432 | 71 | builds = [] |
f6e6ff79 | 72 | for build in self.db.query(query, *args): |
326664a5 | 73 | build = Build(self.backend, build.id, build) |
eedc6432 MT |
74 | builds.append(build) |
75 | ||
76 | return builds | |
f6e6ff79 | 77 | |
1104bcbf | 78 | def get_by_name(self, name, type=None, user=None, limit=None, offset=None): |
f6e6ff79 MT |
79 | args = [name,] |
80 | conditions = [ | |
81 | "packages.name = %s", | |
82 | ] | |
83 | ||
84 | if type: | |
85 | conditions.append("builds.type = %s") | |
86 | args.append(type) | |
87 | ||
88 | or_conditions = [] | |
f6e6ff79 MT |
89 | if user and not user.is_admin(): |
90 | or_conditions.append("builds.owner_id = %s") | |
91 | args.append(user.id) | |
92 | ||
a15d6139 | 93 | query = "SELECT builds.* AS id FROM builds \ |
f6e6ff79 MT |
94 | JOIN packages ON builds.pkg_id = packages.id" |
95 | ||
96 | if or_conditions: | |
97 | conditions.append(" OR ".join(or_conditions)) | |
98 | ||
99 | if conditions: | |
100 | query += " WHERE %s" % " AND ".join(conditions) | |
101 | ||
a15d6139 MT |
102 | if type == "release": |
103 | query += " ORDER BY packages.name,packages.epoch,packages.version,packages.release,id ASC" | |
104 | elif type == "scratch": | |
105 | query += " ORDER BY time_created DESC" | |
f6e6ff79 | 106 | |
a15d6139 MT |
107 | if limit: |
108 | if offset: | |
109 | query += " LIMIT %s,%s" | |
110 | args.extend([offset, limit]) | |
111 | else: | |
112 | query += " LIMIT %s" | |
113 | args.append(limit) | |
114 | ||
326664a5 | 115 | return [Build(self.backend, b.id, b) for b in self.db.query(query, *args)] |
f6e6ff79 | 116 | |
1104bcbf | 117 | def get_latest_by_name(self, name, type=None): |
2f45327a MT |
118 | query = "\ |
119 | SELECT * FROM builds \ | |
120 | LEFT JOIN builds_latest ON builds.id = builds_latest.build_id \ | |
121 | WHERE builds_latest.package_name = %s" | |
f6e6ff79 MT |
122 | args = [name,] |
123 | ||
2f45327a MT |
124 | if type: |
125 | query += " AND builds_latest.build_type = %s" | |
126 | args.append(type) | |
127 | ||
2f45327a MT |
128 | # Get the last one only. |
129 | # Prefer release builds over scratch builds. | |
130 | query += "\ | |
131 | ORDER BY \ | |
132 | CASE builds.type WHEN 'release' THEN 0 ELSE 1 END, \ | |
133 | builds.time_created DESC \ | |
134 | LIMIT 1" | |
f6e6ff79 | 135 | |
2f45327a | 136 | res = self.db.get(query, *args) |
f6e6ff79 | 137 | |
2f45327a | 138 | if res: |
326664a5 | 139 | return Build(self.backend, res.id, res) |
f6e6ff79 | 140 | |
1104bcbf | 141 | def get_active_builds(self, name): |
fd0e70ec | 142 | query = "\ |
aff0187d MT |
143 | SELECT * FROM builds \ |
144 | LEFT JOIN builds_latest ON builds.id = builds_latest.build_id \ | |
2f83864f MT |
145 | WHERE builds_latest.package_name = %s AND builds.type = %s" |
146 | args = [name, "release"] | |
fd0e70ec | 147 | |
fd0e70ec MT |
148 | builds = [] |
149 | for row in self.db.query(query, *args): | |
326664a5 | 150 | b = Build(self.backend, row.id, row) |
fd0e70ec MT |
151 | builds.append(b) |
152 | ||
153 | # Sort the result. Lastest build first. | |
154 | builds.sort(reverse=True) | |
155 | ||
156 | return builds | |
157 | ||
f6e6ff79 | 158 | def count(self): |
966498de MT |
159 | builds = self.db.get("SELECT COUNT(*) AS count FROM builds") |
160 | if builds: | |
161 | return builds.count | |
f6e6ff79 | 162 | |
f6e6ff79 MT |
163 | def get_obsolete(self, repo=None): |
164 | """ | |
165 | Get all obsoleted builds. | |
166 | ||
167 | If repo is True: which are in any repository. | |
168 | If repo is some Repository object: which are in this repository. | |
169 | """ | |
170 | args = [] | |
171 | ||
172 | if repo is None: | |
173 | query = "SELECT id FROM builds WHERE state = 'obsolete'" | |
174 | ||
175 | else: | |
176 | query = "SELECT build_id AS id FROM repositories_builds \ | |
177 | JOIN builds ON builds.id = repositories_builds.build_id \ | |
178 | WHERE builds.state = 'obsolete'" | |
179 | ||
180 | if repo and not repo is True: | |
181 | query += " AND repositories_builds.repo_id = %s" | |
182 | args.append(repo.id) | |
183 | ||
184 | res = self.db.query(query, *args) | |
185 | ||
186 | builds = [] | |
187 | for build in res: | |
326664a5 | 188 | build = Build(self.backend, build.id) |
f6e6ff79 MT |
189 | builds.append(build) |
190 | ||
191 | return builds | |
192 | ||
e153d3f6 MT |
193 | def create(self, pkg, type="release", owner=None, distro=None): |
194 | assert type in ("release", "scratch", "test") | |
195 | assert distro, "You need to specify the distribution of this build." | |
196 | ||
197 | # Check if scratch build has an owner. | |
198 | if type == "scratch" and not owner: | |
199 | raise Exception, "Scratch builds require an owner" | |
200 | ||
201 | # Set the default priority of this build. | |
202 | if type == "release": | |
203 | priority = 0 | |
204 | ||
205 | elif type == "scratch": | |
206 | priority = 1 | |
207 | ||
208 | elif type == "test": | |
209 | priority = -1 | |
210 | ||
211 | # Create build in database | |
212 | build = self._get_build("INSERT INTO builds(uuid, pkg_id, type, distro_id, priority) \ | |
213 | VALUES(%s, %s, %s, %s, %s) RETURNING *", "%s" % uuid.uuid4(), pkg.id, type, distro.id, priority) | |
214 | ||
215 | # Set the owner of this build | |
216 | if owner: | |
217 | build.owner = owner | |
218 | ||
219 | # Log that the build has been created. | |
220 | build.log("created", user=owner) | |
221 | ||
222 | # Create directory where the files live | |
223 | if not os.path.exists(build.path): | |
224 | os.makedirs(build.path) | |
225 | ||
226 | # Move package file to the directory of the build. | |
227 | build.pkg.move(os.path.join(build.path, "src")) | |
228 | ||
229 | # Generate an update id. | |
230 | build.generate_update_id() | |
231 | ||
232 | # Obsolete all other builds with the same name to track updates. | |
233 | build.obsolete_others() | |
234 | ||
235 | # Search for possible bug IDs in the commit message. | |
236 | build.search_for_bugs() | |
237 | ||
238 | return build | |
239 | ||
240 | def create_from_source_package(self, filename, distro, commit=None, type="release", | |
241 | arches=None, check_for_duplicates=True, owner=None): | |
242 | assert distro | |
243 | ||
244 | # Open the package file to read some basic information. | |
245 | pkg = pakfire.packages.open(None, None, filename) | |
246 | ||
247 | if check_for_duplicates: | |
248 | if distro.has_package(pkg.name, pkg.epoch, pkg.version, pkg.release): | |
249 | log.warning("Duplicate package detected: %s. Skipping." % pkg) | |
250 | return | |
251 | ||
252 | # Open the package and add it to the database | |
253 | pkg = self.backend.packages.create(filename) | |
254 | ||
255 | # Associate the package to the processed commit | |
256 | if commit: | |
257 | pkg.commit = commit | |
258 | ||
259 | # Create a new build object from the package | |
260 | build = self.create(pkg, type=type, owner=owner, distro=distro) | |
261 | ||
262 | # Create all automatic jobs | |
263 | build.create_autojobs(arches=arches) | |
264 | ||
265 | return build | |
266 | ||
1104bcbf | 267 | def get_changelog(self, name, limit=5, offset=0): |
4b1e87c4 MT |
268 | query = "SELECT builds.* FROM builds \ |
269 | JOIN packages ON builds.pkg_id = packages.id \ | |
270 | WHERE \ | |
271 | builds.type = %s \ | |
272 | AND \ | |
273 | packages.name = %s" | |
274 | args = ["release", name,] | |
275 | ||
4b1e87c4 MT |
276 | query += " ORDER BY builds.time_created DESC" |
277 | ||
278 | if limit: | |
279 | if offset: | |
280 | query += " LIMIT %s,%s" | |
281 | args += [offset, limit] | |
282 | else: | |
283 | query += " LIMIT %s" | |
284 | args.append(limit) | |
285 | ||
286 | builds = [] | |
287 | for b in self.db.query(query, *args): | |
326664a5 | 288 | b = Build(self.backend, b.id, b) |
4b1e87c4 MT |
289 | builds.append(b) |
290 | ||
291 | builds.sort(reverse=True) | |
292 | ||
293 | return builds | |
294 | ||
62c7e7cd MT |
295 | def get_comments(self, limit=10, offset=None, user=None): |
296 | query = "SELECT * FROM builds_comments \ | |
297 | JOIN users ON builds_comments.user_id = users.id" | |
298 | args = [] | |
299 | ||
300 | wheres = [] | |
301 | if user: | |
302 | wheres.append("users.id = %s") | |
303 | args.append(user.id) | |
304 | ||
305 | if wheres: | |
306 | query += " WHERE %s" % " AND ".join(wheres) | |
307 | ||
308 | # Sort everything. | |
309 | query += " ORDER BY time_created DESC" | |
310 | ||
311 | # Limits. | |
312 | if limit: | |
313 | if offset: | |
314 | query += " LIMIT %s,%s" | |
315 | args.append(offset) | |
316 | else: | |
317 | query += " LIMIT %s" | |
318 | ||
319 | args.append(limit) | |
320 | ||
321 | comments = [] | |
322 | for comment in self.db.query(query, *args): | |
326664a5 | 323 | comment = logs.CommentLogEntry(self.backend, comment) |
62c7e7cd MT |
324 | comments.append(comment) |
325 | ||
326 | return comments | |
327 | ||
4f90cf84 | 328 | def get_build_times_summary(self, name=None, arch=None): |
bc293d03 MT |
329 | query = "\ |
330 | SELECT \ | |
331 | builds_times.arch AS arch, \ | |
332 | MAX(duration) AS maximum, \ | |
333 | MIN(duration) AS minimum, \ | |
334 | AVG(duration) AS average, \ | |
335 | SUM(duration) AS sum, \ | |
336 | STDDEV_POP(duration) AS stddev \ | |
337 | FROM builds_times \ | |
338 | LEFT JOIN builds ON builds_times.build_id = builds.id \ | |
339 | LEFT JOIN packages ON builds.pkg_id = packages.id" | |
340 | ||
341 | args = [] | |
342 | conditions = [] | |
343 | ||
344 | # Filter for name. | |
345 | if name: | |
346 | conditions.append("packages.name = %s") | |
347 | args.append(name) | |
348 | ||
a90bd9b0 MT |
349 | # Filter by arch. |
350 | if arch: | |
351 | conditions.append("builds_times.arch = %s") | |
352 | args.append(arch) | |
353 | ||
bc293d03 MT |
354 | # Add conditions. |
355 | if conditions: | |
356 | query += " WHERE %s" % " AND ".join(conditions) | |
357 | ||
358 | # Grouping and sorting. | |
359 | query += " GROUP BY arch ORDER BY arch DESC" | |
360 | ||
361 | return self.db.query(query, *args) | |
362 | ||
a90bd9b0 MT |
363 | def get_build_times_by_arch(self, arch, **kwargs): |
364 | kwargs.update({ | |
365 | "arch" : arch, | |
366 | }) | |
367 | ||
368 | build_times = self.get_build_times_summary(**kwargs) | |
369 | if build_times: | |
370 | return build_times[0] | |
371 | ||
f6e6ff79 | 372 | |
326664a5 | 373 | class Build(base.DataObject): |
e153d3f6 MT |
374 | table = "builds" |
375 | ||
f6e6ff79 MT |
376 | def __repr__(self): |
377 | return "<%s id=%s %s>" % (self.__class__.__name__, self.id, self.pkg) | |
378 | ||
326664a5 MT |
379 | def __eq__(self, other): |
380 | if isinstance(other, self.__class__): | |
381 | return self.id == other.id | |
f6e6ff79 | 382 | |
326664a5 MT |
383 | def __lt__(self, other): |
384 | if isinstance(other, self.__class__): | |
385 | return self.pkg < other.pkg | |
f6e6ff79 | 386 | |
764b87d2 MT |
387 | def __iter__(self): |
388 | jobs = self.backend.jobs._get_jobs("SELECT * FROM jobs \ | |
389 | WHERE build_id = %s", self.id) | |
390 | ||
391 | return iter(sorted(jobs)) | |
392 | ||
f6e6ff79 MT |
393 | def delete(self): |
394 | """ | |
a08fbdef MT |
395 | Deletes this build including all jobs, |
396 | packages and the source package. | |
f6e6ff79 MT |
397 | """ |
398 | # If the build is in a repository, we need to remove it. | |
399 | if self.repo: | |
400 | self.repo.rem_build(self) | |
401 | ||
a08fbdef MT |
402 | # Delete all release jobs |
403 | for job in self.jobs: | |
f6e6ff79 MT |
404 | job.delete() |
405 | ||
a08fbdef MT |
406 | # Delete all test jobs |
407 | for job in self.test_jobs: | |
408 | job.delete() | |
f6e6ff79 | 409 | |
a08fbdef | 410 | # Deleted all associated bugs |
f6e6ff79 MT |
411 | self.db.execute("DELETE FROM builds_bugs WHERE build_id = %s", self.id) |
412 | ||
a08fbdef | 413 | # Delete all comments |
f6e6ff79 MT |
414 | self.db.execute("DELETE FROM builds_comments WHERE build_id = %s", self.id) |
415 | ||
a08fbdef | 416 | # Delete the repository history |
f6e6ff79 MT |
417 | self.db.execute("DELETE FROM repositories_history WHERE build_id = %s", self.id) |
418 | ||
a08fbdef | 419 | # Delete all watchers |
f6e6ff79 MT |
420 | self.db.execute("DELETE FROM builds_watchers WHERE build_id = %s", self.id) |
421 | ||
a08fbdef MT |
422 | # Delete build history |
423 | self.db.execute("DELETE FROM builds_history WHERE build_id = %s", self.id) | |
424 | ||
425 | # Delete the build itself. | |
426 | self.db.execute("DELETE FROM builds WHERE id = %s", self.id) | |
427 | ||
428 | # Delete source package | |
429 | self.pkg.delete() | |
430 | ||
f6e6ff79 MT |
431 | @property |
432 | def info(self): | |
433 | """ | |
434 | A set of information that is sent to the XMLRPC client. | |
435 | """ | |
436 | return { "uuid" : self.uuid } | |
437 | ||
438 | def log(self, action, user=None, bug_id=None): | |
439 | user_id = None | |
440 | if user: | |
441 | user_id = user.id | |
442 | ||
443 | self.db.execute("INSERT INTO builds_history(build_id, action, user_id, time, bug_id) \ | |
444 | VALUES(%s, %s, %s, NOW(), %s)", self.id, action, user_id, bug_id) | |
445 | ||
446 | @property | |
447 | def uuid(self): | |
448 | """ | |
449 | The UUID of this build. | |
450 | """ | |
451 | return self.data.uuid | |
452 | ||
326664a5 | 453 | @lazy_property |
f6e6ff79 MT |
454 | def pkg(self): |
455 | """ | |
456 | Get package that is to be built in the build. | |
457 | """ | |
326664a5 | 458 | return self.backend.packages.get_by_id(self.data.pkg_id) |
f6e6ff79 MT |
459 | |
460 | @property | |
461 | def name(self): | |
462 | return "%s-%s" % (self.pkg.name, self.pkg.friendly_version) | |
463 | ||
464 | @property | |
465 | def type(self): | |
466 | """ | |
467 | The type of this build. | |
468 | """ | |
469 | return self.data.type | |
470 | ||
e153d3f6 | 471 | def get_owner(self): |
f6e6ff79 MT |
472 | """ |
473 | The owner of this build. | |
474 | """ | |
e153d3f6 MT |
475 | if self.data.owner_id: |
476 | return self.backend.users.get_by_id(self.data.owner_id) | |
f6e6ff79 | 477 | |
e153d3f6 MT |
478 | def set_owner(self, owner): |
479 | if owner: | |
480 | self._set_attribute("owner_id", owner.id) | |
481 | else: | |
482 | self._set_attribute("owner_id", None) | |
f6e6ff79 | 483 | |
e153d3f6 | 484 | owner = lazy_property(get_owner, set_owner) |
f6e6ff79 | 485 | |
326664a5 | 486 | @lazy_property |
f6e6ff79 | 487 | def distro(self): |
326664a5 | 488 | return self.backend.distros.get_by_id(self.data.distro_id) |
f6e6ff79 MT |
489 | |
490 | @property | |
491 | def user(self): | |
492 | if self.type == "scratch": | |
493 | return self.owner | |
494 | ||
495 | def get_depends_on(self): | |
326664a5 MT |
496 | if self.data.depends_on: |
497 | return self.backend.builds.get_by_id(self.data.depends_on) | |
f6e6ff79 MT |
498 | |
499 | def set_depends_on(self, build): | |
326664a5 | 500 | self._set_attribute("depends_on", build.id) |
f6e6ff79 | 501 | |
326664a5 | 502 | depends_on = lazy_property(get_depends_on, set_depends_on) |
f6e6ff79 MT |
503 | |
504 | @property | |
505 | def created(self): | |
506 | return self.data.time_created | |
507 | ||
eedc6432 MT |
508 | @property |
509 | def date(self): | |
510 | return self.created.date() | |
511 | ||
326664a5 | 512 | @lazy_property |
eedc6432 MT |
513 | def size(self): |
514 | """ | |
515 | Returns the size on disk of this build. | |
516 | """ | |
517 | s = 0 | |
518 | ||
519 | # Add the source package. | |
520 | if self.pkg: | |
521 | s += self.pkg.size | |
522 | ||
523 | # Add all jobs. | |
524 | s += sum((j.size for j in self.jobs)) | |
525 | ||
526 | return s | |
527 | ||
f6e6ff79 MT |
528 | def auto_update_state(self): |
529 | """ | |
530 | Check if the state of this build can be updated and perform | |
531 | the change if possible. | |
532 | """ | |
533 | # Do not change the broken/obsolete state automatically. | |
534 | if self.state in ("broken", "obsolete"): | |
535 | return | |
536 | ||
537 | if self.repo and self.repo.type == "stable": | |
538 | self.update_state("stable") | |
539 | return | |
540 | ||
541 | # If any of the build jobs are finished, the build will be put in testing | |
542 | # state. | |
543 | for job in self.jobs: | |
544 | if job.state == "finished": | |
545 | self.update_state("testing") | |
546 | break | |
547 | ||
548 | def update_state(self, state, user=None, remove=False): | |
549 | assert state in ("stable", "testing", "obsolete", "broken") | |
550 | ||
326664a5 | 551 | self._set_attribute("state", state) |
f6e6ff79 MT |
552 | |
553 | # In broken state, the removal from the repository is forced and | |
554 | # all jobs that are not finished yet will be aborted. | |
555 | if state == "broken": | |
556 | remove = True | |
557 | ||
558 | for job in self.jobs: | |
559 | if job.state in ("new", "pending", "running", "dependency_error"): | |
560 | job.state = "aborted" | |
561 | ||
562 | # If this build is in a repository, it will leave it. | |
563 | if remove and self.repo: | |
564 | self.repo.rem_build(self) | |
565 | ||
566 | # If a release build is now in testing state, we put it into the | |
567 | # first repository of the distribution. | |
568 | elif self.type == "release" and state == "testing": | |
569 | # If the build is not in a repository, yet and if there is | |
570 | # a first repository, we put the build there. | |
571 | if not self.repo and self.distro.first_repo: | |
572 | self.distro.first_repo.add_build(self, user=user) | |
573 | ||
574 | @property | |
575 | def state(self): | |
576 | return self.data.state | |
577 | ||
9fa1787c MT |
578 | def is_broken(self): |
579 | return self.state == "broken" | |
580 | ||
f6e6ff79 MT |
581 | def obsolete_others(self): |
582 | if not self.type == "release": | |
583 | return | |
584 | ||
326664a5 | 585 | for build in self.backend.builds.get_by_name(self.pkg.name, type="release"): |
f6e6ff79 MT |
586 | # Don't modify ourself. |
587 | if self.id == build.id: | |
588 | continue | |
589 | ||
590 | # Don't touch broken builds. | |
591 | if build.state in ("obsolete", "broken"): | |
592 | continue | |
593 | ||
594 | # Obsolete the build. | |
595 | build.update_state("obsolete") | |
596 | ||
597 | def set_severity(self, severity): | |
326664a5 | 598 | self._set_attribute("severity", severity) |
f6e6ff79 MT |
599 | |
600 | def get_severity(self): | |
601 | return self.data.severity | |
602 | ||
603 | severity = property(get_severity, set_severity) | |
604 | ||
326664a5 | 605 | @lazy_property |
f6e6ff79 MT |
606 | def commit(self): |
607 | if self.pkg and self.pkg.commit: | |
608 | return self.pkg.commit | |
609 | ||
326664a5 MT |
610 | def update_message(self, message): |
611 | self._set_attribute("message", message) | |
f6e6ff79 MT |
612 | |
613 | def has_perm(self, user): | |
614 | """ | |
615 | Check, if the given user has the right to perform administrative | |
616 | operations on this build. | |
617 | """ | |
618 | if user is None: | |
619 | return False | |
620 | ||
621 | if user.is_admin(): | |
622 | return True | |
623 | ||
624 | # Check if the user is allowed to manage packages from the critical path. | |
625 | if self.critical_path and not user.has_perm("manage_critical_path"): | |
626 | return False | |
627 | ||
628 | # Search for maintainers... | |
629 | ||
630 | # Scratch builds. | |
631 | if self.type == "scratch": | |
632 | # The owner of a scratch build has the right to do anything with it. | |
633 | if self.owner_id == user.id: | |
634 | return True | |
635 | ||
636 | # Release builds. | |
637 | elif self.type == "release": | |
638 | # The maintainer also is allowed to manage the build. | |
639 | if self.pkg.maintainer == user: | |
640 | return True | |
641 | ||
642 | # Deny permission for all other cases. | |
643 | return False | |
644 | ||
645 | @property | |
646 | def message(self): | |
647 | message = "" | |
648 | ||
649 | if self.data.message: | |
650 | message = self.data.message | |
651 | ||
652 | elif self.commit: | |
653 | if self.commit.message: | |
654 | message = "\n".join((self.commit.subject, self.commit.message)) | |
655 | else: | |
656 | message = self.commit.subject | |
657 | ||
658 | prefix = "%s: " % self.pkg.name | |
659 | if message.startswith(prefix): | |
660 | message = message[len(prefix):] | |
661 | ||
662 | return message | |
663 | ||
664 | def get_priority(self): | |
665 | return self.data.priority | |
666 | ||
667 | def set_priority(self, priority): | |
668 | assert priority in (-2, -1, 0, 1, 2) | |
669 | ||
326664a5 | 670 | self._set_attribute("priority", priority) |
f6e6ff79 MT |
671 | |
672 | priority = property(get_priority, set_priority) | |
673 | ||
674 | @property | |
675 | def path(self): | |
676 | path = [] | |
677 | if self.type == "scratch": | |
678 | path.append(BUILD_SCRATCH_DIR) | |
679 | path.append(self.uuid) | |
680 | ||
681 | elif self.type == "release": | |
682 | path.append(BUILD_RELEASE_DIR) | |
683 | path.append("%s/%s-%s-%s" % \ | |
684 | (self.pkg.name, self.pkg.epoch, self.pkg.version, self.pkg.release)) | |
685 | ||
686 | else: | |
687 | raise Exception, "Unknown build type: %s" % self.type | |
688 | ||
689 | return os.path.join(*path) | |
690 | ||
691 | @property | |
692 | def source_filename(self): | |
693 | return os.path.basename(self.pkg.path) | |
694 | ||
695 | @property | |
696 | def download_prefix(self): | |
326664a5 | 697 | return "/".join((self.backend.settings.get("download_baseurl"), "packages")) |
f6e6ff79 MT |
698 | |
699 | @property | |
700 | def source_download(self): | |
701 | return "/".join((self.download_prefix, self.pkg.path)) | |
702 | ||
703 | @property | |
704 | def source_hash_sha512(self): | |
705 | return self.pkg.hash_sha512 | |
706 | ||
707 | @property | |
708 | def link(self): | |
709 | # XXX maybe this should rather live in a uimodule. | |
710 | # zlib-1.2.3-2.ip3 [src, i686, blah...] | |
711 | s = """<a class="state_%s %s" href="/build/%s">%s</a>""" % \ | |
712 | (self.state, self.type, self.uuid, self.name) | |
713 | ||
714 | s_jobs = [] | |
715 | for job in self.jobs: | |
716 | s_jobs.append("""<a class="state_%s %s" href="/job/%s">%s</a>""" % \ | |
8f04a9e9 | 717 | (job.state, "test" if job.test else "build", job.uuid, job.arch)) |
f6e6ff79 MT |
718 | |
719 | if s_jobs: | |
720 | s += " [%s]" % ", ".join(s_jobs) | |
721 | ||
722 | return s | |
723 | ||
724 | @property | |
725 | def supported_arches(self): | |
726 | return self.pkg.supported_arches | |
727 | ||
728 | @property | |
729 | def critical_path(self): | |
730 | return self.pkg.critical_path | |
731 | ||
e67c036e MT |
732 | def _get_jobs(self, query, *args): |
733 | ret = [] | |
734 | for job in self.backend.jobs._get_jobs(query, *args): | |
735 | job.build = self | |
736 | ret.append(job) | |
737 | ||
738 | return ret | |
739 | ||
326664a5 | 740 | @lazy_property |
f6e6ff79 MT |
741 | def jobs(self): |
742 | """ | |
743 | Get a list of all build jobs that are in this build. | |
744 | """ | |
e67c036e | 745 | return self._get_jobs("SELECT * FROM jobs \ |
a08fbdef | 746 | WHERE build_id = %s AND test IS FALSE", self.id) |
f6e6ff79 MT |
747 | |
748 | @property | |
749 | def test_jobs(self): | |
e67c036e | 750 | return self._get_jobs("SELECT * FROM jobs \ |
a08fbdef | 751 | WHERE build_id = %s AND test IS TRUE", self.id) |
f6e6ff79 MT |
752 | |
753 | @property | |
754 | def all_jobs_finished(self): | |
755 | ret = True | |
756 | ||
757 | for job in self.jobs: | |
758 | if not job.state == "finished": | |
759 | ret = False | |
760 | break | |
761 | ||
762 | return ret | |
763 | ||
8f04a9e9 | 764 | def create_autojobs(self, arches=None, **kwargs): |
f6e6ff79 MT |
765 | jobs = [] |
766 | ||
767 | # Arches may be passed to this function. If not we use all arches | |
768 | # this package supports. | |
769 | if arches is None: | |
770 | arches = self.supported_arches | |
771 | ||
772 | # Create a new job for every given archirecture. | |
326664a5 | 773 | for arch in self.backend.arches.expand(arches): |
e153d3f6 MT |
774 | # Don't create jobs for src |
775 | if arch == "src": | |
f6e6ff79 MT |
776 | continue |
777 | ||
8f04a9e9 | 778 | job = self.add_job(arch, **kwargs) |
f6e6ff79 MT |
779 | jobs.append(job) |
780 | ||
781 | # Return all newly created jobs. | |
782 | return jobs | |
783 | ||
8f04a9e9 MT |
784 | def add_job(self, arch, **kwargs): |
785 | job = self.backend.jobs.create(self, arch, **kwargs) | |
f6e6ff79 MT |
786 | |
787 | # Add new job to cache. | |
326664a5 | 788 | self.jobs.append(job) |
f6e6ff79 MT |
789 | |
790 | return job | |
791 | ||
792 | ## Update stuff | |
793 | ||
794 | @property | |
795 | def update_id(self): | |
796 | if not self.type == "release": | |
797 | return | |
798 | ||
799 | # Generate an update ID if none does exist, yet. | |
800 | self.generate_update_id() | |
801 | ||
802 | s = [ | |
803 | "%s" % self.distro.name.replace(" ", "").upper(), | |
804 | "%04d" % (self.data.update_year or 0), | |
805 | "%04d" % (self.data.update_num or 0), | |
806 | ] | |
807 | ||
808 | return "-".join(s) | |
809 | ||
810 | def generate_update_id(self): | |
811 | if not self.type == "release": | |
812 | return | |
813 | ||
814 | if self.data.update_num: | |
815 | return | |
816 | ||
817 | update = self.db.get("SELECT update_num AS num FROM builds \ | |
e153d3f6 | 818 | WHERE update_year = EXTRACT(year FROM NOW()) ORDER BY update_num DESC LIMIT 1") |
f6e6ff79 MT |
819 | |
820 | if update: | |
821 | update_num = update.num + 1 | |
822 | else: | |
823 | update_num = 1 | |
824 | ||
e153d3f6 | 825 | self.db.execute("UPDATE builds SET update_year = EXTRACT(year FROM NOW()), update_num = %s \ |
f6e6ff79 MT |
826 | WHERE id = %s", update_num, self.id) |
827 | ||
828 | ## Comment stuff | |
829 | ||
830 | def get_comments(self, limit=10, offset=0): | |
831 | query = "SELECT * FROM builds_comments \ | |
832 | JOIN users ON builds_comments.user_id = users.id \ | |
833 | WHERE build_id = %s ORDER BY time_created ASC" | |
834 | ||
835 | comments = [] | |
836 | for comment in self.db.query(query, self.id): | |
326664a5 | 837 | comment = logs.CommentLogEntry(self.backend, comment) |
f6e6ff79 MT |
838 | comments.append(comment) |
839 | ||
840 | return comments | |
841 | ||
326664a5 | 842 | def add_comment(self, user, text, score): |
f6e6ff79 MT |
843 | # Add the new comment to the database. |
844 | id = self.db.execute("INSERT INTO \ | |
d31d17af | 845 | builds_comments(build_id, user_id, text, score, time_created) \ |
f6e6ff79 | 846 | VALUES(%s, %s, %s, %s, NOW())", |
326664a5 | 847 | self.id, user.id, text, score) |
f6e6ff79 | 848 | |
d31d17af | 849 | # Update the score cache |
326664a5 | 850 | self.score += score |
f6e6ff79 MT |
851 | |
852 | # Send the new comment to all watchers and stuff. | |
853 | self.send_comment_message(id) | |
854 | ||
855 | # Return the ID of the newly created comment. | |
856 | return id | |
857 | ||
326664a5 | 858 | @lazy_property |
f6e6ff79 | 859 | def score(self): |
d31d17af | 860 | res = self.db.get("SELECT SUM(score) AS score \ |
326664a5 | 861 | FROM builds_comments WHERE build_id = %s", self.id) |
f6e6ff79 | 862 | |
326664a5 | 863 | return res.score or 0 |
f6e6ff79 | 864 | |
f6e6ff79 MT |
865 | def get_commenters(self): |
866 | users = self.db.query("SELECT DISTINCT users.id AS id FROM builds_comments \ | |
867 | JOIN users ON builds_comments.user_id = users.id \ | |
868 | WHERE builds_comments.build_id = %s AND NOT users.deleted = 'Y' \ | |
869 | AND NOT users.activated = 'Y' ORDER BY users.id", self.id) | |
870 | ||
326664a5 | 871 | return [users.User(self.backend, u.id) for u in users] |
f6e6ff79 MT |
872 | |
873 | def send_comment_message(self, comment_id): | |
874 | comment = self.db.get("SELECT * FROM builds_comments WHERE id = %s", | |
875 | comment_id) | |
876 | ||
877 | assert comment | |
878 | assert comment.build_id == self.id | |
879 | ||
880 | # Get user who wrote the comment. | |
326664a5 | 881 | user = self.backend.users.get_by_id(comment.user_id) |
f6e6ff79 MT |
882 | |
883 | format = { | |
884 | "build_name" : self.name, | |
885 | "user_name" : user.realname, | |
886 | } | |
887 | ||
888 | # XXX create beautiful message | |
889 | ||
326664a5 | 890 | self.backend.messages.send_to_all(self.message_recipients, |
f6e6ff79 MT |
891 | N_("%(user_name)s commented on %(build_name)s"), |
892 | comment.text, format) | |
893 | ||
894 | ## Logging stuff | |
895 | ||
896 | def get_log(self, comments=True, repo=True, limit=None): | |
897 | entries = [] | |
898 | ||
fd681905 | 899 | # Created entry. |
326664a5 | 900 | created_entry = logs.CreatedLogEntry(self.backend, self) |
fd681905 MT |
901 | entries.append(created_entry) |
902 | ||
f6e6ff79 MT |
903 | if comments: |
904 | entries += self.get_comments(limit=limit) | |
905 | ||
906 | if repo: | |
907 | entries += self.get_repo_moves(limit=limit) | |
908 | ||
909 | # Sort all entries in chronological order. | |
910 | entries.sort() | |
911 | ||
912 | if limit: | |
913 | entries = entries[:limit] | |
914 | ||
915 | return entries | |
916 | ||
917 | ## Watchers stuff | |
918 | ||
919 | def get_watchers(self): | |
fe8e7f02 | 920 | query = self.db.query("SELECT DISTINCT users.id AS id FROM builds_watchers \ |
f6e6ff79 MT |
921 | JOIN users ON builds_watchers.user_id = users.id \ |
922 | WHERE builds_watchers.build_id = %s AND NOT users.deleted = 'Y' \ | |
923 | AND users.activated = 'Y' ORDER BY users.id", self.id) | |
924 | ||
326664a5 | 925 | return [users.User(self.backend, u.id) for u in query] |
f6e6ff79 MT |
926 | |
927 | def add_watcher(self, user): | |
928 | # Don't add a user twice. | |
929 | if user in self.get_watchers(): | |
930 | return | |
931 | ||
932 | self.db.execute("INSERT INTO builds_watchers(build_id, user_id) \ | |
933 | VALUES(%s, %s)", self.id, user.id) | |
934 | ||
935 | @property | |
936 | def message_recipients(self): | |
937 | ret = [] | |
938 | ||
939 | for watcher in self.get_watchers(): | |
940 | ret.append("%s <%s>" % (watcher.realname, watcher.email)) | |
941 | ||
942 | return ret | |
943 | ||
944 | @property | |
945 | def update(self): | |
946 | if self._update is None: | |
947 | update = self.db.get("SELECT update_id AS id FROM updates_builds \ | |
948 | WHERE build_id = %s", self.id) | |
949 | ||
950 | if update: | |
326664a5 | 951 | self._update = updates.Update(self.backend, update.id) |
f6e6ff79 MT |
952 | |
953 | return self._update | |
954 | ||
326664a5 | 955 | @lazy_property |
f6e6ff79 | 956 | def repo(self): |
326664a5 MT |
957 | res = self.db.get("SELECT repo_id FROM repositories_builds \ |
958 | WHERE build_id = %s", self.id) | |
f6e6ff79 | 959 | |
326664a5 MT |
960 | if res: |
961 | return self.backend.repos.get_by_id(res.repo_id) | |
f6e6ff79 MT |
962 | |
963 | def get_repo_moves(self, limit=None): | |
964 | query = "SELECT * FROM repositories_history \ | |
965 | WHERE build_id = %s ORDER BY time ASC" | |
966 | ||
967 | actions = [] | |
968 | for action in self.db.query(query, self.id): | |
326664a5 | 969 | action = logs.RepositoryLogEntry(self.backend, action) |
f6e6ff79 MT |
970 | actions.append(action) |
971 | ||
972 | return actions | |
973 | ||
974 | @property | |
975 | def is_loose(self): | |
976 | if self.repo: | |
977 | return False | |
978 | ||
979 | return True | |
980 | ||
981 | @property | |
982 | def repo_time(self): | |
983 | repo = self.db.get("SELECT time_added FROM repositories_builds \ | |
984 | WHERE build_id = %s", self.id) | |
985 | ||
986 | if repo: | |
987 | return repo.time_added | |
988 | ||
989 | def get_auto_move(self): | |
990 | return self.data.auto_move == "Y" | |
991 | ||
992 | def set_auto_move(self, state): | |
326664a5 | 993 | self._set_attribute("auto_move", state) |
f6e6ff79 MT |
994 | |
995 | auto_move = property(get_auto_move, set_auto_move) | |
996 | ||
997 | @property | |
998 | def can_move_forward(self): | |
999 | if not self.repo: | |
1000 | return False | |
1001 | ||
1002 | # If there is no next repository, we cannot move anything. | |
d629da45 | 1003 | if not self.repo.next: |
f6e6ff79 MT |
1004 | return False |
1005 | ||
1006 | # If the needed amount of score is reached, we can move forward. | |
d629da45 | 1007 | if self.score >= self.repo.next.score_needed: |
f6e6ff79 MT |
1008 | return True |
1009 | ||
1010 | # If the repository does not require a minimal time, | |
1011 | # we can move forward immediately. | |
1012 | if not self.repo.time_min: | |
1013 | return True | |
1014 | ||
1015 | query = self.db.get("SELECT NOW() - time_added AS duration FROM repositories_builds \ | |
1016 | WHERE build_id = %s", self.id) | |
1017 | duration = query.duration | |
1018 | ||
1019 | if duration >= self.repo.time_min: | |
1020 | return True | |
1021 | ||
1022 | return False | |
1023 | ||
1024 | ## Bugs | |
1025 | ||
1026 | def get_bug_ids(self): | |
1027 | query = self.db.query("SELECT bug_id FROM builds_bugs \ | |
1028 | WHERE build_id = %s", self.id) | |
1029 | ||
1030 | return [b.bug_id for b in query] | |
1031 | ||
1032 | def add_bug(self, bug_id, user=None, log=True): | |
1033 | # Check if this bug is already in the list of bugs. | |
1034 | if bug_id in self.get_bug_ids(): | |
1035 | return | |
1036 | ||
1037 | self.db.execute("INSERT INTO builds_bugs(build_id, bug_id) \ | |
1038 | VALUES(%s, %s)", self.id, bug_id) | |
1039 | ||
1040 | # Log the event. | |
1041 | if log: | |
1042 | self.log("bug_added", user=user, bug_id=bug_id) | |
1043 | ||
1044 | def rem_bug(self, bug_id, user=None, log=True): | |
1045 | self.db.execute("DELETE FROM builds_bugs WHERE build_id = %s AND \ | |
1046 | bug_id = %s", self.id, bug_id) | |
1047 | ||
1048 | # Log the event. | |
1049 | if log: | |
1050 | self.log("bug_removed", user=user, bug_id=bug_id) | |
1051 | ||
1052 | def search_for_bugs(self): | |
1053 | if not self.commit: | |
1054 | return | |
1055 | ||
1056 | pattern = re.compile(r"(bug\s?|#)(\d+)") | |
1057 | ||
1058 | for txt in (self.commit.subject, self.commit.message): | |
1059 | for bug in re.finditer(pattern, txt): | |
1060 | try: | |
1061 | bugid = int(bug.group(2)) | |
1062 | except ValueError: | |
1063 | continue | |
1064 | ||
1065 | # Check if a bug with the given ID exists in BZ. | |
326664a5 | 1066 | bug = self.backend.bugzilla.get_bug(bugid) |
f6e6ff79 MT |
1067 | if not bug: |
1068 | continue | |
1069 | ||
1070 | self.add_bug(bugid) | |
1071 | ||
1072 | def get_bugs(self): | |
1073 | bugs = [] | |
1074 | for bug_id in self.get_bug_ids(): | |
326664a5 | 1075 | bug = self.backend.bugzilla.get_bug(bug_id) |
f6e6ff79 MT |
1076 | if not bug: |
1077 | continue | |
1078 | ||
1079 | bugs.append(bug) | |
1080 | ||
1081 | return bugs | |
1082 | ||
1083 | def _update_bugs_helper(self, repo): | |
1084 | """ | |
1085 | This function takes a new status and generates messages that | |
1086 | are appended to all bugs. | |
1087 | """ | |
1088 | try: | |
1089 | kwargs = BUG_MESSAGES[repo.type].copy() | |
1090 | except KeyError: | |
1091 | return | |
1092 | ||
326664a5 | 1093 | baseurl = self.backend.settings.get("baseurl", "") |
f6e6ff79 MT |
1094 | args = { |
1095 | "build_url" : "%s/build/%s" % (baseurl, self.uuid), | |
1096 | "distro_name" : self.distro.name, | |
1097 | "package_name" : self.name, | |
1098 | "repo_name" : repo.name, | |
1099 | } | |
1100 | kwargs["comment"] = kwargs["comment"] % args | |
1101 | ||
1102 | self.update_bugs(**kwargs) | |
1103 | ||
1104 | def _update_bug(self, bug_id, status=None, resolution=None, comment=None): | |
1105 | self.db.execute("INSERT INTO builds_bugs_updates(bug_id, status, resolution, comment, time) \ | |
1106 | VALUES(%s, %s, %s, %s, NOW())", bug_id, status, resolution, comment) | |
1107 | ||
1108 | def update_bugs(self, status, resolution=None, comment=None): | |
1109 | # Update all bugs linked to this build. | |
1110 | for bug_id in self.get_bug_ids(): | |
1111 | self._update_bug(bug_id, status=status, resolution=resolution, comment=comment) |