13 import pakfire
.packages
17 from . import builders
19 from . import packages
20 from . import repository
24 log
= logging
.getLogger("builds")
27 from .constants
import *
28 from .decorators
import *
30 class Builds(base
.Object
):
31 def _get_build(self
, query
, *args
):
32 res
= self
.db
.get(query
, *args
)
35 return Build(self
.backend
, res
.id, data
=res
)
37 def _get_builds(self
, query
, *args
):
38 res
= self
.db
.query(query
, *args
)
41 yield Build(self
.backend
, row
.id, data
=row
)
43 def get_by_id(self
, id, data
=None):
44 return Build(self
.backend
, id, data
=data
)
46 def get_by_uuid(self
, uuid
):
47 build
= self
.db
.get("SELECT id FROM builds WHERE uuid = %s LIMIT 1", uuid
)
50 return self
.get_by_id(build
.id)
52 def get_all(self
, limit
=50):
53 query
= "SELECT * FROM builds ORDER BY time_created DESC"
56 query
+= " LIMIT %d" % limit
58 return [self
.get_by_id(b
.id, b
) for b
in self
.db
.query(query
)]
60 def get_by_user(self
, user
, type=None, public
=None):
64 if not type or type == "scratch":
65 # On scratch builds the user id equals the owner id.
66 conditions
.append("(builds.type = 'scratch' AND owner_id = %s)")
69 elif not type or type == "release":
73 conditions
.append("public = 'Y'")
75 conditions
.append("public = 'N'")
77 query
= "SELECT builds.* AS id FROM builds \
78 JOIN packages ON builds.pkg_id = packages.id"
81 query
+= " WHERE %s" % " AND ".join(conditions
)
83 query
+= " ORDER BY builds.time_created DESC"
86 for build
in self
.db
.query(query
, *args
):
87 build
= Build(self
.backend
, build
.id, build
)
92 def get_by_name(self
, name
, type=None, public
=None, user
=None, limit
=None, offset
=None):
99 conditions
.append("builds.type = %s")
104 or_conditions
.append("public = 'Y'")
105 elif public
is False:
106 or_conditions
.append("public = 'N'")
108 if user
and not user
.is_admin():
109 or_conditions
.append("builds.owner_id = %s")
112 query
= "SELECT builds.* AS id FROM builds \
113 JOIN packages ON builds.pkg_id = packages.id"
116 conditions
.append(" OR ".join(or_conditions
))
119 query
+= " WHERE %s" % " AND ".join(conditions
)
121 if type == "release":
122 query
+= " ORDER BY packages.name,packages.epoch,packages.version,packages.release,id ASC"
123 elif type == "scratch":
124 query
+= " ORDER BY time_created DESC"
128 query
+= " LIMIT %s,%s"
129 args
.extend([offset
, limit
])
134 return [Build(self
.backend
, b
.id, b
) for b
in self
.db
.query(query
, *args
)]
136 def get_latest_by_name(self
, name
, type=None, public
=None):
138 SELECT * FROM builds \
139 LEFT JOIN builds_latest ON builds.id = builds_latest.build_id \
140 WHERE builds_latest.package_name = %s"
144 query
+= " AND builds_latest.build_type = %s"
148 query
+= " AND builds.public = %s"
150 elif public
is False:
151 query
+= " AND builds.public = %s"
154 # Get the last one only.
155 # Prefer release builds over scratch builds.
158 CASE builds.type WHEN 'release' THEN 0 ELSE 1 END, \
159 builds.time_created DESC \
162 res
= self
.db
.get(query
, *args
)
165 return Build(self
.backend
, res
.id, res
)
167 def get_active_builds(self
, name
, public
=None):
169 SELECT * FROM builds \
170 LEFT JOIN builds_latest ON builds.id = builds_latest.build_id \
171 WHERE builds_latest.package_name = %s AND builds.type = %s"
172 args
= [name
, "release"]
175 query
+= " AND builds.public = %s"
177 elif public
is False:
178 query
+= " AND builds.public = %s"
182 for row
in self
.db
.query(query
, *args
):
183 b
= Build(self
.backend
, row
.id, row
)
186 # Sort the result. Lastest build first.
187 builds
.sort(reverse
=True)
192 builds
= self
.db
.get("SELECT COUNT(*) AS count FROM builds")
196 def get_obsolete(self
, repo
=None):
198 Get all obsoleted builds.
200 If repo is True: which are in any repository.
201 If repo is some Repository object: which are in this repository.
206 query
= "SELECT id FROM builds WHERE state = 'obsolete'"
209 query
= "SELECT build_id AS id FROM repositories_builds \
210 JOIN builds ON builds.id = repositories_builds.build_id \
211 WHERE builds.state = 'obsolete'"
213 if repo
and not repo
is True:
214 query
+= " AND repositories_builds.repo_id = %s"
217 res
= self
.db
.query(query
, *args
)
221 build
= Build(self
.backend
, build
.id)
226 def create(self
, pkg
, type="release", owner
=None, distro
=None):
227 assert type in ("release", "scratch", "test")
228 assert distro
, "You need to specify the distribution of this build."
230 # Check if scratch build has an owner.
231 if type == "scratch" and not owner
:
232 raise Exception, "Scratch builds require an owner"
234 # Set the default priority of this build.
235 if type == "release":
238 elif type == "scratch":
244 # Create build in database
245 build
= self
._get
_build
("INSERT INTO builds(uuid, pkg_id, type, distro_id, priority) \
246 VALUES(%s, %s, %s, %s, %s) RETURNING *", "%s" % uuid
.uuid4(), pkg
.id, type, distro
.id, priority
)
248 # Set the owner of this build
252 # Log that the build has been created.
253 build
.log("created", user
=owner
)
255 # Create directory where the files live
256 if not os
.path
.exists(build
.path
):
257 os
.makedirs(build
.path
)
259 # Move package file to the directory of the build.
260 build
.pkg
.move(os
.path
.join(build
.path
, "src"))
262 # Generate an update id.
263 build
.generate_update_id()
265 # Obsolete all other builds with the same name to track updates.
266 build
.obsolete_others()
268 # Search for possible bug IDs in the commit message.
269 build
.search_for_bugs()
273 def create_from_source_package(self
, filename
, distro
, commit
=None, type="release",
274 arches
=None, check_for_duplicates
=True, owner
=None):
277 # Open the package file to read some basic information.
278 pkg
= pakfire
.packages
.open(None, None, filename
)
280 if check_for_duplicates
:
281 if distro
.has_package(pkg
.name
, pkg
.epoch
, pkg
.version
, pkg
.release
):
282 log
.warning("Duplicate package detected: %s. Skipping." % pkg
)
285 # Open the package and add it to the database
286 pkg
= self
.backend
.packages
.create(filename
)
288 # Associate the package to the processed commit
292 # Create a new build object from the package
293 build
= self
.create(pkg
, type=type, owner
=owner
, distro
=distro
)
295 # Create all automatic jobs
296 build
.create_autojobs(arches
=arches
)
300 def get_changelog(self
, name
, public
=None, limit
=5, offset
=0):
301 query
= "SELECT builds.* FROM builds \
302 JOIN packages ON builds.pkg_id = packages.id \
307 args
= ["release", name
,]
310 query
+= " AND builds.public = %s"
312 elif public
== False:
313 query
+= " AND builds.public = %s"
316 query
+= " ORDER BY builds.time_created DESC"
320 query
+= " LIMIT %s,%s"
321 args
+= [offset
, limit
]
327 for b
in self
.db
.query(query
, *args
):
328 b
= Build(self
.backend
, b
.id, b
)
331 builds
.sort(reverse
=True)
335 def get_comments(self
, limit
=10, offset
=None, user
=None):
336 query
= "SELECT * FROM builds_comments \
337 JOIN users ON builds_comments.user_id = users.id"
342 wheres
.append("users.id = %s")
346 query
+= " WHERE %s" % " AND ".join(wheres
)
349 query
+= " ORDER BY time_created DESC"
354 query
+= " LIMIT %s,%s"
362 for comment
in self
.db
.query(query
, *args
):
363 comment
= logs
.CommentLogEntry(self
.backend
, comment
)
364 comments
.append(comment
)
368 def get_build_times_summary(self
, name
=None, job_type
=None, arch
=None):
371 builds_times.arch AS arch, \
372 MAX(duration) AS maximum, \
373 MIN(duration) AS minimum, \
374 AVG(duration) AS average, \
375 SUM(duration) AS sum, \
376 STDDEV_POP(duration) AS stddev \
378 LEFT JOIN builds ON builds_times.build_id = builds.id \
379 LEFT JOIN packages ON builds.pkg_id = packages.id"
386 conditions
.append("packages.name = %s")
389 # Filter by job types.
391 conditions
.append("builds_times.job_type = %s")
392 args
.append(job_type
)
396 conditions
.append("builds_times.arch = %s")
401 query
+= " WHERE %s" % " AND ".join(conditions
)
403 # Grouping and sorting.
404 query
+= " GROUP BY arch ORDER BY arch DESC"
406 return self
.db
.query(query
, *args
)
408 def get_build_times_by_arch(self
, arch
, **kwargs
):
413 build_times
= self
.get_build_times_summary(**kwargs
)
415 return build_times
[0]
418 class Build(base
.DataObject
):
422 return "<%s id=%s %s>" % (self
.__class
__.__name
__, self
.id, self
.pkg
)
424 def __eq__(self
, other
):
425 if isinstance(other
, self
.__class
__):
426 return self
.id == other
.id
428 def __lt__(self
, other
):
429 if isinstance(other
, self
.__class
__):
430 return self
.pkg
< other
.pkg
433 jobs
= self
.backend
.jobs
._get
_jobs
("SELECT * FROM jobs \
434 WHERE build_id = %s", self
.id)
436 return iter(sorted(jobs
))
440 Deletes this build including all jobs, packages and the source
443 # If the build is in a repository, we need to remove it.
445 self
.repo
.rem_build(self
)
447 for job
in self
.jobs
+ self
.test_jobs
:
453 # Delete everything related to this build.
455 self
.__delete
_comments
()
456 self
.__delete
_history
()
457 self
.__delete
_watchers
()
459 # Delete the build itself.
460 self
.db
.execute("DELETE FROM builds WHERE id = %s", self
.id)
462 def __delete_bugs(self
):
464 Delete all associated bugs.
466 self
.db
.execute("DELETE FROM builds_bugs WHERE build_id = %s", self
.id)
468 def __delete_comments(self
):
472 self
.db
.execute("DELETE FROM builds_comments WHERE build_id = %s", self
.id)
474 def __delete_history(self
):
476 Delete the repository history.
478 self
.db
.execute("DELETE FROM repositories_history WHERE build_id = %s", self
.id)
480 def __delete_watchers(self
):
484 self
.db
.execute("DELETE FROM builds_watchers WHERE build_id = %s", self
.id)
488 Resets the whole build so it can start again (as it has never
491 for job
in self
.jobs
:
494 #self.__delete_bugs()
495 self
.__delete
_comments
()
496 self
.__delete
_history
()
497 self
.__delete
_watchers
()
499 self
.state
= "building"
506 A set of information that is sent to the XMLRPC client.
508 return { "uuid" : self
.uuid
}
510 def log(self
, action
, user
=None, bug_id
=None):
515 self
.db
.execute("INSERT INTO builds_history(build_id, action, user_id, time, bug_id) \
516 VALUES(%s, %s, %s, NOW(), %s)", self
.id, action
, user_id
, bug_id
)
521 The UUID of this build.
523 return self
.data
.uuid
528 Get package that is to be built in the build.
530 return self
.backend
.packages
.get_by_id(self
.data
.pkg_id
)
534 return "%s-%s" % (self
.pkg
.name
, self
.pkg
.friendly_version
)
539 The type of this build.
541 return self
.data
.type
545 The owner of this build.
547 if self
.data
.owner_id
:
548 return self
.backend
.users
.get_by_id(self
.data
.owner_id
)
550 def set_owner(self
, owner
):
552 self
._set
_attribute
("owner_id", owner
.id)
554 self
._set
_attribute
("owner_id", None)
556 owner
= lazy_property(get_owner
, set_owner
)
560 return self
.backend
.distros
.get_by_id(self
.data
.distro_id
)
564 if self
.type == "scratch":
567 def get_depends_on(self
):
568 if self
.data
.depends_on
:
569 return self
.backend
.builds
.get_by_id(self
.data
.depends_on
)
571 def set_depends_on(self
, build
):
572 self
._set
_attribute
("depends_on", build
.id)
574 depends_on
= lazy_property(get_depends_on
, set_depends_on
)
578 return self
.data
.time_created
582 return self
.created
.date()
587 Is this build public?
589 return self
.data
.public
594 Returns the size on disk of this build.
598 # Add the source package.
603 s
+= sum((j
.size
for j
in self
.jobs
))
607 def auto_update_state(self
):
609 Check if the state of this build can be updated and perform
610 the change if possible.
612 # Do not change the broken/obsolete state automatically.
613 if self
.state
in ("broken", "obsolete"):
616 if self
.repo
and self
.repo
.type == "stable":
617 self
.update_state("stable")
620 # If any of the build jobs are finished, the build will be put in testing
622 for job
in self
.jobs
:
623 if job
.state
== "finished":
624 self
.update_state("testing")
627 def update_state(self
, state
, user
=None, remove
=False):
628 assert state
in ("stable", "testing", "obsolete", "broken")
630 self
._set
_attribute
("state", state
)
632 # In broken state, the removal from the repository is forced and
633 # all jobs that are not finished yet will be aborted.
634 if state
== "broken":
637 for job
in self
.jobs
:
638 if job
.state
in ("new", "pending", "running", "dependency_error"):
639 job
.state
= "aborted"
641 # If this build is in a repository, it will leave it.
642 if remove
and self
.repo
:
643 self
.repo
.rem_build(self
)
645 # If a release build is now in testing state, we put it into the
646 # first repository of the distribution.
647 elif self
.type == "release" and state
== "testing":
648 # If the build is not in a repository, yet and if there is
649 # a first repository, we put the build there.
650 if not self
.repo
and self
.distro
.first_repo
:
651 self
.distro
.first_repo
.add_build(self
, user
=user
)
655 return self
.data
.state
658 return self
.state
== "broken"
660 def obsolete_others(self
):
661 if not self
.type == "release":
664 for build
in self
.backend
.builds
.get_by_name(self
.pkg
.name
, type="release"):
665 # Don't modify ourself.
666 if self
.id == build
.id:
669 # Don't touch broken builds.
670 if build
.state
in ("obsolete", "broken"):
673 # Obsolete the build.
674 build
.update_state("obsolete")
676 def set_severity(self
, severity
):
677 self
._set
_attribute
("severity", severity
)
679 def get_severity(self
):
680 return self
.data
.severity
682 severity
= property(get_severity
, set_severity
)
686 if self
.pkg
and self
.pkg
.commit
:
687 return self
.pkg
.commit
689 def update_message(self
, message
):
690 self
._set
_attribute
("message", message
)
692 def has_perm(self
, user
):
694 Check, if the given user has the right to perform administrative
695 operations on this build.
703 # Check if the user is allowed to manage packages from the critical path.
704 if self
.critical_path
and not user
.has_perm("manage_critical_path"):
707 # Search for maintainers...
710 if self
.type == "scratch":
711 # The owner of a scratch build has the right to do anything with it.
712 if self
.owner_id
== user
.id:
716 elif self
.type == "release":
717 # The maintainer also is allowed to manage the build.
718 if self
.pkg
.maintainer
== user
:
721 # Deny permission for all other cases.
728 if self
.data
.message
:
729 message
= self
.data
.message
732 if self
.commit
.message
:
733 message
= "\n".join((self
.commit
.subject
, self
.commit
.message
))
735 message
= self
.commit
.subject
737 prefix
= "%s: " % self
.pkg
.name
738 if message
.startswith(prefix
):
739 message
= message
[len(prefix
):]
743 def get_priority(self
):
744 return self
.data
.priority
746 def set_priority(self
, priority
):
747 assert priority
in (-2, -1, 0, 1, 2)
749 self
._set
_attribute
("priority", priority
)
751 priority
= property(get_priority
, set_priority
)
756 if self
.type == "scratch":
757 path
.append(BUILD_SCRATCH_DIR
)
758 path
.append(self
.uuid
)
760 elif self
.type == "release":
761 path
.append(BUILD_RELEASE_DIR
)
762 path
.append("%s/%s-%s-%s" % \
763 (self
.pkg
.name
, self
.pkg
.epoch
, self
.pkg
.version
, self
.pkg
.release
))
766 raise Exception, "Unknown build type: %s" % self
.type
768 return os
.path
.join(*path
)
771 def source_filename(self
):
772 return os
.path
.basename(self
.pkg
.path
)
775 def download_prefix(self
):
776 return "/".join((self
.backend
.settings
.get("download_baseurl"), "packages"))
779 def source_download(self
):
780 return "/".join((self
.download_prefix
, self
.pkg
.path
))
783 def source_hash_sha512(self
):
784 return self
.pkg
.hash_sha512
788 # XXX maybe this should rather live in a uimodule.
789 # zlib-1.2.3-2.ip3 [src, i686, blah...]
790 s
= """<a class="state_%s %s" href="/build/%s">%s</a>""" % \
791 (self
.state
, self
.type, self
.uuid
, self
.name
)
794 for job
in self
.jobs
:
795 s_jobs
.append("""<a class="state_%s %s" href="/job/%s">%s</a>""" % \
796 (job
.state
, job
.type, job
.uuid
, job
.arch
))
799 s
+= " [%s]" % ", ".join(s_jobs
)
804 def supported_arches(self
):
805 return self
.pkg
.supported_arches
808 def critical_path(self
):
809 return self
.pkg
.critical_path
811 def get_jobs(self
, type=None):
813 Returns a list of jobs of this build.
815 return self
.backend
.jobs
.get_by_build(self
.id, self
, type=type)
820 Get a list of all build jobs that are in this build.
822 return self
.get_jobs(type="build")
826 return self
.get_jobs(type="test")
829 def all_jobs_finished(self
):
832 for job
in self
.jobs
:
833 if not job
.state
== "finished":
839 def create_autojobs(self
, arches
=None, type="build"):
842 # Arches may be passed to this function. If not we use all arches
843 # this package supports.
845 arches
= self
.supported_arches
847 # Create a new job for every given archirecture.
848 for arch
in self
.backend
.arches
.expand(arches
):
849 # Don't create jobs for src
853 job
= self
.add_job(arch
, type=type)
856 # Return all newly created jobs.
859 def add_job(self
, arch
, type="build"):
860 job
= self
.backend
.jobs
.create(self
.backend
, self
, arch
, type=type)
862 # Add new job to cache.
863 self
.jobs
.append(job
)
871 if not self
.type == "release":
874 # Generate an update ID if none does exist, yet.
875 self
.generate_update_id()
878 "%s" % self
.distro
.name
.replace(" ", "").upper(),
879 "%04d" % (self
.data
.update_year
or 0),
880 "%04d" % (self
.data
.update_num
or 0),
885 def generate_update_id(self
):
886 if not self
.type == "release":
889 if self
.data
.update_num
:
892 update
= self
.db
.get("SELECT update_num AS num FROM builds \
893 WHERE update_year = EXTRACT(year FROM NOW()) ORDER BY update_num DESC LIMIT 1")
896 update_num
= update
.num
+ 1
900 self
.db
.execute("UPDATE builds SET update_year = EXTRACT(year FROM NOW()), update_num = %s \
901 WHERE id = %s", update_num
, self
.id)
905 def get_comments(self
, limit
=10, offset
=0):
906 query
= "SELECT * FROM builds_comments \
907 JOIN users ON builds_comments.user_id = users.id \
908 WHERE build_id = %s ORDER BY time_created ASC"
911 for comment
in self
.db
.query(query
, self
.id):
912 comment
= logs
.CommentLogEntry(self
.backend
, comment
)
913 comments
.append(comment
)
917 def add_comment(self
, user
, text
, score
):
918 # Add the new comment to the database.
919 id = self
.db
.execute("INSERT INTO \
920 builds_comments(build_id, user_id, text, credit, time_created) \
921 VALUES(%s, %s, %s, %s, NOW())",
922 self
.id, user
.id, text
, score
)
924 # Update the credit cache
927 # Send the new comment to all watchers and stuff.
928 self
.send_comment_message(id)
930 # Return the ID of the newly created comment.
935 res
= self
.db
.get("SELECT SUM(credit) AS score \
936 FROM builds_comments WHERE build_id = %s", self
.id)
938 return res
.score
or 0
945 def get_commenters(self
):
946 users
= self
.db
.query("SELECT DISTINCT users.id AS id FROM builds_comments \
947 JOIN users ON builds_comments.user_id = users.id \
948 WHERE builds_comments.build_id = %s AND NOT users.deleted = 'Y' \
949 AND NOT users.activated = 'Y' ORDER BY users.id", self
.id)
951 return [users
.User(self
.backend
, u
.id) for u
in users
]
953 def send_comment_message(self
, comment_id
):
954 comment
= self
.db
.get("SELECT * FROM builds_comments WHERE id = %s",
958 assert comment
.build_id
== self
.id
960 # Get user who wrote the comment.
961 user
= self
.backend
.users
.get_by_id(comment
.user_id
)
964 "build_name" : self
.name
,
965 "user_name" : user
.realname
,
968 # XXX create beautiful message
970 self
.backend
.messages
.send_to_all(self
.message_recipients
,
971 N_("%(user_name)s commented on %(build_name)s"),
972 comment
.text
, format
)
976 def get_log(self
, comments
=True, repo
=True, limit
=None):
980 created_entry
= logs
.CreatedLogEntry(self
.backend
, self
)
981 entries
.append(created_entry
)
984 entries
+= self
.get_comments(limit
=limit
)
987 entries
+= self
.get_repo_moves(limit
=limit
)
989 # Sort all entries in chronological order.
993 entries
= entries
[:limit
]
999 def get_watchers(self
):
1000 query
= self
.db
.query("SELECT DISTINCT users.id AS id FROM builds_watchers \
1001 JOIN users ON builds_watchers.user_id = users.id \
1002 WHERE builds_watchers.build_id = %s AND NOT users.deleted = 'Y' \
1003 AND users.activated = 'Y' ORDER BY users.id", self
.id)
1005 return [users
.User(self
.backend
, u
.id) for u
in query
]
1007 def add_watcher(self
, user
):
1008 # Don't add a user twice.
1009 if user
in self
.get_watchers():
1012 self
.db
.execute("INSERT INTO builds_watchers(build_id, user_id) \
1013 VALUES(%s, %s)", self
.id, user
.id)
1016 def message_recipients(self
):
1019 for watcher
in self
.get_watchers():
1020 ret
.append("%s <%s>" % (watcher
.realname
, watcher
.email
))
1026 if self
._update
is None:
1027 update
= self
.db
.get("SELECT update_id AS id FROM updates_builds \
1028 WHERE build_id = %s", self
.id)
1031 self
._update
= updates
.Update(self
.backend
, update
.id)
1037 res
= self
.db
.get("SELECT repo_id FROM repositories_builds \
1038 WHERE build_id = %s", self
.id)
1041 return self
.backend
.repos
.get_by_id(res
.repo_id
)
1043 def get_repo_moves(self
, limit
=None):
1044 query
= "SELECT * FROM repositories_history \
1045 WHERE build_id = %s ORDER BY time ASC"
1048 for action
in self
.db
.query(query
, self
.id):
1049 action
= logs
.RepositoryLogEntry(self
.backend
, action
)
1050 actions
.append(action
)
1062 def repo_time(self
):
1063 repo
= self
.db
.get("SELECT time_added FROM repositories_builds \
1064 WHERE build_id = %s", self
.id)
1067 return repo
.time_added
1069 def get_auto_move(self
):
1070 return self
.data
.auto_move
== "Y"
1072 def set_auto_move(self
, state
):
1073 self
._set
_attribute
("auto_move", state
)
1075 auto_move
= property(get_auto_move
, set_auto_move
)
1078 def can_move_forward(self
):
1082 # If there is no next repository, we cannot move anything.
1083 if not self
.repo
.next
:
1086 # If the needed amount of score is reached, we can move forward.
1087 if self
.score
>= self
.repo
.next
.score_needed
:
1090 # If the repository does not require a minimal time,
1091 # we can move forward immediately.
1092 if not self
.repo
.time_min
:
1095 query
= self
.db
.get("SELECT NOW() - time_added AS duration FROM repositories_builds \
1096 WHERE build_id = %s", self
.id)
1097 duration
= query
.duration
1099 if duration
>= self
.repo
.time_min
:
1106 def get_bug_ids(self
):
1107 query
= self
.db
.query("SELECT bug_id FROM builds_bugs \
1108 WHERE build_id = %s", self
.id)
1110 return [b
.bug_id
for b
in query
]
1112 def add_bug(self
, bug_id
, user
=None, log
=True):
1113 # Check if this bug is already in the list of bugs.
1114 if bug_id
in self
.get_bug_ids():
1117 self
.db
.execute("INSERT INTO builds_bugs(build_id, bug_id) \
1118 VALUES(%s, %s)", self
.id, bug_id
)
1122 self
.log("bug_added", user
=user
, bug_id
=bug_id
)
1124 def rem_bug(self
, bug_id
, user
=None, log
=True):
1125 self
.db
.execute("DELETE FROM builds_bugs WHERE build_id = %s AND \
1126 bug_id = %s", self
.id, bug_id
)
1130 self
.log("bug_removed", user
=user
, bug_id
=bug_id
)
1132 def search_for_bugs(self
):
1136 pattern
= re
.compile(r
"(bug\s?|#)(\d+)")
1138 for txt
in (self
.commit
.subject
, self
.commit
.message
):
1139 for bug
in re
.finditer(pattern
, txt
):
1141 bugid
= int(bug
.group(2))
1145 # Check if a bug with the given ID exists in BZ.
1146 bug
= self
.backend
.bugzilla
.get_bug(bugid
)
1154 for bug_id
in self
.get_bug_ids():
1155 bug
= self
.backend
.bugzilla
.get_bug(bug_id
)
1163 def _update_bugs_helper(self
, repo
):
1165 This function takes a new status and generates messages that
1166 are appended to all bugs.
1169 kwargs
= BUG_MESSAGES
[repo
.type].copy()
1173 baseurl
= self
.backend
.settings
.get("baseurl", "")
1175 "build_url" : "%s/build/%s" % (baseurl
, self
.uuid
),
1176 "distro_name" : self
.distro
.name
,
1177 "package_name" : self
.name
,
1178 "repo_name" : repo
.name
,
1180 kwargs
["comment"] = kwargs
["comment"] % args
1182 self
.update_bugs(**kwargs
)
1184 def _update_bug(self
, bug_id
, status
=None, resolution
=None, comment
=None):
1185 self
.db
.execute("INSERT INTO builds_bugs_updates(bug_id, status, resolution, comment, time) \
1186 VALUES(%s, %s, %s, %s, NOW())", bug_id
, status
, resolution
, comment
)
1188 def update_bugs(self
, status
, resolution
=None, comment
=None):
1189 # Update all bugs linked to this build.
1190 for bug_id
in self
.get_bug_ids():
1191 self
._update
_bug
(bug_id
, status
=status
, resolution
=resolution
, comment
=comment
)
1194 class Jobs(base
.Object
):
1195 def _get_job(self
, query
, *args
):
1196 res
= self
.db
.get(query
, *args
)
1199 return Job(self
.backend
, res
.id, data
=res
)
1201 def _get_jobs(self
, query
, *args
):
1202 res
= self
.db
.query(query
, *args
)
1205 yield Job(self
.backend
, row
.id, data
=row
)
1207 def create(self
, build
, arch
, type="build"):
1208 job
= self
._get
_job
("INSERT INTO jobs(uuid, type, build_id, arch, time_created) \
1209 VALUES(%s, %s, %s, %s, NOW()) RETURNING *", "%s" % uuid
.uuid4(), type, build
.id, arch
)
1212 # Set cache for Build object.
1215 # Jobs are by default in state "new" and wait for being checked
1216 # for dependencies. Packages that do have no build dependencies
1217 # can directly be forwarded to "pending" state.
1218 if not job
.pkg
.requires
:
1219 job
.state
= "pending"
1223 def get_by_id(self
, id, data
=None):
1224 return Job(self
.backend
, id, data
)
1226 def get_by_uuid(self
, uuid
):
1227 job
= self
.db
.get("SELECT id FROM jobs WHERE uuid = %s", uuid
)
1230 return self
.get_by_id(job
.id)
1232 def get_by_build(self
, build_id
, build
=None, type=None):
1234 Get all jobs in the specifies build.
1236 query
= "SELECT * FROM jobs WHERE build_id = %s"
1240 query
+= " AND type = %s"
1243 # Get IDs of all builds in this group.
1245 for job
in self
.db
.query(query
, *args
):
1246 job
= Job(self
.backend
, job
.id, job
)
1248 # If the Build object was set, we set it so it won't be retrieved
1249 # from the database again.
1255 # Return sorted list of jobs.
1258 def get_active(self
, host_id
=None, builder
=None, states
=None):
1260 host_id
= builder
.id
1263 states
= ["dispatching", "running", "uploading"]
1265 query
= "SELECT * FROM jobs WHERE state IN (%s)" % ", ".join(["%s"] * len(states
))
1269 query
+= " AND builder_id = %s" % host_id
1271 query
+= " ORDER BY \
1273 WHEN jobs.state = 'running' THEN 0 \
1274 WHEN jobs.state = 'uploading' THEN 1 \
1275 WHEN jobs.state = 'dispatching' THEN 2 \
1276 WHEN jobs.state = 'pending' THEN 3 \
1277 WHEN jobs.state = 'new' THEN 4 \
1278 END, time_started ASC"
1280 return [Job(self
.backend
, j
.id, j
) for j
in self
.db
.query(query
, *args
)]
1282 def get_latest(self
, arch
=None, builder
=None, limit
=None, age
=None, date
=None):
1283 query
= "SELECT * FROM jobs"
1286 where
= ["(state = 'finished' OR state = 'failed' OR state = 'aborted')"]
1289 where
.append("arch = %s")
1293 where
.append("builder_id = %s")
1294 args
.append(builder
.id)
1298 year
, month
, day
= date
.split("-", 2)
1299 date
= datetime
.date(int(year
), int(month
), int(day
))
1303 where
.append("(time_created::date = %s OR \
1304 time_started::date = %s OR time_finished::date = %s)")
1305 args
+= (date
, date
, date
)
1308 where
.append("time_finished >= NOW() - '%s'::interval" % age
)
1311 query
+= " WHERE %s" % " AND ".join(where
)
1313 query
+= " ORDER BY time_finished DESC"
1316 query
+= " LIMIT %s"
1319 return [Job(self
.backend
, j
.id, j
) for j
in self
.db
.query(query
, *args
)]
1321 def get_average_build_time(self
):
1323 Returns the average build time of all finished builds from the
1326 result
= self
.db
.get("SELECT AVG(time_finished - time_started) as average \
1327 FROM jobs WHERE type = 'build' AND state = 'finished' AND \
1328 time_finished >= NOW() - '3 months'::interval")
1331 return result
.average
1333 def count(self
, *states
):
1334 query
= "SELECT COUNT(*) AS count FROM jobs"
1338 query
+= " WHERE state IN %s"
1341 jobs
= self
.db
.get(query
, *args
)
1345 def restart_failed(self
, max_tries
=9):
1346 jobs
= self
._get
_jobs
("SELECT jobs.* FROM jobs \
1347 JOIN builds ON builds.id = jobs.build_id \
1349 jobs.type = 'build' AND \
1350 jobs.state = 'failed' AND \
1351 jobs.tries <= %s AND \
1352 NOT builds.state = 'broken' AND \
1353 jobs.time_finished < NOW() - '72 hours'::interval \
1356 WHEN jobs.type = 'build' THEN 0 \
1357 WHEN jobs.type = 'test' THEN 1 \
1359 builds.priority DESC, jobs.time_created ASC",
1364 job
.set_state("new", log
=False)
1367 class Job(base
.DataObject
):
1371 return "<%s id=%s %s>" % (self
.__class
__.__name
__, self
.id, self
.name
)
1373 def __eq__(self
, other
):
1374 if isinstance(other
, self
.__class
__):
1375 return self
.id == other
.id
1377 def __lt__(self
, other
):
1378 if isinstance(other
, self
.__class
__):
1379 if (self
.type, other
.type) == ("build", "test"):
1382 if self
.build
== other
.build
:
1383 return arches
.priority(self
.arch
) < arches
.priority(other
.arch
)
1385 return self
.time_created
< other
.time_created
1388 packages
= self
.backend
.packages
._get
_packages
("SELECT packages.* FROM jobs_packages \
1389 LEFT JOIN packages ON jobs_packages.pkg_id = packages.id \
1390 WHERE jobs_packages.job_id = %s ORDER BY packages.name", self
.id)
1392 return iter(packages
)
1394 def __nonzero__(self
):
1398 res
= self
.db
.get("SELECT COUNT(*) AS len FROM jobs_packages \
1399 WHERE job_id = %s", self
.id)
1405 return self
.build
.distro
1408 self
.__delete
_buildroots
()
1409 self
.__delete
_history
()
1410 self
.__delete
_packages
()
1411 self
.__delete
_logfiles
()
1413 # Delete the job itself.
1414 self
.db
.execute("DELETE FROM jobs WHERE id = %s", self
.id)
1416 def __delete_buildroots(self
):
1418 Removes all buildroots.
1420 self
.db
.execute("DELETE FROM jobs_buildroots WHERE job_id = %s", self
.id)
1422 def __delete_history(self
):
1424 Removes all references in the history to this build job.
1426 self
.db
.execute("DELETE FROM jobs_history WHERE job_id = %s", self
.id)
1428 def __delete_packages(self
):
1430 Deletes all uploaded files from the job.
1432 for pkg
in self
.packages
:
1435 self
.db
.execute("DELETE FROM jobs_packages WHERE job_id = %s", self
.id)
1437 def __delete_logfiles(self
):
1438 for logfile
in self
.logfiles
:
1439 self
.db
.execute("INSERT INTO queue_delete(path) VALUES(%s)", logfile
.path
)
1441 def reset(self
, user
=None):
1442 self
.__delete
_buildroots
()
1443 self
.__delete
_packages
()
1444 self
.__delete
_history
()
1445 self
.__delete
_logfiles
()
1448 self
.log("reset", user
=user
)
1452 def log(self
, action
, user
=None, state
=None, builder
=None, test_job
=None):
1459 builder_id
= builder
.id
1463 test_job_id
= test_job
.id
1465 self
.db
.execute("INSERT INTO jobs_history(job_id, action, state, user_id, \
1466 time, builder_id, test_job_id) VALUES(%s, %s, %s, %s, NOW(), %s, %s)",
1467 self
.id, action
, state
, user_id
, builder_id
, test_job_id
)
1469 def get_log(self
, limit
=None, offset
=None, user
=None):
1470 query
= "SELECT * FROM jobs_history"
1472 conditions
= ["job_id = %s",]
1476 conditions
.append("user_id = %s")
1477 args
.append(user
.id)
1480 query
+= " WHERE %s" % " AND ".join(conditions
)
1482 query
+= " ORDER BY time DESC"
1486 query
+= " LIMIT %s,%s"
1487 args
+= [offset
, limit
,]
1489 query
+= " LIMIT %s"
1493 for entry
in self
.db
.query(query
, *args
):
1494 entry
= logs
.JobLogEntry(self
.backend
, entry
)
1495 entries
.append(entry
)
1501 return self
.data
.uuid
1505 return self
.data
.type
1509 return self
.data
.build_id
1513 return self
.backend
.builds
.get_by_id(self
.build_id
)
1516 def related_jobs(self
):
1519 for job
in self
.build
.jobs
:
1529 return self
.build
.pkg
1533 return "%s-%s.%s" % (self
.pkg
.name
, self
.pkg
.friendly_version
, self
.arch
)
1537 return sum((p
.size
for p
in self
.packages
))
1542 Returns the rank in the build queue
1544 if not self
.state
== "pending":
1547 res
= self
.db
.get("SELECT rank FROM jobs_queue WHERE job_id = %s", self
.id)
1552 def is_running(self
):
1554 Returns True if job is in a running state.
1556 return self
.state
in ("pending", "dispatching", "running", "uploading")
1558 def get_state(self
):
1559 return self
.data
.state
1561 def set_state(self
, state
, user
=None, log
=True):
1562 # Nothing to do if the state remains.
1563 if not self
.state
== state
:
1564 self
.db
.execute("UPDATE jobs SET state = %s WHERE id = %s", state
, self
.id)
1567 if log
and not state
== "new":
1568 self
.log("state_change", state
=state
, user
=user
)
1572 self
._data
["state"] = state
1574 # Always clear the message when the status is changed.
1575 self
.update_message(None)
1577 # Update some more informations.
1578 if state
== "dispatching":
1580 self
.db
.execute("UPDATE jobs SET time_started = NOW(), time_finished = NULL \
1581 WHERE id = %s", self
.id)
1583 elif state
== "pending":
1584 self
.db
.execute("UPDATE jobs SET tries = tries + 1, time_started = NULL, \
1585 time_finished = NULL WHERE id = %s", self
.id)
1587 elif state
in ("aborted", "dependency_error", "finished", "failed"):
1588 # Set finish time and reset builder..
1589 self
.db
.execute("UPDATE jobs SET time_finished = NOW() WHERE id = %s", self
.id)
1591 # Send messages to the user.
1592 if state
== "finished":
1593 self
.send_finished_message()
1595 elif state
== "failed":
1596 # Remove all package files if a job is set to failed state.
1597 self
.__delete
_packages
()
1599 self
.send_failed_message()
1601 # Automatically update the state of the build (not on test builds).
1602 if self
.type == "build":
1603 self
.build
.auto_update_state()
1605 state
= property(get_state
, set_state
)
1609 return self
.data
.message
1611 def update_message(self
, msg
):
1612 self
.db
.execute("UPDATE jobs SET message = %s WHERE id = %s",
1616 self
._data
["message"] = msg
1618 def get_builder(self
):
1619 if self
.data
.builder_id
:
1620 return self
.backend
.builders
.get_by_id(self
.data
.builder_id
)
1622 def set_builder(self
, builder
, user
=None):
1623 self
.db
.execute("UPDATE jobs SET builder_id = %s WHERE id = %s",
1624 builder
.id, self
.id)
1628 self
._data
["builder_id"] = builder
.id
1630 self
._builder
= builder
1634 self
.log("builder_assigned", builder
=builder
, user
=user
)
1636 builder
= lazy_property(get_builder
, set_builder
)
1640 return self
.data
.arch
1644 if not self
.time_started
:
1647 if self
.time_finished
:
1648 delta
= self
.time_finished
- self
.time_started
1650 delta
= datetime
.datetime
.utcnow() - self
.time_started
1652 return delta
.total_seconds()
1655 def time_created(self
):
1656 return self
.data
.time_created
1659 def time_started(self
):
1660 return self
.data
.time_started
1663 def time_finished(self
):
1664 return self
.data
.time_finished
1667 def expected_runtime(self
):
1669 Returns the estimated time and stddev, this job takes to finish.
1671 # Get the average build time.
1672 build_times
= self
.backend
.builds
.get_build_times_by_arch(self
.arch
,
1675 # If there is no statistical data, we cannot estimate anything.
1679 return build_times
.average
, build_times
.stddev
1683 expected_runtime
, stddev
= self
.expected_runtime
1685 if expected_runtime
:
1686 return expected_runtime
- int(self
.duration
), stddev
1690 return self
.data
.tries
1692 def get_pkg_by_uuid(self
, uuid
):
1693 pkg
= self
.backend
.packages
._get
_package
("SELECT packages.id FROM packages \
1694 JOIN jobs_packages ON jobs_packages.pkg_id = packages.id \
1695 WHERE jobs_packages.job_id = %s AND packages.uuid = %s",
1706 for log
in self
.db
.query("SELECT id FROM logfiles WHERE job_id = %s", self
.id):
1707 log
= logs
.LogFile(self
.backend
, log
.id)
1710 logfiles
.append(log
)
1714 def add_file(self
, filename
):
1716 Add the specified file to this job.
1718 The file is copied to the right directory by this function.
1720 assert os
.path
.exists(filename
)
1722 if filename
.endswith(".log"):
1723 self
._add
_file
_log
(filename
)
1725 elif filename
.endswith(".%s" % PACKAGE_EXTENSION
):
1726 # It is not allowed to upload packages on test builds.
1727 if self
.type == "test":
1730 self
._add
_file
_package
(filename
)
1732 def _add_file_log(self
, filename
):
1734 Attach a log file to this job.
1736 target_dirname
= os
.path
.join(self
.build
.path
, "logs")
1738 if self
.type == "test":
1741 target_filename
= os
.path
.join(target_dirname
,
1742 "test.%s.%s.%s.log" % (self
.arch
, i
, self
.tries
))
1744 if os
.path
.exists(target_filename
):
1749 target_filename
= os
.path
.join(target_dirname
,
1750 "build.%s.%s.log" % (self
.arch
, self
.tries
))
1752 # Make sure the target directory exists.
1753 if not os
.path
.exists(target_dirname
):
1754 os
.makedirs(target_dirname
)
1756 # Calculate a SHA512 hash from that file.
1757 f
= open(filename
, "rb")
1758 h
= hashlib
.sha512()
1760 buf
= f
.read(BUFFER_SIZE
)
1767 # Copy the file to the final location.
1768 shutil
.copy2(filename
, target_filename
)
1770 # Create an entry in the database.
1771 self
.db
.execute("INSERT INTO logfiles(job_id, path, filesize, hash_sha512) \
1772 VALUES(%s, %s, %s, %s)", self
.id, os
.path
.relpath(target_filename
, PACKAGES_DIR
),
1773 os
.path
.getsize(target_filename
), h
.hexdigest())
1775 def _add_file_package(self
, filename
):
1776 # Open package (creates entry in the database).
1777 pkg
= packages
.Package
.open(self
.backend
, filename
)
1779 # Move package to the build directory.
1780 pkg
.move(os
.path
.join(self
.build
.path
, self
.arch
))
1782 # Attach the package to this job.
1783 self
.db
.execute("INSERT INTO jobs_packages(job_id, pkg_id) VALUES(%s, %s)",
1786 def get_aborted_state(self
):
1787 return self
.data
.aborted_state
1789 def set_aborted_state(self
, state
):
1790 self
._set
_attribute
("aborted_state", state
)
1792 aborted_state
= property(get_aborted_state
, set_aborted_state
)
1795 def message_recipients(self
):
1798 # Add all people watching the build.
1799 l
+= self
.build
.message_recipients
1801 # Add the package maintainer on release builds.
1802 if self
.build
.type == "release":
1803 maint
= self
.pkg
.maintainer
1805 if isinstance(maint
, users
.User
):
1806 l
.append("%s <%s>" % (maint
.realname
, maint
.email
))
1810 # XXX add committer and commit author.
1812 # Add the owner of the scratch build on scratch builds.
1813 elif self
.build
.type == "scratch" and self
.build
.user
:
1814 l
.append("%s <%s>" % \
1815 (self
.build
.user
.realname
, self
.build
.user
.email
))
1819 def save_buildroot(self
, pkgs
):
1822 for pkg_name
, pkg_uuid
in pkgs
:
1823 rows
.append((self
.id, self
.tries
, pkg_uuid
, pkg_name
))
1825 # Cleanup old stuff first (for rebuilding packages).
1826 self
.db
.execute("DELETE FROM jobs_buildroots WHERE job_id = %s AND tries = %s",
1827 self
.id, self
.tries
)
1829 self
.db
.executemany("INSERT INTO \
1830 jobs_buildroots(job_id, tries, pkg_uuid, pkg_name) \
1831 VALUES(%s, %s, %s, %s)", rows
)
1833 def has_buildroot(self
, tries
=None):
1837 res
= self
.db
.get("SELECT COUNT(*) AS num FROM jobs_buildroots \
1838 WHERE jobs_buildroots.job_id = %s AND jobs_buildroots.tries = %s",
1846 def get_buildroot(self
, tries
=None):
1850 rows
= self
.db
.query("SELECT * FROM jobs_buildroots \
1851 WHERE jobs_buildroots.job_id = %s AND jobs_buildroots.tries = %s \
1852 ORDER BY pkg_name", self
.id, tries
)
1856 # Search for this package in the packages table.
1857 pkg
= self
.backend
.packages
.get_by_uuid(row
.pkg_uuid
)
1858 pkgs
.append((row
.pkg_name
, row
.pkg_uuid
, pkg
))
1862 def send_finished_message(self
):
1863 # Send no finished mails for test jobs.
1864 if self
.type == "test":
1867 logging
.debug("Sending finished message for job %s to %s" % \
1868 (self
.name
, ", ".join(self
.message_recipients
)))
1871 "build_name" : self
.name
,
1872 "build_host" : self
.builder
.name
,
1873 "build_uuid" : self
.uuid
,
1876 self
.backend
.messages
.send_to_all(self
.message_recipients
,
1877 MSG_BUILD_FINISHED_SUBJECT
, MSG_BUILD_FINISHED
, info
)
1879 def send_failed_message(self
):
1880 logging
.debug("Sending failed message for job %s to %s" % \
1881 (self
.name
, ", ".join(self
.message_recipients
)))
1885 build_host
= self
.builder
.name
1888 "build_name" : self
.name
,
1889 "build_host" : build_host
,
1890 "build_uuid" : self
.uuid
,
1893 self
.backend
.messages
.send_to_all(self
.message_recipients
,
1894 MSG_BUILD_FAILED_SUBJECT
, MSG_BUILD_FAILED
, info
)
1896 def set_start_time(self
, start_not_before
):
1897 self
._set
_attribute
("start_not_before", start_not_before
)
1899 def schedule(self
, type, start_time
=None, user
=None):
1900 assert type in ("rebuild", "test")
1902 if type == "rebuild":
1903 if self
.state
== "finished":
1906 self
.set_state("new", user
=user
, log
=False)
1907 self
.set_start_time(start_time
)
1910 self
.log("schedule_rebuild", user
=user
)
1912 elif type == "test":
1913 if not self
.state
== "finished":
1916 # Create a new job with same build and arch.
1917 job
= self
.create(self
.backend
, self
.build
, self
.arch
, type="test")
1918 job
.set_start_time(start_time
)
1921 self
.log("schedule_test_job", test_job
=job
, user
=user
)
1925 def schedule_test(self
, start_not_before
=None, user
=None):
1927 return self
.schedule("test", start_time
=start_not_before
, user
=user
)
1929 def schedule_rebuild(self
, start_not_before
=None, user
=None):
1931 return self
.schedule("rebuild", start_time
=start_not_before
, user
=user
)
1933 def get_build_repos(self
):
1935 Returns a list of all repositories that should be used when
1938 repo_ids
= self
.db
.query("SELECT repo_id FROM jobs_repos WHERE job_id = %s",
1942 return self
.distro
.get_build_repos()
1945 for repo
in self
.distro
.repositories
:
1946 if repo
.id in [r
.id for r
in repo_ids
]:
1949 return repos
or self
.distro
.get_build_repos()
1951 def get_repo_config(self
):
1953 Get repository configuration file that is sent to the builder.
1957 for repo
in self
.get_build_repos():
1958 confs
.append(repo
.get_conf())
1960 return "\n\n".join(confs
)
1962 def get_config(self
):
1964 Get configuration file that is sent to the builder.
1968 # Add the distribution configuration.
1969 confs
.append(self
.distro
.get_config())
1971 # Then add all repositories for this build.
1972 confs
.append(self
.get_repo_config())
1974 return "\n\n".join(confs
)
1976 def resolvdep(self
):
1977 config
= pakfire
.config
.Config(files
=["general.conf"])
1978 config
.parse(self
.get_config())
1980 # The filename of the source file.
1981 filename
= os
.path
.join(PACKAGES_DIR
, self
.build
.pkg
.path
)
1982 assert os
.path
.exists(filename
), filename
1984 # Create a new pakfire instance with the configuration for
1986 p
= pakfire
.PakfireServer(config
=config
, arch
=self
.arch
)
1988 # Try to solve the build dependencies.
1990 solver
= p
.resolvdep(filename
)
1992 # Catch dependency errors and log the problem string.
1993 except DependencyError
, e
:
1994 self
.state
= "dependency_error"
1995 self
.update_message(e
)
1998 # If the build dependencies can be resolved, we set the build in
2000 if solver
.status
is True:
2001 if self
.state
in ("failed",):
2004 self
.state
= "pending"