]> git.ipfire.org Git - people/jschlag/pbs.git/blob - src/manager/builds.py
arches: Allow iterating over all architectures
[people/jschlag/pbs.git] / src / manager / builds.py
1 #!/usr/bin/python
2
3 import datetime
4 import logging
5 import pakfire
6 import pakfire.config
7 import shutil
8 import tempfile
9
10 from .. import builds
11 from .. import git
12
13 from . import base
14
15 from ..constants import *
16
17 class BuildsFailedRestartEvent(base.Event):
18 # Run when idle.
19 priority = 5
20
21 @property
22 def interval(self):
23 return self.pakfire.settings.get_int("build_keepalive_interval", 900)
24
25 def run(self):
26 max_tries = self.pakfire.settings.get_int("builds_restart_max_tries", 9)
27
28 query = self.db.query("SELECT jobs.id AS id FROM jobs \
29 JOIN builds ON builds.id = jobs.build_id \
30 WHERE \
31 jobs.type = 'build' AND \
32 jobs.state = 'failed' AND \
33 jobs.tries <= %s AND \
34 NOT builds.state = 'broken' AND \
35 jobs.time_finished < NOW() - '72 hours'::interval \
36 ORDER BY \
37 CASE \
38 WHEN jobs.type = 'build' THEN 0 \
39 WHEN jobs.type = 'test' THEN 1 \
40 END, \
41 builds.priority DESC, jobs.time_created ASC",
42 max_tries)
43
44 for row in query:
45 job = self.pakfire.jobs.get_by_id(row.id)
46
47 # Restart the job.
48 job.set_state("new", log=False)
49
50
51 class CheckBuildDependenciesEvent(base.Event):
52 # Process them as quickly as possible, but there may be more important events.
53 priority = 3
54
55 @property
56 def interval(self):
57 return self.pakfire.settings.get_int("dependency_checker_interval", 30)
58
59 def run(self):
60 query = self.db.query("SELECT id FROM jobs \
61 WHERE state = 'new' OR \
62 (state = 'dependency_error' AND \
63 time_finished < NOW() - '5 minutes'::interval) \
64 ORDER BY time_finished LIMIT 50")
65
66 for row in query:
67 e = CheckBuildDependencyEvent(self.pakfire, row.id)
68 self.scheduler.add_event(e)
69
70
71 class CheckBuildDependencyEvent(base.Event):
72 # Process them as quickly as possible, but there may be more important events.
73 priority = 3
74
75 def run(self, job_id):
76 self.run_subprocess(self._run, job_id)
77
78 @staticmethod
79 def _run(_pakfire, job_id):
80 # Get the build job we are working on.
81 job = _pakfire.jobs.get_by_id(job_id)
82 if not job:
83 logging.debug("Job %s does not exist." % job_id)
84 return
85
86 # Check if the job status has changed in the meanwhile.
87 if not job.state in ("new", "dependency_error", "failed"):
88 logging.warning("Job status has already changed: %s - %s" % (job.name, job.state))
89 return
90
91 # Resolve the dependencies.
92 job.resolvdep()
93
94
95 class CreateTestBuildsEvent(base.Event):
96 # Run this every five minutes.
97 interval = 300
98
99 # Run when the build service is idle.
100 priority = 10
101
102 @property
103 def test_threshold(self):
104 threshold_days = self.pakfire.settings.get_int("test_threshold_days", 14)
105
106 return datetime.datetime.utcnow() - datetime.timedelta(days=threshold_days)
107
108 def run(self):
109 max_queue_length = self.pakfire.settings.get_int("test_queue_limit", 10)
110
111 for arch in self.backend.arches:
112 # Skip adding new jobs if there are more too many jobs in the queue.
113 limit = max_queue_length - self.backend.jobqueue.get_length_for_arch(arch)
114 if limit <= 0:
115 logging.debug("Already too many jobs in queue of %s to create tests." % arch)
116 continue
117
118 # Get a list of builds, with potentially need a test build.
119 # Randomize the output and do not return more jobs than we are
120 # allowed to put into the build queue.
121 builds = self.pakfire.builds.needs_test(self.test_threshold,
122 arch=arch, limit=limit)
123
124 if not builds:
125 logging.debug("No builds needs a test for %s." % arch.name)
126 continue
127
128 # Search for the job with the right architecture in each
129 # build and schedule a test job.
130 for build in builds:
131 for job in build.jobs:
132 if job.arch == arch:
133 job.schedule("test")
134 break
135
136
137 class DistEvent(base.Event):
138 interval = 60
139
140 first_run = True
141
142 def run(self):
143 if self.first_run:
144 self.first_run = False
145
146 self.process = self.init_repos()
147
148 for commit in self.pakfire.sources.get_pending_commits():
149 commit.state = "running"
150
151 logging.debug("Processing commit %s: %s" % (commit.revision, commit.subject))
152
153 # Get the repository of this commit.
154 repo = git.Repo(self.pakfire, commit.source)
155
156 # Make sure, it is checked out.
157 if not repo.cloned:
158 repo.clone()
159
160 # Navigate to the right revision.
161 repo.checkout(commit.revision)
162
163 # Get all changed makefiles.
164 deleted_files = []
165 updated_files = []
166
167 for file in repo.changed_files(commit.revision):
168 # Don't care about files that are not a makefile.
169 if not file.endswith(".%s" % MAKEFILE_EXTENSION):
170 continue
171
172 if os.path.exists(file):
173 updated_files.append(file)
174 else:
175 deleted_files.append(file)
176
177 e = DistFileEvent(self.pakfire, None, commit.id, updated_files, deleted_files)
178 self.scheduler.add_event(e)
179
180 def init_repos(self):
181 """
182 Initialize all repositories.
183 """
184 for source in self.pakfire.sources.get_all():
185 # Skip those which already have a revision.
186 if source.revision:
187 continue
188
189 # Initialize the repository or and clone it if necessary.
190 repo = git.Repo(self.pakfire, source)
191 if not repo.cloned:
192 repo.clone()
193
194 # Get a list of all files in the repository.
195 files = repo.get_all_files()
196
197 for file in [f for f in files if file.endswith(".%s" % MAKEFILE_EXTENSION)]:
198 e = DistFileEvent(self.pakfire, source.id, None, [file,], [])
199 self.scheduler.add_event(e)
200
201
202 class DistFileEvent(base.Event):
203 def run(self, *args):
204 self.run_subprocess(self._run, *args)
205
206 @staticmethod
207 def _run(_pakfire, source_id, commit_id, updated_files, deleted_files):
208 commit = None
209 source = None
210
211 if commit_id:
212 commit = _pakfire.sources.get_commit_by_id(commit_id)
213 assert commit
214
215 source = commit.source
216
217 if source_id and not source:
218 source = _pakfire.sources.get_by_id(source_id)
219
220 assert source
221
222 if updated_files:
223 # Create a temporary directory where to put all the files
224 # that are generated here.
225 pkg_dir = tempfile.mkdtemp()
226
227 try:
228 config = pakfire.config.Config(["general.conf",])
229 config.parse(source.distro.get_config())
230
231 p = pakfire.PakfireServer(config=config)
232
233 pkgs = []
234 for file in updated_files:
235 try:
236 pkg_file = p.dist(file, pkg_dir)
237 pkgs.append(pkg_file)
238 except:
239 raise
240
241 # Import all packages in one swoop.
242 for pkg in pkgs:
243 # Import the package file and create a build out of it.
244 builds.import_from_package(_pakfire, pkg,
245 distro=source.distro, commit=commit, type="release")
246
247 except:
248 if commit:
249 commit.state = "failed"
250
251 raise
252
253 finally:
254 if os.path.exists(pkg_dir):
255 shutil.rmtree(pkg_dir)
256
257 for file in deleted_files:
258 # Determine the name of the package.
259 name = os.path.basename(file)
260 name = name[:len(MAKEFILE_EXTENSION) + 1]
261
262 source.distro.delete_package(name)
263
264 if commit:
265 commit.state = "finished"