]> git.ipfire.org Git - people/jschlag/pbs.git/blob - manager/builds.py
Drop dependency on textile
[people/jschlag/pbs.git] / manager / builds.py
1 #!/usr/bin/python
2
3 import datetime
4 import logging
5 import pakfire
6 import pakfire.config
7 import shutil
8 import tempfile
9
10 import backend.builds
11 import backend.git
12
13 import base
14
15 from pakfire.constants import *
16
17 class BuildsFailedRestartEvent(base.Event):
18 # Run when idle.
19 priority = 5
20
21 @property
22 def interval(self):
23 return self.pakfire.settings.get_int("build_keepalive_interval", 900)
24
25 def run(self):
26 max_tries = self.pakfire.settings.get_int("builds_restart_max_tries", 9)
27
28 query = self.db.query("SELECT jobs.id AS id FROM jobs \
29 JOIN builds ON builds.id = jobs.build_id \
30 WHERE \
31 jobs.type = 'build' AND \
32 jobs.state = 'failed' AND \
33 jobs.tries <= %s AND \
34 NOT builds.state = 'broken' AND \
35 jobs.time_finished < DATE_SUB(NOW(), INTERVAL 72 HOUR) \
36 ORDER BY \
37 CASE \
38 WHEN jobs.type = 'build' THEN 0 \
39 WHEN jobs.type = 'test' THEN 1 \
40 END, \
41 builds.priority DESC, jobs.time_created ASC",
42 max_tries)
43
44 for row in query:
45 job = self.pakfire.jobs.get_by_id(row.id)
46
47 # Restart the job.
48 job.set_state("new", log=False)
49
50
51 class CheckBuildDependenciesEvent(base.Event):
52 # Process them as quickly as possible, but there may be more important events.
53 priority = 3
54
55 @property
56 def interval(self):
57 return self.pakfire.settings.get_int("dependency_checker_interval", 30)
58
59 def run(self):
60 query = self.db.query("SELECT id FROM jobs \
61 WHERE state = 'new' OR \
62 (state = 'dependency_error' AND \
63 time_finished < DATE_SUB(NOW(), INTERVAL 5 MINUTE)) \
64 ORDER BY time_finished LIMIT 50")
65
66 for row in query:
67 e = CheckBuildDependencyEvent(self.pakfire, row.id)
68 self.scheduler.add_event(e)
69
70
71 class CheckBuildDependencyEvent(base.Event):
72 # Process them as quickly as possible, but there may be more important events.
73 priority = 3
74
75 def run(self, job_id):
76 self.run_subprocess(self._run, job_id)
77
78 @staticmethod
79 def _run(_pakfire, job_id):
80 # Get the build job we are working on.
81 job = _pakfire.jobs.get_by_id(job_id)
82 if not job:
83 logging.debug("Job %s does not exist." % job_id)
84 return
85
86 # Check if the job status has changed in the meanwhile.
87 if not job.state in ("new", "dependency_error", "failed"):
88 logging.warning("Job status has already changed: %s - %s" % (job.name, job.state))
89 return
90
91 # Resolve the dependencies.
92 job.resolvdep()
93
94
95 class CreateTestBuildsEvent(base.Event):
96 # Run this every five minutes.
97 interval = 300
98
99 # Run when the build service is idle.
100 priority = 10
101
102 @property
103 def test_threshold(self):
104 threshold_days = self.pakfire.settings.get_int("test_threshold_days", 14)
105
106 return datetime.datetime.utcnow() - datetime.timedelta(days=threshold_days)
107
108 def run(self):
109 max_queue_length = self.pakfire.settings.get_int("test_queue_limit", 10)
110
111 # Get a list with all feasible architectures.
112 arches = self.pakfire.arches.get_all()
113 noarch = self.pakfire.arches.get_by_name("noarch")
114 if noarch:
115 arches.append(noarch)
116
117 for arch in arches:
118 # Get the job queue for each architecture.
119 queue = self.pakfire.jobs.get_next(arches=[arch,])
120
121 # Skip adding new jobs if there are more too many jobs in the queue.
122 limit = max_queue_length - len(queue)
123 if limit <= 0:
124 logging.debug("Already too many jobs in queue of %s to create tests." % arch.name)
125 continue
126
127 # Get a list of builds, with potentially need a test build.
128 # Randomize the output and do not return more jobs than we are
129 # allowed to put into the build queue.
130 builds = self.pakfire.builds.needs_test(self.test_threshold,
131 arch=arch, limit=limit)
132
133 if not builds:
134 logging.debug("No builds needs a test for %s." % arch.name)
135 continue
136
137 # Search for the job with the right architecture in each
138 # build and schedule a test job.
139 for build in builds:
140 for job in build.jobs:
141 if job.arch == arch:
142 job.schedule("test")
143 break
144
145
146 class DistEvent(base.Event):
147 interval = 60
148
149 first_run = True
150
151 def run(self):
152 if self.first_run:
153 self.first_run = False
154
155 self.process = self.init_repos()
156
157 for commit in self.pakfire.sources.get_pending_commits():
158 commit.state = "running"
159
160 logging.debug("Processing commit %s: %s" % (commit.revision, commit.subject))
161
162 # Get the repository of this commit.
163 repo = backend.git.Repo(self.pakfire, commit.source_id)
164
165 # Make sure, it is checked out.
166 if not repo.cloned:
167 repo.clone()
168
169 # Navigate to the right revision.
170 repo.checkout(commit.revision)
171
172 # Get all changed makefiles.
173 deleted_files = []
174 updated_files = []
175
176 for file in repo.changed_files(commit.revision):
177 # Don't care about files that are not a makefile.
178 if not file.endswith(".%s" % MAKEFILE_EXTENSION):
179 continue
180
181 if os.path.exists(file):
182 updated_files.append(file)
183 else:
184 deleted_files.append(file)
185
186 e = DistFileEvent(self.pakfire, None, commit.id, updated_files, deleted_files)
187 self.scheduler.add_event(e)
188
189 def init_repos(self):
190 """
191 Initialize all repositories.
192 """
193 for source in self.pakfire.sources.get_all():
194 # Skip those which already have a revision.
195 if source.revision:
196 continue
197
198 # Initialize the repository or and clone it if necessary.
199 repo = backend.git.Repo(self.pakfire, source.id)
200 if not repo.cloned:
201 repo.clone()
202
203 # Get a list of all files in the repository.
204 files = repo.get_all_files()
205
206 for file in [f for f in files if file.endswith(".%s" % MAKEFILE_EXTENSION)]:
207 e = DistFileEvent(self.pakfire, source.id, None, [file,], [])
208 self.scheduler.add_event(e)
209
210
211 class DistFileEvent(base.Event):
212 def run(self, *args):
213 self.run_subprocess(self._run, *args)
214
215 @staticmethod
216 def _run(_pakfire, source_id, commit_id, updated_files, deleted_files):
217 commit = None
218 source = None
219
220 if commit_id:
221 commit = _pakfire.sources.get_commit_by_id(commit_id)
222 assert commit
223
224 source = commit.source
225
226 if source_id and not source:
227 source = _pakfire.sources.get_by_id(source_id)
228
229 assert source
230
231 if updated_files:
232 # Create a temporary directory where to put all the files
233 # that are generated here.
234 pkg_dir = tempfile.mkdtemp()
235
236 try:
237 config = pakfire.config.Config(["general.conf",])
238 config.parse(source.distro.get_config())
239
240 p = pakfire.PakfireServer(config=config)
241
242 pkgs = []
243 for file in updated_files:
244 try:
245 pkg_file = p.dist(file, pkg_dir)
246 pkgs.append(pkg_file)
247 except:
248 raise
249
250 # Import all packages in one swoop.
251 for pkg in pkgs:
252 # Import the package file and create a build out of it.
253 backend.builds.import_from_package(_pakfire, pkg,
254 distro=source.distro, commit=commit, type="release")
255
256 except:
257 if commit:
258 commit.state = "failed"
259
260 raise
261
262 finally:
263 if os.path.exists(pkg_dir):
264 shutil.rmtree(pkg_dir)
265
266 for file in deleted_files:
267 # Determine the name of the package.
268 name = os.path.basename(file)
269 name = name[:len(MAKEFILE_EXTENSION) + 1]
270
271 source.distro.delete_package(name)
272
273 if commit:
274 commit.state = "finished"