]> git.ipfire.org Git - people/jschlag/pbs.git/blame - src/manager/builds.py
Refactor sources
[people/jschlag/pbs.git] / src / manager / builds.py
CommitLineData
83be3106
MT
1#!/usr/bin/python
2
3import datetime
4import logging
e6fa8404
MT
5import pakfire
6import pakfire.config
7import shutil
8import tempfile
9
2c909128
MT
10from .. import builds
11from .. import git
83be3106 12
2c909128 13from . import base
83be3106 14
2c909128 15from ..constants import *
e6fa8404 16
83be3106
MT
17class BuildsFailedRestartEvent(base.Event):
18 # Run when idle.
19 priority = 5
20
21 @property
22 def interval(self):
23 return self.pakfire.settings.get_int("build_keepalive_interval", 900)
24
25 def run(self):
26 max_tries = self.pakfire.settings.get_int("builds_restart_max_tries", 9)
27
28 query = self.db.query("SELECT jobs.id AS id FROM jobs \
29 JOIN builds ON builds.id = jobs.build_id \
30 WHERE \
31 jobs.type = 'build' AND \
32 jobs.state = 'failed' AND \
33 jobs.tries <= %s AND \
34 NOT builds.state = 'broken' AND \
9779008c 35 jobs.time_finished < NOW() - '72 hours'::interval \
83be3106
MT
36 ORDER BY \
37 CASE \
38 WHEN jobs.type = 'build' THEN 0 \
39 WHEN jobs.type = 'test' THEN 1 \
40 END, \
41 builds.priority DESC, jobs.time_created ASC",
42 max_tries)
43
44 for row in query:
45 job = self.pakfire.jobs.get_by_id(row.id)
46
47 # Restart the job.
48 job.set_state("new", log=False)
49
50
51class CheckBuildDependenciesEvent(base.Event):
52 # Process them as quickly as possible, but there may be more important events.
53 priority = 3
54
55 @property
56 def interval(self):
57 return self.pakfire.settings.get_int("dependency_checker_interval", 30)
58
59 def run(self):
60 query = self.db.query("SELECT id FROM jobs \
61 WHERE state = 'new' OR \
62 (state = 'dependency_error' AND \
9779008c 63 time_finished < NOW() - '5 minutes'::interval) \
83be3106
MT
64 ORDER BY time_finished LIMIT 50")
65
66 for row in query:
67 e = CheckBuildDependencyEvent(self.pakfire, row.id)
68 self.scheduler.add_event(e)
69
70
71class CheckBuildDependencyEvent(base.Event):
72 # Process them as quickly as possible, but there may be more important events.
73 priority = 3
74
75 def run(self, job_id):
e6fa8404
MT
76 self.run_subprocess(self._run, job_id)
77
78 @staticmethod
79 def _run(_pakfire, job_id):
83be3106 80 # Get the build job we are working on.
e6fa8404 81 job = _pakfire.jobs.get_by_id(job_id)
83be3106
MT
82 if not job:
83 logging.debug("Job %s does not exist." % job_id)
84 return
85
86 # Check if the job status has changed in the meanwhile.
87 if not job.state in ("new", "dependency_error", "failed"):
88 logging.warning("Job status has already changed: %s - %s" % (job.name, job.state))
89 return
90
91 # Resolve the dependencies.
92 job.resolvdep()
93
94
95class CreateTestBuildsEvent(base.Event):
96 # Run this every five minutes.
97 interval = 300
98
99 # Run when the build service is idle.
100 priority = 10
101
102 @property
103 def test_threshold(self):
104 threshold_days = self.pakfire.settings.get_int("test_threshold_days", 14)
105
106 return datetime.datetime.utcnow() - datetime.timedelta(days=threshold_days)
107
108 def run(self):
109 max_queue_length = self.pakfire.settings.get_int("test_queue_limit", 10)
110
111 # Get a list with all feasible architectures.
112 arches = self.pakfire.arches.get_all()
113 noarch = self.pakfire.arches.get_by_name("noarch")
114 if noarch:
115 arches.append(noarch)
116
117 for arch in arches:
83be3106 118 # Skip adding new jobs if there are more too many jobs in the queue.
fd43d5e1 119 limit = max_queue_length - self.backend.jobqueue.get_length_for_arch(arch.name)
83be3106
MT
120 if limit <= 0:
121 logging.debug("Already too many jobs in queue of %s to create tests." % arch.name)
122 continue
123
124 # Get a list of builds, with potentially need a test build.
125 # Randomize the output and do not return more jobs than we are
126 # allowed to put into the build queue.
127 builds = self.pakfire.builds.needs_test(self.test_threshold,
128 arch=arch, limit=limit)
129
130 if not builds:
131 logging.debug("No builds needs a test for %s." % arch.name)
132 continue
133
134 # Search for the job with the right architecture in each
135 # build and schedule a test job.
136 for build in builds:
137 for job in build.jobs:
138 if job.arch == arch:
139 job.schedule("test")
140 break
141
142
e6fa8404
MT
143class DistEvent(base.Event):
144 interval = 60
83be3106
MT
145
146 first_run = True
147
e6fa8404 148 def run(self):
83be3106
MT
149 if self.first_run:
150 self.first_run = False
151
152 self.process = self.init_repos()
153
e6fa8404
MT
154 for commit in self.pakfire.sources.get_pending_commits():
155 commit.state = "running"
83be3106 156
e6fa8404 157 logging.debug("Processing commit %s: %s" % (commit.revision, commit.subject))
83be3106 158
e6fa8404 159 # Get the repository of this commit.
78366294 160 repo = git.Repo(self.pakfire, commit.source)
83be3106 161
e6fa8404
MT
162 # Make sure, it is checked out.
163 if not repo.cloned:
164 repo.clone()
83be3106 165
e6fa8404
MT
166 # Navigate to the right revision.
167 repo.checkout(commit.revision)
83be3106 168
e6fa8404
MT
169 # Get all changed makefiles.
170 deleted_files = []
171 updated_files = []
83be3106 172
e6fa8404
MT
173 for file in repo.changed_files(commit.revision):
174 # Don't care about files that are not a makefile.
175 if not file.endswith(".%s" % MAKEFILE_EXTENSION):
176 continue
83be3106 177
e6fa8404
MT
178 if os.path.exists(file):
179 updated_files.append(file)
180 else:
181 deleted_files.append(file)
83be3106 182
e6fa8404
MT
183 e = DistFileEvent(self.pakfire, None, commit.id, updated_files, deleted_files)
184 self.scheduler.add_event(e)
83be3106
MT
185
186 def init_repos(self):
e6fa8404
MT
187 """
188 Initialize all repositories.
189 """
190 for source in self.pakfire.sources.get_all():
191 # Skip those which already have a revision.
83be3106
MT
192 if source.revision:
193 continue
194
e6fa8404 195 # Initialize the repository or and clone it if necessary.
78366294 196 repo = git.Repo(self.pakfire, source)
83be3106
MT
197 if not repo.cloned:
198 repo.clone()
199
e6fa8404 200 # Get a list of all files in the repository.
83be3106
MT
201 files = repo.get_all_files()
202
e6fa8404
MT
203 for file in [f for f in files if file.endswith(".%s" % MAKEFILE_EXTENSION)]:
204 e = DistFileEvent(self.pakfire, source.id, None, [file,], [])
205 self.scheduler.add_event(e)
83be3106 206
83be3106 207
e6fa8404
MT
208class DistFileEvent(base.Event):
209 def run(self, *args):
210 self.run_subprocess(self._run, *args)
83be3106
MT
211
212 @staticmethod
e6fa8404 213 def _run(_pakfire, source_id, commit_id, updated_files, deleted_files):
83be3106
MT
214 commit = None
215 source = None
216
217 if commit_id:
218 commit = _pakfire.sources.get_commit_by_id(commit_id)
219 assert commit
220
221 source = commit.source
222
223 if source_id and not source:
224 source = _pakfire.sources.get_by_id(source_id)
225
226 assert source
227
228 if updated_files:
229 # Create a temporary directory where to put all the files
230 # that are generated here.
231 pkg_dir = tempfile.mkdtemp()
232
233 try:
234 config = pakfire.config.Config(["general.conf",])
235 config.parse(source.distro.get_config())
236
e6fa8404 237 p = pakfire.PakfireServer(config=config)
83be3106
MT
238
239 pkgs = []
240 for file in updated_files:
241 try:
242 pkg_file = p.dist(file, pkg_dir)
243 pkgs.append(pkg_file)
244 except:
245 raise
246
247 # Import all packages in one swoop.
248 for pkg in pkgs:
249 # Import the package file and create a build out of it.
2c909128 250 builds.import_from_package(_pakfire, pkg,
83be3106
MT
251 distro=source.distro, commit=commit, type="release")
252
253 except:
254 if commit:
255 commit.state = "failed"
256
257 raise
258
259 finally:
260 if os.path.exists(pkg_dir):
261 shutil.rmtree(pkg_dir)
262
e6fa8404
MT
263 for file in deleted_files:
264 # Determine the name of the package.
265 name = os.path.basename(file)
266 name = name[:len(MAKEFILE_EXTENSION) + 1]
83be3106 267
e6fa8404 268 source.distro.delete_package(name)
83be3106 269
e6fa8404
MT
270 if commit:
271 commit.state = "finished"