]> git.ipfire.org Git - people/jschlag/pbs.git/blob - src/manager/builds.py
9ff85d2bc58d8ca26d1433dd6919b04b5f9495cd
[people/jschlag/pbs.git] / src / manager / builds.py
1 #!/usr/bin/python
2
3 import datetime
4 import logging
5 import pakfire
6 import pakfire.config
7 import shutil
8 import tempfile
9
10 from .. import builds
11 from .. import git
12
13 from . import base
14
15 from ..constants import *
16
17 class BuildsFailedRestartEvent(base.Event):
18 # Run when idle.
19 priority = 5
20
21 @property
22 def interval(self):
23 return self.pakfire.settings.get_int("build_keepalive_interval", 900)
24
25 def run(self):
26 max_tries = self.pakfire.settings.get_int("builds_restart_max_tries", 9)
27
28 query = self.db.query("SELECT jobs.id AS id FROM jobs \
29 JOIN builds ON builds.id = jobs.build_id \
30 WHERE \
31 jobs.type = 'build' AND \
32 jobs.state = 'failed' AND \
33 jobs.tries <= %s AND \
34 NOT builds.state = 'broken' AND \
35 jobs.time_finished < NOW() - '72 hours'::interval \
36 ORDER BY \
37 CASE \
38 WHEN jobs.type = 'build' THEN 0 \
39 WHEN jobs.type = 'test' THEN 1 \
40 END, \
41 builds.priority DESC, jobs.time_created ASC",
42 max_tries)
43
44 for row in query:
45 job = self.pakfire.jobs.get_by_id(row.id)
46
47 # Restart the job.
48 job.set_state("new", log=False)
49
50
51 class CheckBuildDependenciesEvent(base.Event):
52 # Process them as quickly as possible, but there may be more important events.
53 priority = 3
54
55 @property
56 def interval(self):
57 return self.pakfire.settings.get_int("dependency_checker_interval", 30)
58
59 def run(self):
60 query = self.db.query("SELECT id FROM jobs \
61 WHERE state = 'new' OR \
62 (state = 'dependency_error' AND \
63 time_finished < NOW() - '5 minutes'::interval) \
64 ORDER BY time_finished LIMIT 50")
65
66 for row in query:
67 e = CheckBuildDependencyEvent(self.pakfire, row.id)
68 self.scheduler.add_event(e)
69
70
71 class CheckBuildDependencyEvent(base.Event):
72 # Process them as quickly as possible, but there may be more important events.
73 priority = 3
74
75 def run(self, job_id):
76 self.run_subprocess(self._run, job_id)
77
78 @staticmethod
79 def _run(_pakfire, job_id):
80 # Get the build job we are working on.
81 job = _pakfire.jobs.get_by_id(job_id)
82 if not job:
83 logging.debug("Job %s does not exist." % job_id)
84 return
85
86 # Check if the job status has changed in the meanwhile.
87 if not job.state in ("new", "dependency_error", "failed"):
88 logging.warning("Job status has already changed: %s - %s" % (job.name, job.state))
89 return
90
91 # Resolve the dependencies.
92 job.resolvdep()
93
94
95 class DistEvent(base.Event):
96 interval = 60
97
98 first_run = True
99
100 def run(self):
101 if self.first_run:
102 self.first_run = False
103
104 self.process = self.init_repos()
105
106 for commit in self.pakfire.sources.get_pending_commits():
107 commit.state = "running"
108
109 logging.debug("Processing commit %s: %s" % (commit.revision, commit.subject))
110
111 # Get the repository of this commit.
112 repo = git.Repo(self.pakfire, commit.source)
113
114 # Make sure, it is checked out.
115 if not repo.cloned:
116 repo.clone()
117
118 # Navigate to the right revision.
119 repo.checkout(commit.revision)
120
121 # Get all changed makefiles.
122 deleted_files = []
123 updated_files = []
124
125 for file in repo.changed_files(commit.revision):
126 # Don't care about files that are not a makefile.
127 if not file.endswith(".%s" % MAKEFILE_EXTENSION):
128 continue
129
130 if os.path.exists(file):
131 updated_files.append(file)
132 else:
133 deleted_files.append(file)
134
135 e = DistFileEvent(self.pakfire, None, commit.id, updated_files, deleted_files)
136 self.scheduler.add_event(e)
137
138 def init_repos(self):
139 """
140 Initialize all repositories.
141 """
142 for source in self.pakfire.sources.get_all():
143 # Skip those which already have a revision.
144 if source.revision:
145 continue
146
147 # Initialize the repository or and clone it if necessary.
148 repo = git.Repo(self.pakfire, source)
149 if not repo.cloned:
150 repo.clone()
151
152 # Get a list of all files in the repository.
153 files = repo.get_all_files()
154
155 for file in [f for f in files if file.endswith(".%s" % MAKEFILE_EXTENSION)]:
156 e = DistFileEvent(self.pakfire, source.id, None, [file,], [])
157 self.scheduler.add_event(e)
158
159
160 class DistFileEvent(base.Event):
161 def run(self, *args):
162 self.run_subprocess(self._run, *args)
163
164 @staticmethod
165 def _run(_pakfire, source_id, commit_id, updated_files, deleted_files):
166 commit = None
167 source = None
168
169 if commit_id:
170 commit = _pakfire.sources.get_commit_by_id(commit_id)
171 assert commit
172
173 source = commit.source
174
175 if source_id and not source:
176 source = _pakfire.sources.get_by_id(source_id)
177
178 assert source
179
180 if updated_files:
181 # Create a temporary directory where to put all the files
182 # that are generated here.
183 pkg_dir = tempfile.mkdtemp()
184
185 try:
186 config = pakfire.config.Config(["general.conf",])
187 config.parse(source.distro.get_config())
188
189 p = pakfire.PakfireServer(config=config)
190
191 pkgs = []
192 for file in updated_files:
193 try:
194 pkg_file = p.dist(file, pkg_dir)
195 pkgs.append(pkg_file)
196 except:
197 raise
198
199 # Import all packages in one swoop.
200 for pkg in pkgs:
201 # Import the package file and create a build out of it.
202 builds.import_from_package(_pakfire, pkg,
203 distro=source.distro, commit=commit, type="release")
204
205 except:
206 if commit:
207 commit.state = "failed"
208
209 raise
210
211 finally:
212 if os.path.exists(pkg_dir):
213 shutil.rmtree(pkg_dir)
214
215 for file in deleted_files:
216 # Determine the name of the package.
217 name = os.path.basename(file)
218 name = name[:len(MAKEFILE_EXTENSION) + 1]
219
220 source.distro.delete_package(name)
221
222 if commit:
223 commit.state = "finished"