]>
Commit | Line | Data |
---|---|---|
3ad4bb5a | 1 | #!/usr/bin/python |
b792d887 MT |
2 | ############################################################################### |
3 | # # | |
4 | # Pakfire - The IPFire package management system # | |
5 | # Copyright (C) 2011 Pakfire development team # | |
6 | # # | |
7 | # This program is free software: you can redistribute it and/or modify # | |
8 | # it under the terms of the GNU General Public License as published by # | |
9 | # the Free Software Foundation, either version 3 of the License, or # | |
10 | # (at your option) any later version. # | |
11 | # # | |
12 | # This program is distributed in the hope that it will be useful, # | |
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of # | |
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # | |
15 | # GNU General Public License for more details. # | |
16 | # # | |
17 | # You should have received a copy of the GNU General Public License # | |
18 | # along with this program. If not, see <http://www.gnu.org/licenses/>. # | |
19 | # # | |
20 | ############################################################################### | |
3ad4bb5a MT |
21 | |
22 | import hashlib | |
8276111d | 23 | import os |
3ad4bb5a MT |
24 | import random |
25 | import socket | |
26 | import subprocess | |
27 | import tempfile | |
28 | import time | |
29 | import xmlrpclib | |
30 | ||
8b6bc023 MT |
31 | import logging |
32 | log = logging.getLogger("pakfire") | |
33 | ||
3ad4bb5a MT |
34 | import pakfire.api |
35 | import pakfire.base | |
36 | import pakfire.config | |
37 | import pakfire.downloader | |
38 | import pakfire.packages | |
39 | import pakfire.repository | |
40 | import pakfire.util | |
41 | ||
42 | from pakfire.constants import * | |
43 | ||
44 | CHUNK_SIZE = 1024**2 # 1M | |
45 | ||
46 | class Source(object): | |
47 | def __init__(self, pakfire, id, name, url, path, targetpath, revision, branch): | |
48 | self.pakfire = pakfire | |
49 | self.id = id | |
50 | self.name = name | |
51 | self.url = url | |
52 | self.targetpath = targetpath | |
53 | self.revision = revision | |
54 | self.branch = branch | |
55 | ||
56 | # If the repository is not yet checked out, we create a local clone | |
57 | # from it to work with it. | |
58 | if not self.is_cloned(): | |
59 | self.clone() | |
60 | else: | |
61 | # Always refresh the repository to have the recent commits. | |
62 | self.fetch() | |
63 | ||
64 | def is_cloned(self): | |
65 | return os.path.exists(self.path) | |
66 | ||
67 | def clone(self): | |
68 | if self.is_cloned(): | |
69 | return | |
70 | ||
71 | dirname = os.path.dirname(self.path) | |
72 | basename = os.path.basename(self.path) | |
73 | ||
74 | if not os.path.exists(dirname): | |
75 | os.makedirs(dirname) | |
76 | ||
77 | self._git("clone %s %s" % (self.url, basename), path=dirname) | |
78 | ||
79 | def fetch(self): | |
80 | self._git("fetch") | |
81 | ||
82 | @property | |
83 | def path(self): | |
84 | h = hashlib.sha1(self.url) | |
85 | ||
86 | # XXX path is to be changed | |
87 | return "/var/cache/pakfire/sources/%s" % h.hexdigest() | |
88 | ||
89 | def _git(self, cmd, path=None): | |
90 | if not path: | |
91 | path = self.path | |
92 | ||
93 | cmd = "cd %s && git %s" % (path, cmd) | |
94 | ||
8b6bc023 | 95 | log.debug("Running command: %s" % cmd) |
3ad4bb5a MT |
96 | |
97 | return subprocess.check_output(["/bin/sh", "-c", cmd]) | |
98 | ||
99 | def _git_changed_files(self, revision1, revision2=""): | |
100 | files = self._git("diff --name-only %s %s" % (revision1, revision2)) | |
101 | ||
102 | return [os.path.join(self.path, f) for f in files.splitlines()] | |
103 | ||
104 | def _git_checkout_revision(self, revision): | |
105 | self._git("checkout %s" % revision) | |
106 | ||
107 | def update_revision(self, revision, **pakfire_args): | |
108 | # Checkout the revision we want to work with. | |
109 | self._git_checkout_revision(revision) | |
110 | ||
111 | # Get list of all changes files between the current revision and | |
112 | # the previous one. | |
113 | files = self._git_changed_files("HEAD^", "HEAD") | |
114 | ||
115 | # Update all changed files and return a repository with them. | |
116 | return self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)], | |
117 | **pakfire_args) | |
118 | ||
119 | def update_files(self, files, **pakfire_args): | |
120 | rnd = random.randint(0, 1024**2) | |
121 | tmpdir = "/tmp/pakfire-source-%s" % rnd | |
122 | ||
123 | pkgs = [] | |
124 | for file in files: | |
125 | if os.path.exists(file): | |
126 | pkgs.append(file) | |
127 | # XXX not sure what to do here | |
128 | #else: | |
129 | # pkg_name = os.path.basename(os.path.dirname(file)) | |
130 | # | |
131 | # # Send deleted package to server. | |
132 | # self.master.package_remove(self, pkg_name) | |
133 | ||
134 | if not pkgs: | |
135 | return | |
136 | ||
137 | # XXX This totally ignores the local configuration. | |
138 | pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args) | |
139 | ||
140 | # Create a kind of dummy repository to link the packages against it. | |
8276111d MT |
141 | if pakfire_args.has_key("build_id"): |
142 | del pakfire_args["build_id"] | |
143 | pakfire_args["mode"] = "server" | |
3ad4bb5a | 144 | |
8276111d MT |
145 | repo = pakfire.api.repo_create("source-%s" % rnd, [tmpdir,], type="source", |
146 | **pakfire_args) | |
3ad4bb5a | 147 | |
8276111d | 148 | return repo |
3ad4bb5a MT |
149 | |
150 | def update_all(self): | |
151 | _files = [] | |
152 | for dir, subdirs, files in os.walk(self.path): | |
153 | for f in files: | |
154 | if not f.endswith(".%s" % MAKEFILE_EXTENSION): | |
155 | continue | |
156 | ||
157 | _files.append(os.path.join(dir, f)) | |
158 | ||
159 | return self.update_files(_files) | |
160 | ||
161 | ||
162 | class XMLRPCTransport(xmlrpclib.Transport): | |
163 | user_agent = "pakfire/%s" % PAKFIRE_VERSION | |
164 | ||
165 | def single_request(self, *args, **kwargs): | |
166 | ret = None | |
167 | ||
168 | # Tries can be passed to this method. | |
169 | tries = kwargs.pop("tries", 100) | |
170 | ||
171 | while tries: | |
172 | try: | |
173 | ret = xmlrpclib.Transport.single_request(self, *args, **kwargs) | |
174 | ||
175 | except socket.error, e: | |
176 | # These kinds of errors are not fatal, but they can happen on | |
177 | # a bad internet connection or whatever. | |
178 | # 32 Broken pipe | |
179 | # 110 Connection timeout | |
180 | # 111 Connection refused | |
181 | if not e.errno in (32, 110, 111,): | |
182 | raise | |
183 | ||
184 | except xmlrpclib.ProtocolError, e: | |
185 | # Log all XMLRPC protocol errors. | |
8b6bc023 MT |
186 | log.error("XMLRPC protocol error:") |
187 | log.error(" URL: %s" % e.url) | |
188 | log.error(" HTTP headers:") | |
3ad4bb5a | 189 | for header in e.headers.items(): |
8b6bc023 MT |
190 | log.error(" %s: %s" % header) |
191 | log.error(" Error code: %s" % e.errcode) | |
192 | log.error(" Error message: %s" % e.errmsg) | |
3ad4bb5a MT |
193 | raise |
194 | ||
195 | else: | |
196 | # If request was successful, we can break the loop. | |
197 | break | |
198 | ||
199 | # If the request was not successful, we wait a little time to try | |
200 | # it again. | |
8b6bc023 | 201 | log.debug("Request was not successful, we wait a little bit and try it again.") |
3ad4bb5a MT |
202 | time.sleep(30) |
203 | tries -= 1 | |
204 | ||
205 | else: | |
8b6bc023 | 206 | log.error("Maximum number of tries was reached. Giving up.") |
3ad4bb5a MT |
207 | # XXX need better exception here. |
208 | raise Exception, "Could not fulfill request." | |
209 | ||
210 | return ret | |
211 | ||
212 | ||
213 | class Server(object): | |
214 | def __init__(self, **pakfire_args): | |
215 | self.config = pakfire.config.Config() | |
216 | ||
217 | server = self.config._slave.get("server") | |
218 | ||
8b6bc023 | 219 | log.info("Establishing RPC connection to: %s" % server) |
3ad4bb5a MT |
220 | |
221 | self.conn = xmlrpclib.ServerProxy(server, transport=XMLRPCTransport(), | |
222 | allow_none=True) | |
223 | ||
269c59f3 MT |
224 | self.pakfire_args = pakfire_args |
225 | ||
3ad4bb5a MT |
226 | @property |
227 | def hostname(self): | |
228 | """ | |
229 | Return the host's name. | |
230 | """ | |
231 | return socket.gethostname() | |
232 | ||
233 | def update_info(self): | |
234 | # Get the current load average. | |
235 | loadavg = ", ".join(["%.2f" % l for l in os.getloadavg()]) | |
236 | ||
237 | # Get all supported architectures. | |
238 | arches = sorted([a for a in self.config.supported_arches]) | |
239 | arches = " ".join(arches) | |
240 | ||
241 | # Determine CPU model | |
242 | cpuinfo = {} | |
243 | with open("/proc/cpuinfo") as f: | |
244 | for line in f.readlines(): | |
245 | # Break at an empty line, because all information after that | |
246 | # is redundant. | |
247 | if not line: | |
248 | break | |
249 | ||
250 | try: | |
251 | key, value = line.split(":") | |
252 | except: | |
253 | pass # Skip invalid lines | |
254 | ||
255 | key, value = key.strip(), value.strip() | |
256 | ||
257 | cpuinfo[key] = value | |
258 | ||
259 | cpu_model = cpuinfo.get("model name", "Could not be determined") | |
260 | ||
261 | # Determine memory size | |
262 | memory = 0 | |
263 | with open("/proc/meminfo") as f: | |
264 | line = f.readline() | |
265 | ||
266 | try: | |
267 | a, b, c = line.split() | |
268 | except: | |
269 | pass | |
270 | else: | |
ee603c85 | 271 | memory = int(b) |
3ad4bb5a MT |
272 | |
273 | self.conn.update_host_info(loadavg, cpu_model, memory, arches) | |
274 | ||
275 | def upload_file(self, filename, build_id): | |
276 | # Get the hash of the file. | |
277 | hash = pakfire.util.calc_hash1(filename) | |
278 | ||
279 | # Get the size of the file. | |
280 | size = os.path.getsize(filename) | |
281 | ||
282 | # Get an upload ID from the server. | |
283 | upload_id = self.conn.get_upload_cookie(os.path.basename(filename), | |
284 | size, hash) | |
285 | ||
286 | # Calculate the number of chunks. | |
287 | chunks = (size / CHUNK_SIZE) + 1 | |
288 | ||
289 | # Cut the file in pieces and upload them one after another. | |
290 | with open(filename) as f: | |
291 | chunk = 0 | |
292 | while True: | |
293 | data = f.read(CHUNK_SIZE) | |
294 | if not data: | |
295 | break | |
296 | ||
297 | chunk += 1 | |
8b6bc023 | 298 | log.info("Uploading chunk %s/%s of %s." % (chunk, chunks, |
3ad4bb5a MT |
299 | os.path.basename(filename))) |
300 | ||
301 | data = xmlrpclib.Binary(data) | |
302 | self.conn.upload_chunk(upload_id, data) | |
303 | ||
304 | # Tell the server, that we finished the upload. | |
305 | ret = self.conn.finish_upload(upload_id, build_id) | |
306 | ||
307 | # If the server sends false, something happened with the upload that | |
308 | # could not be recovered. | |
309 | if not ret: | |
310 | raise Exception, "Upload failed." | |
311 | ||
312 | def update_build_status(self, build_id, status, message=""): | |
313 | ret = self.conn.update_build_state(build_id, status, message) | |
314 | ||
315 | # If the server returns False, then it did not acknowledge our status | |
316 | # update and the build has to be aborted. | |
317 | if not ret: | |
318 | raise BuildAbortedException, "The build was aborted by the master server." | |
319 | ||
320 | def build_job(self, type=None): | |
321 | build = self.conn.build_job() # XXX type=None | |
322 | ||
323 | # If the server has got no job for us, we end right here. | |
324 | if not build: | |
325 | return | |
326 | ||
327 | job_types = { | |
328 | "binary" : self.build_binary_job, | |
329 | "source" : self.build_source_job, | |
330 | } | |
331 | ||
332 | build_id = build["id"] | |
333 | build_type = build["type"] | |
334 | ||
335 | try: | |
336 | func = job_types[build_type] | |
337 | except KeyError: | |
338 | raise Exception, "Build type not supported: %s" % type | |
339 | ||
340 | # Call the function that processes the build and try to catch general | |
341 | # exceptions and report them to the server. | |
342 | # If everything goes okay, we tell this the server, too. | |
343 | try: | |
344 | func(build_id, build) | |
345 | ||
1c2f9e52 MT |
346 | except DependencyError: |
347 | # This has already been reported by func. | |
348 | raise | |
349 | ||
3ad4bb5a MT |
350 | except Exception, e: |
351 | # Format the exception and send it to the server. | |
352 | message = "%s: %s" % (e.__class__.__name__, e) | |
353 | ||
354 | self.update_build_status(build_id, "failed", message) | |
355 | raise | |
356 | ||
357 | else: | |
358 | self.update_build_status(build_id, "finished") | |
359 | ||
360 | def build_binary_job(self, build_id, build): | |
361 | arch = build["arch"] | |
362 | filename = build["name"] | |
363 | download = build["download"] | |
364 | hash1 = build["hash1"] | |
365 | ||
366 | # Create a temporary file and a directory for the resulting files. | |
367 | tmpdir = tempfile.mkdtemp() | |
368 | tmpfile = os.path.join(tmpdir, filename) | |
369 | logfile = os.path.join(tmpdir, "build.log") | |
370 | ||
371 | # Get a package grabber and add mirror download capabilities to it. | |
e57c5475 | 372 | grabber = pakfire.downloader.PackageDownloader(self.config) |
3ad4bb5a MT |
373 | |
374 | try: | |
375 | # Download the source. | |
376 | grabber.urlgrab(download, filename=tmpfile) | |
377 | ||
378 | # Check if the download checksum matches. | |
379 | if pakfire.util.calc_hash1(tmpfile) == hash1: | |
380 | print "Checksum matches: %s" % hash1 | |
381 | else: | |
382 | raise DownloadError, "Download was corrupted" | |
383 | ||
384 | # Update the build status on the server. | |
385 | self.update_build_status(build_id, "running") | |
386 | ||
387 | # Run the build. | |
388 | pakfire.api.build(tmpfile, build_id=build_id, | |
389 | resultdirs=[tmpdir,], logfile=logfile) | |
390 | ||
391 | self.update_build_status(build_id, "uploading") | |
392 | ||
393 | # Walk through the result directory and upload all (binary) files. | |
394 | for dir, subdirs, files in os.walk(tmpdir): | |
395 | for file in files: | |
396 | file = os.path.join(dir, file) | |
397 | if file in (logfile, tmpfile,): | |
398 | continue | |
399 | ||
400 | self.upload_file(file, build_id) | |
401 | ||
402 | except DependencyError, e: | |
403 | message = "%s: %s" % (e.__class__.__name__, e) | |
404 | self.update_build_status(build_id, "dependency_error", message) | |
1c2f9e52 | 405 | raise |
3ad4bb5a MT |
406 | |
407 | finally: | |
408 | # Upload the logfile in any case and if it exists. | |
409 | if os.path.exists(logfile): | |
410 | self.upload_file(logfile, build_id) | |
411 | ||
412 | # Cleanup the files we created. | |
413 | pakfire.util.rm(tmpdir) | |
414 | ||
415 | def build_source_job(self, build_id, build): | |
416 | # Update the build status on the server. | |
417 | self.update_build_status(build_id, "running") | |
418 | ||
419 | source = Source(self, **build["source"]) | |
420 | ||
269c59f3 MT |
421 | repo = source.update_revision(build["revision"], build_id=build_id, |
422 | **self.pakfire_args) | |
3ad4bb5a | 423 | |
8276111d MT |
424 | try: |
425 | # Upload all files in the repository. | |
426 | for pkg in repo: | |
427 | path = os.path.join(pkg.repo.path, pkg.filename) | |
428 | self.upload_file(path, build_id) | |
429 | finally: | |
430 | repo.remove() | |
431 | ||
432 | def update_repositories(self, limit=2): | |
433 | repos = self.conn.get_repos(limit) | |
434 | ||
435 | for repo in repos: | |
436 | files = self.conn.get_repo_packages(repo["id"]) | |
437 | ||
438 | for arch in repo["arches"]: | |
439 | path = "/pakfire/repositories/%s/%s/%s" % \ | |
440 | (repo["distro"]["sname"], repo["name"], arch) | |
3ad4bb5a | 441 | |
8276111d | 442 | pakfire.api.repo_create(path, files) |