]>
Commit | Line | Data |
---|---|---|
3ad4bb5a | 1 | #!/usr/bin/python |
b792d887 MT |
2 | ############################################################################### |
3 | # # | |
4 | # Pakfire - The IPFire package management system # | |
5 | # Copyright (C) 2011 Pakfire development team # | |
6 | # # | |
7 | # This program is free software: you can redistribute it and/or modify # | |
8 | # it under the terms of the GNU General Public License as published by # | |
9 | # the Free Software Foundation, either version 3 of the License, or # | |
10 | # (at your option) any later version. # | |
11 | # # | |
12 | # This program is distributed in the hope that it will be useful, # | |
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of # | |
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # | |
15 | # GNU General Public License for more details. # | |
16 | # # | |
17 | # You should have received a copy of the GNU General Public License # | |
18 | # along with this program. If not, see <http://www.gnu.org/licenses/>. # | |
19 | # # | |
20 | ############################################################################### | |
3ad4bb5a MT |
21 | |
22 | import hashlib | |
8276111d | 23 | import os |
3ad4bb5a MT |
24 | import random |
25 | import socket | |
26 | import subprocess | |
27 | import tempfile | |
28 | import time | |
29 | import xmlrpclib | |
30 | ||
8b6bc023 MT |
31 | import logging |
32 | log = logging.getLogger("pakfire") | |
33 | ||
3ad4bb5a MT |
34 | import pakfire.api |
35 | import pakfire.base | |
36 | import pakfire.config | |
37 | import pakfire.downloader | |
38 | import pakfire.packages | |
39 | import pakfire.repository | |
40 | import pakfire.util | |
41 | ||
a6bd96bc | 42 | from pakfire.system import system |
3ad4bb5a | 43 | from pakfire.constants import * |
aad6f600 | 44 | from pakfire.i18n import _ |
3ad4bb5a MT |
45 | |
46 | CHUNK_SIZE = 1024**2 # 1M | |
47 | ||
48 | class Source(object): | |
49 | def __init__(self, pakfire, id, name, url, path, targetpath, revision, branch): | |
50 | self.pakfire = pakfire | |
51 | self.id = id | |
52 | self.name = name | |
53 | self.url = url | |
54 | self.targetpath = targetpath | |
55 | self.revision = revision | |
56 | self.branch = branch | |
57 | ||
58 | # If the repository is not yet checked out, we create a local clone | |
59 | # from it to work with it. | |
60 | if not self.is_cloned(): | |
61 | self.clone() | |
62 | else: | |
63 | # Always refresh the repository to have the recent commits. | |
64 | self.fetch() | |
65 | ||
66 | def is_cloned(self): | |
67 | return os.path.exists(self.path) | |
68 | ||
69 | def clone(self): | |
70 | if self.is_cloned(): | |
71 | return | |
72 | ||
73 | dirname = os.path.dirname(self.path) | |
74 | basename = os.path.basename(self.path) | |
75 | ||
76 | if not os.path.exists(dirname): | |
77 | os.makedirs(dirname) | |
78 | ||
79 | self._git("clone %s %s" % (self.url, basename), path=dirname) | |
80 | ||
81 | def fetch(self): | |
82 | self._git("fetch") | |
83 | ||
84 | @property | |
85 | def path(self): | |
86 | h = hashlib.sha1(self.url) | |
87 | ||
88 | # XXX path is to be changed | |
89 | return "/var/cache/pakfire/sources/%s" % h.hexdigest() | |
90 | ||
91 | def _git(self, cmd, path=None): | |
92 | if not path: | |
93 | path = self.path | |
94 | ||
95 | cmd = "cd %s && git %s" % (path, cmd) | |
96 | ||
8b6bc023 | 97 | log.debug("Running command: %s" % cmd) |
3ad4bb5a MT |
98 | |
99 | return subprocess.check_output(["/bin/sh", "-c", cmd]) | |
100 | ||
101 | def _git_changed_files(self, revision1, revision2=""): | |
102 | files = self._git("diff --name-only %s %s" % (revision1, revision2)) | |
103 | ||
104 | return [os.path.join(self.path, f) for f in files.splitlines()] | |
105 | ||
106 | def _git_checkout_revision(self, revision): | |
107 | self._git("checkout %s" % revision) | |
108 | ||
109 | def update_revision(self, revision, **pakfire_args): | |
110 | # Checkout the revision we want to work with. | |
111 | self._git_checkout_revision(revision) | |
112 | ||
113 | # Get list of all changes files between the current revision and | |
114 | # the previous one. | |
115 | files = self._git_changed_files("HEAD^", "HEAD") | |
116 | ||
117 | # Update all changed files and return a repository with them. | |
118 | return self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)], | |
119 | **pakfire_args) | |
120 | ||
121 | def update_files(self, files, **pakfire_args): | |
122 | rnd = random.randint(0, 1024**2) | |
123 | tmpdir = "/tmp/pakfire-source-%s" % rnd | |
124 | ||
125 | pkgs = [] | |
126 | for file in files: | |
127 | if os.path.exists(file): | |
128 | pkgs.append(file) | |
129 | # XXX not sure what to do here | |
130 | #else: | |
131 | # pkg_name = os.path.basename(os.path.dirname(file)) | |
132 | # | |
133 | # # Send deleted package to server. | |
134 | # self.master.package_remove(self, pkg_name) | |
135 | ||
136 | if not pkgs: | |
137 | return | |
138 | ||
139 | # XXX This totally ignores the local configuration. | |
140 | pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args) | |
141 | ||
142 | # Create a kind of dummy repository to link the packages against it. | |
8276111d MT |
143 | if pakfire_args.has_key("build_id"): |
144 | del pakfire_args["build_id"] | |
145 | pakfire_args["mode"] = "server" | |
3ad4bb5a | 146 | |
8276111d MT |
147 | repo = pakfire.api.repo_create("source-%s" % rnd, [tmpdir,], type="source", |
148 | **pakfire_args) | |
3ad4bb5a | 149 | |
8276111d | 150 | return repo |
3ad4bb5a MT |
151 | |
152 | def update_all(self): | |
153 | _files = [] | |
154 | for dir, subdirs, files in os.walk(self.path): | |
155 | for f in files: | |
156 | if not f.endswith(".%s" % MAKEFILE_EXTENSION): | |
157 | continue | |
158 | ||
159 | _files.append(os.path.join(dir, f)) | |
160 | ||
161 | return self.update_files(_files) | |
162 | ||
163 | ||
164 | class XMLRPCTransport(xmlrpclib.Transport): | |
165 | user_agent = "pakfire/%s" % PAKFIRE_VERSION | |
166 | ||
167 | def single_request(self, *args, **kwargs): | |
168 | ret = None | |
169 | ||
170 | # Tries can be passed to this method. | |
171 | tries = kwargs.pop("tries", 100) | |
172 | ||
173 | while tries: | |
174 | try: | |
175 | ret = xmlrpclib.Transport.single_request(self, *args, **kwargs) | |
176 | ||
177 | except socket.error, e: | |
178 | # These kinds of errors are not fatal, but they can happen on | |
179 | # a bad internet connection or whatever. | |
180 | # 32 Broken pipe | |
181 | # 110 Connection timeout | |
182 | # 111 Connection refused | |
183 | if not e.errno in (32, 110, 111,): | |
184 | raise | |
185 | ||
186 | except xmlrpclib.ProtocolError, e: | |
187 | # Log all XMLRPC protocol errors. | |
8b6bc023 MT |
188 | log.error("XMLRPC protocol error:") |
189 | log.error(" URL: %s" % e.url) | |
190 | log.error(" HTTP headers:") | |
3ad4bb5a | 191 | for header in e.headers.items(): |
8b6bc023 MT |
192 | log.error(" %s: %s" % header) |
193 | log.error(" Error code: %s" % e.errcode) | |
194 | log.error(" Error message: %s" % e.errmsg) | |
3ad4bb5a MT |
195 | raise |
196 | ||
197 | else: | |
198 | # If request was successful, we can break the loop. | |
199 | break | |
200 | ||
201 | # If the request was not successful, we wait a little time to try | |
202 | # it again. | |
8b6bc023 | 203 | log.debug("Request was not successful, we wait a little bit and try it again.") |
3ad4bb5a MT |
204 | time.sleep(30) |
205 | tries -= 1 | |
206 | ||
207 | else: | |
8b6bc023 | 208 | log.error("Maximum number of tries was reached. Giving up.") |
3ad4bb5a MT |
209 | # XXX need better exception here. |
210 | raise Exception, "Could not fulfill request." | |
211 | ||
212 | return ret | |
213 | ||
214 | ||
c62d93f1 MT |
215 | class ServerProxy(xmlrpclib.ServerProxy): |
216 | def __init__(self, server, *args, **kwargs): | |
217 | ||
218 | # Some default settings. | |
219 | if not kwargs.has_key("transport"): | |
220 | kwargs["transport"] = XMLRPCTransport() | |
221 | ||
222 | kwargs["allow_none"] = True | |
223 | ||
224 | xmlrpclib.ServerProxy.__init__(self, server, *args, **kwargs) | |
225 | ||
226 | ||
3ad4bb5a MT |
227 | class Server(object): |
228 | def __init__(self, **pakfire_args): | |
229 | self.config = pakfire.config.Config() | |
230 | ||
231 | server = self.config._slave.get("server") | |
232 | ||
8b6bc023 | 233 | log.info("Establishing RPC connection to: %s" % server) |
3ad4bb5a | 234 | |
c62d93f1 | 235 | self.conn = ServerProxy(server) |
3ad4bb5a | 236 | |
269c59f3 MT |
237 | self.pakfire_args = pakfire_args |
238 | ||
3ad4bb5a MT |
239 | @property |
240 | def hostname(self): | |
241 | """ | |
242 | Return the host's name. | |
243 | """ | |
244 | return socket.gethostname() | |
245 | ||
aad6f600 MT |
246 | @property |
247 | def uname(self): | |
248 | return os.uname()[4] | |
3ad4bb5a | 249 | |
aad6f600 MT |
250 | @property |
251 | def cpu_model(self): | |
3ad4bb5a MT |
252 | # Determine CPU model |
253 | cpuinfo = {} | |
254 | with open("/proc/cpuinfo") as f: | |
255 | for line in f.readlines(): | |
256 | # Break at an empty line, because all information after that | |
257 | # is redundant. | |
258 | if not line: | |
259 | break | |
260 | ||
261 | try: | |
262 | key, value = line.split(":") | |
263 | except: | |
264 | pass # Skip invalid lines | |
265 | ||
266 | key, value = key.strip(), value.strip() | |
267 | ||
268 | cpuinfo[key] = value | |
269 | ||
aad6f600 MT |
270 | ret = None |
271 | if self.uname.startswith("arm"): | |
272 | try: | |
273 | ret = "%(Hardware)s - %(Processor)s" % cpuinfo | |
274 | except KeyError: | |
275 | pass | |
276 | else: | |
277 | ret = cpuinfo.get("model name", None) | |
278 | ||
279 | return ret or _("Could not be determined") | |
3ad4bb5a | 280 | |
aad6f600 MT |
281 | @property |
282 | def memory(self): | |
3ad4bb5a MT |
283 | # Determine memory size |
284 | memory = 0 | |
285 | with open("/proc/meminfo") as f: | |
286 | line = f.readline() | |
287 | ||
288 | try: | |
289 | a, b, c = line.split() | |
290 | except: | |
291 | pass | |
292 | else: | |
aad6f600 MT |
293 | memory = int(b) * 1024 |
294 | ||
295 | return memory | |
296 | ||
297 | def info(self): | |
298 | ret = [] | |
299 | ||
300 | ret.append("") | |
301 | ret.append(" PAKFIRE %s" % PAKFIRE_VERSION) | |
302 | ret.append("") | |
303 | ret.append(" %-20s: %s" % (_("Hostname"), self.hostname)) | |
304 | ret.append("") | |
305 | ||
306 | # Hardware information | |
307 | ret.append(" %s:" % _("Hardware information")) | |
308 | ret.append(" %-16s: %s" % (_("CPU model"), self.cpu_model)) | |
309 | ret.append(" %-16s: %s" % (_("Memory"), pakfire.util.format_size(self.memory))) | |
310 | ret.append("") | |
a6bd96bc | 311 | ret.append(" %-16s: %s" % (_("Native arch"), system.native_arch)) |
aad6f600 MT |
312 | |
313 | header = _("Supported arches") | |
314 | for arch in self.config.supported_arches: | |
315 | ret.append(" %-16s: %s" % (header, arch)) | |
316 | header = "" | |
317 | ret.append("") | |
318 | ||
319 | return ret | |
320 | ||
321 | def update_info(self): | |
322 | # Get the current load average. | |
323 | loadavg = ", ".join(["%.2f" % l for l in os.getloadavg()]) | |
324 | ||
325 | # Get all supported architectures. | |
326 | arches = " ".join([a for a in self.config.supported_arches]) | |
3ad4bb5a | 327 | |
aad6f600 | 328 | self.conn.update_host_info(loadavg, self.cpu_model, self.memory, arches) |
3ad4bb5a MT |
329 | |
330 | def upload_file(self, filename, build_id): | |
331 | # Get the hash of the file. | |
332 | hash = pakfire.util.calc_hash1(filename) | |
333 | ||
334 | # Get the size of the file. | |
335 | size = os.path.getsize(filename) | |
336 | ||
337 | # Get an upload ID from the server. | |
338 | upload_id = self.conn.get_upload_cookie(os.path.basename(filename), | |
339 | size, hash) | |
340 | ||
341 | # Calculate the number of chunks. | |
342 | chunks = (size / CHUNK_SIZE) + 1 | |
343 | ||
344 | # Cut the file in pieces and upload them one after another. | |
345 | with open(filename) as f: | |
346 | chunk = 0 | |
347 | while True: | |
348 | data = f.read(CHUNK_SIZE) | |
349 | if not data: | |
350 | break | |
351 | ||
352 | chunk += 1 | |
8b6bc023 | 353 | log.info("Uploading chunk %s/%s of %s." % (chunk, chunks, |
3ad4bb5a MT |
354 | os.path.basename(filename))) |
355 | ||
356 | data = xmlrpclib.Binary(data) | |
357 | self.conn.upload_chunk(upload_id, data) | |
358 | ||
359 | # Tell the server, that we finished the upload. | |
360 | ret = self.conn.finish_upload(upload_id, build_id) | |
361 | ||
362 | # If the server sends false, something happened with the upload that | |
363 | # could not be recovered. | |
364 | if not ret: | |
365 | raise Exception, "Upload failed." | |
366 | ||
367 | def update_build_status(self, build_id, status, message=""): | |
368 | ret = self.conn.update_build_state(build_id, status, message) | |
369 | ||
370 | # If the server returns False, then it did not acknowledge our status | |
371 | # update and the build has to be aborted. | |
372 | if not ret: | |
373 | raise BuildAbortedException, "The build was aborted by the master server." | |
374 | ||
375 | def build_job(self, type=None): | |
376 | build = self.conn.build_job() # XXX type=None | |
377 | ||
378 | # If the server has got no job for us, we end right here. | |
379 | if not build: | |
380 | return | |
381 | ||
382 | job_types = { | |
383 | "binary" : self.build_binary_job, | |
384 | "source" : self.build_source_job, | |
385 | } | |
386 | ||
387 | build_id = build["id"] | |
388 | build_type = build["type"] | |
389 | ||
390 | try: | |
391 | func = job_types[build_type] | |
392 | except KeyError: | |
393 | raise Exception, "Build type not supported: %s" % type | |
394 | ||
395 | # Call the function that processes the build and try to catch general | |
396 | # exceptions and report them to the server. | |
397 | # If everything goes okay, we tell this the server, too. | |
398 | try: | |
399 | func(build_id, build) | |
400 | ||
1c2f9e52 MT |
401 | except DependencyError: |
402 | # This has already been reported by func. | |
403 | raise | |
404 | ||
3ad4bb5a MT |
405 | except Exception, e: |
406 | # Format the exception and send it to the server. | |
407 | message = "%s: %s" % (e.__class__.__name__, e) | |
408 | ||
409 | self.update_build_status(build_id, "failed", message) | |
410 | raise | |
411 | ||
412 | else: | |
413 | self.update_build_status(build_id, "finished") | |
414 | ||
415 | def build_binary_job(self, build_id, build): | |
416 | arch = build["arch"] | |
417 | filename = build["name"] | |
418 | download = build["download"] | |
419 | hash1 = build["hash1"] | |
420 | ||
421 | # Create a temporary file and a directory for the resulting files. | |
422 | tmpdir = tempfile.mkdtemp() | |
423 | tmpfile = os.path.join(tmpdir, filename) | |
424 | logfile = os.path.join(tmpdir, "build.log") | |
425 | ||
426 | # Get a package grabber and add mirror download capabilities to it. | |
e57c5475 | 427 | grabber = pakfire.downloader.PackageDownloader(self.config) |
3ad4bb5a MT |
428 | |
429 | try: | |
430 | # Download the source. | |
431 | grabber.urlgrab(download, filename=tmpfile) | |
432 | ||
433 | # Check if the download checksum matches. | |
434 | if pakfire.util.calc_hash1(tmpfile) == hash1: | |
435 | print "Checksum matches: %s" % hash1 | |
436 | else: | |
437 | raise DownloadError, "Download was corrupted" | |
438 | ||
439 | # Update the build status on the server. | |
440 | self.update_build_status(build_id, "running") | |
441 | ||
442 | # Run the build. | |
443 | pakfire.api.build(tmpfile, build_id=build_id, | |
444 | resultdirs=[tmpdir,], logfile=logfile) | |
445 | ||
446 | self.update_build_status(build_id, "uploading") | |
447 | ||
448 | # Walk through the result directory and upload all (binary) files. | |
449 | for dir, subdirs, files in os.walk(tmpdir): | |
450 | for file in files: | |
451 | file = os.path.join(dir, file) | |
452 | if file in (logfile, tmpfile,): | |
453 | continue | |
454 | ||
455 | self.upload_file(file, build_id) | |
456 | ||
457 | except DependencyError, e: | |
458 | message = "%s: %s" % (e.__class__.__name__, e) | |
459 | self.update_build_status(build_id, "dependency_error", message) | |
1c2f9e52 | 460 | raise |
3ad4bb5a MT |
461 | |
462 | finally: | |
463 | # Upload the logfile in any case and if it exists. | |
464 | if os.path.exists(logfile): | |
465 | self.upload_file(logfile, build_id) | |
466 | ||
467 | # Cleanup the files we created. | |
468 | pakfire.util.rm(tmpdir) | |
469 | ||
470 | def build_source_job(self, build_id, build): | |
471 | # Update the build status on the server. | |
472 | self.update_build_status(build_id, "running") | |
473 | ||
474 | source = Source(self, **build["source"]) | |
475 | ||
269c59f3 MT |
476 | repo = source.update_revision(build["revision"], build_id=build_id, |
477 | **self.pakfire_args) | |
3ad4bb5a | 478 | |
8276111d MT |
479 | try: |
480 | # Upload all files in the repository. | |
481 | for pkg in repo: | |
482 | path = os.path.join(pkg.repo.path, pkg.filename) | |
483 | self.upload_file(path, build_id) | |
484 | finally: | |
485 | repo.remove() | |
486 | ||
487 | def update_repositories(self, limit=2): | |
488 | repos = self.conn.get_repos(limit) | |
489 | ||
490 | for repo in repos: | |
491 | files = self.conn.get_repo_packages(repo["id"]) | |
492 | ||
493 | for arch in repo["arches"]: | |
494 | path = "/pakfire/repositories/%s/%s/%s" % \ | |
495 | (repo["distro"]["sname"], repo["name"], arch) | |
3ad4bb5a | 496 | |
8276111d | 497 | pakfire.api.repo_create(path, files) |
c62d93f1 MT |
498 | |
499 | def create_scratch_build(self, *args, **kwargs): | |
500 | return self.conn.create_scratch_build(*args, **kwargs) |