]> git.ipfire.org Git - people/stevee/pakfire.git/blob - python/pakfire/server.py
Quality Agent: Replace "env ruby".
[people/stevee/pakfire.git] / python / pakfire / server.py
1 #!/usr/bin/python
2 ###############################################################################
3 # #
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
6 # #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
11 # #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
16 # #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
19 # #
20 ###############################################################################
21
22 import hashlib
23 import os
24 import random
25 import socket
26 import subprocess
27 import tempfile
28 import time
29 import xmlrpclib
30
31 import logging
32 log = logging.getLogger("pakfire")
33
34 import pakfire.base
35 import pakfire.config
36 import pakfire.downloader
37 import pakfire.packages
38 import pakfire.repository
39 import pakfire.util
40
41 from pakfire.system import system
42 from pakfire.constants import *
43 from pakfire.i18n import _
44
45 CHUNK_SIZE = 1024**2 # 1M
46
47 class Source(object):
48 def __init__(self, pakfire, id, name, url, path, targetpath, revision, branch):
49 self.pakfire = pakfire
50 self.id = id
51 self.name = name
52 self.url = url
53 self.targetpath = targetpath
54 self.revision = revision
55 self.branch = branch
56
57 # If the repository is not yet checked out, we create a local clone
58 # from it to work with it.
59 if not self.is_cloned():
60 self.clone()
61 else:
62 # Always refresh the repository to have the recent commits.
63 self.fetch()
64
65 def is_cloned(self):
66 return os.path.exists(self.path)
67
68 def clone(self):
69 if self.is_cloned():
70 return
71
72 dirname = os.path.dirname(self.path)
73 basename = os.path.basename(self.path)
74
75 if not os.path.exists(dirname):
76 os.makedirs(dirname)
77
78 self._git("clone %s %s" % (self.url, basename), path=dirname)
79
80 def fetch(self):
81 self._git("fetch")
82
83 @property
84 def path(self):
85 h = hashlib.sha1(self.url)
86
87 # XXX path is to be changed
88 return "/var/cache/pakfire/sources/%s" % h.hexdigest()
89
90 def _git(self, cmd, path=None):
91 if not path:
92 path = self.path
93
94 cmd = "cd %s && git %s" % (path, cmd)
95
96 log.debug("Running command: %s" % cmd)
97
98 return subprocess.check_output(["/bin/sh", "-c", cmd])
99
100 def _git_changed_files(self, revision1, revision2=""):
101 files = self._git("diff --name-only %s %s" % (revision1, revision2))
102
103 return [os.path.join(self.path, f) for f in files.splitlines()]
104
105 def _git_checkout_revision(self, revision):
106 self._git("checkout %s" % revision)
107
108 def update_revision(self, revision, **pakfire_args):
109 # Checkout the revision we want to work with.
110 self._git_checkout_revision(revision)
111
112 # Get list of all changes files between the current revision and
113 # the previous one.
114 files = self._git_changed_files("HEAD^", "HEAD")
115
116 # Update all changed files and return a repository with them.
117 return self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)],
118 **pakfire_args)
119
120 def update_files(self, files, **pakfire_args):
121 rnd = random.randint(0, 1024**2)
122 tmpdir = "/tmp/pakfire-source-%s" % rnd
123
124 pkgs = []
125 for file in files:
126 if os.path.exists(file):
127 pkgs.append(file)
128 # XXX not sure what to do here
129 #else:
130 # pkg_name = os.path.basename(os.path.dirname(file))
131 #
132 # # Send deleted package to server.
133 # self.master.package_remove(self, pkg_name)
134
135 if not pkgs:
136 return
137
138 # XXX This totally ignores the local configuration.
139 pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args)
140
141 # Create a kind of dummy repository to link the packages against it.
142 if pakfire_args.has_key("build_id"):
143 del pakfire_args["build_id"]
144 pakfire_args["mode"] = "server"
145
146 repo = pakfire.api.repo_create("source-%s" % rnd, [tmpdir,], type="source",
147 **pakfire_args)
148
149 return repo
150
151 def update_all(self):
152 _files = []
153 for dir, subdirs, files in os.walk(self.path):
154 for f in files:
155 if not f.endswith(".%s" % MAKEFILE_EXTENSION):
156 continue
157
158 _files.append(os.path.join(dir, f))
159
160 return self.update_files(_files)
161
162
163 class XMLRPCTransport(xmlrpclib.Transport):
164 user_agent = "pakfire/%s" % PAKFIRE_VERSION
165
166 def single_request(self, *args, **kwargs):
167 ret = None
168
169 # Tries can be passed to this method.
170 tries = kwargs.pop("tries", 100)
171
172 while tries:
173 try:
174 ret = xmlrpclib.Transport.single_request(self, *args, **kwargs)
175
176 except socket.error, e:
177 # These kinds of errors are not fatal, but they can happen on
178 # a bad internet connection or whatever.
179 # 32 Broken pipe
180 # 110 Connection timeout
181 # 111 Connection refused
182 if not e.errno in (32, 110, 111,):
183 raise
184
185 except xmlrpclib.ProtocolError, e:
186 # Log all XMLRPC protocol errors.
187 log.error("XMLRPC protocol error:")
188 log.error(" URL: %s" % e.url)
189 log.error(" HTTP headers:")
190 for header in e.headers.items():
191 log.error(" %s: %s" % header)
192 log.error(" Error code: %s" % e.errcode)
193 log.error(" Error message: %s" % e.errmsg)
194 raise
195
196 else:
197 # If request was successful, we can break the loop.
198 break
199
200 # If the request was not successful, we wait a little time to try
201 # it again.
202 log.debug("Request was not successful, we wait a little bit and try it again.")
203 time.sleep(30)
204 tries -= 1
205
206 else:
207 log.error("Maximum number of tries was reached. Giving up.")
208 # XXX need better exception here.
209 raise Exception, "Could not fulfill request."
210
211 return ret
212
213
214 class ServerProxy(xmlrpclib.ServerProxy):
215 def __init__(self, server, *args, **kwargs):
216
217 # Some default settings.
218 if not kwargs.has_key("transport"):
219 kwargs["transport"] = XMLRPCTransport()
220
221 kwargs["allow_none"] = True
222
223 xmlrpclib.ServerProxy.__init__(self, server, *args, **kwargs)
224
225
226 class Server(object):
227 def __init__(self, **pakfire_args):
228 self.config = pakfire.config.Config()
229
230 server = self.config._slave.get("server")
231
232 log.info("Establishing RPC connection to: %s" % server)
233
234 self.conn = ServerProxy(server)
235
236 self.pakfire_args = pakfire_args
237
238 @property
239 def hostname(self):
240 """
241 Return the host's name.
242 """
243 return socket.gethostname()
244
245 @property
246 def uname(self):
247 return os.uname()[4]
248
249 @property
250 def cpu_model(self):
251 # Determine CPU model
252 cpuinfo = {}
253 with open("/proc/cpuinfo") as f:
254 for line in f.readlines():
255 # Break at an empty line, because all information after that
256 # is redundant.
257 if not line:
258 break
259
260 try:
261 key, value = line.split(":")
262 except:
263 pass # Skip invalid lines
264
265 key, value = key.strip(), value.strip()
266
267 cpuinfo[key] = value
268
269 ret = None
270 if self.uname.startswith("arm"):
271 try:
272 ret = "%(Hardware)s - %(Processor)s" % cpuinfo
273 except KeyError:
274 pass
275 else:
276 ret = cpuinfo.get("model name", None)
277
278 return ret or _("Could not be determined")
279
280 @property
281 def memory(self):
282 # Determine memory size
283 memory = 0
284 with open("/proc/meminfo") as f:
285 line = f.readline()
286
287 try:
288 a, b, c = line.split()
289 except:
290 pass
291 else:
292 memory = int(b) * 1024
293
294 return memory
295
296 def info(self):
297 ret = []
298
299 ret.append("")
300 ret.append(" PAKFIRE %s" % PAKFIRE_VERSION)
301 ret.append("")
302 ret.append(" %-20s: %s" % (_("Hostname"), self.hostname))
303 ret.append("")
304
305 # Hardware information
306 ret.append(" %s:" % _("Hardware information"))
307 ret.append(" %-16s: %s" % (_("CPU model"), self.cpu_model))
308 ret.append(" %-16s: %s" % (_("Memory"), pakfire.util.format_size(self.memory)))
309 ret.append("")
310 ret.append(" %-16s: %s" % (_("Native arch"), system.native_arch))
311
312 header = _("Supported arches")
313 for arch in self.config.supported_arches:
314 ret.append(" %-16s: %s" % (header, arch))
315 header = ""
316 ret.append("")
317
318 return ret
319
320 def update_info(self):
321 # Get the current load average.
322 loadavg = ", ".join(["%.2f" % l for l in os.getloadavg()])
323
324 # Get all supported architectures.
325 arches = " ".join([a for a in self.config.supported_arches])
326
327 self.conn.update_host_info(loadavg, self.cpu_model, self.memory, arches)
328
329 def upload_file(self, filename, build_id):
330 # Get the hash of the file.
331 hash = pakfire.util.calc_hash1(filename)
332
333 # Get the size of the file.
334 size = os.path.getsize(filename)
335
336 # Get an upload ID from the server.
337 upload_id = self.conn.get_upload_cookie(os.path.basename(filename),
338 size, hash)
339
340 # Calculate the number of chunks.
341 chunks = (size / CHUNK_SIZE) + 1
342
343 # Cut the file in pieces and upload them one after another.
344 with open(filename) as f:
345 chunk = 0
346 while True:
347 data = f.read(CHUNK_SIZE)
348 if not data:
349 break
350
351 chunk += 1
352 log.info("Uploading chunk %s/%s of %s." % (chunk, chunks,
353 os.path.basename(filename)))
354
355 data = xmlrpclib.Binary(data)
356 self.conn.upload_chunk(upload_id, data)
357
358 # Tell the server, that we finished the upload.
359 ret = self.conn.finish_upload(upload_id, build_id)
360
361 # If the server sends false, something happened with the upload that
362 # could not be recovered.
363 if not ret:
364 raise Exception, "Upload failed."
365
366 def update_build_status(self, build_id, status, message=""):
367 ret = self.conn.update_build_state(build_id, status, message)
368
369 # If the server returns False, then it did not acknowledge our status
370 # update and the build has to be aborted.
371 if not ret:
372 raise BuildAbortedException, "The build was aborted by the master server."
373
374 def build_job(self, type=None):
375 build = self.conn.build_job() # XXX type=None
376
377 # If the server has got no job for us, we end right here.
378 if not build:
379 return
380
381 job_types = {
382 "binary" : self.build_binary_job,
383 "source" : self.build_source_job,
384 }
385
386 build_id = build["id"]
387 build_type = build["type"]
388
389 try:
390 func = job_types[build_type]
391 except KeyError:
392 raise Exception, "Build type not supported: %s" % type
393
394 # Call the function that processes the build and try to catch general
395 # exceptions and report them to the server.
396 # If everything goes okay, we tell this the server, too.
397 try:
398 func(build_id, build)
399
400 except DependencyError:
401 # This has already been reported by func.
402 raise
403
404 except Exception, e:
405 # Format the exception and send it to the server.
406 message = "%s: %s" % (e.__class__.__name__, e)
407
408 self.update_build_status(build_id, "failed", message)
409 raise
410
411 else:
412 self.update_build_status(build_id, "finished")
413
414 def build_binary_job(self, build_id, build):
415 arch = build["arch"]
416 filename = build["name"]
417 download = build["download"]
418 hash1 = build["hash1"]
419
420 # Create a temporary file and a directory for the resulting files.
421 tmpdir = tempfile.mkdtemp()
422 tmpfile = os.path.join(tmpdir, filename)
423 logfile = os.path.join(tmpdir, "build.log")
424
425 # Get a package grabber and add mirror download capabilities to it.
426 grabber = pakfire.downloader.PackageDownloader(self.config)
427
428 try:
429 # Download the source.
430 grabber.urlgrab(download, filename=tmpfile)
431
432 # Check if the download checksum matches.
433 if pakfire.util.calc_hash1(tmpfile) == hash1:
434 print "Checksum matches: %s" % hash1
435 else:
436 raise DownloadError, "Download was corrupted"
437
438 # Update the build status on the server.
439 self.update_build_status(build_id, "running")
440
441 # Run the build.
442 pakfire.api.build(tmpfile, build_id=build_id,
443 resultdirs=[tmpdir,], logfile=logfile)
444
445 self.update_build_status(build_id, "uploading")
446
447 # Walk through the result directory and upload all (binary) files.
448 for dir, subdirs, files in os.walk(tmpdir):
449 for file in files:
450 file = os.path.join(dir, file)
451 if file in (logfile, tmpfile,):
452 continue
453
454 self.upload_file(file, build_id)
455
456 except DependencyError, e:
457 message = "%s: %s" % (e.__class__.__name__, e)
458 self.update_build_status(build_id, "dependency_error", message)
459 raise
460
461 finally:
462 # Upload the logfile in any case and if it exists.
463 if os.path.exists(logfile):
464 self.upload_file(logfile, build_id)
465
466 # Cleanup the files we created.
467 pakfire.util.rm(tmpdir)
468
469 def build_source_job(self, build_id, build):
470 # Update the build status on the server.
471 self.update_build_status(build_id, "running")
472
473 source = Source(self, **build["source"])
474
475 repo = source.update_revision(build["revision"], build_id=build_id,
476 **self.pakfire_args)
477
478 try:
479 # Upload all files in the repository.
480 for pkg in repo:
481 path = os.path.join(pkg.repo.path, pkg.filename)
482 self.upload_file(path, build_id)
483 finally:
484 repo.remove()
485
486 def update_repositories(self, limit=2):
487 repos = self.conn.get_repos(limit)
488
489 for repo in repos:
490 files = self.conn.get_repo_packages(repo["id"])
491
492 for arch in repo["arches"]:
493 path = "/pakfire/repositories/%s/%s/%s" % \
494 (repo["distro"]["sname"], repo["name"], arch)
495
496 pakfire.api.repo_create(path, files)
497
498 def create_scratch_build(self, *args, **kwargs):
499 return self.conn.create_scratch_build(*args, **kwargs)