]> git.ipfire.org Git - pakfire.git/blob - pakfire/server.py
Bump version 0.9.9.
[pakfire.git] / pakfire / server.py
1 #!/usr/bin/python
2 ###############################################################################
3 # #
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
6 # #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
11 # #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
16 # #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
19 # #
20 ###############################################################################
21
22 import hashlib
23 import logging
24 import os
25 import random
26 import socket
27 import subprocess
28 import tempfile
29 import time
30 import xmlrpclib
31
32 import pakfire.api
33 import pakfire.base
34 import pakfire.config
35 import pakfire.downloader
36 import pakfire.packages
37 import pakfire.repository
38 import pakfire.util
39
40 from pakfire.constants import *
41
42 CHUNK_SIZE = 1024**2 # 1M
43
44 class Source(object):
45 def __init__(self, pakfire, id, name, url, path, targetpath, revision, branch):
46 self.pakfire = pakfire
47 self.id = id
48 self.name = name
49 self.url = url
50 self.targetpath = targetpath
51 self.revision = revision
52 self.branch = branch
53
54 # If the repository is not yet checked out, we create a local clone
55 # from it to work with it.
56 if not self.is_cloned():
57 self.clone()
58 else:
59 # Always refresh the repository to have the recent commits.
60 self.fetch()
61
62 def is_cloned(self):
63 return os.path.exists(self.path)
64
65 def clone(self):
66 if self.is_cloned():
67 return
68
69 dirname = os.path.dirname(self.path)
70 basename = os.path.basename(self.path)
71
72 if not os.path.exists(dirname):
73 os.makedirs(dirname)
74
75 self._git("clone %s %s" % (self.url, basename), path=dirname)
76
77 def fetch(self):
78 self._git("fetch")
79
80 @property
81 def path(self):
82 h = hashlib.sha1(self.url)
83
84 # XXX path is to be changed
85 return "/var/cache/pakfire/sources/%s" % h.hexdigest()
86
87 def _git(self, cmd, path=None):
88 if not path:
89 path = self.path
90
91 cmd = "cd %s && git %s" % (path, cmd)
92
93 logging.debug("Running command: %s" % cmd)
94
95 return subprocess.check_output(["/bin/sh", "-c", cmd])
96
97 def _git_changed_files(self, revision1, revision2=""):
98 files = self._git("diff --name-only %s %s" % (revision1, revision2))
99
100 return [os.path.join(self.path, f) for f in files.splitlines()]
101
102 def _git_checkout_revision(self, revision):
103 self._git("checkout %s" % revision)
104
105 def update_revision(self, revision, **pakfire_args):
106 # Checkout the revision we want to work with.
107 self._git_checkout_revision(revision)
108
109 # Get list of all changes files between the current revision and
110 # the previous one.
111 files = self._git_changed_files("HEAD^", "HEAD")
112
113 # Update all changed files and return a repository with them.
114 return self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)],
115 **pakfire_args)
116
117 def update_files(self, files, **pakfire_args):
118 rnd = random.randint(0, 1024**2)
119 tmpdir = "/tmp/pakfire-source-%s" % rnd
120
121 pkgs = []
122 for file in files:
123 if os.path.exists(file):
124 pkgs.append(file)
125 # XXX not sure what to do here
126 #else:
127 # pkg_name = os.path.basename(os.path.dirname(file))
128 #
129 # # Send deleted package to server.
130 # self.master.package_remove(self, pkg_name)
131
132 if not pkgs:
133 return
134
135 # XXX This totally ignores the local configuration.
136 pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args)
137
138 # Create a kind of dummy repository to link the packages against it.
139 if pakfire_args.has_key("build_id"):
140 del pakfire_args["build_id"]
141 pakfire_args["mode"] = "server"
142
143 repo = pakfire.api.repo_create("source-%s" % rnd, [tmpdir,], type="source",
144 **pakfire_args)
145
146 return repo
147
148 def update_all(self):
149 _files = []
150 for dir, subdirs, files in os.walk(self.path):
151 for f in files:
152 if not f.endswith(".%s" % MAKEFILE_EXTENSION):
153 continue
154
155 _files.append(os.path.join(dir, f))
156
157 return self.update_files(_files)
158
159
160 class XMLRPCTransport(xmlrpclib.Transport):
161 user_agent = "pakfire/%s" % PAKFIRE_VERSION
162
163 def single_request(self, *args, **kwargs):
164 ret = None
165
166 # Tries can be passed to this method.
167 tries = kwargs.pop("tries", 100)
168
169 while tries:
170 try:
171 ret = xmlrpclib.Transport.single_request(self, *args, **kwargs)
172
173 except socket.error, e:
174 # These kinds of errors are not fatal, but they can happen on
175 # a bad internet connection or whatever.
176 # 32 Broken pipe
177 # 110 Connection timeout
178 # 111 Connection refused
179 if not e.errno in (32, 110, 111,):
180 raise
181
182 except xmlrpclib.ProtocolError, e:
183 # Log all XMLRPC protocol errors.
184 logging.error("XMLRPC protocol error:")
185 logging.error(" URL: %s" % e.url)
186 logging.error(" HTTP headers:")
187 for header in e.headers.items():
188 logging.error(" %s: %s" % header)
189 logging.error(" Error code: %s" % e.errcode)
190 logging.error(" Error message: %s" % e.errmsg)
191 raise
192
193 else:
194 # If request was successful, we can break the loop.
195 break
196
197 # If the request was not successful, we wait a little time to try
198 # it again.
199 logging.debug("Request was not successful, we wait a little bit and try it again.")
200 time.sleep(30)
201 tries -= 1
202
203 else:
204 logging.error("Maximum number of tries was reached. Giving up.")
205 # XXX need better exception here.
206 raise Exception, "Could not fulfill request."
207
208 return ret
209
210
211 class Server(object):
212 def __init__(self, **pakfire_args):
213 self.config = pakfire.config.Config()
214
215 server = self.config._slave.get("server")
216
217 logging.info("Establishing RPC connection to: %s" % server)
218
219 self.conn = xmlrpclib.ServerProxy(server, transport=XMLRPCTransport(),
220 allow_none=True)
221
222 @property
223 def hostname(self):
224 """
225 Return the host's name.
226 """
227 return socket.gethostname()
228
229 def update_info(self):
230 # Get the current load average.
231 loadavg = ", ".join(["%.2f" % l for l in os.getloadavg()])
232
233 # Get all supported architectures.
234 arches = sorted([a for a in self.config.supported_arches])
235 arches = " ".join(arches)
236
237 # Determine CPU model
238 cpuinfo = {}
239 with open("/proc/cpuinfo") as f:
240 for line in f.readlines():
241 # Break at an empty line, because all information after that
242 # is redundant.
243 if not line:
244 break
245
246 try:
247 key, value = line.split(":")
248 except:
249 pass # Skip invalid lines
250
251 key, value = key.strip(), value.strip()
252
253 cpuinfo[key] = value
254
255 cpu_model = cpuinfo.get("model name", "Could not be determined")
256
257 # Determine memory size
258 memory = 0
259 with open("/proc/meminfo") as f:
260 line = f.readline()
261
262 try:
263 a, b, c = line.split()
264 except:
265 pass
266 else:
267 memory = int(b)
268
269 self.conn.update_host_info(loadavg, cpu_model, memory, arches)
270
271 def upload_file(self, filename, build_id):
272 # Get the hash of the file.
273 hash = pakfire.util.calc_hash1(filename)
274
275 # Get the size of the file.
276 size = os.path.getsize(filename)
277
278 # Get an upload ID from the server.
279 upload_id = self.conn.get_upload_cookie(os.path.basename(filename),
280 size, hash)
281
282 # Calculate the number of chunks.
283 chunks = (size / CHUNK_SIZE) + 1
284
285 # Cut the file in pieces and upload them one after another.
286 with open(filename) as f:
287 chunk = 0
288 while True:
289 data = f.read(CHUNK_SIZE)
290 if not data:
291 break
292
293 chunk += 1
294 logging.info("Uploading chunk %s/%s of %s." % (chunk, chunks,
295 os.path.basename(filename)))
296
297 data = xmlrpclib.Binary(data)
298 self.conn.upload_chunk(upload_id, data)
299
300 # Tell the server, that we finished the upload.
301 ret = self.conn.finish_upload(upload_id, build_id)
302
303 # If the server sends false, something happened with the upload that
304 # could not be recovered.
305 if not ret:
306 raise Exception, "Upload failed."
307
308 def update_build_status(self, build_id, status, message=""):
309 ret = self.conn.update_build_state(build_id, status, message)
310
311 # If the server returns False, then it did not acknowledge our status
312 # update and the build has to be aborted.
313 if not ret:
314 raise BuildAbortedException, "The build was aborted by the master server."
315
316 def build_job(self, type=None):
317 build = self.conn.build_job() # XXX type=None
318
319 # If the server has got no job for us, we end right here.
320 if not build:
321 return
322
323 job_types = {
324 "binary" : self.build_binary_job,
325 "source" : self.build_source_job,
326 }
327
328 build_id = build["id"]
329 build_type = build["type"]
330
331 try:
332 func = job_types[build_type]
333 except KeyError:
334 raise Exception, "Build type not supported: %s" % type
335
336 # Call the function that processes the build and try to catch general
337 # exceptions and report them to the server.
338 # If everything goes okay, we tell this the server, too.
339 try:
340 func(build_id, build)
341
342 except DependencyError:
343 # This has already been reported by func.
344 raise
345
346 except Exception, e:
347 # Format the exception and send it to the server.
348 message = "%s: %s" % (e.__class__.__name__, e)
349
350 self.update_build_status(build_id, "failed", message)
351 raise
352
353 else:
354 self.update_build_status(build_id, "finished")
355
356 def build_binary_job(self, build_id, build):
357 arch = build["arch"]
358 filename = build["name"]
359 download = build["download"]
360 hash1 = build["hash1"]
361
362 # Create a temporary file and a directory for the resulting files.
363 tmpdir = tempfile.mkdtemp()
364 tmpfile = os.path.join(tmpdir, filename)
365 logfile = os.path.join(tmpdir, "build.log")
366
367 # Get a package grabber and add mirror download capabilities to it.
368 grabber = pakfire.downloader.PackageDownloader(self.config)
369
370 try:
371 # Download the source.
372 grabber.urlgrab(download, filename=tmpfile)
373
374 # Check if the download checksum matches.
375 if pakfire.util.calc_hash1(tmpfile) == hash1:
376 print "Checksum matches: %s" % hash1
377 else:
378 raise DownloadError, "Download was corrupted"
379
380 # Update the build status on the server.
381 self.update_build_status(build_id, "running")
382
383 # Run the build.
384 pakfire.api.build(tmpfile, build_id=build_id,
385 resultdirs=[tmpdir,], logfile=logfile)
386
387 self.update_build_status(build_id, "uploading")
388
389 # Walk through the result directory and upload all (binary) files.
390 for dir, subdirs, files in os.walk(tmpdir):
391 for file in files:
392 file = os.path.join(dir, file)
393 if file in (logfile, tmpfile,):
394 continue
395
396 self.upload_file(file, build_id)
397
398 except DependencyError, e:
399 message = "%s: %s" % (e.__class__.__name__, e)
400 self.update_build_status(build_id, "dependency_error", message)
401 raise
402
403 finally:
404 # Upload the logfile in any case and if it exists.
405 if os.path.exists(logfile):
406 self.upload_file(logfile, build_id)
407
408 # Cleanup the files we created.
409 pakfire.util.rm(tmpdir)
410
411 def build_source_job(self, build_id, build):
412 # Update the build status on the server.
413 self.update_build_status(build_id, "running")
414
415 source = Source(self, **build["source"])
416
417 repo = source.update_revision(build["revision"], build_id=build_id)
418
419 try:
420 # Upload all files in the repository.
421 for pkg in repo:
422 path = os.path.join(pkg.repo.path, pkg.filename)
423 self.upload_file(path, build_id)
424 finally:
425 repo.remove()
426
427 def update_repositories(self, limit=2):
428 repos = self.conn.get_repos(limit)
429
430 for repo in repos:
431 files = self.conn.get_repo_packages(repo["id"])
432
433 for arch in repo["arches"]:
434 path = "/pakfire/repositories/%s/%s/%s" % \
435 (repo["distro"]["sname"], repo["name"], arch)
436
437 pakfire.api.repo_create(path, files)