]> git.ipfire.org Git - pakfire.git/blame - python/pakfire/server.py
server: Fix packaging of source packages.
[pakfire.git] / python / pakfire / server.py
CommitLineData
3ad4bb5a 1#!/usr/bin/python
b792d887
MT
2###############################################################################
3# #
4# Pakfire - The IPFire package management system #
5# Copyright (C) 2011 Pakfire development team #
6# #
7# This program is free software: you can redistribute it and/or modify #
8# it under the terms of the GNU General Public License as published by #
9# the Free Software Foundation, either version 3 of the License, or #
10# (at your option) any later version. #
11# #
12# This program is distributed in the hope that it will be useful, #
13# but WITHOUT ANY WARRANTY; without even the implied warranty of #
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15# GNU General Public License for more details. #
16# #
17# You should have received a copy of the GNU General Public License #
18# along with this program. If not, see <http://www.gnu.org/licenses/>. #
19# #
20###############################################################################
3ad4bb5a
MT
21
22import hashlib
23import logging
8276111d 24import os
3ad4bb5a
MT
25import random
26import socket
27import subprocess
28import tempfile
29import time
30import xmlrpclib
31
32import pakfire.api
33import pakfire.base
34import pakfire.config
35import pakfire.downloader
36import pakfire.packages
37import pakfire.repository
38import pakfire.util
39
40from pakfire.constants import *
41
42CHUNK_SIZE = 1024**2 # 1M
43
44class Source(object):
45 def __init__(self, pakfire, id, name, url, path, targetpath, revision, branch):
46 self.pakfire = pakfire
47 self.id = id
48 self.name = name
49 self.url = url
50 self.targetpath = targetpath
51 self.revision = revision
52 self.branch = branch
53
54 # If the repository is not yet checked out, we create a local clone
55 # from it to work with it.
56 if not self.is_cloned():
57 self.clone()
58 else:
59 # Always refresh the repository to have the recent commits.
60 self.fetch()
61
62 def is_cloned(self):
63 return os.path.exists(self.path)
64
65 def clone(self):
66 if self.is_cloned():
67 return
68
69 dirname = os.path.dirname(self.path)
70 basename = os.path.basename(self.path)
71
72 if not os.path.exists(dirname):
73 os.makedirs(dirname)
74
75 self._git("clone %s %s" % (self.url, basename), path=dirname)
76
77 def fetch(self):
78 self._git("fetch")
79
80 @property
81 def path(self):
82 h = hashlib.sha1(self.url)
83
84 # XXX path is to be changed
85 return "/var/cache/pakfire/sources/%s" % h.hexdigest()
86
87 def _git(self, cmd, path=None):
88 if not path:
89 path = self.path
90
91 cmd = "cd %s && git %s" % (path, cmd)
92
93 logging.debug("Running command: %s" % cmd)
94
95 return subprocess.check_output(["/bin/sh", "-c", cmd])
96
97 def _git_changed_files(self, revision1, revision2=""):
98 files = self._git("diff --name-only %s %s" % (revision1, revision2))
99
100 return [os.path.join(self.path, f) for f in files.splitlines()]
101
102 def _git_checkout_revision(self, revision):
103 self._git("checkout %s" % revision)
104
105 def update_revision(self, revision, **pakfire_args):
106 # Checkout the revision we want to work with.
107 self._git_checkout_revision(revision)
108
109 # Get list of all changes files between the current revision and
110 # the previous one.
111 files = self._git_changed_files("HEAD^", "HEAD")
112
113 # Update all changed files and return a repository with them.
114 return self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)],
115 **pakfire_args)
116
117 def update_files(self, files, **pakfire_args):
118 rnd = random.randint(0, 1024**2)
119 tmpdir = "/tmp/pakfire-source-%s" % rnd
120
121 pkgs = []
122 for file in files:
123 if os.path.exists(file):
124 pkgs.append(file)
125 # XXX not sure what to do here
126 #else:
127 # pkg_name = os.path.basename(os.path.dirname(file))
128 #
129 # # Send deleted package to server.
130 # self.master.package_remove(self, pkg_name)
131
132 if not pkgs:
133 return
134
135 # XXX This totally ignores the local configuration.
136 pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args)
137
138 # Create a kind of dummy repository to link the packages against it.
8276111d
MT
139 if pakfire_args.has_key("build_id"):
140 del pakfire_args["build_id"]
141 pakfire_args["mode"] = "server"
3ad4bb5a 142
8276111d
MT
143 repo = pakfire.api.repo_create("source-%s" % rnd, [tmpdir,], type="source",
144 **pakfire_args)
3ad4bb5a 145
8276111d 146 return repo
3ad4bb5a
MT
147
148 def update_all(self):
149 _files = []
150 for dir, subdirs, files in os.walk(self.path):
151 for f in files:
152 if not f.endswith(".%s" % MAKEFILE_EXTENSION):
153 continue
154
155 _files.append(os.path.join(dir, f))
156
157 return self.update_files(_files)
158
159
160class XMLRPCTransport(xmlrpclib.Transport):
161 user_agent = "pakfire/%s" % PAKFIRE_VERSION
162
163 def single_request(self, *args, **kwargs):
164 ret = None
165
166 # Tries can be passed to this method.
167 tries = kwargs.pop("tries", 100)
168
169 while tries:
170 try:
171 ret = xmlrpclib.Transport.single_request(self, *args, **kwargs)
172
173 except socket.error, e:
174 # These kinds of errors are not fatal, but they can happen on
175 # a bad internet connection or whatever.
176 # 32 Broken pipe
177 # 110 Connection timeout
178 # 111 Connection refused
179 if not e.errno in (32, 110, 111,):
180 raise
181
182 except xmlrpclib.ProtocolError, e:
183 # Log all XMLRPC protocol errors.
184 logging.error("XMLRPC protocol error:")
185 logging.error(" URL: %s" % e.url)
186 logging.error(" HTTP headers:")
187 for header in e.headers.items():
188 logging.error(" %s: %s" % header)
189 logging.error(" Error code: %s" % e.errcode)
190 logging.error(" Error message: %s" % e.errmsg)
191 raise
192
193 else:
194 # If request was successful, we can break the loop.
195 break
196
197 # If the request was not successful, we wait a little time to try
198 # it again.
199 logging.debug("Request was not successful, we wait a little bit and try it again.")
200 time.sleep(30)
201 tries -= 1
202
203 else:
204 logging.error("Maximum number of tries was reached. Giving up.")
205 # XXX need better exception here.
206 raise Exception, "Could not fulfill request."
207
208 return ret
209
210
211class Server(object):
212 def __init__(self, **pakfire_args):
213 self.config = pakfire.config.Config()
214
215 server = self.config._slave.get("server")
216
217 logging.info("Establishing RPC connection to: %s" % server)
218
219 self.conn = xmlrpclib.ServerProxy(server, transport=XMLRPCTransport(),
220 allow_none=True)
221
269c59f3
MT
222 self.pakfire_args = pakfire_args
223
3ad4bb5a
MT
224 @property
225 def hostname(self):
226 """
227 Return the host's name.
228 """
229 return socket.gethostname()
230
231 def update_info(self):
232 # Get the current load average.
233 loadavg = ", ".join(["%.2f" % l for l in os.getloadavg()])
234
235 # Get all supported architectures.
236 arches = sorted([a for a in self.config.supported_arches])
237 arches = " ".join(arches)
238
239 # Determine CPU model
240 cpuinfo = {}
241 with open("/proc/cpuinfo") as f:
242 for line in f.readlines():
243 # Break at an empty line, because all information after that
244 # is redundant.
245 if not line:
246 break
247
248 try:
249 key, value = line.split(":")
250 except:
251 pass # Skip invalid lines
252
253 key, value = key.strip(), value.strip()
254
255 cpuinfo[key] = value
256
257 cpu_model = cpuinfo.get("model name", "Could not be determined")
258
259 # Determine memory size
260 memory = 0
261 with open("/proc/meminfo") as f:
262 line = f.readline()
263
264 try:
265 a, b, c = line.split()
266 except:
267 pass
268 else:
ee603c85 269 memory = int(b)
3ad4bb5a
MT
270
271 self.conn.update_host_info(loadavg, cpu_model, memory, arches)
272
273 def upload_file(self, filename, build_id):
274 # Get the hash of the file.
275 hash = pakfire.util.calc_hash1(filename)
276
277 # Get the size of the file.
278 size = os.path.getsize(filename)
279
280 # Get an upload ID from the server.
281 upload_id = self.conn.get_upload_cookie(os.path.basename(filename),
282 size, hash)
283
284 # Calculate the number of chunks.
285 chunks = (size / CHUNK_SIZE) + 1
286
287 # Cut the file in pieces and upload them one after another.
288 with open(filename) as f:
289 chunk = 0
290 while True:
291 data = f.read(CHUNK_SIZE)
292 if not data:
293 break
294
295 chunk += 1
296 logging.info("Uploading chunk %s/%s of %s." % (chunk, chunks,
297 os.path.basename(filename)))
298
299 data = xmlrpclib.Binary(data)
300 self.conn.upload_chunk(upload_id, data)
301
302 # Tell the server, that we finished the upload.
303 ret = self.conn.finish_upload(upload_id, build_id)
304
305 # If the server sends false, something happened with the upload that
306 # could not be recovered.
307 if not ret:
308 raise Exception, "Upload failed."
309
310 def update_build_status(self, build_id, status, message=""):
311 ret = self.conn.update_build_state(build_id, status, message)
312
313 # If the server returns False, then it did not acknowledge our status
314 # update and the build has to be aborted.
315 if not ret:
316 raise BuildAbortedException, "The build was aborted by the master server."
317
318 def build_job(self, type=None):
319 build = self.conn.build_job() # XXX type=None
320
321 # If the server has got no job for us, we end right here.
322 if not build:
323 return
324
325 job_types = {
326 "binary" : self.build_binary_job,
327 "source" : self.build_source_job,
328 }
329
330 build_id = build["id"]
331 build_type = build["type"]
332
333 try:
334 func = job_types[build_type]
335 except KeyError:
336 raise Exception, "Build type not supported: %s" % type
337
338 # Call the function that processes the build and try to catch general
339 # exceptions and report them to the server.
340 # If everything goes okay, we tell this the server, too.
341 try:
342 func(build_id, build)
343
1c2f9e52
MT
344 except DependencyError:
345 # This has already been reported by func.
346 raise
347
3ad4bb5a
MT
348 except Exception, e:
349 # Format the exception and send it to the server.
350 message = "%s: %s" % (e.__class__.__name__, e)
351
352 self.update_build_status(build_id, "failed", message)
353 raise
354
355 else:
356 self.update_build_status(build_id, "finished")
357
358 def build_binary_job(self, build_id, build):
359 arch = build["arch"]
360 filename = build["name"]
361 download = build["download"]
362 hash1 = build["hash1"]
363
364 # Create a temporary file and a directory for the resulting files.
365 tmpdir = tempfile.mkdtemp()
366 tmpfile = os.path.join(tmpdir, filename)
367 logfile = os.path.join(tmpdir, "build.log")
368
369 # Get a package grabber and add mirror download capabilities to it.
e57c5475 370 grabber = pakfire.downloader.PackageDownloader(self.config)
3ad4bb5a
MT
371
372 try:
373 # Download the source.
374 grabber.urlgrab(download, filename=tmpfile)
375
376 # Check if the download checksum matches.
377 if pakfire.util.calc_hash1(tmpfile) == hash1:
378 print "Checksum matches: %s" % hash1
379 else:
380 raise DownloadError, "Download was corrupted"
381
382 # Update the build status on the server.
383 self.update_build_status(build_id, "running")
384
385 # Run the build.
386 pakfire.api.build(tmpfile, build_id=build_id,
387 resultdirs=[tmpdir,], logfile=logfile)
388
389 self.update_build_status(build_id, "uploading")
390
391 # Walk through the result directory and upload all (binary) files.
392 for dir, subdirs, files in os.walk(tmpdir):
393 for file in files:
394 file = os.path.join(dir, file)
395 if file in (logfile, tmpfile,):
396 continue
397
398 self.upload_file(file, build_id)
399
400 except DependencyError, e:
401 message = "%s: %s" % (e.__class__.__name__, e)
402 self.update_build_status(build_id, "dependency_error", message)
1c2f9e52 403 raise
3ad4bb5a
MT
404
405 finally:
406 # Upload the logfile in any case and if it exists.
407 if os.path.exists(logfile):
408 self.upload_file(logfile, build_id)
409
410 # Cleanup the files we created.
411 pakfire.util.rm(tmpdir)
412
413 def build_source_job(self, build_id, build):
414 # Update the build status on the server.
415 self.update_build_status(build_id, "running")
416
417 source = Source(self, **build["source"])
418
269c59f3
MT
419 repo = source.update_revision(build["revision"], build_id=build_id,
420 **self.pakfire_args)
3ad4bb5a 421
8276111d
MT
422 try:
423 # Upload all files in the repository.
424 for pkg in repo:
425 path = os.path.join(pkg.repo.path, pkg.filename)
426 self.upload_file(path, build_id)
427 finally:
428 repo.remove()
429
430 def update_repositories(self, limit=2):
431 repos = self.conn.get_repos(limit)
432
433 for repo in repos:
434 files = self.conn.get_repo_packages(repo["id"])
435
436 for arch in repo["arches"]:
437 path = "/pakfire/repositories/%s/%s/%s" % \
438 (repo["distro"]["sname"], repo["name"], arch)
3ad4bb5a 439
8276111d 440 pakfire.api.repo_create(path, files)