]>
git.ipfire.org Git - people/stevee/pakfire.git/blob - python/pakfire/server.py
2 ###############################################################################
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
20 ###############################################################################
32 log
= logging
.getLogger("pakfire")
36 import pakfire
.downloader
37 import pakfire
.packages
38 import pakfire
.repository
41 from pakfire
.system
import system
42 from pakfire
.constants
import *
43 from pakfire
.i18n
import _
45 CHUNK_SIZE
= 1024**2 # 1M
48 def __init__(self
, pakfire
, id, name
, url
, path
, targetpath
, revision
, branch
):
49 self
.pakfire
= pakfire
53 self
.targetpath
= targetpath
54 self
.revision
= revision
57 # If the repository is not yet checked out, we create a local clone
58 # from it to work with it.
59 if not self
.is_cloned():
62 # Always refresh the repository to have the recent commits.
66 return os
.path
.exists(self
.path
)
72 dirname
= os
.path
.dirname(self
.path
)
73 basename
= os
.path
.basename(self
.path
)
75 if not os
.path
.exists(dirname
):
78 self
._git
("clone %s %s" % (self
.url
, basename
), path
=dirname
)
85 h
= hashlib
.sha1(self
.url
)
87 # XXX path is to be changed
88 return "/var/cache/pakfire/sources/%s" % h
.hexdigest()
90 def _git(self
, cmd
, path
=None):
94 cmd
= "cd %s && git %s" % (path
, cmd
)
96 log
.debug("Running command: %s" % cmd
)
98 return subprocess
.check_output(["/bin/sh", "-c", cmd
])
100 def _git_changed_files(self
, revision1
, revision2
=""):
101 files
= self
._git
("diff --name-only %s %s" % (revision1
, revision2
))
103 return [os
.path
.join(self
.path
, f
) for f
in files
.splitlines()]
105 def _git_checkout_revision(self
, revision
):
106 self
._git
("checkout %s" % revision
)
108 def update_revision(self
, revision
, **pakfire_args
):
109 # Checkout the revision we want to work with.
110 self
._git
_checkout
_revision
(revision
)
112 # Get list of all changes files between the current revision and
114 files
= self
._git
_changed
_files
("HEAD^", "HEAD")
116 # Update all changed files and return a repository with them.
117 return self
.update_files([f
for f
in files
if f
.endswith(".%s" % MAKEFILE_EXTENSION
)],
120 def update_files(self
, files
, **pakfire_args
):
121 rnd
= random
.randint(0, 1024**2)
122 tmpdir
= "/tmp/pakfire-source-%s" % rnd
126 if os
.path
.exists(file):
128 # XXX not sure what to do here
130 # pkg_name = os.path.basename(os.path.dirname(file))
132 # # Send deleted package to server.
133 # self.master.package_remove(self, pkg_name)
138 # XXX This totally ignores the local configuration.
139 pakfire
.api
.dist(pkgs
, resultdirs
=[tmpdir
,], **pakfire_args
)
141 # Create a kind of dummy repository to link the packages against it.
142 if pakfire_args
.has_key("build_id"):
143 del pakfire_args
["build_id"]
144 pakfire_args
["mode"] = "server"
146 repo
= pakfire
.api
.repo_create("source-%s" % rnd
, [tmpdir
,], type="source",
151 def update_all(self
):
153 for dir, subdirs
, files
in os
.walk(self
.path
):
155 if not f
.endswith(".%s" % MAKEFILE_EXTENSION
):
158 _files
.append(os
.path
.join(dir, f
))
160 return self
.update_files(_files
)
163 class XMLRPCTransport(xmlrpclib
.Transport
):
164 user_agent
= "pakfire/%s" % PAKFIRE_VERSION
166 def single_request(self
, *args
, **kwargs
):
169 # Tries can be passed to this method.
170 tries
= kwargs
.pop("tries", 100)
174 ret
= xmlrpclib
.Transport
.single_request(self
, *args
, **kwargs
)
176 except socket
.error
, e
:
177 # These kinds of errors are not fatal, but they can happen on
178 # a bad internet connection or whatever.
180 # 110 Connection timeout
181 # 111 Connection refused
182 if not e
.errno
in (32, 110, 111,):
185 except xmlrpclib
.ProtocolError
, e
:
186 # Log all XMLRPC protocol errors.
187 log
.error("XMLRPC protocol error:")
188 log
.error(" URL: %s" % e
.url
)
189 log
.error(" HTTP headers:")
190 for header
in e
.headers
.items():
191 log
.error(" %s: %s" % header
)
192 log
.error(" Error code: %s" % e
.errcode
)
193 log
.error(" Error message: %s" % e
.errmsg
)
197 # If request was successful, we can break the loop.
200 # If the request was not successful, we wait a little time to try
202 log
.debug("Request was not successful, we wait a little bit and try it again.")
207 log
.error("Maximum number of tries was reached. Giving up.")
208 # XXX need better exception here.
209 raise Exception, "Could not fulfill request."
214 class ServerProxy(xmlrpclib
.ServerProxy
):
215 def __init__(self
, server
, *args
, **kwargs
):
217 # Some default settings.
218 if not kwargs
.has_key("transport"):
219 kwargs
["transport"] = XMLRPCTransport()
221 kwargs
["allow_none"] = True
223 xmlrpclib
.ServerProxy
.__init
__(self
, server
, *args
, **kwargs
)
226 class Server(object):
227 def __init__(self
, **pakfire_args
):
228 self
.config
= pakfire
.config
.Config()
230 server
= self
.config
._slave
.get("server")
232 log
.info("Establishing RPC connection to: %s" % server
)
234 self
.conn
= ServerProxy(server
)
236 self
.pakfire_args
= pakfire_args
241 Return the host's name.
243 return socket
.gethostname()
251 # Determine CPU model
253 with
open("/proc/cpuinfo") as f
:
254 for line
in f
.readlines():
255 # Break at an empty line, because all information after that
261 key
, value
= line
.split(":")
263 pass # Skip invalid lines
265 key
, value
= key
.strip(), value
.strip()
270 if self
.uname
.startswith("arm"):
272 ret
= "%(Hardware)s - %(Processor)s" % cpuinfo
276 ret
= cpuinfo
.get("model name", None)
278 return ret
or _("Could not be determined")
282 # Determine memory size
284 with
open("/proc/meminfo") as f
:
288 a
, b
, c
= line
.split()
292 memory
= int(b
) * 1024
300 ret
.append(" PAKFIRE %s" % PAKFIRE_VERSION
)
302 ret
.append(" %-20s: %s" % (_("Hostname"), self
.hostname
))
305 # Hardware information
306 ret
.append(" %s:" % _("Hardware information"))
307 ret
.append(" %-16s: %s" % (_("CPU model"), self
.cpu_model
))
308 ret
.append(" %-16s: %s" % (_("Memory"), pakfire
.util
.format_size(self
.memory
)))
310 ret
.append(" %-16s: %s" % (_("Native arch"), system
.native_arch
))
312 header
= _("Supported arches")
313 for arch
in self
.config
.supported_arches
:
314 ret
.append(" %-16s: %s" % (header
, arch
))
320 def update_info(self
):
321 # Get the current load average.
322 loadavg
= ", ".join(["%.2f" % l
for l
in os
.getloadavg()])
324 # Get all supported architectures.
325 arches
= " ".join([a
for a
in self
.config
.supported_arches
])
327 self
.conn
.update_host_info(loadavg
, self
.cpu_model
, self
.memory
, arches
)
329 def upload_file(self
, filename
, build_id
):
330 # Get the hash of the file.
331 hash = pakfire
.util
.calc_hash1(filename
)
333 # Get the size of the file.
334 size
= os
.path
.getsize(filename
)
336 # Get an upload ID from the server.
337 upload_id
= self
.conn
.get_upload_cookie(os
.path
.basename(filename
),
340 # Calculate the number of chunks.
341 chunks
= (size
/ CHUNK_SIZE
) + 1
343 # Cut the file in pieces and upload them one after another.
344 with
open(filename
) as f
:
347 data
= f
.read(CHUNK_SIZE
)
352 log
.info("Uploading chunk %s/%s of %s." % (chunk
, chunks
,
353 os
.path
.basename(filename
)))
355 data
= xmlrpclib
.Binary(data
)
356 self
.conn
.upload_chunk(upload_id
, data
)
358 # Tell the server, that we finished the upload.
359 ret
= self
.conn
.finish_upload(upload_id
, build_id
)
361 # If the server sends false, something happened with the upload that
362 # could not be recovered.
364 raise Exception, "Upload failed."
366 def update_build_status(self
, build_id
, status
, message
=""):
367 ret
= self
.conn
.update_build_state(build_id
, status
, message
)
369 # If the server returns False, then it did not acknowledge our status
370 # update and the build has to be aborted.
372 raise BuildAbortedException
, "The build was aborted by the master server."
374 def build_job(self
, type=None):
375 build
= self
.conn
.build_job() # XXX type=None
377 # If the server has got no job for us, we end right here.
382 "binary" : self
.build_binary_job
,
383 "source" : self
.build_source_job
,
386 build_id
= build
["id"]
387 build_type
= build
["type"]
390 func
= job_types
[build_type
]
392 raise Exception, "Build type not supported: %s" % type
394 # Call the function that processes the build and try to catch general
395 # exceptions and report them to the server.
396 # If everything goes okay, we tell this the server, too.
398 func(build_id
, build
)
400 except DependencyError
:
401 # This has already been reported by func.
405 # Format the exception and send it to the server.
406 message
= "%s: %s" % (e
.__class
__.__name
__, e
)
408 self
.update_build_status(build_id
, "failed", message
)
412 self
.update_build_status(build_id
, "finished")
414 def build_binary_job(self
, build_id
, build
):
416 filename
= build
["name"]
417 download
= build
["download"]
418 hash1
= build
["hash1"]
420 # Create a temporary file and a directory for the resulting files.
421 tmpdir
= tempfile
.mkdtemp()
422 tmpfile
= os
.path
.join(tmpdir
, filename
)
423 logfile
= os
.path
.join(tmpdir
, "build.log")
425 # Get a package grabber and add mirror download capabilities to it.
426 grabber
= pakfire
.downloader
.PackageDownloader(self
.config
)
429 # Download the source.
430 grabber
.urlgrab(download
, filename
=tmpfile
)
432 # Check if the download checksum matches.
433 if pakfire
.util
.calc_hash1(tmpfile
) == hash1
:
434 print "Checksum matches: %s" % hash1
436 raise DownloadError
, "Download was corrupted"
438 # Update the build status on the server.
439 self
.update_build_status(build_id
, "running")
442 pakfire
.api
.build(tmpfile
, build_id
=build_id
,
443 resultdirs
=[tmpdir
,], logfile
=logfile
)
445 self
.update_build_status(build_id
, "uploading")
447 # Walk through the result directory and upload all (binary) files.
448 for dir, subdirs
, files
in os
.walk(tmpdir
):
450 file = os
.path
.join(dir, file)
451 if file in (logfile
, tmpfile
,):
454 self
.upload_file(file, build_id
)
456 except DependencyError
, e
:
457 message
= "%s: %s" % (e
.__class
__.__name
__, e
)
458 self
.update_build_status(build_id
, "dependency_error", message
)
462 # Upload the logfile in any case and if it exists.
463 if os
.path
.exists(logfile
):
464 self
.upload_file(logfile
, build_id
)
466 # Cleanup the files we created.
467 pakfire
.util
.rm(tmpdir
)
469 def build_source_job(self
, build_id
, build
):
470 # Update the build status on the server.
471 self
.update_build_status(build_id
, "running")
473 source
= Source(self
, **build
["source"])
475 repo
= source
.update_revision(build
["revision"], build_id
=build_id
,
479 # Upload all files in the repository.
481 path
= os
.path
.join(pkg
.repo
.path
, pkg
.filename
)
482 self
.upload_file(path
, build_id
)
486 def update_repositories(self
, limit
=2):
487 repos
= self
.conn
.get_repos(limit
)
490 files
= self
.conn
.get_repo_packages(repo
["id"])
492 for arch
in repo
["arches"]:
493 path
= "/pakfire/repositories/%s/%s/%s" % \
494 (repo
["distro"]["sname"], repo
["name"], arch
)
496 pakfire
.api
.repo_create(path
, files
)
498 def create_scratch_build(self
, *args
, **kwargs
):
499 return self
.conn
.create_scratch_build(*args
, **kwargs
)