[slave]
-server = http://172.28.1.250/api/slave
+server = http://172.28.1.250:81/api/builder/filius.ipfire.org/ciore4pyR8dI
# Check if we are operating as the root user.
self.check_root_user()
+ config_type = None
+
# The path where we are operating in.
if builder:
+ config_type = "builder"
self.builder = True
self.path = os.path.join(BUILD_ROOT, util.random_string())
else:
# XXX check if we are actually running on an ipfire system.
# Read configuration file(s)
- self.config = config.Config(pakfire=self)
+ self.config = config.Config(type=config_type)
for filename in configs:
self.config.read(filename)
@property
def supported_arches(self):
- return self.distro.supported_arches
+ return self.config.supported_arches
def check_root_user(self):
if not os.getuid() == 0 or not os.getgid() == 0:
if not arch:
return True
- if not self.distro.host_supports_arch(arch):
+ if not self.config.host_supports_arch(arch):
raise BuildError, "Cannot build for the target architecture: %s" % arch
raise BuildError, arch
return
# Ask the user if this is okay.
- if not t.cli_yesno():
- return
+ #if not t.cli_yesno():
+ # return
# If okay, run the transcation.
t.run()
self.master.update_sources()
-class CliSlave(Cli):
+class CliServer(Cli):
def __init__(self):
self.parser = argparse.ArgumentParser(
- description = _("Pakfire slave command line interface."),
+ description = _("Pakfire server command line interface."),
)
self.parse_common_arguments()
# Finally parse all arguments from the command line and save them.
self.args = self.parser.parse_args()
- self.slave = server.slave.Slave()
+ self.server = server.Server()
self.action2func = {
"build" : self.handle_build,
const="keepalive")
def handle_keepalive(self):
- self.slave.keepalive()
+ self.server.update_info()
def handle_build(self):
- self.slave.build_job()
+ self.server.build_job()
from constants import *
class Config(object):
- def __init__(self, pakfire):
- self.pakfire = pakfire
+ def __init__(self, type=None):
+ self.type = type
self._config = {
"debug" : False,
def config_files(self):
files = []
- if self.pakfire.builder:
+ if self.type == "builder":
path = os.getcwd()
while not path == "/":
return files
+ @property
+ def host_arch(self):
+ """
+ Return the architecture of the host we are running on.
+ """
+ return os.uname()[4]
+
+ @property
+ def supported_arches(self):
+ host_arches = {
+ "x86_64" : [ "x86_64", ],
+ "i686" : [ "i686", "x86_64", ],
+ "i586" : [ "i586", "i686", "x86_64", ],
+ "i486" : [ "i486", "i586", "i686", "x86_64", ],
+ }
+
+ for host, can_be_built in host_arches.items():
+ if self.host_arch in can_be_built:
+ yield host
+
+ def host_supports_arch(self, arch):
+ """
+ Check if this host can build for the target architecture "arch".
+ """
+ return arch in self.supported_arches
self.pakfire = pakfire
self._data = {
- "arch" : self.host_arch,
+ "arch" : self.config.host_arch,
"name" : "unknown",
"slogan" : "---",
"vendor" : "unknown",
# Dump all data
self.dump()
+ @property
+ def config(self):
+ return self.pakfire.config
+
def dump(self):
logging.debug("Distribution configuration:")
return self._data.get("vendor")
def get_arch(self):
- return self._data.get("arch") or self.host_arch
+ return self._data.get("arch") or self.config.host_arch
def set_arch(self, arch):
# XXX check if we are allowed to set this arch
return "%s-%s-linux-gnu" % (self.arch, vendor.lower())
- @property
- def host_arch(self):
- """
- Return the architecture of the host we are running on.
- """
- return os.uname()[4]
-
- @property
- def supported_arches(self):
- host_arches = {
- "i686" : [ "i686", "x86_64", ],
- "i586" : [ "i586", "i686", "x86_64", ],
- "i486" : [ "i486", "i586", "i686", "x86_64", ],
- }
-
- for host, can_be_built in host_arches.items():
- if self.host_arch in can_be_built:
- yield host
-
- def host_supports_arch(self, arch):
- """
- Check if this host can build for the target architecture "arch".
- """
- return arch in self.supported_arches
-
@property
def environ(self):
"""
None to skip the setting of the personality in the build chroot.
"""
- if self.arch == self.host_arch:
+ if self.arch == self.config.host_arch:
return None
arch2personality = {
}
return arch2personality[self.arch]
-
class Error(Exception):
pass
+class BuildAbortedException(Error):
+ pass
+
class BuildError(Error):
pass
import shutil
import pakfire.packages as packages
+import pakfire.util as util
import index
elif idx == "directory":
self.index = index.DirectoryIndex(self.pakfire, self, self.path)
+ def remove(self):
+ if os.path.exists(self.path):
+ util.rm(self.path)
+
@property
def local(self):
# This is obviously local.
--- /dev/null
+#!/usr/bin/python
+
+import hashlib
+import logging
+import random
+import socket
+import subprocess
+import tempfile
+import time
+import xmlrpclib
+
+import pakfire.api
+import pakfire.base
+import pakfire.config
+import pakfire.downloader
+import pakfire.packages
+import pakfire.repository
+import pakfire.util
+
+from pakfire.constants import *
+
+CHUNK_SIZE = 1024**2 # 1M
+
+class Source(object):
+ def __init__(self, pakfire, id, name, url, path, targetpath, revision, branch):
+ self.pakfire = pakfire
+ self.id = id
+ self.name = name
+ self.url = url
+ self.targetpath = targetpath
+ self.revision = revision
+ self.branch = branch
+
+ # If the repository is not yet checked out, we create a local clone
+ # from it to work with it.
+ if not self.is_cloned():
+ self.clone()
+ else:
+ # Always refresh the repository to have the recent commits.
+ self.fetch()
+
+ def is_cloned(self):
+ return os.path.exists(self.path)
+
+ def clone(self):
+ if self.is_cloned():
+ return
+
+ dirname = os.path.dirname(self.path)
+ basename = os.path.basename(self.path)
+
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ self._git("clone %s %s" % (self.url, basename), path=dirname)
+
+ def fetch(self):
+ self._git("fetch")
+
+ @property
+ def path(self):
+ h = hashlib.sha1(self.url)
+
+ # XXX path is to be changed
+ return "/var/cache/pakfire/sources/%s" % h.hexdigest()
+
+ def _git(self, cmd, path=None):
+ if not path:
+ path = self.path
+
+ cmd = "cd %s && git %s" % (path, cmd)
+
+ logging.debug("Running command: %s" % cmd)
+
+ return subprocess.check_output(["/bin/sh", "-c", cmd])
+
+ def _git_changed_files(self, revision1, revision2=""):
+ files = self._git("diff --name-only %s %s" % (revision1, revision2))
+
+ return [os.path.join(self.path, f) for f in files.splitlines()]
+
+ def _git_checkout_revision(self, revision):
+ self._git("checkout %s" % revision)
+
+ def update_revision(self, revision, **pakfire_args):
+ # Checkout the revision we want to work with.
+ self._git_checkout_revision(revision)
+
+ # Get list of all changes files between the current revision and
+ # the previous one.
+ files = self._git_changed_files("HEAD^", "HEAD")
+
+ # Update all changed files and return a repository with them.
+ return self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)],
+ **pakfire_args)
+
+ def update_files(self, files, **pakfire_args):
+ rnd = random.randint(0, 1024**2)
+ tmpdir = "/tmp/pakfire-source-%s" % rnd
+
+ pkgs = []
+ for file in files:
+ if os.path.exists(file):
+ pkgs.append(file)
+ # XXX not sure what to do here
+ #else:
+ # pkg_name = os.path.basename(os.path.dirname(file))
+ #
+ # # Send deleted package to server.
+ # self.master.package_remove(self, pkg_name)
+
+ if not pkgs:
+ return
+
+ # XXX This totally ignores the local configuration.
+ pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args)
+
+ # Create a kind of dummy repository to link the packages against it.
+ repo = pakfire.repository.LocalSourceRepository(self.pakfire,
+ "source-%s" % rnd, "Source packages", tmpdir, idx="directory")
+ repo.update(force=True)
+
+ return repo
+
+ # XXX don't forget to remove the repository.
+
+ def update_all(self):
+ _files = []
+ for dir, subdirs, files in os.walk(self.path):
+ for f in files:
+ if not f.endswith(".%s" % MAKEFILE_EXTENSION):
+ continue
+
+ _files.append(os.path.join(dir, f))
+
+ return self.update_files(_files)
+
+
+class XMLRPCTransport(xmlrpclib.Transport):
+ user_agent = "pakfire/%s" % PAKFIRE_VERSION
+
+ def single_request(self, *args, **kwargs):
+ ret = None
+
+ # Tries can be passed to this method.
+ tries = kwargs.pop("tries", 100)
+
+ while tries:
+ try:
+ ret = xmlrpclib.Transport.single_request(self, *args, **kwargs)
+
+ except socket.error, e:
+ # These kinds of errors are not fatal, but they can happen on
+ # a bad internet connection or whatever.
+ # 32 Broken pipe
+ # 110 Connection timeout
+ # 111 Connection refused
+ if not e.errno in (32, 110, 111,):
+ raise
+
+ except xmlrpclib.ProtocolError, e:
+ # Log all XMLRPC protocol errors.
+ logging.error("XMLRPC protocol error:")
+ logging.error(" URL: %s" % e.url)
+ logging.error(" HTTP headers:")
+ for header in e.headers.items():
+ logging.error(" %s: %s" % header)
+ logging.error(" Error code: %s" % e.errcode)
+ logging.error(" Error message: %s" % e.errmsg)
+ raise
+
+ else:
+ # If request was successful, we can break the loop.
+ break
+
+ # If the request was not successful, we wait a little time to try
+ # it again.
+ logging.debug("Request was not successful, we wait a little bit and try it again.")
+ time.sleep(30)
+ tries -= 1
+
+ else:
+ logging.error("Maximum number of tries was reached. Giving up.")
+ # XXX need better exception here.
+ raise Exception, "Could not fulfill request."
+
+ return ret
+
+
+class Server(object):
+ def __init__(self, **pakfire_args):
+ self.config = pakfire.config.Config()
+
+ server = self.config._slave.get("server")
+
+ logging.info("Establishing RPC connection to: %s" % server)
+
+ self.conn = xmlrpclib.ServerProxy(server, transport=XMLRPCTransport(),
+ allow_none=True)
+
+ @property
+ def hostname(self):
+ """
+ Return the host's name.
+ """
+ return socket.gethostname()
+
+ def update_info(self):
+ # Get the current load average.
+ loadavg = ", ".join(["%.2f" % l for l in os.getloadavg()])
+
+ # Get all supported architectures.
+ arches = sorted([a for a in self.config.supported_arches])
+ arches = " ".join(arches)
+
+ # Determine CPU model
+ cpuinfo = {}
+ with open("/proc/cpuinfo") as f:
+ for line in f.readlines():
+ # Break at an empty line, because all information after that
+ # is redundant.
+ if not line:
+ break
+
+ try:
+ key, value = line.split(":")
+ except:
+ pass # Skip invalid lines
+
+ key, value = key.strip(), value.strip()
+
+ cpuinfo[key] = value
+
+ cpu_model = cpuinfo.get("model name", "Could not be determined")
+
+ # Determine memory size
+ memory = 0
+ with open("/proc/meminfo") as f:
+ line = f.readline()
+
+ try:
+ a, b, c = line.split()
+ except:
+ pass
+ else:
+ memory = int(b) * 1024
+
+ self.conn.update_host_info(loadavg, cpu_model, memory, arches)
+
+ def upload_file(self, filename, build_id):
+ # Get the hash of the file.
+ hash = pakfire.util.calc_hash1(filename)
+
+ # Get the size of the file.
+ size = os.path.getsize(filename)
+
+ # Get an upload ID from the server.
+ upload_id = self.conn.get_upload_cookie(os.path.basename(filename),
+ size, hash)
+
+ # Calculate the number of chunks.
+ chunks = (size / CHUNK_SIZE) + 1
+
+ # Cut the file in pieces and upload them one after another.
+ with open(filename) as f:
+ chunk = 0
+ while True:
+ data = f.read(CHUNK_SIZE)
+ if not data:
+ break
+
+ chunk += 1
+ logging.info("Uploading chunk %s/%s of %s." % (chunk, chunks,
+ os.path.basename(filename)))
+
+ data = xmlrpclib.Binary(data)
+ self.conn.upload_chunk(upload_id, data)
+
+ # Tell the server, that we finished the upload.
+ ret = self.conn.finish_upload(upload_id, build_id)
+
+ # If the server sends false, something happened with the upload that
+ # could not be recovered.
+ if not ret:
+ raise Exception, "Upload failed."
+
+ def update_build_status(self, build_id, status, message=""):
+ ret = self.conn.update_build_state(build_id, status, message)
+
+ # If the server returns False, then it did not acknowledge our status
+ # update and the build has to be aborted.
+ if not ret:
+ raise BuildAbortedException, "The build was aborted by the master server."
+
+ def build_job(self, type=None):
+ build = self.conn.build_job() # XXX type=None
+
+ # If the server has got no job for us, we end right here.
+ if not build:
+ return
+
+ job_types = {
+ "binary" : self.build_binary_job,
+ "source" : self.build_source_job,
+ }
+
+ build_id = build["id"]
+ build_type = build["type"]
+
+ try:
+ func = job_types[build_type]
+ except KeyError:
+ raise Exception, "Build type not supported: %s" % type
+
+ # Call the function that processes the build and try to catch general
+ # exceptions and report them to the server.
+ # If everything goes okay, we tell this the server, too.
+ try:
+ func(build_id, build)
+
+ except Exception, e:
+ # Format the exception and send it to the server.
+ message = "%s: %s" % (e.__class__.__name__, e)
+
+ self.update_build_status(build_id, "failed", message)
+ raise
+
+ else:
+ self.update_build_status(build_id, "finished")
+
+ def build_binary_job(self, build_id, build):
+ arch = build["arch"]
+ filename = build["name"]
+ download = build["download"]
+ hash1 = build["hash1"]
+
+ # Create a temporary file and a directory for the resulting files.
+ tmpdir = tempfile.mkdtemp()
+ tmpfile = os.path.join(tmpdir, filename)
+ logfile = os.path.join(tmpdir, "build.log")
+
+ # Get a package grabber and add mirror download capabilities to it.
+ grabber = pakfire.downloader.PackageDownloader()
+
+ try:
+ # Download the source.
+ grabber.urlgrab(download, filename=tmpfile)
+
+ # Check if the download checksum matches.
+ if pakfire.util.calc_hash1(tmpfile) == hash1:
+ print "Checksum matches: %s" % hash1
+ else:
+ raise DownloadError, "Download was corrupted"
+
+ # Update the build status on the server.
+ self.update_build_status(build_id, "running")
+
+ # Run the build.
+ pakfire.api.build(tmpfile, build_id=build_id,
+ resultdirs=[tmpdir,], logfile=logfile)
+
+ self.update_build_status(build_id, "uploading")
+
+ # Walk through the result directory and upload all (binary) files.
+ for dir, subdirs, files in os.walk(tmpdir):
+ for file in files:
+ file = os.path.join(dir, file)
+ if file in (logfile, tmpfile,):
+ continue
+
+ self.upload_file(file, build_id)
+
+ except DependencyError, e:
+ message = "%s: %s" % (e.__class__.__name__, e)
+ self.update_build_status(build_id, "dependency_error", message)
+
+ finally:
+ # Upload the logfile in any case and if it exists.
+ if os.path.exists(logfile):
+ self.upload_file(logfile, build_id)
+
+ # Cleanup the files we created.
+ pakfire.util.rm(tmpdir)
+
+ def build_source_job(self, build_id, build):
+ # Update the build status on the server.
+ self.update_build_status(build_id, "running")
+
+ source = Source(self, **build["source"])
+
+ repo = source.update_revision(build["revision"], build_id=build_id)
+
+ # Upload all files in the repository.
+ for pkg in repo.get_all():
+ self.upload_file(pkg.filename, build_id)
+
+ repo.remove()
+++ /dev/null
-#!/usr/bin/python
-
-import master
-import slave
-
+++ /dev/null
-#!/usr/bin/python
-
-import hashlib
-import logging
-import os
-import socket
-import xmlrpclib
-
-import pakfire.packages
-
-CHUNK_SIZE = 2097152 # 2M
-
-class MasterSlave(object):
- @property
- def hostname(self):
- """
- Return the host's name.
- """
- return socket.gethostname()
-
- def _chunked_upload(self, filename):
- # Update the amount of chunks that there will be to be uploaded.
- chunks = (os.path.getsize(filename) / CHUNK_SIZE) + 1
-
- # Open the file for read.
- f = open(filename, "rb")
-
- chunk, id = 0, ""
- while True:
- # Read a chunk and break if we reached the end of the file.
- data = f.read(CHUNK_SIZE)
- if not data:
- break
-
- chunk += 1
- logging.info("Uploading chunk %s/%s of %s." % (chunk, chunks, filename))
-
- # Calc the hash of the chunk.
- hash = hashlib.sha1(data)
-
- # Actually do the upload and make sure we got an ID.
- id = self.conn.chunk_upload(id, hash.hexdigest(), xmlrpclib.Binary(data))
- assert id
-
- f.close()
-
- return id
-
- def upload_package_file(self, source_id, pkg_id, pkg):
- logging.info("Uploading package file: %s" % pkg.filename)
-
- # Upload the file at first to the server.
- file_id = self._chunked_upload(pkg.filename)
-
- info = {
- "filename" : os.path.basename(pkg.filename),
- "source_id" : source_id,
- "type" : pkg.type,
- "arch" : pkg.arch,
- "summary" : pkg.summary,
- "description" : pkg.description,
- "requires" : " ".join(pkg.requires),
- "provides" : "",
- "obsoletes" : "",
- "conflicts" : "",
- "url" : pkg.url,
- "license" : pkg.license,
- "maintainer" : pkg.maintainer,
- "size" : pkg.size,
- "hash1" : pkg.hash1,
- "build_host" : pkg.build_host,
- "build_id" : pkg.build_id,
- "build_time" : pkg.build_time,
- "uuid" : pkg.uuid,
- }
-
- if isinstance(pkg, pakfire.packages.BinaryPackage):
- info.update({
- "provides" : " ".join(pkg.provides),
- "obsoletes" : " ".join(pkg.obsoletes),
- "conflicts" : " ".join(pkg.conflicts),
- })
-
- return self.conn.package_add_file(pkg_id, file_id, info)
-
- def upload_log_file(self, build_id, logfile):
- file_id = self._chunked_upload(logfile)
-
- return self.conn.build_add_log(build_id, file_id)
-
- def package_add(self, source, pkg):
- logging.info("Adding package: %s" % pkg.friendly_name)
-
- # Collect data that is sent to the database...
- info = {
- "name" : pkg.name,
- "epoch" : pkg.epoch,
- "version" : pkg.version,
- "release" : pkg.release,
- "groups" : " ".join(pkg.groups),
- "maintainer" : pkg.maintainer,
- "license" : pkg.license,
- "url" : pkg.url,
- "summary" : pkg.summary,
- "description" : pkg.description,
- "supported_arches" : pkg.supported_arches,
- "source_id" : source.id,
- }
-
- return self.conn.package_add(info)
-
- def package_remove(self, source, pkg):
- logging.info("Package '%s' has been removed." % pkg)
+++ /dev/null
-#!/usr/bin/python
-
-import logging
-import hashlib
-import os
-import random
-import shutil
-import subprocess
-import xmlrpclib
-
-import pakfire
-import pakfire.api
-import pakfire.base
-
-import pakfire.packages as packages
-import pakfire.repository as repository
-import pakfire.util as util
-from pakfire.constants import *
-
-from base import MasterSlave
-
-class Source(object):
- def __init__(self, master, id, name, url, path, targetpath, revision, branch):
- self.master = master
- self.id = id
- self.name = name
- self.url = url
- self.targetpath = targetpath
- self.revision = revision
- self.branch = branch
-
- # If the repository is not yet checked out, we create a local clone
- # from it to work with it.
- if not os.path.exists(self.path):
- dirname = os.path.dirname(self.path)
- basename = os.path.basename(self.path)
-
- if not os.path.exists(dirname):
- os.makedirs(dirname)
-
- self._git("clone %s %s" % (self.url, basename), path=dirname)
-
- else:
- # Always refresh the repository to have the recent commits.
- self._git("fetch")
-
- @property
- def path(self):
- h = hashlib.sha1(self.url)
-
- # XXX path is to be changed
- return "/var/cache/pakfire/sources/%s" % h.hexdigest()
-
- @property
- def pakfire(self):
- return self.master.pakfire
-
- def _git(self, cmd, path=None):
- if not path:
- path = self.path
-
- cmd = "cd %s && git %s" % (path, cmd)
-
- logging.debug("Running command: %s" % cmd)
-
- return subprocess.check_output(["/bin/sh", "-c", cmd])
-
- def _git_rev_list(self, revision=None):
- if not revision:
- revision = self.revision
-
- command = "rev-list %s..origin/%s" % (revision, self.branch)
-
- # Get all normal commits.
- commits = self._git("%s --no-merges" % command)
- commits = commits.splitlines()
-
- revisions = []
- for commit in self._git(command).splitlines():
- # Check if commit is a normal commit or merge commit.
- merge = not commit in commits
-
- revisions.append((commit, merge))
-
- return reversed(revisions)
-
- def _git_changed_files(self, revision1, revision2=""):
- files = self._git("diff --name-only %s %s" % (revision1, revision2))
-
- return [os.path.join(self.path, f) for f in files.splitlines()]
-
- def _git_checkout_revision(self, revision):
- self._git("checkout %s" % revision)
-
- def update_revision(self, (revision, merge), **pakfire_args):
- if not merge:
- self._git_checkout_revision(revision)
-
- # Get list of all changes files between the current revision and
- # the previous one.
- files = self._git_changed_files("HEAD^", "HEAD")
-
- self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)],
- **pakfire_args)
-
- # Send update to the server.
- #self.master.update_revision(self, revision)
-
- def update_files(self, files, **pakfire_args):
- rnd = random.randint(0, 1024**2)
- tmpdir = "/tmp/pakfire-source-%s" % rnd
-
- pkgs = []
- for file in files:
- if os.path.exists(file):
- pkgs.append(file)
- else:
- pkg_name = os.path.basename(os.path.dirname(file))
-
- # Send deleted package to server.
- self.master.package_remove(self, pkg_name)
-
- if not pkgs:
- return
-
- # XXX This totally ignores the local configuration.
- pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args)
-
- # Create a kind of dummy repository to link the packages against it.
- repo = repository.LocalSourceRepository(self.pakfire,
- "source-%s" % rnd, "Source packages", tmpdir, idx="directory")
- repo.update(force=True)
-
- for pkg in repo.get_all():
- logging.debug("Processing package: %s" % pkg)
-
- # Register package in database and get an ID.
- pkg_id = self.master.package_add(self, pkg)
-
- # Upload the package.
- self.master.upload_package_file(self.id, pkg_id, pkg)
-
- util.rm(tmpdir)
-
- def update(self):
- # If there has been no data, yet we need to import all packages
- # that are currently checked out.
- if not self.revision:
- self.update_all()
-
- # Update the revisions on the server.
- for revision, merge in self._git_rev_list():
- if merge:
- continue
-
- logging.info("Sending revision to server: %s" % revision)
- self.master.conn.source_add_revision(self.id, revision)
-
- # Get all pending revisions from the server and process them.
- #for rev in self.master.conn.source_get_pending_revisions(self.id):
- # self.update_revision(rev)
-
- def update_all(self):
- _files = []
- for dir, subdirs, files in os.walk(self.path):
- for f in files:
- if not f.endswith(".%s" % MAKEFILE_EXTENSION):
- continue
-
- _files.append(os.path.join(dir, f))
-
- self.update_files(_files)
-
-
-class Master(MasterSlave):
- def __init__(self, **pakfire_args):
- self.pakfire = pakfire.base.Pakfire(**pakfire_args)
-
- server = self.pakfire.config._master.get("server")
-
- logging.info("Establishing RPC connection to: %s" % server)
-
- self.conn = xmlrpclib.Server(server)
-
- def update_sources(self):
- sources = self.conn.sources_get_all()
-
- for source in sources:
- source = Source(self, **source)
-
- source.update()
-
- def update_revision(self, source, revision):
- self.conn.sources_update_revision(source.id, revision)
+++ /dev/null
-#!/usr/bin/python
-
-import logging
-import os
-import socket
-import tempfile
-import xmlrpclib
-
-import pakfire.api
-import pakfire.base
-import pakfire.downloader
-import pakfire.packages
-import pakfire.util
-
-from pakfire.constants import *
-
-from base import MasterSlave
-from master import Source
-
-class Slave(MasterSlave):
- def __init__(self, **pakfire_args):
- self.pakfire = pakfire.base.Pakfire(**pakfire_args)
-
- server = self.pakfire.config._slave.get("server")
-
- logging.info("Establishing RPC connection to: %s" % server)
-
- self.conn = xmlrpclib.Server(server)
-
- def keepalive(self):
- """
- Send the server a keep-alive to say that we are still there.
- """
- hostname = self.hostname
- l1, l5, l15 = os.getloadavg()
-
- logging.info("Sending the server a keepalive: %s" % hostname)
-
- # Get all supported architectures and send them to the server.
- arches = [a for a in self.pakfire.supported_arches]
- arches.sort()
-
- self.conn.keepalive(hostname, l5, arches)
-
- def update_build_status(self, build_id, status, message=""):
- self.conn.update_build_state(build_id, status, message)
-
- def build_job(self):
- build = self.conn.build_job(self.hostname)
-
- # If the server has got no job for us, we end right here.
- if not build:
- return
-
- print build
-
- job_types = {
- "binary" : self.build_binary_job,
- "source" : self.build_source_job,
- }
-
- build_id = build["id"]
- build_type = build["type"]
-
- try:
- func = job_types[build_type]
- except KeyError:
- raise Exception, "Build type not supported: %s" % type
-
- # Call the function that processes the build and try to catch general
- # exceptions and report them to the server.
- # If everything goes okay, we tell this the server, too.
- try:
- func(build_id, build)
-
- except Exception, e:
- message = "%s: %s" % (e.__class__.__name__, e)
- self.update_build_status(build_id, "failed", message)
- raise
-
- else:
- self.update_build_status(build_id, "finished")
-
- def build_binary_job(self, build_id, build):
- arch = build["arch"]
- filename = build["name"]
- download = build["download"]
- hash1 = build["hash1"]
-
- # Create a temporary file and a directory for the resulting files.
- tmpdir = tempfile.mkdtemp()
- tmpfile = os.path.join(tmpdir, filename)
- logfile = os.path.join(tmpdir, "build.log")
-
- # Get a package grabber and add mirror download capabilities to it.
- grabber = pakfire.downloader.PackageDownloader()
-
- try:
- # Download the source.
- grabber.urlgrab(download, filename=tmpfile)
-
- # Check if the download checksum matches.
- if pakfire.util.calc_hash1(tmpfile) == hash1:
- print "Checksum matches: %s" % hash1
- else:
- raise DownloadError, "Download was corrupted"
-
- # Update the build status on the server.
- self.update_build_status(build_id, "running")
-
- # Run the build.
- pakfire.api.build(tmpfile, build_id=build_id,
- resultdirs=[tmpdir,], logfile=logfile)
-
- self.update_build_status(build_id, "uploading")
-
- for dir, subdirs, files in os.walk(tmpdir):
- for file in files:
- file = os.path.join(dir, file)
- if file in (logfile, tmpfile,):
- continue
-
- pkg = pakfire.packages.open(self.pakfire, None, file)
-
- self.upload_package_file(build["source_id"], build["pkg_id"], pkg)
-
- except DependencyError, e:
- message = "%s: %s" % (e.__class__.__name__, e)
- self.update_build_status(build_id, "dependency_error", message)
-
- finally:
- # Upload the logfile in any case and if it exists.
- if os.path.exists(logfile):
- self.upload_log_file(build_id, logfile)
-
- # Cleanup the files we created.
- pakfire.util.rm(tmpdir)
-
- def build_source_job(self, build_id, build):
- # Update the build status on the server.
- self.update_build_status(build_id, "running")
-
- source = Source(self, **build["source"])
-
- source.update_revision((build["revision"], False), build_id=build_id)
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2011-05-01 13:43+0000\n"
+"POT-Creation-Date: 2011-05-19 12:48+0200\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
msgstr ""
#: ../pakfire/cli.py:454
-msgid "Pakfire slave command line interface."
+msgid "Pakfire server command line interface."
msgstr ""
#: ../pakfire/cli.py:478
basename2cls = {
"pakfire" : Cli,
"pakfire-build" : CliBuilder,
- "pakfire-master" : CliMaster,
+ "pakfire-server" : CliServer,
"pakfire-repo" : CliRepo,
- "pakfire-slave" : CliSlave,
}
# Get the basename of the program
+++ /dev/null
-pakfire
\ No newline at end of file
scripts = [
"scripts/pakfire",
"scripts/pakfire-build",
- "scripts/pakfire-master",
"scripts/pakfire-repo",
- "scripts/pakfire-slave",
+ "scripts/pakfire-server",
],
cmdclass = { "build" : build_extra.build_extra,
"build_i18n" : build_i18n.build_i18n },