from . import daemon
from . import packages
from . import repository
-from . import server
from . import transaction
from . import ui
from . import util
Cli.handle_provides(self, int=True)
-class CliServer(Cli):
- pakfire = base.PakfireServer
-
- def __init__(self):
- self.parser = argparse.ArgumentParser(
- description = _("Pakfire server command line interface."),
- )
- self._add_common_arguments(self.parser)
-
- # Add sub-commands.
- self.sub_commands = self.parser.add_subparsers()
-
- self.parse_command_build()
- self.parse_command_keepalive()
- self.parse_command_repoupdate()
- self.parse_command_repo()
- self.parse_command_info()
-
- # Finally parse all arguments from the command line and save them.
- self.args = self.parser.parse_args()
-
- #self.server = server.Server(**self.pakfire_args)
-
- self.action2func = {
- "build" : self.handle_build,
- "info" : self.handle_info,
- "keepalive" : self.handle_keepalive,
- "repoupdate" : self.handle_repoupdate,
- "repo_create": self.handle_repo_create,
- }
-
- @property
- def pakfire_args(self):
- ret = {}
-
- if hasattr(self.args, "offline") and self.args.offline:
- ret["downloader"] = {
- "offline" : self.args.offline,
- }
-
- return ret
-
- def parse_command_build(self):
- # Implement the "build" command.
- sub_build = self.sub_commands.add_parser("build",
- help=_("Send a scrach build job to the server."))
- sub_build.add_argument("package", nargs=1,
- help=_("Give name of at least one package to build."))
- sub_build.add_argument("--arch", "-a",
- help=_("Limit build to only these architecture(s)."))
- sub_build.add_argument("action", action="store_const", const="build")
-
- def parse_command_keepalive(self):
- # Implement the "keepalive" command.
- sub_keepalive = self.sub_commands.add_parser("keepalive",
- help=_("Send a keepalive to the server."))
- sub_keepalive.add_argument("action", action="store_const",
- const="keepalive")
-
- def parse_command_repoupdate(self):
- # Implement the "repoupdate" command.
- sub_repoupdate = self.sub_commands.add_parser("repoupdate",
- help=_("Update all repositories."))
- sub_repoupdate.add_argument("action", action="store_const",
- const="repoupdate")
-
- def parse_command_repo(self):
- sub_repo = self.sub_commands.add_parser("repo",
- help=_("Repository management commands."))
-
- sub_repo_commands = sub_repo.add_subparsers()
-
- self.parse_command_repo_create(sub_repo_commands)
-
- def parse_command_repo_create(self, sub_commands):
- sub_create = sub_commands.add_parser("create",
- help=_("Create a new repository index."))
- sub_create.add_argument("path", nargs=1,
- help=_("Path to the packages."))
- sub_create.add_argument("inputs", nargs="+",
- help=_("Path to input packages."))
- sub_create.add_argument("--key", "-k", nargs="?",
- help=_("Key to sign the repository with."))
- sub_create.add_argument("action", action="store_const", const="repo_create")
-
- def parse_command_info(self):
- sub_info = self.sub_commands.add_parser("info",
- help=_("Dump some information about this machine."))
- sub_info.add_argument("action", action="store_const", const="info")
-
- def handle_keepalive(self):
- self.server.update_info()
-
- def handle_build(self):
- # Arch.
- if self.args.arch:
- arches = self.args.arch.split()
-
- (package,) = self.args.package
-
- self.server.create_scratch_build({})
- return
-
- # Temporary folter for source package.
- tmpdir = "/tmp/pakfire-%s" % util.random_string()
-
- try:
- os.makedirs(tmpdir)
-
- pakfire.dist(package, resultdir=[tmpdir,])
-
- for file in os.listdir(tmpdir):
- file = os.path.join(tmpdir, file)
-
- print(file)
-
- finally:
- if os.path.exists(tmpdir):
- util.rm(tmpdir)
-
- def handle_repoupdate(self):
- self.server.update_repositories()
-
- def handle_repo_create(self):
- path = self.args.path[0]
-
- p = self.create_pakfire()
- p.repo_create(path, self.args.inputs, key_id=self.args.key)
-
- def handle_info(self):
- info = self.server.info()
-
- print("\n".join(info))
-
-
class CliBuilderIntern(Cli):
def __init__(self):
self.parser = argparse.ArgumentParser(
+++ /dev/null
-#!/usr/bin/python3
-###############################################################################
-# #
-# Pakfire - The IPFire package management system #
-# Copyright (C) 2011 Pakfire development team #
-# #
-# This program is free software: you can redistribute it and/or modify #
-# it under the terms of the GNU General Public License as published by #
-# the Free Software Foundation, either version 3 of the License, or #
-# (at your option) any later version. #
-# #
-# This program is distributed in the hope that it will be useful, #
-# but WITHOUT ANY WARRANTY; without even the implied warranty of #
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
-# GNU General Public License for more details. #
-# #
-# You should have received a copy of the GNU General Public License #
-# along with this program. If not, see <http://www.gnu.org/licenses/>. #
-# #
-###############################################################################
-
-import hashlib
-import os
-import random
-import socket
-import subprocess
-import tempfile
-import time
-import xmlrpc.client
-
-import logging
-log = logging.getLogger("pakfire")
-
-import pakfire.base
-import pakfire.config
-import pakfire.downloader
-import pakfire.packages
-import pakfire.repository
-import pakfire.util
-
-from pakfire.system import system
-from pakfire.constants import *
-from pakfire.i18n import _
-
-CHUNK_SIZE = 1024**2 # 1M
-
-class Source(object):
- def __init__(self, pakfire, id, name, url, path, targetpath, revision, branch):
- self.pakfire = pakfire
- self.id = id
- self.name = name
- self.url = url
- self.targetpath = targetpath
- self.revision = revision
- self.branch = branch
-
- # If the repository is not yet checked out, we create a local clone
- # from it to work with it.
- if not self.is_cloned():
- self.clone()
- else:
- # Always refresh the repository to have the recent commits.
- self.fetch()
-
- def is_cloned(self):
- return os.path.exists(self.path)
-
- def clone(self):
- if self.is_cloned():
- return
-
- dirname = os.path.dirname(self.path)
- basename = os.path.basename(self.path)
-
- if not os.path.exists(dirname):
- os.makedirs(dirname)
-
- self._git("clone %s %s" % (self.url, basename), path=dirname)
-
- def fetch(self):
- self._git("fetch")
-
- @property
- def path(self):
- h = hashlib.new("sha1")
- h.update(self.url)
-
- # XXX path is to be changed
- return "/var/cache/pakfire/sources/%s" % h.hexdigest()
-
- def _git(self, cmd, path=None):
- if not path:
- path = self.path
-
- cmd = "cd %s && git %s" % (path, cmd)
-
- log.debug("Running command: %s" % cmd)
-
- return subprocess.check_output(["/bin/sh", "-c", cmd])
-
- def _git_changed_files(self, revision1, revision2=""):
- files = self._git("diff --name-only %s %s" % (revision1, revision2))
-
- return [os.path.join(self.path, f) for f in files.splitlines()]
-
- def _git_checkout_revision(self, revision):
- self._git("checkout %s" % revision)
-
- def update_revision(self, revision, **pakfire_args):
- # Checkout the revision we want to work with.
- self._git_checkout_revision(revision)
-
- # Get list of all changes files between the current revision and
- # the previous one.
- files = self._git_changed_files("HEAD^", "HEAD")
-
- # Update all changed files and return a repository with them.
- return self.update_files([f for f in files if f.endswith(".%s" % MAKEFILE_EXTENSION)],
- **pakfire_args)
-
- def update_files(self, files, **pakfire_args):
- rnd = random.randint(0, 1024**2)
- tmpdir = "/tmp/pakfire-source-%s" % rnd
-
- pkgs = []
- for file in files:
- if os.path.exists(file):
- pkgs.append(file)
- # XXX not sure what to do here
- #else:
- # pkg_name = os.path.basename(os.path.dirname(file))
- #
- # # Send deleted package to server.
- # self.master.package_remove(self, pkg_name)
-
- if not pkgs:
- return
-
- # XXX This totally ignores the local configuration.
- pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args)
-
- # Create a kind of dummy repository to link the packages against it.
- if "build_id" in pakfire_args:
- del pakfire_args["build_id"]
- pakfire_args["mode"] = "server"
-
- repo = pakfire.api.repo_create("source-%s" % rnd, [tmpdir,], type="source",
- **pakfire_args)
-
- return repo
-
- def update_all(self):
- _files = []
- for dir, subdirs, files in os.walk(self.path):
- for f in files:
- if not f.endswith(".%s" % MAKEFILE_EXTENSION):
- continue
-
- _files.append(os.path.join(dir, f))
-
- return self.update_files(_files)
-
-
-class XMLRPCTransport(xmlrpc.client.Transport):
- user_agent = "pakfire/%s" % PAKFIRE_VERSION
-
- def single_request(self, *args, **kwargs):
- ret = None
-
- # Tries can be passed to this method.
- tries = kwargs.pop("tries", 100)
-
- while tries:
- try:
- ret = xmlrpc.client.Transport.single_request(self, *args, **kwargs)
-
- except socket.error as e:
- # These kinds of errors are not fatal, but they can happen on
- # a bad internet connection or whatever.
- # 32 Broken pipe
- # 110 Connection timeout
- # 111 Connection refused
- if not e.errno in (32, 110, 111,):
- raise
-
- except xmlrpc.client.ProtocolError as e:
- # Log all XMLRPC protocol errors.
- log.error("XMLRPC protocol error:")
- log.error(" URL: %s" % e.url)
- log.error(" HTTP headers:")
- for header in list(e.headers.items()):
- log.error(" %s: %s" % header)
- log.error(" Error code: %s" % e.errcode)
- log.error(" Error message: %s" % e.errmsg)
- raise
-
- else:
- # If request was successful, we can break the loop.
- break
-
- # If the request was not successful, we wait a little time to try
- # it again.
- log.debug("Request was not successful, we wait a little bit and try it again.")
- time.sleep(30)
- tries -= 1
-
- else:
- log.error("Maximum number of tries was reached. Giving up.")
- # XXX need better exception here.
- raise Exception("Could not fulfill request.")
-
- return ret
-
-
-class ServerProxy(xmlrpc.client.ServerProxy):
- def __init__(self, server, *args, **kwargs):
-
- # Some default settings.
- if "transport" not in kwargs:
- kwargs["transport"] = XMLRPCTransport()
-
- kwargs["allow_none"] = True
-
- xmlrpc.client.ServerProxy.__init__(self, server, *args, **kwargs)
-
-
-class Server(object):
- def __init__(self, **pakfire_args):
- self.config = pakfire.config.Config()
-
- server = self.config._slave.get("server")
-
- log.info("Establishing RPC connection to: %s" % server)
-
- self.conn = ServerProxy(server)
-
- self.pakfire_args = pakfire_args
-
- @property
- def hostname(self):
- """
- Return the host's name.
- """
- return socket.gethostname()
-
- @property
- def uname(self):
- return os.uname()[4]
-
- @property
- def cpu_model(self):
- # Determine CPU model
- cpuinfo = {}
- with open("/proc/cpuinfo") as f:
- for line in f.readlines():
- # Break at an empty line, because all information after that
- # is redundant.
- if not line:
- break
-
- try:
- key, value = line.split(":")
- except:
- pass # Skip invalid lines
-
- key, value = key.strip(), value.strip()
-
- cpuinfo[key] = value
-
- ret = None
- if self.uname.startswith("arm"):
- try:
- ret = "%(Hardware)s - %(Processor)s" % cpuinfo
- except KeyError:
- pass
- else:
- ret = cpuinfo.get("model name", None)
-
- return ret or _("Could not be determined")
-
- @property
- def memory(self):
- # Determine memory size
- memory = 0
- with open("/proc/meminfo") as f:
- line = f.readline()
-
- try:
- a, b, c = line.split()
- except:
- pass
- else:
- memory = int(b) * 1024
-
- return memory
-
- def info(self):
- ret = []
-
- ret.append("")
- ret.append(" PAKFIRE %s" % PAKFIRE_VERSION)
- ret.append("")
- ret.append(" %-20s: %s" % (_("Hostname"), self.hostname))
- ret.append("")
-
- # Hardware information
- ret.append(" %s:" % _("Hardware information"))
- ret.append(" %-16s: %s" % (_("CPU model"), self.cpu_model))
- ret.append(" %-16s: %s" % (_("Memory"), pakfire.util.format_size(self.memory)))
- ret.append("")
- ret.append(" %-16s: %s" % (_("Native arch"), system.native_arch))
-
- header = _("Supported arches")
- for arch in self.config.supported_arches:
- ret.append(" %-16s: %s" % (header, arch))
- header = ""
- ret.append("")
-
- return ret
-
- def update_info(self):
- # Get the current load average.
- loadavg = ", ".join(["%.2f" % l for l in os.getloadavg()])
-
- # Get all supported architectures.
- arches = " ".join([a for a in self.config.supported_arches])
-
- self.conn.update_host_info(loadavg, self.cpu_model, self.memory, arches)
-
- def upload_file(self, filename, build_id):
- # Get the hash of the file.
- hash = pakfire.util.calc_hash1(filename)
-
- # Get the size of the file.
- size = os.path.getsize(filename)
-
- # Get an upload ID from the server.
- upload_id = self.conn.get_upload_cookie(os.path.basename(filename),
- size, hash)
-
- # Calculate the number of chunks.
- chunks = (size / CHUNK_SIZE) + 1
-
- # Cut the file in pieces and upload them one after another.
- with open(filename) as f:
- chunk = 0
- while True:
- data = f.read(CHUNK_SIZE)
- if not data:
- break
-
- chunk += 1
- log.info("Uploading chunk %s/%s of %s." % (chunk, chunks,
- os.path.basename(filename)))
-
- data = xmlrpc.client.Binary(data)
- self.conn.upload_chunk(upload_id, data)
-
- # Tell the server, that we finished the upload.
- ret = self.conn.finish_upload(upload_id, build_id)
-
- # If the server sends false, something happened with the upload that
- # could not be recovered.
- if not ret:
- raise Exception("Upload failed.")
-
- def update_build_status(self, build_id, status, message=""):
- ret = self.conn.update_build_state(build_id, status, message)
-
- # If the server returns False, then it did not acknowledge our status
- # update and the build has to be aborted.
- if not ret:
- raise BuildAbortedException("The build was aborted by the master server.")
-
- def build_job(self, type=None):
- build = self.conn.build_job() # XXX type=None
-
- # If the server has got no job for us, we end right here.
- if not build:
- return
-
- job_types = {
- "binary" : self.build_binary_job,
- "source" : self.build_source_job,
- }
-
- build_id = build["id"]
- build_type = build["type"]
-
- try:
- func = job_types[build_type]
- except KeyError:
- raise Exception("Build type not supported: %s" % type)
-
- # Call the function that processes the build and try to catch general
- # exceptions and report them to the server.
- # If everything goes okay, we tell this the server, too.
- try:
- func(build_id, build)
-
- except DependencyError:
- # This has already been reported by func.
- raise
-
- except Exception as e:
- # Format the exception and send it to the server.
- message = "%s: %s" % (e.__class__.__name__, e)
-
- self.update_build_status(build_id, "failed", message)
- raise
-
- else:
- self.update_build_status(build_id, "finished")
-
- def build_binary_job(self, build_id, build):
- arch = build["arch"]
- filename = build["name"]
- download = build["download"]
- hash1 = build["hash1"]
-
- # Create a temporary file and a directory for the resulting files.
- tmpdir = tempfile.mkdtemp()
- tmpfile = os.path.join(tmpdir, filename)
- logfile = os.path.join(tmpdir, "build.log")
-
- # Get a package grabber and add mirror download capabilities to it.
- grabber = pakfire.downloader.PackageDownloader(self.config)
-
- try:
- # Download the source.
- grabber.urlgrab(download, filename=tmpfile)
-
- # Check if the download checksum matches.
- if pakfire.util.calc_hash1(tmpfile) == hash1:
- print("Checksum matches: %s" % hash1)
- else:
- raise DownloadError("Download was corrupted")
-
- # Update the build status on the server.
- self.update_build_status(build_id, "running")
-
- # Run the build.
- pakfire.api.build(tmpfile, build_id=build_id,
- resultdirs=[tmpdir,], logfile=logfile)
-
- self.update_build_status(build_id, "uploading")
-
- # Walk through the result directory and upload all (binary) files.
- for dir, subdirs, files in os.walk(tmpdir):
- for file in files:
- file = os.path.join(dir, file)
- if file in (logfile, tmpfile,):
- continue
-
- self.upload_file(file, build_id)
-
- except DependencyError as e:
- message = "%s: %s" % (e.__class__.__name__, e)
- self.update_build_status(build_id, "dependency_error", message)
- raise
-
- finally:
- # Upload the logfile in any case and if it exists.
- if os.path.exists(logfile):
- self.upload_file(logfile, build_id)
-
- # Cleanup the files we created.
- pakfire.util.rm(tmpdir)
-
- def build_source_job(self, build_id, build):
- # Update the build status on the server.
- self.update_build_status(build_id, "running")
-
- source = Source(self, **build["source"])
-
- repo = source.update_revision(build["revision"], build_id=build_id,
- **self.pakfire_args)
-
- try:
- # Upload all files in the repository.
- for pkg in repo:
- path = os.path.join(pkg.repo.path, pkg.filename)
- self.upload_file(path, build_id)
- finally:
- repo.remove()
-
- def update_repositories(self, limit=2):
- repos = self.conn.get_repos(limit)
-
- for repo in repos:
- files = self.conn.get_repo_packages(repo["id"])
-
- for arch in repo["arches"]:
- path = "/pakfire/repositories/%s/%s/%s" % \
- (repo["distro"]["sname"], repo["name"], arch)
-
- pakfire.api.repo_create(path, files)
-
- def create_scratch_build(self, *args, **kwargs):
- return self.conn.create_scratch_build(*args, **kwargs)