#!/usr/bin/python3
import asyncio
-import hashlib
import json
import logging
import multiprocessing
-import os
import setproctitle
import signal
import socket
-import sys
-import tempfile
-import time
-
-import pakfire.util
from . import _pakfire
from . import config
Handle signal SIGTERM.
"""
self.shutdown()
-
- def execute_job(self, job):
- log.debug("Executing job: %s" % job)
-
- # Call the function that processes the build and try to catch general
- # exceptions and report them to the server.
- # If everything goes okay, we tell this the server, too.
- try:
- # Create a temporary file and a directory for the resulting files.
- tmpdir = tempfile.mkdtemp()
- tmpfile = os.path.join(tmpdir, os.path.basename(job.source_url))
- logfile = os.path.join(tmpdir, "build.log")
-
- # Create pakfire configuration instance.
- config = pakfire.config.Config()
- config.parse(job.config)
-
- # Create pakfire instance.
- p = None
- try:
- # PakfireBuilder has been removed
- #p = pakfire.base.PakfireBuilder(config=config, arch=job.arch)
-
- # Download the source package.
- client = http.Client()
- client.retrieve(job.source_url, tmpfile)
-
- # Check if the download checksum matches (if provided).
- if job.source_hash_sha512:
- h = hashlib.new("sha512")
- f = open(tmpfile, "rb")
- while True:
- buf = f.read(BUFFER_SIZE)
- if not buf:
- break
-
- h.update(buf)
- f.close()
-
- if not job.source_hash_sha512 == h.hexdigest():
- raise DownloadError("Hash check did not succeed.")
-
- # Create a new instance of a build environment.
- #build = pakfire.builder.BuildEnviron(p, tmpfile,
- # release_build=True, build_id=job.id, logfile=logfile)
-
- try:
- # Create the build environment.
- build.start()
-
- # Update the build status on the server.
- self.upload_buildroot(job, build.installed_packages)
- self.update_state(job, "running")
-
- # Run the build (without install test).
- build.build(install_test=False)
-
- # Copy the created packages to the tempdir.
- build.copy_result(tmpdir)
-
- finally:
- # Cleanup the build environment.
- build.stop()
-
- # Jippie, build is finished, we are going to upload the files.
- self.update_state(job, "uploading")
-
- # Walk through the result directory and upload all (binary) files.
- # Skip that for test builds.
- if not job.type == "test":
- for dir, subdirs, files in os.walk(tmpdir):
- for file in files:
- file = os.path.join(dir, file)
- if file in (logfile, tmpfile,):
- continue
-
- self.upload_file(job, file, "package")
-
- except DependencyError as e:
- message = "%s: %s" % (e.__class__.__name__, e)
- self.update_state(job, "dependency_error", message)
- raise
-
- except DownloadError as e:
- message = "%s: %s" % (e.__class__.__name__, e)
- self.update_state(job, "download_error", message)
- raise
-
- finally:
- if p:
- p.destroy()
-
- # Upload the logfile in any case and if it exists.
- if os.path.exists(logfile):
- self.upload_file(job, logfile, "log")
-
- # Cleanup the files we created.
- pakfire.util.rm(tmpdir)
-
- except DependencyError:
- # This has already been reported.
- raise
-
- except (DownloadError,):
- # Do not take any further action for these exceptions.
- pass
-
- except (KeyboardInterrupt, SystemExit):
- self.update_state(job, "aborted")
-
- except Exception as e:
- # Format the exception and send it to the server.
- message = "%s: %s" % (e.__class__.__name__, e)
-
- self.update_state(job, "failed", message)
- raise
-
- else:
- self.update_state(job, "finished")
-
- def update_state(self, job, state, message=None):
- """
- Update state of the build job on the hub.
- """
- data = {
- "message" : message or "",
- }
-
- self.hub._request("/builders/jobs/%s/state/%s" % (job.id, state),
- method="POST", data=data)
-
- def upload_file(self, job, filename, type):
- assert os.path.exists(filename)
- assert type in ("package", "log")
-
- # First upload the file data and save the upload_id.
- upload_id = self.hub.upload_file(filename)
-
- data = {
- "type" : type,
- }
-
- # Add the file to the build.
- self.hub._request("/builders/jobs/%s/addfile/%s" % (job.id, upload_id),
- method="POST", data=data)
-
- def upload_buildroot(self, job, installed_packages):
- pkgs = []
- for pkg in installed_packages:
- pkgs.append((pkg.friendly_name, pkg.uuid))
-
- data = { "buildroot" : json.dumps(pkgs) }
-
- self.hub._request("/builders/jobs/%s/buildroot" % job.id, method="POST", data=data)