From: Michael Tremer Date: Thu, 28 Feb 2013 10:32:58 +0000 (+0100) Subject: Update hub. X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=c2902b2994974848ce9d50a8e2aa9b519bce857e;p=pbs.git Update hub. Remove XMLRPC communication backend and replace it with a proper API. --- diff --git a/backend/builders.py b/backend/builders.py index 9dcf7870..87003d7a 100644 --- a/backend/builders.py +++ b/backend/builders.py @@ -126,7 +126,6 @@ class Builder(base.Object): # Cache. self._data = data self._active_jobs = None - self._arches = None self._disabled_arches = None def __cmp__(self, other): @@ -225,40 +224,26 @@ class Builder(base.Object): """ return self.data.time_keepalive - def update_keepalive(self, loadavg, free_space): + def update_keepalive(self, loadavg1=None, loadavg5=None, loadavg15=None, + mem_total=None, mem_free=None, swap_total=None, swap_free=None, + space_free=None): """ Update the keepalive timestamp of this machine. """ - if free_space is None: - free_space = 0 - - self.db.execute("UPDATE builders SET time_keepalive = NOW(), loadavg = %s, \ - free_space = %s WHERE id = %s", loadavg, free_space, self.id) - - logging.debug("Builder %s updated it keepalive status: %s" \ - % (self.name, loadavg)) - - def needs_update(self): - query = self.db.get("SELECT time_updated, NOW() - time_updated \ - AS seconds FROM builders WHERE id = %s", self.id) - - # If there has been no update at all, we will need a new one. - if query.time_updated is None: - return True - - # Require an update after the data is older than 24 hours. - return query.seconds >= 24*3600 - - def update_info(self, arches, cpu_model, cpu_count, memory, pakfire_version=None, host_key_id=None): - # Update architecture information. - self.update_arches(arches) - + self.db.execute("UPDATE builders SET time_keepalive = NOW(), \ + loadavg1 = %s, loadavg5 = %s, loadavg15 = %s, space_free = %s, \ + mem_total = %s, mem_free = %s, swap_total = %s, swap_free = %s \ + WHERE id = %s", loadavg1, loadavg5, loadavg15, space_free, + mem_total, mem_free, swap_total, swap_free, self.id) + + def update_info(self, cpu_model=None, cpu_count=None, cpu_arch=None, cpu_bogomips=None, + pakfire_version=None, host_key=None, os_name=None): # Update all the rest. self.db.execute("UPDATE builders SET time_updated = NOW(), \ - pakfire_version = %s, cpu_model = %s, cpu_count = %s, memory = %s, \ - host_key_id = %s \ - WHERE id = %s", pakfire_version or "", cpu_model, cpu_count, memory, - host_key_id, self.id) + pakfire_version = %s, cpu_model = %s, cpu_count = %s, cpu_arch = %s, \ + cpu_bogomips = %s, host_key_id = %s, os_name = %s WHERE id = %s", + pakfire_version, cpu_model, cpu_count, cpu_arch, cpu_bogomips, + host_key, os_name, self.id) def update_arches(self, arches): # Get all arches this builder does currently support. @@ -293,19 +278,6 @@ class Builder(base.Object): self.db.execute("DELETE FROM builders_arches WHERE builder_id = %s \ AND arch_id = %s", self.id, arch.id) - def update_overload(self, overload): - if overload: - overload = "Y" - else: - overload = "N" - - self.db.execute("UPDATE builders SET overload = %s WHERE id = %s", - overload, self.id) - self._data["overload"] = overload - - logging.debug("Builder %s updated it overload status to %s" % \ - (self.name, self.overload)) - def get_enabled(self): return self.status == "enabled" @@ -340,60 +312,20 @@ class Builder(base.Object): if log: self.log(status, user=user) - def get_arches(self, enabled=None): - """ - A list of architectures that are supported by this builder. - """ - if enabled is True: - enabled = "Y" - elif enabled is False: - enabled = "N" - else: - enabled = None - - query = "SELECT arch_id AS id FROM builders_arches WHERE builder_id = %s" - args = [self.id,] - - if enabled: - query += " AND enabled = %s" - args.append(enabled) - - # Get all other arches from the database. - arches = [] - for arch in self.db.query(query, *args): - arch = self.pakfire.arches.get_by_id(arch.id) - arches.append(arch) - - # Save a sorted list of supported architectures. - arches.sort() - - return arches - @property def arches(self): - if self._arches is None: - self._arches = self.get_arches(enabled=True) - - return self._arches - - @property - def disabled_arches(self): - if self._disabled_arches is None: - self._disabled_arches = self.get_arches(enabled=False) - - return self._disabled_arches + if not hasattr(self, "_arches"): + self._arches = ["noarch",] - def set_arch_status(self, arch, enabled): - if enabled: - enabled = "Y" - else: - enabled = "N" + if self.cpu_arch: + res = self.db.query("SELECT build_arch FROM arches_compat \ + WHERE host_arch = %s", self.cpu_arch) - self.db.execute("UPDATE builders_arches SET enabled = %s \ - WHERE builder_id = %s AND arch_id = %s", enabled, self.id, arch.id) + self._arches += [r.build_arch for r in res] + if not self.cpu_arch in self._arches: + self._arches.append(self.cpu_arch) - # Reset the arch cache. - self._arches = None + return self._arches def get_build_release(self): return self.data.build_release == "Y" @@ -484,24 +416,32 @@ class Builder(base.Object): def passphrase(self): return self.data.passphrase + # Load average + @property def loadavg(self): - if self.state == "online": - return self.data.loadavg + return ", ".join(["%.2f" % l for l in (self.loadavg1, self.loadavg5, self.loadavg15)]) @property - def load1(self): - try: - load1, load5, load15 = self.loadavg.split(", ") - except: - return None + def loadavg1(self): + return self.data.loadavg1 or 0.0 + + @property + def loadavg5(self): + return self.data.loadavg5 or 0.0 - return load1 + @property + def loadavg15(self): + return self.data.loadavg15 or 0.0 @property def pakfire_version(self): return self.data.pakfire_version or "" + @property + def os_name(self): + return self.data.os_name or "" + @property def cpu_model(self): return self.data.cpu_model or "" @@ -511,16 +451,63 @@ class Builder(base.Object): return self.data.cpu_count @property - def memory(self): - return self.data.memory + def cpu_arch(self): + return self.data.cpu_arch + + @property + def cpu_bogomips(self): + return self.data.cpu_bogomips or 0.0 + + @property + def mem_percentage(self): + if not self.mem_total: + return None + + return self.mem_used * 100 / self.mem_total @property - def free_space(self): - return self.data.free_space or 0 + def mem_total(self): + return self.data.mem_total + + @property + def mem_used(self): + if self.mem_total and self.mem_free: + return self.mem_total - self.mem_free + + @property + def mem_free(self): + return self.data.mem_free + + @property + def swap_percentage(self): + if not self.swap_total: + return None + + return self.swap_used * 100 / self.swap_total + + @property + def swap_total(self): + return self.data.swap_total + + @property + def swap_used(self): + if self.swap_total and self.swap_free: + return self.swap_total - self.swap_free + + @property + def swap_free(self): + return self.data.swap_free + + @property + def space_free(self): + return self.data.space_free @property def overload(self): - return self.data.overload == "Y" + if not self.cpu_count or not self.loadavg1: + return None + + return self.loadavg1 >= self.cpu_count @property def host_key_id(self): @@ -555,17 +542,18 @@ class Builder(base.Object): """ return self.count_active_jobs() >= self.max_jobs - def get_next_jobs(self, arches=None, limit=None): - if arches is None: - arches = self.get_arches() - - return self.pakfire.jobs.get_next(arches=arches, builder=self, - state="pending", limit=limit) - - def get_next_job(self, *args, **kwargs): - kwargs["limit"] = 1 + def get_next_jobs(self, limit=None): + """ + Returns a list of jobs that can be built on this host. + """ + return self.pakfire.jobs.get_next(arches=self.arches, limit=limit) - jobs = self.get_next_jobs(*args, **kwargs) + def get_next_job(self): + """ + Returns the next job in line for this builder. + """ + # Get the first item of all jobs in the list. + jobs = self.pakfire.jobs.get_next(builder=self, state="pending", limit=1) if jobs: return jobs[0] diff --git a/backend/builds.py b/backend/builds.py index fcfb3491..fac2b2bf 100644 --- a/backend/builds.py +++ b/backend/builds.py @@ -1410,47 +1410,25 @@ class Jobs(base.Object): def get_next(self, arches=None, builder=None, limit=None, offset=None, type=None, state=None, states=None, max_tries=None): - if state is None and states is None: - states = ["pending", "new"] + if state and states is None: + states = [state,] - if builder and arches is None: - arches = builder.get_arches() - - query = "SELECT jobs.* FROM jobs \ - JOIN builds ON jobs.build_id = builds.id \ - WHERE \ - (start_not_before IS NULL OR start_not_before <= NOW())" + query = "SELECT * FROM jobs \ + INNER JOIN jobs_queue ON jobs.id = jobs_queue.id" args = [] if arches: - query += " AND jobs.arch_id IN (%s)" % ", ".join(["%s"] * len(arches)) - args.extend([a.id for a in arches]) + query += " AND jobs_queue.arch IN (%s)" % ", ".join(["%s"] * len(arches)) + args.extend(arches) if builder: - #query += " AND (jobs.builder_id = %s OR jobs.builder_id IS NULL)" - #args.append(builder.id) - - # Check out which types of builds this builder builds. - build_types = [] - for build_type in builder.build_types: - if build_type == "release": - build_types.append("(builds.type = 'release' AND jobs.type = 'build')") - elif build_type == "scratch": - build_types.append("(builds.type = 'scratch' AND jobs.type = 'build')") - elif build_type == "test": - build_types.append("jobs.type = 'test'") - - if build_types: - query += " AND (%s)" % " OR ".join(build_types) + query += " AND jobs_queue.designated_builder_id = %s" + args.append(builder.id) if max_tries: query += " AND jobs.max_tries <= %s" args.append(max_tries) - if state: - query += " AND jobs.state = %s" - args.append(state) - if states: query += " AND jobs.state IN (%s)" % ", ".join(["%s"] * len(states)) args.extend(states) @@ -1459,21 +1437,6 @@ class Jobs(base.Object): query += " AND jobs.type = %s" args.append(type) - # Order builds. - # Release builds and scratch builds are more important than test builds. - # Builds are sorted by priority and older builds are preferred. - - query += " ORDER BY \ - CASE \ - WHEN jobs.state = 'pending' THEN 0 \ - WHEN jobs.state = 'new' THEN 1 \ - END, \ - CASE \ - WHEN jobs.type = 'build' THEN 0 \ - WHEN jobs.type = 'test' THEN 1 \ - END, \ - builds.priority DESC, jobs.time_created ASC" - if limit: query += " LIMIT %s" args.append(limit) @@ -1483,6 +1446,9 @@ class Jobs(base.Object): job = self.pakfire.jobs.get_by_id(row.id, row) jobs.append(job) + # Reverse the order of the builds. + jobs.reverse() + return jobs def get_latest(self, arch=None, builder=None, limit=None, age=None, date=None): @@ -1563,6 +1529,12 @@ class Jobs(base.Object): return count + def get_queue_length(self): + res = self.db.get("SELECT COUNT(*) AS count FROM jobs_queue") + + if res: + return res.count + class Job(base.Object): def __init__(self, pakfire, id, data=None): diff --git a/data/templates/builders/detail.html b/data/templates/builders/detail.html index 675604c7..8f2abcce 100644 --- a/data/templates/builders/detail.html +++ b/data/templates/builders/detail.html @@ -137,6 +137,12 @@
+ + + + @@ -164,8 +166,43 @@ - - + + + + + @@ -178,7 +215,7 @@ - + diff --git a/data/templates/builders/edit.html b/data/templates/builders/edit.html index b008971b..8e6b3d14 100644 --- a/data/templates/builders/edit.html +++ b/data/templates/builders/edit.html @@ -94,21 +94,6 @@ -
- -
- - -

- {{ _("Select or deselect the architectures, this builder should build or not.") }} -

-
-
-
diff --git a/data/templates/builders/list.html b/data/templates/builders/list.html index 5265ce80..8a3b2b85 100644 --- a/data/templates/builders/list.html +++ b/data/templates/builders/list.html @@ -28,7 +28,7 @@ - + @@ -50,14 +50,13 @@ {{ _("Overload") }} {% end %}
- {{ builder.cpu_model or _("Unknown CPU") }} - {{ format_size(builder.memory) }} + {{ builder.cpu_model or _("Unknown CPU") }} + {% if builder.mem_total %} + - {{ format_size(builder.mem_total) }} + {% end %}
{{ _("OS") }} + {{ builder.os_name or _("N/A") }} +
{{ _("Pakfire version") }} @@ -146,11 +152,7 @@
{{ _("Supported architectures") }} - {{ locale.list([a.name for a in builder.get_arches() ]) }} - - {% if builder.disabled_arches %} - ({{ _("disabled: %s") % locale.list([a.name for a in builder.disabled_arches]) }}) - {% end %} + {{ locale.list(builder.arches) }}
{{ builder.cpu_count }}
{{ _("Memory") }}{{ format_size(builder.memory) }}{{ _("Bogomips") }}{{ builder.cpu_bogomips or _("N/A") }}
+ {% if builder.mem_total %} +
+ + {{ format_size(builder.mem_used) }}/{{ format_size(builder.mem_total) }} + + + {{ _("Memory") }} +
+ +
+
+
+ {% end %} + + {% if builder.swap_total %} +
+ + {{ format_size(builder.swap_used) }}/{{ format_size(builder.swap_total) }} + + + {{ _("SWAP space") }} +
+ +
+
+
+ {% end %} + + {% if not builder.mem_total and not builder.swap_total %} + {{ _("No memory information available.") }} + {% end %} +
{{ _("Load average") }}
{{ _("Free disk space") }}{{ format_size(builder.free_space * 1024**2) }}{{ format_size(builder.space_free or 0) }}
  {{ _("Hostname") }}{{ _("Architectures") }}{{ _("Architecture") }} {{ _("Jobs") }}
- {% if builder.arches %} - {{ locale.list([a.name for a in builder.arches]) }} - {% else %} - {{ _("N/A") }} - {% end %} + {{ builder.cpu_arch or _("Unknown") }}

diff --git a/hub/__init__.py b/hub/__init__.py index 748ff5fb..683eb84b 100644 --- a/hub/__init__.py +++ b/hub/__init__.py @@ -12,7 +12,8 @@ import handlers BASEDIR = os.path.join(os.path.dirname(__file__), "..", "data") -# Enable logging +# Read command line +tornado.options.define("debug", default=False, help="Run in debug mode", type=bool) tornado.options.parse_command_line() class Application(tornado.web.Application): @@ -20,7 +21,7 @@ class Application(tornado.web.Application): self.__pakfire = None settings = dict( - debug = False, + debug = tornado.options.options.debug, gzip = True, ) @@ -32,11 +33,39 @@ class Application(tornado.web.Application): self.add_handlers(r"pakfirehub.ipfire.org", [ # Redirect strayed users. - (r"/", handlers.RedirectHandler), - - # API - (r"/builder", handlers.BuilderHandler), - (r"/user", handlers.UserHandler), + #(r"/", handlers.RedirectHandler), + + # Test handlers + (r"/noop", handlers.NoopHandler), + (r"/error/test", handlers.ErrorTestHandler), + (r"/error/test/(\d+)", handlers.ErrorTestHandler), + + # Statistics + (r"/statistics/jobs/queue", handlers.StatsJobsQueueHandler), + + # Builds + (r"/builds/create", handlers.BuildsCreateHandler), + (r"/builds/(.*)", handlers.BuildsGetHandler), + + # Builders + (r"/builders/info", handlers.BuildersInfoHandler), + (r"/builders/jobs/queue", handlers.BuildersJobsQueueHandler), + (r"/builders/jobs/(.*)/addfile/(.*)", handlers.BuildersJobsAddFileHandler), + (r"/builders/jobs/(.*)/buildroot", handlers.BuildersJobsBuildrootHandler), + (r"/builders/jobs/(.*)/state/(.*)", handlers.BuildersJobsStateHandler), + (r"/builders/keepalive", handlers.BuildersKeepaliveHandler), + + # Jobs + (r"/jobs/(.*)", handlers.JobsGetHandler), + + # Packages + (r"/packages/(.*)", handlers.PackagesGetHandler), + + # Uploads + (r"/uploads/create", handlers.UploadsCreateHandler), + (r"/uploads/(.*)/sendchunk", handlers.UploadsSendChunkHandler), + (r"/uploads/(.*)/finished", handlers.UploadsFinishedHandler), + (r"/uploads/(.*)/destroy", handlers.UploadsDestroyHandler), ]) logging.info("Successfully initialied application") diff --git a/hub/handlers.py b/hub/handlers.py index bcafc9e4..d8ba58c9 100644 --- a/hub/handlers.py +++ b/hub/handlers.py @@ -1,164 +1,43 @@ #!/usr/bin/python +import backend + import base64 import hashlib +import json import logging -import os +import time import tornado.web -import uuid -import xmlrpclib - -import backend.builds -from backend.builders import Builder -from backend.builds import Build -from backend.packages import Package -from backend.uploads import Upload -from backend.users import User - -class BaseHandler(tornado.web.RequestHandler): - """ - Handler class that provides very basic things we will need. - """ - @property - def pakfire(self): - """ - Reference to the Pakfire object. - """ - return self.application.pakfire - - @property - def remote_address(self): - """ - Returns the IP address the request came from. - """ - remote_ips = self.request.remote_ip.split(", ") - - return remote_ips[-1] - - -class RedirectHandler(BaseHandler): - """ - This handler redirects from the hub to the main website. - """ - def get(self): - url = self.pakfire.settings.get("baseurl", None) - - # If there was no URL in the database, we cannot do anything. - if not url: - raise tornado.web.HTTPError(404) - - self.redirect(url) - -# From: http://blog.joshmarshall.org/2009/10/its-a-twister-now-with-more-xml/ -# -# This is just a very simple implementation from the website above, because -# I badly want to run this software out of the box on any distribution. -# - -def private(func): - # Decorator to make a method, well, private. - class PrivateMethod(object): - def __init__(self): - self.private = True - - __call__ = func - - return PrivateMethod() - - -class XMLRPCHandler(BaseHandler): - """ - Subclass this to add methods -- you can treat them - just like normal methods, this handles the XML formatting. - """ - def post(self): - """ - Later we'll make this compatible with "dot" calls like: - server.namespace.method() - If you implement this, make sure you do something proper - with the Exceptions, i.e. follow the XMLRPC spec. - """ - try: - params, method_name = xmlrpclib.loads(self.request.body) - except: - # Bad request formatting, bad. - raise tornado.web.HTTPError(400) - - if method_name in dir(tornado.web.RequestHandler): - # Pre-existing, not an implemented attribute - raise AttributeError('%s is not implemented.' % method_name) - - try: - method = getattr(self, method_name) - except: - # Attribute doesn't exist - print self - raise AttributeError('%s is not a valid method.' % method_name) - - if not callable(method): - # Not callable, so not a method - raise Exception('Attribute %s is not a method.' % method_name) - if method_name.startswith('_') or \ - ('private' in dir(method) and method.private is True): - # No, no. That's private. - raise Exception('Private function %s called.' % method_name) +class LongPollMixin(object): + def initialize(self): + self._start_time = time.time() - response = method(*params) - response_xml = xmlrpclib.dumps((response,), methodresponse=True, - allow_none=True) + def add_timeout(self, timeout, callback): + deadline = time.time() + timeout - self.set_header("Content-Type", "text/xml") - self.write(response_xml) + return self.application.ioloop.add_timeout(deadline, callback) + def on_connection_close(self): + logging.debug("Connection closed unexpectedly") -class CommonHandler(XMLRPCHandler): - """ - Subclass that provides very basic functions that do not need any - kind of authentication and are accessable by any user/builder. - """ - - def noop(self): - """ - No operation. Just check if the connection is working. - """ - return True - - def test_code(self, error_code=200): - """ - For testing a client. - - This just returns a HTTP response with the given code. - """ - raise tornado.web.HTTPError(error_code) + def connection_closed(self): + return self.request.connection.stream.closed() - def get_my_address(self): - """ - Return the address of the requesting host. + @property + def runtime(self): + return time.time() - self._start_time - This is to discover it through NAT. - """ - return self.remote_address - def get_hub_status(self): +class BaseHandler(LongPollMixin, tornado.web.RequestHandler): + @property + def backend(self): """ - Return some status information about the hub. + Shortcut handler to pakfire instance. """ + return self.application.pakfire - # Return number of pending and running builds. - ret = { - "jobs_pending" : self.pakfire.jobs.count(state="pending"), - "jobs_running" : self.pakfire.jobs.count(state="running"), - } - - return ret - - -class AuthHandler(CommonHandler): - def _auth(self, name, password): - raise NotImplementedError - - def get_current_user(self): + def get_basic_auth_credentials(self): """ This handles HTTP Basic authentication. """ @@ -166,156 +45,156 @@ class AuthHandler(CommonHandler): # If no authentication information was provided, we stop here. if not auth_header: - return + return None, None # No basic auth? We cannot handle that. if not auth_header.startswith("Basic "): raise tornado.web.HTTPError(400, "Can only handle Basic auth.") - # Decode the authentication information. - auth_header = base64.decodestring(auth_header[6:]) - try: + # Decode the authentication information. + auth_header = base64.decodestring(auth_header[6:]) + name, password = auth_header.split(":", 1) except: raise tornado.web.HTTPError(400, "Authorization data was malformed") - # Authenticate user to the database. - return self._auth(name, password) + return name, password + def get_current_user(self): + name, password = self.get_basic_auth_credentials() + if name is None: + return -class UserAuthMixin(object): - """ - Mixin to authenticate users. - """ - def _auth(self, username, password): - return self.pakfire.users.auth(username, password) + builder = self.backend.builders.auth(name, password) + if builder: + return builder - @property - def user(self): - """ - Alias for "current_user". - """ - return self.current_user + user = self.backend.users.auth(name, password) + if user: + return user @property def builder(self): - return None + if isinstance(self.current_user, backend.builders.Builder): + return self.current_user - def check_auth(self): - """ - Tell the user if he authenticated successfully. - """ - if self.user: - return True + @property + def user(self): + if isinstance(self.current_user, backend.users.User): + return self.current_user - return False + def get_argument_int(self, *args, **kwargs): + arg = self.get_argument(*args, **kwargs) + try: + return int(arg) + except (TypeError, ValueError): + return None -class BuilderAuthMixin(object): - """ - Mixin to authenticate builders. - """ - def _auth(self, hostname, password): - return self.pakfire.builders.auth(hostname, password) + def get_argument_float(self, *args, **kwargs): + arg = self.get_argument(*args, **kwargs) - @property - def builder(self): - """ - Alias for "current_user". - """ - return self.current_user + try: + return float(arg) + except (TypeError, ValueError): + return None - @property - def user(self): - return None + def get_argument_json(self, *args, **kwargs): + arg = self.get_argument(*args, **kwargs) + if arg: + return json.loads(arg) -class CommonAuthHandler(AuthHandler): - """ - Methods that are usable by both, the real users and the builders - but they require an authentication. - """ - @tornado.web.authenticated - def build_create(self, upload_id, distro_ident, arches): - ## Check if the user has permission to create a build. - # Builders do have the permission to create all kinds of builds. - if isinstance(self.current_user, Builder): - type = "release" - check_for_duplicates = True - # - # Users only have the permission to create scratch builds. - elif isinstance(self.current_user, User) and \ - self.current_user.has_perm("create_scratch_builds"): - type = "scratch" - check_for_duplicates = False - # - # In all other cases, it is not allowed to proceed. + +class NoopHandler(BaseHandler): + def get(self): + if self.builder: + self.write("Welcome to the Pakfire hub, %s!" % self.builder.hostname) + elif self.user: + self.write("Welcome to the Pakfire hub, %s!" % self.user.name) else: - raise tornado.web.HTTPError(403, "Not allowed to create a build.") + self.write("Welcome to the Pakfire hub!") - # Get previously uploaded file to create this build from. - upload = self.pakfire.uploads.get_by_uuid(upload_id) - if not upload: - raise tornado.web.HTTPError(400, "Upload does not exist: %s" % upload_id) - # Check if the uploaded file belongs to this user/builder. - if self.user and not upload.user == self.user: - raise tornado.web.HTTPError(400, "Upload does not belong to this user.") +class ErrorTestHandler(BaseHandler): + def get(self, error_code=200): + """ + For testing a client. - elif self.builder and not upload.builder == self.builder: - raise tornado.web.HTTPError(400, "Upload does not belong to this builder.") + This just returns a HTTP response with the given code. + """ + try: + error_code = int(error_code) + except ValueError: + error_code = 200 - # Get distribution this package should be built for. - distro = self.pakfire.distros.get_by_ident(distro_ident) - if not distro: - distro = self.pakfire.distros.get_default() + raise tornado.web.HTTPError(error_code) - # Open the package that was uploaded earlier and add it to - # the database. Create a new build object from the uploaded package. - ret = backend.builds.import_from_package(self.pakfire, upload.path, - distro=distro, type=type, arches=arches, owner=self.current_user, - check_for_duplicates=check_for_duplicates) - if not ret: - raise tornado.web.HTTPError(500, "Could not create build from package.") +class StatsJobsQueueHandler(BaseHandler): + def get(self): + job_queue_length = self.backend.jobs.get_queue_length() - # Creating the build will move the file to the build directory, - # so we can safely remove the uploaded file. - upload.remove() + ret = { + "length" : job_queue_length, + } + self.write(ret) - # Return a bunch of information about the build back to the user. - pkg, build = ret - return build.info +# Uploads - # Upload processing. +class UploadsCreateHandler(BaseHandler): + """ + Create a new upload object in the database and return a unique ID + to the uploader. + """ @tornado.web.authenticated - def upload_create(self, filename, size, hash): - """ - Create a new upload object in the database and return a unique ID - to the uploader. - """ - upload = Upload.create(self.pakfire, filename, size, hash, - user=self.user, builder=self.builder) + def get(self): + # XXX Check permissions + + filename = self.get_argument("filename") + filesize = self.get_argument_int("filesize") + filehash = self.get_argument("hash") + + upload = backend.uploads.Upload.create(self.backend, filename, filesize, + filehash, user=self.user, builder=self.builder) - return upload.uuid + self.finish(upload.uuid) + +class UploadsSendChunkHandler(BaseHandler): @tornado.web.authenticated - def upload_chunk(self, upload_id, data): - upload = self.pakfire.uploads.get_by_uuid(upload_id) + def post(self, upload_id): + upload = self.backend.uploads.get_by_uuid(upload_id) if not upload: raise tornado.web.HTTPError(404, "Invalid upload id.") if not upload.builder == self.builder: raise tornado.web.HTTPError(403, "Uploading an other host's file.") - upload.append(data.data) + chksum = self.get_argument("chksum") + data = self.get_argument("data") + + # Decode data. + data = base64.b64decode(data) + + # Calculate hash and compare. + h = hashlib.new("sha512") + h.update(data) + + if not chksum == h.hexdigest(): + raise tornado.web.HTTPError(400, "Checksum mismatch") + + # Append the data to file. + upload.append(data) + +class UploadsFinishedHandler(BaseHandler): @tornado.web.authenticated - def upload_finished(self, upload_id): - upload = self.pakfire.uploads.get_by_uuid(upload_id) + def get(self, upload_id): + upload = self.backend.uploads.get_by_uuid(upload_id) if not upload: raise tornado.web.HTTPError(404, "Invalid upload id.") @@ -329,16 +208,21 @@ class CommonAuthHandler(AuthHandler): # as finished and send True to the client. if ret: upload.finished() - return True + self.finish("OK") + + return # In case the download was corrupted or incomplete, we delete it # and tell the client to start over. upload.remove() - return False + self.finish("ERROR: CORRUPTED OR INCOMPLETE FILE") + + +class UploadsDestroyHandler(BaseHandler): @tornado.web.authenticated - def upload_remove(self, upload_id): - upload = self.pakfire.uploads.get_by_uuid(upload_id) + def get(self, upload_id): + upload = self.backend.uploads.get_by_uuid(upload_id) if not upload: raise tornado.web.HTTPError(404, "Invalid upload id.") @@ -349,209 +233,176 @@ class CommonAuthHandler(AuthHandler): upload.remove() -class UserHandler(UserAuthMixin, CommonAuthHandler): - """ - Subclass with methods that are only accessable by users. - """ - @tornado.web.authenticated - def get_user_profile(self): - """ - Send a bunch of account information to the user. - """ - user = self.current_user +# Builds - ret = { - "name" : user.name, - "realname" : user.realname, - "role" : user.state, - "email" : user.email, - "registered" : user.registered, - } +class BuildsCreateHandler(BaseHandler): + @tornado.web.authenticated + def get(self): + # Get the upload ID of the package file. + upload_id = self.get_argument("upload_id") - return ret + # Get the identifier of the distribution we build for. + distro_ident = self.get_argument("distro") - @tornado.web.authenticated - def get_builds(self, type=None, limit=10, offset=0): - if not type in (None, "scratch", "release"): - return + # Get a list of arches to build for. + arches = self.get_argument("arches", None) + if arches == "": + arches = None - builds = self.pakfire.builds.get_by_user_iter(self.current_user, type=type) + # Process build type. + build_type = self.get_argument("build_type") + if build_type == "release": + check_for_duplicates = True + elif build_type == "scratch": + check_for_duplicates = False + else: + raise tornado.web.HTTPError(400, "Invalid build type") - try: - counter = limit + offset - except ValueError: - return [] + ## Check if the user has permission to create a build. + # Users only have the permission to create scratch builds. + if self.user and not build_type == "scratch": + raise tornado.web.HTTPError(403, "Users are only allowed to upload scratch builds") - ret = [] - for build in builds: - build = self.get_build(build.id) + # Get previously uploaded file to create this build from. + upload = self.backend.uploads.get_by_uuid(upload_id) + if not upload: + raise tornado.web.HTTPError(400, "Upload does not exist: %s" % upload_id) - ret.append(build) + # Check if the uploaded file belongs to this user/builder. + if self.user and not upload.user == self.user: + raise tornado.web.HTTPError(400, "Upload does not belong to this user.") - counter -= 1 - if counter <= 0: - break + elif self.builder and not upload.builder == self.builder: + raise tornado.web.HTTPError(400, "Upload does not belong to this builder.") - return ret + # Get distribution this package should be built for. + distro = self.backend.distros.get_by_ident(distro_ident) + if not distro: + distro = self.backend.distros.get_default() - @tornado.web.authenticated - def get_build(self, build_id): - # Check for empty input. - if not build_id: - return None + # Open the package that was uploaded earlier and add it to + # the database. Create a new build object from the uploaded package. + args = { + "arches" : arches, + "check_for_duplicates" : check_for_duplicates, + "distro" : distro, + "type" : build_type, + } + if self.user: + args["owner"] = self.user - build = self.pakfire.builds.get_by_uuid(build_id) - if not build: - return {} + try: + pkg, build = backend.builds.import_from_package(self.backend, upload.path, **args) - ret = { - # Identity information. - "uuid" : build.uuid, - "type" : build.type, - "state" : build.state, # XXX do we actually use this? + except: + # Raise any exception. + raise - "name" : build.name, - "sup_arches" : build.supported_arches, - "jobs" : [self.get_job(j.uuid) for j in build.jobs], + else: + # Creating the build will move the file to the build directory, + # so we can safely remove the uploaded file. + upload.remove() - "severity" : build.severity, - "priority" : build.priority, + # Send the build ID back to the user. + self.finish(build.uuid) - # The source package of this build. - "pkg_id" : build.pkg.uuid, - "distro" : build.distro.id, - "repo" : None, +class BuildsGetHandler(BaseHandler): + def get(self, build_uuid): + build = self.backend.builds.get_by_uuid(build_uuid) + if not build: + raise tornado.web.HTTPError(404, "Could not find build: %s" % build_uuid) - "time_created" : build.created, + ret = { + "distro" : build.distro.identifier, + "jobs" : [j.uuid for j in build.jobs], + "name" : build.name, + "package" : build.pkg.uuid, + "priority" : build.priority, "score" : build.credits, + "severity" : build.severity, + "state" : build.state, + "sup_arches" : build.supported_arches, + "time_created" : build.created.isoformat(), + "type" : build.type, + "uuid" : build.uuid, } # If the build is in a repository, update that bit. if build.repo: - ret["repo"] = build.repo.id - - return ret - - @tornado.web.authenticated - def get_latest_jobs(self): - jobs = [] - - for job in self.pakfire.jobs.get_latest(): - job = self.get_job(job.uuid) - if job: - jobs.append(job) + ret["repo"] = build.repo.identifier - return jobs + self.finish(ret) - @tornado.web.authenticated - def get_active_jobs(self, host_id=None): - jobs = [] - - for job in self.pakfire.jobs.get_active(host_id=host_id): - job = self.get_job(job.uuid) - if job: - jobs.append(job) - return jobs +# Jobs - @tornado.web.authenticated - def get_job(self, job_id): - job = self.pakfire.jobs.get_by_uuid(job_id) +class JobsGetHandler(BaseHandler): + def get(self, job_uuid): + job = self.backend.jobs.get_by_uuid(job_uuid) if not job: - return + raise tornado.web.HTTPError(404, "Could not find job: %s" % job_uuid) + + # Check if user is allowed to view this job. + if job.build.public == False: + if not self.user: + raise tornado.web.HTTPError(401) - # XXX check if user is allowed to view this job. + # Check if an authenticated user has permission to see this build. + if not job.build.has_perm(self.user): + raise tornado.web.HTTPError(403) ret = { - # Identity information. - "uuid" : job.uuid, - "type" : job.type, - - # Name, state, architecture. - "name" : job.name, - "state" : job.state, - "arch" : job.arch.name, - - # Information about the build this job lives in. - "build_id" : job.build.uuid, - - # The package that is built in this job. - "pkg_id" : job.pkg.uuid, - "packages" : [self.get_package(p.uuid) for p in job.packages], - - # The builder that builds this job. - "builder_id" : job.builder_id, - - # Time information. - "duration" : job.duration, - "time_created" : job.time_created, - "time_started" : job.time_started, - "time_finished" : job.time_finished, + "arch" : job.arch.name, + "build" : job.build.uuid, + "builder" : job.builder.hostname, + "duration" : job.duration, + "name" : job.name, + "packages" : [p.uuid for p in job.packages], + "state" : job.state, + "time_created" : job.time_created.isoformat(), + "type" : job.type, + "uuid" : job.uuid, } - return ret + if job.time_started: + ret["time_started"] = job.time_started.isoformat() - @tornado.web.authenticated - def get_builders(self): - builders = [] - - for builder in self.pakfire.builders.get_all(): - builder = self.get_builder(builder.id) - if builder: - builders.append(builder) - - return builders - - @tornado.web.authenticated - def get_builder(self, builder_id): - builder = self.pakfire.builders.get_by_id(builder_id) - if not builder: - return - - ret = { - "name" : builder.name, - "description" : builder.description, - "state" : builder.state, + if job.time_finished: + ret["time_finished"] = job.time_finished.isoformat() - "arches" : [a.name for a in builder.arches], - "disabled" : builder.disabled, + self.finish(ret) - "cpu_model" : builder.cpu_model, - "cpu_count" : builder.cpu_count, - "memory" : builder.memory / 1024, - - "active_jobs" : [j.uuid for j in builder.get_active_jobs()], - } - return ret +# Packages - @tornado.web.authenticated - def get_package(self, pkg_id): - pkg = self.pakfire.packages.get_by_uuid(pkg_id) +class PackagesGetHandler(BaseHandler): + def get(self, package_uuid): + pkg = self.backend.packages.get_by_uuid(package_uuid) if not pkg: - return + raise tornado.web.HTTPError(404, "Could not find package: %s" % package_uuid) ret = { - "uuid" : pkg.uuid, - "name" : pkg.name, - "epoch" : pkg.epoch, - "version" : pkg.version, - "release" : pkg.release, "arch" : pkg.arch.name, - "supported_arches" : pkg.supported_arches, - "type" : pkg.type, + "build_id" : pkg.build_id, + "build_host" : pkg.build_host, + "build_time" : pkg.build_time.isoformat(), + "description" : pkg.description, + "epoch" : pkg.epoch, + "filesize" : pkg.filesize, "friendly_name" : pkg.friendly_name, "friendly_version" : pkg.friendly_version, "groups" : pkg.groups, + "hash_sha512" : pkg.hash_sha512, "license" : pkg.license, - "url" : pkg.url, - "summary" : pkg.summary, - "description" : pkg.description, - + "name" : pkg.name, + "release" : pkg.release, "size" : pkg.size, - "filesize" : pkg.filesize, - "hash_sha512" : pkg.hash_sha512, + "summary" : pkg.summary, + "type" : pkg.type, + "url" : pkg.url, + "uuid" : pkg.uuid, + "version" : pkg.version, # Dependencies. "prerequires" : pkg.prerequires, @@ -559,93 +410,101 @@ class UserHandler(UserAuthMixin, CommonAuthHandler): "provides" : pkg.provides, "obsoletes" : pkg.obsoletes, "conflicts" : pkg.conflicts, - - # Build infos. - "build_id" : pkg.build_id, - "build_host" : pkg.build_host, - "build_time" : pkg.build_time, } - if isinstance(pkg.maintainer, User): + if pkg.type == "source": + ret["supported_arches"] = pkg.supported_arches + + if isinstance(pkg.maintainer, backend.users.User): ret["maintainer"] = "%s <%s>" % (pkg.maintainer.realname, pkg.maintainer.email) elif pkg.maintainer: ret["maintainer"] = pkg.maintainer if pkg.distro: - ret["distro_id"] = pkg.distro.id - else: - ret["distro_id"] = None + ret["distro"] = pkg.distro.identifier - return ret + self.finish(ret) -class BuilderHandler(BuilderAuthMixin, CommonAuthHandler): - """ - Subclass with methods that are only accessable by builders. - """ - @tornado.web.authenticated - def send_keepalive(self, loadavg, overload, free_space=None): - """ - The client just says hello and we tell it if we it needs to - send some information about itself. - """ - self.builder.update_keepalive(loadavg, free_space) - - # Pass overload argument. - if overload in (True, False): - self.builder.update_overload(overload) +# Builders - # Tell the client if it should send an update of its infos. - return self.builder.needs_update() +class BuildersBaseHandler(BaseHandler): + def prepare(self): + # The request must come from an authenticated buider. + if not self.builder: + raise tornado.web.HTTPError(403) - @tornado.web.authenticated - def send_update(self, arches, cpu_model, cpu_count, memory, pakfire_version=None, host_key_id=None): - self.builder.update_info(arches, cpu_model, cpu_count, memory * 1024, - pakfire_version=pakfire_version, host_key_id=host_key_id) +class BuildersInfoHandler(BuildersBaseHandler): @tornado.web.authenticated - def build_get_job(self, arches): - # Disabled buildes do not get any jobs. - if self.builder.disabled: - logging.debug("Host requested job but is disabled: %s" % self.builder.name) - return + def post(self): + args = { + # CPU info + "cpu_model" : self.get_argument("cpu_model", None), + "cpu_count" : self.get_argument("cpu_count", None), + "cpu_arch" : self.get_argument("cpu_arch", None), + "cpu_bogomips" : self.get_argument("cpu_bogomips", None), + + # Pakfire + "pakfire_version" : self.get_argument("pakfire_version", None), + "host_key" : self.get_argument("host_key", None), + + # OS + "os_name" : self.get_argument("os_name", None), + } + self.builder.update_info(**args) - # Check if host has already too many simultaneous jobs. - if self.builder.too_many_jobs: - logging.debug("Host has already too many jobs: %s" % self.builder.name) - return - # Automatically add noarch if not already present. - if not "noarch" in arches: - arches.append("noarch") +class BuildersKeepaliveHandler(BuildersBaseHandler): + @tornado.web.authenticated + def post(self): + args = { + # Load average + "loadavg1" : self.get_argument_float("loadavg1", None), + "loadavg5" : self.get_argument_float("loadavg5", None), + "loadavg15" : self.get_argument_float("loadavg15", None), + + # Memory + "mem_total" : self.get_argument_int("mem_total", None), + "mem_free" : self.get_argument_int("mem_free", None), + + # swap + "swap_total" : self.get_argument_int("swap_total", None), + "swap_free" : self.get_argument_int("swap_free", None), + + # Disk space + "space_free" : self.get_argument_int("space_free", None), + } + self.builder.update_keepalive(**args) - # Get all supported architectures. - supported_arches = [] - for arch_name in arches: - arch = self.pakfire.arches.get_by_name(arch_name) - if not arch: - logging.debug("Unsupported architecture: %s" % arch_name) - continue + self.finish("OK") - # Skip disabled arches. - if arch in self.builder.disabled_arches: - continue - supported_arches.append(arch) +class BuildersJobsQueueHandler(BuildersBaseHandler): + @tornado.web.asynchronous + @tornado.web.authenticated + def get(self): + self.callback() - if not supported_arches: - logging.warning("Host does not support any arches: %s" % self.builder.name) + def callback(self): + # Break if the connection has been closed in the mean time. + if self.connection_closed(): + logging.warning("Connection closed") return - # Get the next job for this builder. - job = self.builder.get_next_job(supported_arches) + # Check if there is a job for us. + job = self.builder.get_next_job() + + # Got no job, wait and try again. if not job: - logging.debug("Could not find a buildable job for %s" % self.builder.name) - return + # Check if we have been running for too long. + if self.runtime >= self.max_runtime: + logging.debug("Exceeded max. runtime. Finishing request.") + return self.finish() - # We got a buildable job, so let's start... - logging.debug("%s is going to build %s" % (self.builder.name, job)) - build = job.build + # Try again in a jiffy. + self.add_timeout(self.heartbeat, self.callback) + return try: # Set job to dispatching state. @@ -657,22 +516,36 @@ class BuilderHandler(BuilderAuthMixin, CommonAuthHandler): ret = { "id" : job.uuid, "arch" : job.arch.name, - "source_url" : build.source_download, - "source_hash_sha512" : build.source_hash_sha512, + "source_url" : job.build.source_download, + "source_hash_sha512" : job.build.source_hash_sha512, "type" : job.type, "config" : job.get_config(), } # Send build information to the builder. - return ret - + self.finish(ret) except: # If anything went wrong, we reset the state. job.state = "pending" raise - def build_job_update_state(self, job_id, state, message=None): - job = self.pakfire.jobs.get_by_uuid(job_id) + @property + def heartbeat(self): + return 15 # 15 seconds + + @property + def max_runtime(self): + timeout = self.get_argument_int("timeout", None) + if timeout: + return timeout - self.heartbeat + + return 300 # 5 min + + +class BuildersJobsStateHandler(BuildersBaseHandler): + @tornado.web.authenticated + def post(self, job_uuid, state): + job = self.backend.jobs.get_by_uuid(job_uuid) if not job: raise tornado.web.HTTPError(404, "Invalid job id.") @@ -681,15 +554,39 @@ class BuilderHandler(BuilderAuthMixin, CommonAuthHandler): # Save information to database. job.state = state + + message = self.get_argument("message", None) job.update_message(message) - return True + self.finish("OK") + + +class BuildersJobsBuildrootHandler(BuildersBaseHandler): + @tornado.web.authenticated + def post(self, job_uuid): + job = self.backend.jobs.get_by_uuid(job_uuid) + if not job: + raise tornado.web.HTTPError(404, "Invalid job id.") + + if not job.builder == self.builder: + raise tornado.web.HTTPError(403, "Altering another builder's build.") - def build_job_add_file(self, job_id, upload_id, type): + # Get buildroot. + buildroot = self.get_argument_json("buildroot", None) + if buildroot: + job.save_buildroot(buildroot) + + self.finish("OK") + + +class BuildersJobsAddFileHandler(BuildersBaseHandler): + @tornado.web.authenticated + def post(self, job_uuid, upload_id): + type = self.get_argument("type") assert type in ("package", "log") # Fetch job we are working on and check if it is actually ours. - job = self.pakfire.jobs.get_by_uuid(job_id) + job = self.backend.jobs.get_by_uuid(job_uuid) if not job: raise tornado.web.HTTPError(404, "Invalid job id.") @@ -697,7 +594,7 @@ class BuilderHandler(BuilderAuthMixin, CommonAuthHandler): raise tornado.web.HTTPError(403, "Altering another builder's job.") # Fetch uploaded file object and check we uploaded it ourself. - upload = self.pakfire.uploads.get_by_uuid(upload_id) + upload = self.backend.uploads.get_by_uuid(upload_id) if not upload: raise tornado.web.HTTPError(404, "Invalid upload id.") @@ -705,7 +602,7 @@ class BuilderHandler(BuilderAuthMixin, CommonAuthHandler): raise tornado.web.HTTPError(403, "Using an other host's file.") # Remove all files that have to be deleted, first. - self.pakfire.cleanup_files() + self.backend.cleanup_files() try: job.add_file(upload.path) @@ -714,53 +611,4 @@ class BuilderHandler(BuilderAuthMixin, CommonAuthHandler): # Finally, remove the uploaded file. upload.remove() - return True - - def build_job_crashed(self, job_id, exitcode): - job = self.pakfire.jobs.get_by_uuid(job_id) - if not job: - raise tornado.web.HTTPError(404, "Invalid job id.") - - if not job.builder == self.builder: - raise tornado.web.HTTPError(403, "Altering another builder's build.") - - # Set build into aborted state. - job.state = "aborted" - - # Set aborted state. - job.aborted_state = exitcode - - def build_jobs_aborted(self, job_ids): - """ - Returns all aborted job ids from the input list. - """ - aborted_jobs = [] - - for job_id in job_ids: - job = self.pakfire.jobs.get_by_uuid(job_id) - if not job: - logging.debug("Unknown job id: %s" % job_id) - continue - - # Check if we own this job. - if not job.builder == self.builder: - logging.debug("Job %s belongs to another builder." % job_id) - continue - - if job.state == "aborted": - aborted_jobs.append(job.uuid) - - return aborted_jobs - - def build_upload_buildroot(self, job_id, pkgs): - """ - Saves the buildroot the builder sends. - """ - job = self.pakfire.jobs.get_by_uuid(job_id) - if not job: - raise tornado.web.HTTPError(404, "Invalid job id.") - - if not job.builder == self.builder: - raise tornado.web.HTTPError(403, "Altering another builder's build.") - - job.save_buildroot(pkgs) + self.finish("OK") diff --git a/web/handlers_builders.py b/web/handlers_builders.py index a4bd4714..771b2d4a 100644 --- a/web/handlers_builders.py +++ b/web/handlers_builders.py @@ -93,17 +93,6 @@ class BuilderEditHandler(BaseHandler): max_jobs = 1 builder.max_jobs = max_jobs - - for arch in builder.get_arches(): - builder.set_arch_status(arch, False) - - for arch in self.get_arguments("arches", []): - arch = self.pakfire.arches.get_by_name(arch) - if not arch: - continue - - builder.set_arch_status(arch, True) - self.redirect("/builder/%s" % builder.hostname)