]> git.ipfire.org Git - pbs.git/commitdiff
Merge branch 'master' into bootstrap4
authorJonatan Schlag <jonatan.schlag@ipfire.org>
Tue, 12 Dec 2017 11:28:49 +0000 (12:28 +0100)
committerJonatan Schlag <jonatan.schlag@ipfire.org>
Tue, 12 Dec 2017 11:28:49 +0000 (12:28 +0100)
17 files changed:
Dockerfile.in
Makefile.am
src/buildservice/builders.py
src/buildservice/packages.py
src/buildservice/repository.py
src/buildservice/users.py
src/database.sql
src/hub/__init__.py
src/hub/handlers.py
src/misc/lets-encrypt-x3-cross-signed.pem [new file with mode: 0644]
src/templates/build-index.html
src/templates/build-list.html
src/templates/mirrors/detail.html
src/templates/repository-detail.html
src/web/__init__.py
src/web/builds.py
src/web/handlers.py

index b9200d85626cdfad62295c2742678181cc1bb9d9..263ab7f6f52635e9ccaf3156fa37f8c8216d5b76 100644 (file)
@@ -12,6 +12,7 @@ RUN yum install -y \
        autoconf \
        intltool \
        make \
+       openssl-perl \
        python2-pip \
        python-daemon \
        python-ldap \
@@ -29,6 +30,11 @@ RUN pip install geoip2
 RUN groupadd -g 2000 pbs && \
        useradd --system --no-create-home --shell /bin/bash --uid 2000 --gid 2000 pbs
 
+# Install LDAP certificate
+RUN mkdir -p /etc/openldap/cacerts
+COPY src/misc/lets-encrypt-x3-cross-signed.pem /etc/openldap/cacerts/
+RUN c_rehash -v /etc/openldap/cacerts
+
 # Copy the source code
 COPY . /build/pakfire-build-service
 
index f1b592b1e7f7860b9022e7dd54345b471edbf677..053cb895f174671f0680b1523dc6feda86b80a2f 100644 (file)
@@ -459,6 +459,7 @@ dist_geoip_DATA = \
 geoipdir = $(datadir)/geoip
 
 EXTRA_DIST += \
+       src/misc/lets-encrypt-x3-cross-signed.pem \
        src/tools/dump-database-schema.sh \
        src/tools/update-geoip-database.sh
 
index 59c6c72295e74ab410de482877a3ff153f50a83b..97de6121db88f74baf49b18183a8b5a91410329e 100644 (file)
@@ -183,7 +183,7 @@ class Builder(base.DataObject):
                """
                        Returns True if the builder is online
                """
-               return self.keepalive >= datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
+               return self.online_until >= datetime.datetime.utcnow()
 
        @property
        def keepalive(self):
@@ -204,6 +204,11 @@ class Builder(base.DataObject):
                        WHERE id = %s", loadavg1, loadavg5, loadavg15, space_free,
                        mem_total, mem_free, swap_total, swap_free, self.id)
 
+       def set_online_until(self, online_until):
+               self._set_attribute("online_until", online_until)
+
+       online_until = property(lambda s: s.data.online_until, set_online_until)
+
        def update_info(self, cpu_model=None, cpu_count=None, cpu_arch=None, cpu_bogomips=None,
                        pakfire_version=None, host_key=None, os_name=None):
                # Update all the rest.
index de9ce1aeaad9465ddddd7f8b09b3b04b74ce5487..a322dd612b7509b2c067e7d321e2902029c9a989 100644 (file)
@@ -133,28 +133,13 @@ class Packages(base.Object):
 
                        This function does not work for UUIDs or filenames.
                """
-               query = "SELECT * FROM packages \
-                       WHERE type = %s AND ( \
-                               name LIKE %s OR \
-                               summary LIKE %s OR \
-                               description LIKE %s \
-                       ) \
-                       GROUP BY name"
-
                pattern = "%%%s%%" % pattern
-               args = ("source", pattern, pattern, pattern)
-
-               res = self.db.query(query, *args)
-
-               pkgs = []
-               for row in res:
-                       pkg = Package(self.backend, row.id, row)
-                       pkgs.append(pkg)
 
-                       if limit and len(pkgs) >= limit:
-                               break
+               packages = self._get_packages("SELECT * FROM packages \
+                       WHERE type = %s AND (name LIKE %s OR summary LIKE %s OR description LIKE %s)",
+                       "source", pattern, pattern, pattern)
 
-               return pkgs
+               return list(packages)
 
        def search_by_filename(self, filename, limit=None):
                query = "SELECT filelists.* FROM filelists \
index 0f6326659e9b547d1a0fc4564f3ff0e361187f48..f955e7201f597a6bb01e8d29855fc9680ae7b7d5 100644 (file)
@@ -468,12 +468,14 @@ class Repository(base.DataObject):
        def get_build_times(self):
                times = []
                for arch in self.arches:
+                       if arch == "src":
+                               continue
+
                        time = self.db.get("SELECT SUM(jobs.time_finished - jobs.time_started) AS time FROM jobs \
                                JOIN builds ON builds.id = jobs.build_id \
                                JOIN repositories_builds ON builds.id = repositories_builds.build_id \
                                WHERE (jobs.arch = %s OR jobs.arch = %s) AND \
-                               jobs.type = 'build' AND \
-                               repositories_builds.repo_id = %s", arch, "noarch", self.id)
+                               jobs.test IS FALSE AND repositories_builds.repo_id = %s", arch, "noarch", self.id)
 
                        times.append((arch, time.time.total_seconds()))
 
index 3375afe1ae5dcd4448bce33d143e2fe864426d86..3f8fe40e30c83b46388657de3f90452e1c1d8417 100644 (file)
@@ -219,11 +219,13 @@ class Users(base.Object):
        def search(self, pattern, limit=None):
                pattern = "%%%s%%" % pattern
 
-               return self._get_users("SELECT * FROM users \
+               users = self._get_users("SELECT * FROM users \
                        WHERE (name LIKE %s OR realname LIKE %s) \
                        AND activated IS TRUE AND deleted IS FALSE \
                        ORDER BY name LIMIT %s", pattern, pattern, limit)
 
+               return list(users)
+
        @staticmethod
        def check_password_strength(password):
                score = 0
index ae02c68781ff4ddbaeaa6b7979709d1a5297713b..75bb7afa3a08d6db20b4e0d5800cd7f20153aff6 100644 (file)
@@ -124,7 +124,8 @@ CREATE TABLE builders (
     mem_free bigint,
     swap_total bigint,
     swap_free bigint,
-    space_free bigint
+    space_free bigint,
+    online_until timestamp without time zone
 );
 
 
@@ -1385,7 +1386,7 @@ CREATE TABLE uploads (
     user_id integer,
     builder_id integer,
     filename text NOT NULL,
-    hash text NOT NULL,
+    hash text,
     size bigint NOT NULL,
     progress bigint DEFAULT 0 NOT NULL,
     finished boolean DEFAULT false NOT NULL,
index e7b7391d14c0770e7cabc29458c308ddbd1b5d41..360349d0c113406a0b10707fd05ec17b0f2411b5 100644 (file)
@@ -38,6 +38,7 @@ class Application(tornado.web.Application):
 
                        # Builders
                        (r"/builders/info", handlers.BuildersInfoHandler),
+                       (r"/builders/jobs/get", handlers.BuildersGetNextJobHandler),
                        (r"/builders/jobs/queue", handlers.BuildersJobsQueueHandler),
                        (r"/builders/jobs/(.*)/addfile/(.*)", handlers.BuildersJobsAddFileHandler),
                        (r"/builders/jobs/(.*)/buildroot", handlers.BuildersJobsBuildrootHandler),
@@ -55,6 +56,7 @@ class Application(tornado.web.Application):
 
                        # Uploads
                        (r"/uploads/create", handlers.UploadsCreateHandler),
+                       (r"/uploads/stream", handlers.UploadsStreamHandler),
                        (r"/uploads/(.*)/sendchunk", handlers.UploadsSendChunkHandler),
                        (r"/uploads/(.*)/finished", handlers.UploadsFinishedHandler),
                        (r"/uploads/(.*)/destroy", handlers.UploadsDestroyHandler),
@@ -80,7 +82,8 @@ class Application(tornado.web.Application):
        def run(self, port=81):
                logging.debug("Going to background")
 
-               http_server = tornado.httpserver.HTTPServer(self, xheaders=True)
+               http_server = tornado.httpserver.HTTPServer(self, xheaders=True,
+                       max_body_size=1 * (1024 ** 3))
 
                # If we are not running in debug mode, we can actually run multiple
                # frontends to get best performance out of our service.
index a4bd8164c8e5be289263e15f292f60829edf4bf8..8d5131e736ee3eb5843d84cfee49d1b53d131177 100644 (file)
@@ -153,7 +153,7 @@ class UploadsCreateHandler(BaseHandler):
 
                filename = self.get_argument("filename")
                filesize = self.get_argument_int("filesize")
-               filehash = self.get_argument("hash")
+               filehash = self.get_argument("hash", None)
 
                with self.db.transaction():
                        upload = self.backend.uploads.create(filename, filesize,
@@ -162,6 +162,37 @@ class UploadsCreateHandler(BaseHandler):
                        self.finish(upload.uuid)
 
 
+@tornado.web.stream_request_body
+class UploadsStreamHandler(BaseHandler):
+       @tornado.web.authenticated
+       def prepare(self):
+               # Received file size
+               self.size = 0
+
+               upload_uuid = self.get_argument("id")
+
+               # Fetch upload object from database
+               self.upload = self.backend.uploads.get_by_uuid(upload_uuid)
+               if not self.upload:
+                       raise tornado.web.HTTPError(404)
+
+       def data_received(self, data):
+               logging.debug("Received chunk of %s bytes" % len(data))
+               self.size += len(data)
+
+               # Write the received chunk to disk
+               with self.db.transaction():
+                       self.upload.append(data)
+
+       def put(self):
+               logging.info("Received entire file (%s bytes)" % self.size)
+
+               with self.db.transaction():
+                       self.upload.finished()
+
+               self.finish("OK")
+
+
 class UploadsSendChunkHandler(BaseHandler):
        @tornado.web.authenticated
        def post(self, upload_id):
@@ -503,6 +534,54 @@ class BuildersKeepaliveHandler(BuildersBaseHandler):
                self.finish("OK")
 
 
+class BuildersGetNextJobHandler(BuildersBaseHandler):
+       def _retry_after(self, seconds):
+               # Consider the builder online until the time has passed
+               self.builder.online_until = \
+                       datetime.datetime.utcnow() + datetime.timedelta(seconds=seconds)
+
+               # Set the Retry-After header
+               self.set_header("Retry-After", "%s" % seconds)
+
+               # Send empty response to client
+               self.finish()
+
+       @tornado.web.authenticated
+       def get(self):
+               # If the builder is disabled, we don't need to do anything
+               # but will ask it to return after 5 min
+               if not self.builder.enabled:
+                       return self._retry_after(300)
+
+               # If the builder has too many jobs running,
+               # we will tell it to return after 1 min
+               if self.builder.too_many_jobs:
+                       return self._retry_after(60)
+
+               # Okay, we are ready for the next job
+               job = self.builder.get_next_job()
+
+               # If we got no job, we will ask the builder
+               # to return after 30 seconds
+               if not job:
+                       return self._retry_after(30)
+
+               # If we got a job, we will serialise it
+               # and send it to the builder
+               with self.db.transaction():
+                       job.start(builder=self.builder)
+
+                       ret = {
+                               "id"                 : job.uuid,
+                               "arch"               : job.arch,
+                               "source_url"         : job.build.source_download,
+                               "source_hash_sha512" : job.build.source_hash_sha512,
+                               "type"               : "test" if job.test else "release",
+                               "config"             : job.get_config(),
+                       }
+                       self.finish(ret)
+
+
 class BuildersJobsQueueHandler(BuildersBaseHandler):
        @tornado.web.asynchronous
        @tornado.web.authenticated
diff --git a/src/misc/lets-encrypt-x3-cross-signed.pem b/src/misc/lets-encrypt-x3-cross-signed.pem
new file mode 100644 (file)
index 0000000..0002462
--- /dev/null
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEkjCCA3qgAwIBAgIQCgFBQgAAAVOFc2oLheynCDANBgkqhkiG9w0BAQsFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTE2MDMxNzE2NDA0NloXDTIxMDMxNzE2NDA0Nlow
+SjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUxldCdzIEVuY3J5cHQxIzAhBgNVBAMT
+GkxldCdzIEVuY3J5cHQgQXV0aG9yaXR5IFgzMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAnNMM8FrlLke3cl03g7NoYzDq1zUmGSXhvb418XCSL7e4S0EF
+q6meNQhY7LEqxGiHC6PjdeTm86dicbp5gWAf15Gan/PQeGdxyGkOlZHP/uaZ6WA8
+SMx+yk13EiSdRxta67nsHjcAHJyse6cF6s5K671B5TaYucv9bTyWaN8jKkKQDIZ0
+Z8h/pZq4UmEUEz9l6YKHy9v6Dlb2honzhT+Xhq+w3Brvaw2VFn3EK6BlspkENnWA
+a6xK8xuQSXgvopZPKiAlKQTGdMDQMc2PMTiVFrqoM7hD8bEfwzB/onkxEz0tNvjj
+/PIzark5McWvxI0NHWQWM6r6hCm21AvA2H3DkwIDAQABo4IBfTCCAXkwEgYDVR0T
+AQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCAYYwfwYIKwYBBQUHAQEEczBxMDIG
+CCsGAQUFBzABhiZodHRwOi8vaXNyZy50cnVzdGlkLm9jc3AuaWRlbnRydXN0LmNv
+bTA7BggrBgEFBQcwAoYvaHR0cDovL2FwcHMuaWRlbnRydXN0LmNvbS9yb290cy9k
+c3Ryb290Y2F4My5wN2MwHwYDVR0jBBgwFoAUxKexpHsscfrb4UuQdf/EFWCFiRAw
+VAYDVR0gBE0wSzAIBgZngQwBAgEwPwYLKwYBBAGC3xMBAQEwMDAuBggrBgEFBQcC
+ARYiaHR0cDovL2Nwcy5yb290LXgxLmxldHNlbmNyeXB0Lm9yZzA8BgNVHR8ENTAz
+MDGgL6AthitodHRwOi8vY3JsLmlkZW50cnVzdC5jb20vRFNUUk9PVENBWDNDUkwu
+Y3JsMB0GA1UdDgQWBBSoSmpjBH3duubRObemRWXv86jsoTANBgkqhkiG9w0BAQsF
+AAOCAQEA3TPXEfNjWDjdGBX7CVW+dla5cEilaUcne8IkCJLxWh9KEik3JHRRHGJo
+uM2VcGfl96S8TihRzZvoroed6ti6WqEBmtzw3Wodatg+VyOeph4EYpr/1wXKtx8/
+wApIvJSwtmVi4MFU5aMqrSDE6ea73Mj2tcMyo5jMd6jmeWUHK8so/joWUoHOUgwu
+X4Po1QYz+3dszkDqMp4fklxBwXRsW10KXzPMTZ+sOPAveyxindmjkW8lGy+QsRlG
+PfZ+G6Z6h7mjem0Y+iWlkYcV4PIWL1iwBi8saCbGS5jN2p8M+X+Q7UNKEkROb3N6
+KOqkqm57TH2H3eDJAkSnh6/DNFu0Qg==
+-----END CERTIFICATE-----
index bc9f303dafb1e143b5422fae79500a42e518a715..65df0a77398ff99b5bfc1211405b2314c2ceaa51 100644 (file)
@@ -17,9 +17,6 @@
                <li>
                        <a href="/builds/comments">{{ _("Comments") }}</a>
                </li>
-               <li>
-                       <a href="/builds/filter">{{ _("Filter builds") }}</a>
-               </li>
        </ul>
 
        <div class="page-header">
index de770c59c4bc09db413f93d58f1d39b87e556dd1..146af9b7678984701b938acda65975a4d4f6c424 100644 (file)
@@ -8,10 +8,3 @@
        <h1>{{ _("Build job list") }}</h1>
        {% module BuildTable(builds, show_user=True) %}
 {% end block %}
-
-{% block sidebar %}
-       <h1>{{ _("Actions") }}</h1>
-       <ul>
-               <li><a href="/builds/filter">{{ _("Filter builds") }}</a></li>
-       </ul>
-{% end block %}
index 70e78ef3538d15acbcba268225e954138000e449..5940b14cb912284acacbc7f5ea60766e5151a830 100644 (file)
@@ -22,7 +22,7 @@
                <div class="col-12 col-sm-12 col-md-6 col-lg-9 col-xl-9">
                        <h2 style="word-wrap: break-word;">
                                {{ _("Mirror: %s") % mirror.hostname }} <br>
-                               <small class="text-muted">{{ _("hosted by %s") % mirror.owner }}</small>
+                               <small class="text-muted">{{ mirror.owner }}</small>
                        </h2>
                </div>
                {% if current_user and current_user.has_perm("manage_mirrors") %}
index 1d8338e41a5d3155760c788fdc914758132bffbd..c579dc35b4c7c5859d27d6013992c68f058204b5 100644 (file)
                </div>
        {% end %}
 
-       {% if unpushed_builds %}
+       {% if repo.unpushed_builds %}
                <div class="row">
                        <div class="span12">
                                <h2>{{ _("Unpushed builds") }}</h2>
                                <div class="alert">
                                        {{ _("These builds were already put into this repository, but were not pushed out to the mirror servers, yet.") }}
                                </div>
-                               {% module BuildTable(unpushed_builds, show_repo_time=True) %}
+                               {% module BuildTable(repo.unpushed_builds, show_repo_time=True) %}
                        </div>
                </div>
        {% end %}
index f44a1239c4f4a1cbf29a5887f89a37753650904e..e77a8c1ff59782a3a57cc8fcd648f9ceea604c14 100644 (file)
@@ -147,7 +147,6 @@ class Application(tornado.web.Application):
 
                        # Builds
                        (r"/builds", builds.BuildsHandler),
-                       (r"/builds/filter", builds.BuildFilterHandler),
                        (r"/builds/queue", builds.BuildQueueHandler),
                        (r"/builds/comments", builds.BuildsCommentsHandler),
                        (r"/builds/comments/(\w+)", builds.BuildsCommentsHandler),
index dd28ca058e8908276400fe02f618025fa38e1722..0e8b29a1207bbfd8568ce990f38de685a36b57aa 100644 (file)
@@ -391,12 +391,3 @@ class BuildListHandler(base.BaseHandler):
                        limit=25)
 
                self.render("build-list.html", builds=builds)
-
-
-class BuildFilterHandler(base.BaseHandler):
-       def get(self):
-               builders = self.backend.builders.get_all()
-               distros  = self.backend.distros.get_all()
-
-               self.render("build-filter.html", builders=builders, distros=distros)
-
index dffaa484727309bdf5baa0552cb6da64721ec749..f910386237a0bb53b78dd40f9d5d5bed67b44c28 100644 (file)
@@ -116,14 +116,13 @@ class RepositoryDetailHandler(base.BaseHandler):
                        offset = None
 
                builds = repo.get_builds(limit=limit, offset=offset)
-               unpushed_builds = repo.get_unpushed_builds()
                obsolete_builds = repo.get_obsolete_builds()
 
                # Get the build times of this repository.
                build_times = repo.get_build_times()
 
                self.render("repository-detail.html", distro=distro, repo=repo,
-                       builds=builds, unpushed_builds=unpushed_builds,
+                       builds=builds,
                        obsolete_builds=obsolete_builds, build_times=build_times)