autoconf \
intltool \
make \
+ openssl-perl \
python2-pip \
python-daemon \
python-ldap \
RUN groupadd -g 2000 pbs && \
useradd --system --no-create-home --shell /bin/bash --uid 2000 --gid 2000 pbs
+# Install LDAP certificate
+RUN mkdir -p /etc/openldap/cacerts
+COPY src/misc/lets-encrypt-x3-cross-signed.pem /etc/openldap/cacerts/
+RUN c_rehash -v /etc/openldap/cacerts
+
# Copy the source code
COPY . /build/pakfire-build-service
geoipdir = $(datadir)/geoip
EXTRA_DIST += \
+ src/misc/lets-encrypt-x3-cross-signed.pem \
src/tools/dump-database-schema.sh \
src/tools/update-geoip-database.sh
"""
Returns True if the builder is online
"""
- return self.keepalive >= datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
+ return self.online_until >= datetime.datetime.utcnow()
@property
def keepalive(self):
WHERE id = %s", loadavg1, loadavg5, loadavg15, space_free,
mem_total, mem_free, swap_total, swap_free, self.id)
+ def set_online_until(self, online_until):
+ self._set_attribute("online_until", online_until)
+
+ online_until = property(lambda s: s.data.online_until, set_online_until)
+
def update_info(self, cpu_model=None, cpu_count=None, cpu_arch=None, cpu_bogomips=None,
pakfire_version=None, host_key=None, os_name=None):
# Update all the rest.
This function does not work for UUIDs or filenames.
"""
- query = "SELECT * FROM packages \
- WHERE type = %s AND ( \
- name LIKE %s OR \
- summary LIKE %s OR \
- description LIKE %s \
- ) \
- GROUP BY name"
-
pattern = "%%%s%%" % pattern
- args = ("source", pattern, pattern, pattern)
-
- res = self.db.query(query, *args)
-
- pkgs = []
- for row in res:
- pkg = Package(self.backend, row.id, row)
- pkgs.append(pkg)
- if limit and len(pkgs) >= limit:
- break
+ packages = self._get_packages("SELECT * FROM packages \
+ WHERE type = %s AND (name LIKE %s OR summary LIKE %s OR description LIKE %s)",
+ "source", pattern, pattern, pattern)
- return pkgs
+ return list(packages)
def search_by_filename(self, filename, limit=None):
query = "SELECT filelists.* FROM filelists \
def get_build_times(self):
times = []
for arch in self.arches:
+ if arch == "src":
+ continue
+
time = self.db.get("SELECT SUM(jobs.time_finished - jobs.time_started) AS time FROM jobs \
JOIN builds ON builds.id = jobs.build_id \
JOIN repositories_builds ON builds.id = repositories_builds.build_id \
WHERE (jobs.arch = %s OR jobs.arch = %s) AND \
- jobs.type = 'build' AND \
- repositories_builds.repo_id = %s", arch, "noarch", self.id)
+ jobs.test IS FALSE AND repositories_builds.repo_id = %s", arch, "noarch", self.id)
times.append((arch, time.time.total_seconds()))
def search(self, pattern, limit=None):
pattern = "%%%s%%" % pattern
- return self._get_users("SELECT * FROM users \
+ users = self._get_users("SELECT * FROM users \
WHERE (name LIKE %s OR realname LIKE %s) \
AND activated IS TRUE AND deleted IS FALSE \
ORDER BY name LIMIT %s", pattern, pattern, limit)
+ return list(users)
+
@staticmethod
def check_password_strength(password):
score = 0
mem_free bigint,
swap_total bigint,
swap_free bigint,
- space_free bigint
+ space_free bigint,
+ online_until timestamp without time zone
);
user_id integer,
builder_id integer,
filename text NOT NULL,
- hash text NOT NULL,
+ hash text,
size bigint NOT NULL,
progress bigint DEFAULT 0 NOT NULL,
finished boolean DEFAULT false NOT NULL,
# Builders
(r"/builders/info", handlers.BuildersInfoHandler),
+ (r"/builders/jobs/get", handlers.BuildersGetNextJobHandler),
(r"/builders/jobs/queue", handlers.BuildersJobsQueueHandler),
(r"/builders/jobs/(.*)/addfile/(.*)", handlers.BuildersJobsAddFileHandler),
(r"/builders/jobs/(.*)/buildroot", handlers.BuildersJobsBuildrootHandler),
# Uploads
(r"/uploads/create", handlers.UploadsCreateHandler),
+ (r"/uploads/stream", handlers.UploadsStreamHandler),
(r"/uploads/(.*)/sendchunk", handlers.UploadsSendChunkHandler),
(r"/uploads/(.*)/finished", handlers.UploadsFinishedHandler),
(r"/uploads/(.*)/destroy", handlers.UploadsDestroyHandler),
def run(self, port=81):
logging.debug("Going to background")
- http_server = tornado.httpserver.HTTPServer(self, xheaders=True)
+ http_server = tornado.httpserver.HTTPServer(self, xheaders=True,
+ max_body_size=1 * (1024 ** 3))
# If we are not running in debug mode, we can actually run multiple
# frontends to get best performance out of our service.
filename = self.get_argument("filename")
filesize = self.get_argument_int("filesize")
- filehash = self.get_argument("hash")
+ filehash = self.get_argument("hash", None)
with self.db.transaction():
upload = self.backend.uploads.create(filename, filesize,
self.finish(upload.uuid)
+@tornado.web.stream_request_body
+class UploadsStreamHandler(BaseHandler):
+ @tornado.web.authenticated
+ def prepare(self):
+ # Received file size
+ self.size = 0
+
+ upload_uuid = self.get_argument("id")
+
+ # Fetch upload object from database
+ self.upload = self.backend.uploads.get_by_uuid(upload_uuid)
+ if not self.upload:
+ raise tornado.web.HTTPError(404)
+
+ def data_received(self, data):
+ logging.debug("Received chunk of %s bytes" % len(data))
+ self.size += len(data)
+
+ # Write the received chunk to disk
+ with self.db.transaction():
+ self.upload.append(data)
+
+ def put(self):
+ logging.info("Received entire file (%s bytes)" % self.size)
+
+ with self.db.transaction():
+ self.upload.finished()
+
+ self.finish("OK")
+
+
class UploadsSendChunkHandler(BaseHandler):
@tornado.web.authenticated
def post(self, upload_id):
self.finish("OK")
+class BuildersGetNextJobHandler(BuildersBaseHandler):
+ def _retry_after(self, seconds):
+ # Consider the builder online until the time has passed
+ self.builder.online_until = \
+ datetime.datetime.utcnow() + datetime.timedelta(seconds=seconds)
+
+ # Set the Retry-After header
+ self.set_header("Retry-After", "%s" % seconds)
+
+ # Send empty response to client
+ self.finish()
+
+ @tornado.web.authenticated
+ def get(self):
+ # If the builder is disabled, we don't need to do anything
+ # but will ask it to return after 5 min
+ if not self.builder.enabled:
+ return self._retry_after(300)
+
+ # If the builder has too many jobs running,
+ # we will tell it to return after 1 min
+ if self.builder.too_many_jobs:
+ return self._retry_after(60)
+
+ # Okay, we are ready for the next job
+ job = self.builder.get_next_job()
+
+ # If we got no job, we will ask the builder
+ # to return after 30 seconds
+ if not job:
+ return self._retry_after(30)
+
+ # If we got a job, we will serialise it
+ # and send it to the builder
+ with self.db.transaction():
+ job.start(builder=self.builder)
+
+ ret = {
+ "id" : job.uuid,
+ "arch" : job.arch,
+ "source_url" : job.build.source_download,
+ "source_hash_sha512" : job.build.source_hash_sha512,
+ "type" : "test" if job.test else "release",
+ "config" : job.get_config(),
+ }
+ self.finish(ret)
+
+
class BuildersJobsQueueHandler(BuildersBaseHandler):
@tornado.web.asynchronous
@tornado.web.authenticated
--- /dev/null
+-----BEGIN CERTIFICATE-----
+MIIEkjCCA3qgAwIBAgIQCgFBQgAAAVOFc2oLheynCDANBgkqhkiG9w0BAQsFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTE2MDMxNzE2NDA0NloXDTIxMDMxNzE2NDA0Nlow
+SjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUxldCdzIEVuY3J5cHQxIzAhBgNVBAMT
+GkxldCdzIEVuY3J5cHQgQXV0aG9yaXR5IFgzMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAnNMM8FrlLke3cl03g7NoYzDq1zUmGSXhvb418XCSL7e4S0EF
+q6meNQhY7LEqxGiHC6PjdeTm86dicbp5gWAf15Gan/PQeGdxyGkOlZHP/uaZ6WA8
+SMx+yk13EiSdRxta67nsHjcAHJyse6cF6s5K671B5TaYucv9bTyWaN8jKkKQDIZ0
+Z8h/pZq4UmEUEz9l6YKHy9v6Dlb2honzhT+Xhq+w3Brvaw2VFn3EK6BlspkENnWA
+a6xK8xuQSXgvopZPKiAlKQTGdMDQMc2PMTiVFrqoM7hD8bEfwzB/onkxEz0tNvjj
+/PIzark5McWvxI0NHWQWM6r6hCm21AvA2H3DkwIDAQABo4IBfTCCAXkwEgYDVR0T
+AQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCAYYwfwYIKwYBBQUHAQEEczBxMDIG
+CCsGAQUFBzABhiZodHRwOi8vaXNyZy50cnVzdGlkLm9jc3AuaWRlbnRydXN0LmNv
+bTA7BggrBgEFBQcwAoYvaHR0cDovL2FwcHMuaWRlbnRydXN0LmNvbS9yb290cy9k
+c3Ryb290Y2F4My5wN2MwHwYDVR0jBBgwFoAUxKexpHsscfrb4UuQdf/EFWCFiRAw
+VAYDVR0gBE0wSzAIBgZngQwBAgEwPwYLKwYBBAGC3xMBAQEwMDAuBggrBgEFBQcC
+ARYiaHR0cDovL2Nwcy5yb290LXgxLmxldHNlbmNyeXB0Lm9yZzA8BgNVHR8ENTAz
+MDGgL6AthitodHRwOi8vY3JsLmlkZW50cnVzdC5jb20vRFNUUk9PVENBWDNDUkwu
+Y3JsMB0GA1UdDgQWBBSoSmpjBH3duubRObemRWXv86jsoTANBgkqhkiG9w0BAQsF
+AAOCAQEA3TPXEfNjWDjdGBX7CVW+dla5cEilaUcne8IkCJLxWh9KEik3JHRRHGJo
+uM2VcGfl96S8TihRzZvoroed6ti6WqEBmtzw3Wodatg+VyOeph4EYpr/1wXKtx8/
+wApIvJSwtmVi4MFU5aMqrSDE6ea73Mj2tcMyo5jMd6jmeWUHK8so/joWUoHOUgwu
+X4Po1QYz+3dszkDqMp4fklxBwXRsW10KXzPMTZ+sOPAveyxindmjkW8lGy+QsRlG
+PfZ+G6Z6h7mjem0Y+iWlkYcV4PIWL1iwBi8saCbGS5jN2p8M+X+Q7UNKEkROb3N6
+KOqkqm57TH2H3eDJAkSnh6/DNFu0Qg==
+-----END CERTIFICATE-----
<li>
<a href="/builds/comments">{{ _("Comments") }}</a>
</li>
- <li>
- <a href="/builds/filter">{{ _("Filter builds") }}</a>
- </li>
</ul>
<div class="page-header">
<h1>{{ _("Build job list") }}</h1>
{% module BuildTable(builds, show_user=True) %}
{% end block %}
-
-{% block sidebar %}
- <h1>{{ _("Actions") }}</h1>
- <ul>
- <li><a href="/builds/filter">{{ _("Filter builds") }}</a></li>
- </ul>
-{% end block %}
<div class="col-12 col-sm-12 col-md-6 col-lg-9 col-xl-9">
<h2 style="word-wrap: break-word;">
{{ _("Mirror: %s") % mirror.hostname }} <br>
- <small class="text-muted">{{ _("hosted by %s") % mirror.owner }}</small>
+ <small class="text-muted">{{ mirror.owner }}</small>
</h2>
</div>
{% if current_user and current_user.has_perm("manage_mirrors") %}
</div>
{% end %}
- {% if unpushed_builds %}
+ {% if repo.unpushed_builds %}
<div class="row">
<div class="span12">
<h2>{{ _("Unpushed builds") }}</h2>
<div class="alert">
{{ _("These builds were already put into this repository, but were not pushed out to the mirror servers, yet.") }}
</div>
- {% module BuildTable(unpushed_builds, show_repo_time=True) %}
+ {% module BuildTable(repo.unpushed_builds, show_repo_time=True) %}
</div>
</div>
{% end %}
# Builds
(r"/builds", builds.BuildsHandler),
- (r"/builds/filter", builds.BuildFilterHandler),
(r"/builds/queue", builds.BuildQueueHandler),
(r"/builds/comments", builds.BuildsCommentsHandler),
(r"/builds/comments/(\w+)", builds.BuildsCommentsHandler),
limit=25)
self.render("build-list.html", builds=builds)
-
-
-class BuildFilterHandler(base.BaseHandler):
- def get(self):
- builders = self.backend.builders.get_all()
- distros = self.backend.distros.get_all()
-
- self.render("build-filter.html", builders=builders, distros=distros)
-
offset = None
builds = repo.get_builds(limit=limit, offset=offset)
- unpushed_builds = repo.get_unpushed_builds()
obsolete_builds = repo.get_obsolete_builds()
# Get the build times of this repository.
build_times = repo.get_build_times()
self.render("repository-detail.html", distro=distro, repo=repo,
- builds=builds, unpushed_builds=unpushed_builds,
+ builds=builds,
obsolete_builds=obsolete_builds, build_times=build_times)