From 5c0f2c782a0fa67ddec8e34a1b2bbcb962b80fb2 Mon Sep 17 00:00:00 2001 From: Michael Tremer Date: Tue, 21 Jan 2025 10:38:58 +0000 Subject: [PATCH] Migrate to an even more async design This monster commit replaces our database engine by SQLAlchemy so that we can utilise eager loading and don't have to implement this on our own. Although Jinja is a lot more flexible and allows us to asynchronically fetch any data, we cannot really have async properties. Therefore it is a good idea to have some eager loading happening when we fetch certain objects that will always depend on some other objects. This has been a major rewrite of the backend which still isn't done, but I needed to commit this somehow. There was little point it doing it gradually so here is this mountain of a commit. Enjoy. Signed-off-by: Michael Tremer --- Makefile.am | 97 +- src/buildservice/__init__.py | 10 +- src/buildservice/base.py | 66 +- src/buildservice/bugtracker.py | 17 +- src/buildservice/builders.py | 295 ++- src/buildservice/builds.py | 1147 +++++----- src/buildservice/database.py | 486 ++-- src/buildservice/distribution.py | 753 +++--- src/buildservice/events.py | 2031 +++++++++-------- src/buildservice/jobs.py | 623 ++--- src/buildservice/keys.py | 130 +- src/buildservice/mirrors.py | 400 ++-- src/buildservice/misc.py | 9 - src/buildservice/packages.py | 731 +++--- src/buildservice/releasemonitoring.py | 1205 +++++----- src/buildservice/repository.py | 519 ++--- src/buildservice/sessions.py | 110 +- src/buildservice/sources.py | 397 ++-- src/buildservice/uploads.py | 217 +- src/buildservice/users.py | 811 +++---- src/scripts/pakfire-web | 7 +- src/templates/base.html | 8 +- .../events.py => templates/bugs/macros.html} | 68 +- src/templates/bugs/modules/list.html | 28 - src/templates/builders/index.html | 51 +- src/templates/builders/macros.html | 54 + src/templates/builders/modules/stats.html | 31 - src/templates/builders/show.html | 53 +- src/templates/builds/groups/macros.html | 88 + src/templates/builds/groups/modules/list.html | 76 - src/templates/builds/groups/show.html | 16 +- src/templates/builds/index.html | 24 +- src/templates/builds/macros.html | 209 ++ src/templates/builds/modules/list.html | 107 - src/templates/builds/modules/watchers.html | 66 - src/templates/builds/show.html | 69 +- src/templates/distros/index.html | 12 +- src/templates/distros/macros.html | 37 + src/templates/distros/modules/list.html | 14 - src/templates/distros/releases/show.html | 26 +- src/templates/distros/show.html | 40 +- src/templates/events/macros.html | 321 +++ .../events/modules/build-comment.html | 5 - src/templates/events/modules/list.html | 9 - .../events/modules/system-message.html | 280 --- .../events/modules/user-message.html | 12 - src/templates/index.html | 14 +- src/templates/jobs/macros.html | 231 ++ src/templates/jobs/modules/list.html | 160 -- src/templates/jobs/modules/queue.html | 38 - src/templates/log.html | 13 +- src/{web/bugs.py => templates/macros.html} | 27 +- src/templates/mirrors/index.html | 14 +- src/templates/mirrors/macros.html | 35 + src/templates/mirrors/modules/list.html | 15 - src/templates/mirrors/show.html | 45 +- src/templates/modules/commit-message.html | 3 - src/templates/modules/link-to-user.html | 16 - .../modules/packages-files-table.html | 47 - src/templates/modules/text.html | 9 - src/templates/monitorings/macros.html | 47 + .../monitorings/modules/releases-list.html | 25 - src/templates/monitorings/show.html | 24 +- src/templates/packages/index.html | 34 +- src/templates/packages/macros.html | 205 ++ .../packages/modules/dependencies.html | 19 - src/templates/packages/modules/info.html | 83 - src/templates/packages/name/builds.html | 26 +- src/templates/packages/name/index.html | 36 +- src/templates/packages/show.html | 30 +- src/templates/packages/view-file.html | 12 +- src/templates/releases/macros.html | 36 + src/templates/repos/builds.html | 19 +- src/templates/repos/macros.html | 56 + src/templates/repos/modules/list.html | 34 - src/templates/repos/show.html | 31 +- src/templates/search.html | 26 +- src/templates/sources/commit.html | 55 +- src/templates/sources/macros.html | 58 + src/templates/sources/modules/commits.html | 12 - src/templates/sources/modules/list.html | 11 - src/templates/sources/show.html | 33 +- src/templates/users/index.html | 10 +- src/templates/users/macros.html | 112 + src/templates/users/modules/list.html | 46 - src/templates/users/show.html | 50 +- src/web/__init__.py | 97 +- src/web/auth.py | 5 +- src/web/base.py | 203 +- src/web/builders.py | 18 +- src/web/builds.py | 58 +- src/web/distributions.py | 22 +- src/web/filters.py | 181 ++ src/web/handlers.py | 15 +- src/web/jobs.py | 11 - src/web/mirrors.py | 15 +- src/web/monitorings.py | 14 +- src/web/packages.py | 73 +- src/web/repos.py | 35 +- src/web/search.py | 18 +- src/web/sources.py | 46 +- src/web/ui_modules.py | 95 - src/web/uploads.py | 31 +- src/web/users.py | 18 +- 104 files changed, 7295 insertions(+), 7222 deletions(-) rename src/{web/events.py => templates/bugs/macros.html} (52%) delete mode 100644 src/templates/bugs/modules/list.html create mode 100644 src/templates/builders/macros.html delete mode 100644 src/templates/builders/modules/stats.html create mode 100644 src/templates/builds/groups/macros.html delete mode 100644 src/templates/builds/groups/modules/list.html create mode 100644 src/templates/builds/macros.html delete mode 100644 src/templates/builds/modules/list.html delete mode 100644 src/templates/builds/modules/watchers.html create mode 100644 src/templates/distros/macros.html delete mode 100644 src/templates/distros/modules/list.html create mode 100644 src/templates/events/macros.html delete mode 100644 src/templates/events/modules/build-comment.html delete mode 100644 src/templates/events/modules/list.html delete mode 100644 src/templates/events/modules/system-message.html delete mode 100644 src/templates/events/modules/user-message.html create mode 100644 src/templates/jobs/macros.html delete mode 100644 src/templates/jobs/modules/list.html delete mode 100644 src/templates/jobs/modules/queue.html rename src/{web/bugs.py => templates/macros.html} (74%) create mode 100644 src/templates/mirrors/macros.html delete mode 100644 src/templates/mirrors/modules/list.html delete mode 100644 src/templates/modules/commit-message.html delete mode 100644 src/templates/modules/link-to-user.html delete mode 100644 src/templates/modules/packages-files-table.html delete mode 100644 src/templates/modules/text.html create mode 100644 src/templates/monitorings/macros.html delete mode 100644 src/templates/monitorings/modules/releases-list.html create mode 100644 src/templates/packages/macros.html delete mode 100644 src/templates/packages/modules/dependencies.html delete mode 100644 src/templates/packages/modules/info.html create mode 100644 src/templates/releases/macros.html create mode 100644 src/templates/repos/macros.html delete mode 100644 src/templates/repos/modules/list.html create mode 100644 src/templates/sources/macros.html delete mode 100644 src/templates/sources/modules/commits.html delete mode 100644 src/templates/sources/modules/list.html create mode 100644 src/templates/users/macros.html delete mode 100644 src/templates/users/modules/list.html create mode 100644 src/web/filters.py delete mode 100644 src/web/ui_modules.py diff --git a/Makefile.am b/Makefile.am index 1d506fbc..a57d9a39 100644 --- a/Makefile.am +++ b/Makefile.am @@ -126,13 +126,12 @@ web_PYTHON = \ src/web/__init__.py \ src/web/auth.py \ src/web/base.py \ - src/web/bugs.py \ src/web/builders.py \ src/web/builds.py \ src/web/debuginfo.py \ src/web/distributions.py \ src/web/errors.py \ - src/web/events.py \ + src/web/filters.py \ src/web/handlers.py \ src/web/jobs.py \ src/web/mirrors.py \ @@ -141,7 +140,6 @@ web_PYTHON = \ src/web/repos.py \ src/web/search.py \ src/web/sources.py \ - src/web/ui_modules.py \ src/web/uploads.py \ src/web/users.py @@ -154,34 +152,29 @@ dist_templates_DATA = \ src/templates/index.html \ src/templates/log.html \ src/templates/login.html \ + src/templates/macros.html \ src/templates/modal.html \ src/templates/search.html templatesdir = $(datadir)/templates -templates_bugsdir = $(templatesdir)/bugs - -dist_templates_bugs_modules_DATA = \ - src/templates/bugs/modules/list.html +dist_templates_bugs_DATA = \ + src/templates/bugs/macros.html -templates_bugs_modulesdir = $(templates_bugsdir)/modules +templates_bugsdir = $(templatesdir)/bugs dist_templates_builders_DATA = \ src/templates/builders/create.html \ src/templates/builders/delete.html \ src/templates/builders/edit.html \ src/templates/builders/index.html \ + src/templates/builders/macros.html \ src/templates/builders/show.html \ src/templates/builders/start.html \ src/templates/builders/stop.html templates_buildersdir = $(templatesdir)/builders -dist_templates_builders_modules_DATA = \ - src/templates/builders/modules/stats.html - -templates_builders_modulesdir = $(templates_buildersdir)/modules - dist_templates_builds_DATA = \ src/templates/builds/approve.html \ src/templates/builds/bug.html \ @@ -189,20 +182,17 @@ dist_templates_builds_DATA = \ src/templates/builds/clone.html \ src/templates/builds/delete.html \ src/templates/builds/index.html \ + src/templates/builds/macros.html \ src/templates/builds/show.html templates_buildsdir = $(templatesdir)/builds dist_templates_builds_groups_DATA = \ + src/templates/builds/groups/macros.html \ src/templates/builds/groups/show.html templates_builds_groupsdir = $(templates_buildsdir)/groups -dist_templates_builds_groups_modules_DATA = \ - src/templates/builds/groups/modules/list.html - -templates_builds_groups_modulesdir = $(templates_builds_groupsdir)/modules - dist_templates_builds_messages_DATA = \ src/templates/builds/messages/comment.txt \ src/templates/builds/messages/failed.txt \ @@ -211,12 +201,6 @@ dist_templates_builds_messages_DATA = \ templates_builds_messagesdir = $(templates_buildsdir)/messages -dist_templates_builds_modules_DATA = \ - src/templates/builds/modules/list.html \ - src/templates/builds/modules/watchers.html - -templates_builds_modulesdir = $(templates_buildsdir)/modules - dist_templates_builds_repos_DATA = \ src/templates/builds/repos/add.html \ src/templates/builds/repos/remove.html @@ -226,15 +210,11 @@ templates_builds_reposdir = $(templates_buildsdir)/repos dist_templates_distros_DATA = \ src/templates/distros/edit.html \ src/templates/distros/index.html \ + src/templates/distros/macros.html \ src/templates/distros/show.html templates_distrosdir = $(templatesdir)/distros -dist_templates_distros_modules_DATA = \ - src/templates/distros/modules/list.html - -templates_distros_modulesdir = $(templates_distrosdir)/modules - dist_templates_distros_releases_DATA = \ src/templates/distros/releases/delete.html \ src/templates/distros/releases/edit.html \ @@ -252,34 +232,26 @@ templates_distros_releases_modulesdir = $(templates_distros_releasesdir)/modules dist_templates_monitorings_DATA = \ src/templates/monitorings/delete.html \ src/templates/monitorings/edit.html \ + src/templates/monitorings/macros.html \ src/templates/monitorings/show.html templates_monitoringsdir = $(templatesdir)/monitorings -dist_templates_monitorings_modules_DATA = \ - src/templates/monitorings/modules/releases-list.html - -templates_monitorings_modulesdir = $(templates_monitoringsdir)/modules - dist_templates_errors_DATA = \ src/templates/errors/error.html templates_errorsdir = $(templatesdir)/errors -templates_eventsdir = $(templatesdir)/events - -dist_templates_events_modules_DATA = \ - src/templates/events/modules/list.html \ - src/templates/events/modules/build-comment.html \ - src/templates/events/modules/system-message.html \ - src/templates/events/modules/user-message.html +dist_templates_events_DATA = \ + src/templates/events/macros.html -templates_events_modulesdir = $(templates_eventsdir)/modules +templates_eventsdir = $(templatesdir)/events dist_templates_jobs_DATA = \ src/templates/jobs/abort.html \ src/templates/jobs/index.html \ src/templates/jobs/log-stream.html \ + src/templates/jobs/macros.html \ src/templates/jobs/queue.html \ src/templates/jobs/retry.html @@ -291,9 +263,7 @@ dist_templates_jobs_messages_DATA = \ templates_jobs_messagesdir = $(templates_jobsdir)/messages dist_templates_jobs_modules_DATA = \ - src/templates/jobs/modules/list.html \ - src/templates/jobs/modules/log-stream.html \ - src/templates/jobs/modules/queue.html + src/templates/jobs/modules/log-stream.html templates_jobs_modulesdir = $(templates_jobsdir)/modules @@ -303,62 +273,41 @@ dist_templates_mirrors_DATA = \ src/templates/mirrors/delete.html \ src/templates/mirrors/edit.html \ src/templates/mirrors/index.html \ + src/templates/mirrors/macros.html \ src/templates/mirrors/show.html templates_mirrorsdir = $(templatesdir)/mirrors -dist_templates_mirrors_modules_DATA = \ - src/templates/mirrors/modules/list.html - -templates_mirrors_modulesdir = $(templates_mirrorsdir)/modules - -dist_templates_modules_DATA = \ - src/templates/modules/commit-message.html \ - src/templates/modules/link-to-user.html \ - src/templates/modules/packages-files-table.html \ - src/templates/modules/text.html +dist_templates_releases_DATA = \ + src/templates/releases/macros.html -templates_modulesdir = $(templatesdir)/modules +templates_releasesdir = $(templatesdir)/releases dist_templates_repos_DATA = \ src/templates/repos/create-custom.html \ src/templates/repos/builds.html \ src/templates/repos/delete.html \ src/templates/repos/edit.html \ + src/templates/repos/macros.html \ src/templates/repos/show.html templates_reposdir = $(templatesdir)/repos -dist_templates_repos_modules_DATA = \ - src/templates/repos/modules/list.html - -templates_repos_modulesdir = $(templates_reposdir)/modules - dist_templates_sources_DATA = \ src/templates/sources/commit.html \ + src/templates/sources/macros.html \ src/templates/sources/show.html templates_sourcesdir = $(templatesdir)/sources -dist_templates_sources_modules_DATA = \ - src/templates/sources/modules/commits.html \ - src/templates/sources/modules/list.html - -templates_sources_modulesdir = $(templates_sourcesdir)/modules - dist_templates_packages_DATA = \ src/templates/packages/index.html \ + src/templates/packages/macros.html \ src/templates/packages/show.html \ src/templates/packages/view-file.html templates_packagesdir = $(templatesdir)/packages -dist_templates_packages_modules_DATA = \ - src/templates/packages/modules/dependencies.html \ - src/templates/packages/modules/info.html - -templates_packages_modulesdir = $(templates_packagesdir)/modules - dist_templates_packages_name_DATA = \ src/templates/packages/name/builds.html \ src/templates/packages/name/index.html @@ -369,6 +318,7 @@ dist_templates_users_DATA = \ src/templates/users/delete.html \ src/templates/users/edit.html \ src/templates/users/index.html \ + src/templates/users/macros.html \ src/templates/users/show.html \ src/templates/users/subscribe.html @@ -380,7 +330,6 @@ dist_templates_users_messages_DATA = \ templates_users_messagesdir = $(templates_usersdir)/messages dist_templates_users_modules_DATA = \ - src/templates/users/modules/list.html \ src/templates/users/modules/push-subscribe-button.html templates_users_modulesdir = $(templates_usersdir)/modules diff --git a/src/buildservice/__init__.py b/src/buildservice/__init__.py index 4b351eb4..2ce05413 100644 --- a/src/buildservice/__init__.py +++ b/src/buildservice/__init__.py @@ -99,8 +99,8 @@ class Backend(object): def launch_background_tasks(self): # Launch some initial tasks - self.run_task(self.users.generate_vapid_keys) - self.run_task(self.builders.autoscale) + #self.run_task(self.users.generate_vapid_keys) + #self.run_task(self.builders.autoscale) # Regularly sync data to the mirrors self.run_periodic_task(300, self.sync) @@ -109,17 +109,17 @@ class Backend(object): self.run_periodic_task(300, self.mirrors.check) # Regularly fetch sources - self.run_periodic_task(300, self.sources.fetch) + #self.run_periodic_task(300, self.sources.fetch) # Regularly check for new releases # XXX Disabled for now #self.run_periodic_task(300, self.monitorings.check) # Cleanup regularly - self.run_periodic_task(3600, self.cleanup) + #self.run_periodic_task(3600, self.cleanup) # Automatically abort any jobs that run for forever - self.run_periodic_task(60, self.jobs.abort) + #self.run_periodic_task(60, self.jobs.abort) def read_config(self, path): c = configparser.ConfigParser() diff --git a/src/buildservice/base.py b/src/buildservice/base.py index ffe8b716..d55d41c2 100644 --- a/src/buildservice/base.py +++ b/src/buildservice/base.py @@ -1,8 +1,6 @@ #!/usr/bin/python -import psycopg.adapt - -from .decorators import * +import functools class Object(object): """ @@ -23,71 +21,13 @@ class Object(object): """ pass - @lazy_property + @functools.cached_property def db(self): """ Shortcut to database """ return self.backend.db - @lazy_property + @functools.cached_property def settings(self): return self.backend.settings - - -class DataObject(Object): - # Table name - table = None - - def __eq__(self, other): - if isinstance(other, self.__class__): - return self.id == other.id - - return NotImplemented - - def __hash__(self): - return hash(self.id) - - def init(self, data=None, **kwargs): - self.data = data - - # Set any extra arguments (to populate the cache) - for arg in kwargs: - setattr(self, arg, kwargs[arg]) - - @property - def id(self): - return self.data.id - - async def _set_attribute(self, key, val): - assert self.table, "Table name not set" - assert self.id - - # Detect if an update is needed - if self.data[key] == val: - return - - await self.db.execute("UPDATE %s SET %s = %%s \ - WHERE id = %%s" % (self.table, key), val, self.id) - - # Update the cached attribute - self.data[key] = val - - async def _set_attribute_now(self, key): - assert self.table, "Table name not set" - assert self.id - - res = await self.db.execute("UPDATE %s SET %s = CURRENT_TIMESTAMP \ - WHERE id = %%s RETURNING %s" % (self.table, key, key), self.id) - - # Update the cached attribute - if res: - self.data[key] = res[key] - - -# SQL Integration - -class DataObjectDumper(psycopg.adapt.Dumper): - def dump(self, obj): - # Return the ID (as bytes) - return bytes("%s" % obj.id, "utf-8") diff --git a/src/buildservice/bugtracker.py b/src/buildservice/bugtracker.py index c64e7fa9..a801b891 100644 --- a/src/buildservice/bugtracker.py +++ b/src/buildservice/bugtracker.py @@ -48,7 +48,7 @@ class BadRequestError(Exception): class Bugzilla(base.Object): def init(self, api_key=None): if api_key is None: - api_key = self.settings.get("bugzilla-api-key") + api_key = self.backend.config.get("bugzilla", "api-key") # Store the API key self.api_key = api_key @@ -58,7 +58,7 @@ class Bugzilla(base.Object): """ Returns the base URL of a Bugzilla instance """ - return self.settings.get("bugzilla-url") + return self.backend.config.get("bugzilla", "url") async def whoami(self): """ @@ -214,9 +214,16 @@ class Bugzilla(base.Object): """ Fetches multiple bugs concurrently """ - return await asyncio.gather( - *(self.get_bug(bug) for bug in bugs), - ) + tasks = [] + + async with asyncio.TaskGroup() as tg: + for bug in bugs: + tg.create_task( + self.get_bug(bug), + ) + + # Return the result from all tasks + return [task.result() for task in tasks] async def get_bug(self, bug): """ diff --git a/src/buildservice/builders.py b/src/buildservice/builders.py index b6049477..893fe1ea 100644 --- a/src/buildservice/builders.py +++ b/src/buildservice/builders.py @@ -3,9 +3,15 @@ import asyncio import botocore.exceptions import datetime +import functools import logging +import sqlalchemy +from sqlalchemy import BigInteger, Boolean, Column, DateTime, ForeignKey, Integer, Text + from . import base +from . import database +from . import jobs from .decorators import * from .errors import * @@ -17,17 +23,20 @@ class Builders(base.Object): # Stores any control connections to builders connections = {} - async def _get_builders(self, *args, **kwargs): - return await self.db.fetch_many(Builder, *args, **kwargs) - - async def _get_builder(self, *args, **kwargs): - return await self.db.fetch_one(Builder, *args, **kwargs) - - async def __aiter__(self): - builders = await self._get_builders("SELECT * FROM builders \ - WHERE deleted_at IS NULL ORDER BY name") + def __aiter__(self): + stmt = ( + sqlalchemy + .select(Builder) + .where( + Builder.deleted_at == None, + ) + .order_by( + Builder.name, + ) + ) - return aiter(builders) + # Fetch the builders + return self.db.fetch(stmt) def init(self): # Initialize stats @@ -35,50 +44,30 @@ class Builders(base.Object): async def create(self, name, user=None): """ - Creates a new builder. + Creates a new builder """ - builder = await self._get_builder(""" - INSERT INTO - builders - ( - name, - created_by - ) - VALUES - ( - %s, %s - ) - RETURNING - * - """, name, user, + builder = await self.db.insert( + Builder, + name = name, + user = user, ) return builder - async def get_by_id(self, id): - return await self._get_builder(""" - SELECT - * - FROM - builders - WHERE - id = %s - """, id, - ) - async def get_by_name(self, name): - return await self._get_builder(""" - SELECT - * - FROM - builders - WHERE - deleted_at IS NULL - AND - name = %s - """, name, + stmt = ( + sqlalchemy + .select(Builder) + .where( + Builder.deleted_at == None, + + # Match by name + Builder.name == name, + ) ) + return await self.db.fetch_one(stmt) + @property def connected(self): """ @@ -179,50 +168,57 @@ class Builders(base.Object): # Stats - @property - def total_build_time(self): + async def get_total_build_time(self): """ Returns the total build time """ - res = self.db.get(""" - SELECT - SUM( - COALESCE(jobs.finished_at, CURRENT_TIMESTAMP) - - - jobs.started_at - ) AS t - FROM - jobs - WHERE - started_at IS NOT NULL""", + stmt = ( + sqlalchemy + .select( + sqlalchemy.func.sum( + sqlalchemy.func.coalesce( + jobs.Job.finished_at, + sqlalchemy.func.current_timestamp() + ) + - jobs.Job.started_at, + ).label("total_build_time") + ) + .where( + jobs.Job.started_at != None, + ) ) - return res.t or 0 + return await self.db.select_one(stmt, "total_build_time") - @property - def total_build_time_by_arch(self): + async def get_total_build_time_by_arch(self): """ Returns a dict with the total build times grouped by architecture """ - res = self.db.query(""" - SELECT - jobs.arch AS arch, - SUM( - COALESCE(jobs.finished_at, CURRENT_TIMESTAMP) - - - jobs.started_at - ) AS t - FROM - jobs - WHERE - started_at IS NOT NULL - GROUP BY - jobs.arch - ORDER BY - jobs.arch""", + stmt = ( + sqlalchemy + .select( + jobs.Job.arch, + + sqlalchemy.func.sum( + sqlalchemy.func.coalesce( + jobs.Job.finished_at, + sqlalchemy.func.current_timestamp() + ) + - jobs.Job.started_at, + ).label("total_build_time") + ) + .where( + jobs.Job.started_at != None, + ) + .group_by( + jobs.Job.arch, + ) + .order_by( + jobs.Job.arch, + ) ) - return { row.arch : row.t for row in res } + return { row.arch : row.total_build_time async for row in self.db.select(stmt) } class BuildersStats(base.Object): @@ -263,8 +259,8 @@ class BuildersStats(base.Object): ) -class Builder(base.DataObject): - table = "builders" +class Builder(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "builders" def __lt__(self, other): if isinstance(other, self.__class__): @@ -272,18 +268,41 @@ class Builder(base.DataObject): return NotImplemented - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.hostname) - def __str__(self): - return self.hostname + return self.name + + # ID + + id = Column(Integer, primary_key=True) # Description - def set_description(self, description): - self._set_attribute("description", description) + description = Column(Text, nullable=False, default="") + + # Created At + + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) - description = property(lambda s: s.data.description or "", set_description) + # Created By ID + + created_by_id = Column(Integer, ForeignKey("users.id"), nullable=False) + + # Created By + + created_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[created_by_id], lazy="selectin", + ) + + # Deleted By ID + + deleted_by_id = Column(Integer, ForeignKey("users.id")) + + # Deleted By + + deleted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deleted_by_id], lazy="selectin", + ) def is_online(self): """ @@ -305,13 +324,7 @@ class Builder(base.DataObject): # Maintenance - def get_maintenance(self): - return self.data.maintenance - - def set_maintenance(self, maintenance): - self._set_attribute("maintenance", maintenance) - - maintenance = property(get_maintenance, set_maintenance) + maintenance = Column(Boolean, nullable=False, default=False) # Stats @@ -444,10 +457,7 @@ class Builder(base.DataObject): # Enabled - def set_enabled(self, enabled): - self._set_attribute("enabled", enabled) - - enabled = property(lambda s: s.data.enabled, set_enabled) + enabled = Column(Boolean, nullable=False, default=False) # Permissions @@ -483,11 +493,6 @@ class Builder(base.DataObject): def can_build(self, job): return job.arch in self.supported_arches - def set_testmode(self, testmode): - self._set_attribute("testmode", testmode) - - testmode = property(lambda s: s.data.testmode, set_testmode) - # Jobs @property @@ -514,13 +519,7 @@ class Builder(base.DataObject): # Max Jobs - def get_max_jobs(self): - return self.data.max_jobs - - def set_max_jobs(self, value): - self._set_attribute("max_jobs", value) - - max_jobs = property(get_max_jobs, set_max_jobs) + max_jobs = Column(Integer, nullable=False, default=1) def is_full(self): """ @@ -528,53 +527,43 @@ class Builder(base.DataObject): """ return len(self.jobs) >= self.max_jobs - @property - def name(self): - return self.data.name + # Name - @property - def hostname(self): - return self.name + name = Column(Text, unique=True, nullable=False) - @property - def pakfire_version(self): - return self.data.pakfire_version or "" + # Pakfire Version - @property - def os_name(self): - return self.data.os_name or "" + pakfire_version = Column(Text, nullable=False, default="") - @property - def cpu_model(self): - return self.data.cpu_model or "" + # OS Name - @property - def cpu_count(self): - return self.data.cpu_count + os_name = Column(Text, nullable=False, default="") - @property - def cpu_arch(self): - return self.data.cpu_arch + # CPU Model - @property - def mem_total(self): - return self.data.mem_total + cpu_model = Column(Text, nullable=False, default="") - @property - def host_key_id(self): - return self.data.host_key_id + # CPU Count - # AWS + cpu_count = Column(Integer, nullable=False, default=1) - @property - def instance_id(self): - return self.data.instance_id + # CPU Arch - @property - def instance_type(self): - return self.data.instance_type + cpu_arch = Column(Text) - @lazy_property + # Mem Total + + mem_total = Column(BigInteger, nullable=False, default=0) + + # AWS - Instance ID + + instance_id = Column(Text) + + # AWS - Instance Type + + instance_type = Column(Text) + + @functools.cached_property def instance(self): if self.instance_id: return self.backend.aws.ec2.Instance(self.instance_id) @@ -737,21 +726,9 @@ class Builder(base.DataObject): # Otherwise return a neutral preference return 0 - # Delete - - def delete(self, user=None): - """ - Deletes this builder - """ - log.info("Deleted builder %s" % self) - - self._set_attribute_now("deleted_at") - if user: - self._set_attribute("deleted_by", user) - # Stats - @lazy_property + @functools.cached_property def total_build_time(self): res = self.db.get(""" SELECT diff --git a/src/buildservice/builds.py b/src/buildservice/builds.py index 964d151f..5775b102 100644 --- a/src/buildservice/builds.py +++ b/src/buildservice/builds.py @@ -2,12 +2,21 @@ import asyncio import datetime +import functools import itertools import logging import os import re +import sqlalchemy +from sqlalchemy import Column, ForeignKey, Index +from sqlalchemy import Boolean, DateTime, Integer, Text, UUID + from . import base +from . import builds +from . import database +from . import packages +from . import repository from . import users from .constants import * @@ -17,219 +26,191 @@ from .decorators import * log = logging.getLogger("pbs.builds") class Builds(base.Object): - def _get_builds(self, query, *args, **kwargs): - return self.db.fetch_many(Build, query, *args, **kwargs) - - async def _get_build(self, query, *args, **kwargs): - return await self.db.fetch_one(Build, query, *args, **kwargs) - - def __len__(self): - res = self.db.get(""" - SELECT - COUNT(*) as builds - FROM - builds - WHERE - deleted_at IS NULL - AND - test IS FALSE - """, - ) - - return res.builds - - async def get_by_id(self, id): - return await self._get_build(""" - SELECT - * - FROM - builds - WHERE - id = %s - """, id, - ) - async def get_by_uuid(self, uuid): - return await self._get_build(""" - SELECT - * - FROM - builds - WHERE - deleted_at IS NULL - AND - uuid = %s - """, uuid, + stmt = ( + sqlalchemy + .select(Build) + .where( + Build.deleted_at == None, + Build.uuid == uuid, + ) ) - def get_latest_by_name(self, name): + return await self.db.fetch_one(stmt) + + async def get_latest_by_name(self, name): """ Returns the latest build that matches the package name """ - return self._get_build(""" - SELECT - builds.* - FROM - builds - LEFT JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.test IS FALSE - AND - packages.name = %s - ORDER BY - builds.created_at DESC - LIMIT 1""", - name, + stmt = ( + sqlalchemy + .select(Build) + .where( + Build.deleted_at == None, + + # Don't include test builds + Build.test == False, + ) + + # Join packages and filter by package name + .join(packages.Package) + .where( + packages.Package.name == name, + ) + + # Pick the latest build + .order_by( + Build.created_at.desc(), + ) + .limit(1) ) - def get_by_name(self, name, limit=None): + return await self.db.fetch_one(stmt) + + async def get(self, name=None, user=None, distro=None, repo=None, scratch=None, + limit=None, offset=None): """ Returns all builds by this name """ - return self._get_builds(""" - SELECT - builds.* - FROM - builds - JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - packages.deleted_at IS NULL - AND - packages.name = %s - ORDER BY - builds.created_at - """, name, - ) + packages = sqlalchemy.orm.aliased(Build.pkg) - def get_release_builds_by_name(self, name, limit=None): - return self._get_builds(""" - WITH builds AS ( - SELECT - builds.*, - - -- Number all builds per distribution so that we can filter out - -- the first N builds later - ROW_NUMBER() OVER ( - PARTITION BY packages.distro_id ORDER BY builds.created_at DESC - ) AS _number - FROM - builds - JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - packages.deleted_at IS NULL - AND - packages.name = %s + stmt = ( + sqlalchemy + .select(Build) + .join(packages) + .where( + Build.deleted_at == None, + + # Always filter out any test builds + Build.test == False, ) - SELECT - * - FROM - builds - ORDER BY - _number - LIMIT - %s - """, name, limit, + # Order by creation date + .order_by( + Build.created_at.desc(), + ) + + # Limit & Offset + .limit(limit) + .offset(offset) ) - def get_scratch_builds_by_name(self, name, limit=None): - return self._get_builds(""" - WITH builds AS ( - SELECT - builds.*, - - -- Number all builds per user so that we can filter out - -- the first N builds later - ROW_NUMBER() OVER ( - PARTITION BY builds.owner_id ORDER BY builds.created_at DESC - ) AS _number - FROM - builds - JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.owner_id IS NOT NULL - AND - packages.deleted_at IS NULL - AND - packages.name = %s + # Scratch builds only? + if scratch is True: + stmt = stmt.where( + Build.owner != None, ) - SELECT - * - FROM - builds - ORDER BY - _number - LIMIT - %s - """, name, limit, - ) + # Exclude scratch builds? + elif scratch is False: + stmt = stmt.where( + Build.owner == None, + ) - def get_recent(self, name=None, limit=None, offset=None): - """ - Returns the most recent (non-test) builds - """ + # Optionally filter by name if name: - return self.get_recent_by_name(name, limit=limit, offset=offset) + stmt = stmt.where( + packages.c.deleted_at == None, + packages.c.name == name, + ) - return self._get_builds(""" - SELECT - * - FROM - builds - WHERE - deleted_at IS NULL - AND - test IS FALSE - ORDER BY - created_at DESC - LIMIT - %s - OFFSET - %s""", - limit, offset, + # Optionally filter by user + if user: + stmt = stmt.where( + Build.owner == user, + ) + + # Optionally filter by distro + if distro: + # XXX This cannot access distro + stmt = stmt.where( + packages.c.distro_id == distro.id, + ) + + # Optionally filter by repo + if repo: + stmt = ( + stmt + .join( + repository.RepoBuild, + repository.RepoBuild.build_id == Build.id, + ).where( + repository.RepoBuild.removed_at == None, + repository.RepoBuild.repo == repo, + ) + .order_by( + repository.RepoBuild.added_at.desc(), + ) + ) + + return await self.db.fetch_as_list(stmt) + + def get_release_builds_by_name(self, name, limit=None): + cte = ( + sqlalchemy.select( + Build, + + # Number all builds per distribution so that we can filter out + # the first N builds later + sqlalchemy.func.row_number() + .over( + partition_by = packages.Package.distro_id, + order_by = Build.created_at.desc(), + ) + .label("_number"), + ) + .join(Build.pkg) + .where( + Build.deleted_at == None, + Build.owner_id == None, + packages.Package.deleted_at == None, + packages.Package.name == name, + ) + .cte("_builds") ) - def get_recent_by_name(self, name, limit=None, offset=None): - """ - Returns the most recent (non-test) builds - """ - return self._get_builds(""" - SELECT - builds.* - FROM - builds - JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.test IS FALSE - AND - packages.deleted_at IS NULL - AND - packages.name = %s - ORDER BY - created_at DESC - LIMIT - %s - OFFSET - %s""", - name, limit, offset, + stmt = ( + sqlalchemy.select(Build) + .select_from(cte) + .order_by(cte.c._number) + .limit(limit) + ) + + return self.db.fetch(stmt) + + def get_scratch_builds_by_name(self, name, limit=None): + cte = ( + sqlalchemy.select( + Build, + + # Number all builds per distribution so that we can filter out + # the first N builds later + sqlalchemy.func.row_number() + .over( + partition_by = Build.owner_id, + order_by = Build.created_at.desc(), + ) + .label("_number"), + ) + .join(Build.pkg) + .where( + Build.deleted_at == None, + Build.owner_id != None, + packages.Package.deleted_at == None, + packages.Package.name == name, + ) + .cte("_builds") + ) + + stmt = ( + sqlalchemy.select(Build) + .select_from(cte) + .order_by(cte.c._number) + .limit(limit) ) + return self.db.fetch(stmt) + def get_by_package_uuids(self, uuids): """ Returns a list of builds that contain the given packages @@ -281,37 +262,17 @@ class Builds(base.Object): if timeout is None: timeout = datetime.timedelta(hours=3) - build = await self._get_build(""" - INSERT INTO - builds - ( - build_repo_id, - pkg_id, - owner_id, - build_group_id, - test, - disable_test_builds - ) - VALUES - ( - %s, %s, %s, %s, %s, %s - ) - RETURNING *""", - repo, - package, - owner, - group, - test, - disable_test_builds, - - # Populate cache - package=package, group=group, owner=owner, repo=repo, + # Insert the build into the database + build = await self.db.insert( + Build, + build_repo = repo, + pkg = package, + owner = owner, + build_group = group, + test = test, + disable_test_builds = disable_test_builds, ) - # Update group cache - if group: - group.builds.append(build) - # Create all jobs await build._create_jobs(timeout=timeout) @@ -342,25 +303,36 @@ class Builds(base.Object): # Groups - @lazy_property - def groups(self): + async def get_group_by_uuid(self, uuid): """ - Build Groups + Fetch a group by its UUID """ - return Groups(self.backend) + stmt = ( + sqlalchemy + .select(BuildGroup) + .where( + BuildGroup.deleted_at == None, + BuildGroup.uuid == uuid, + ) + ) - # Comments + return await self.db.fetch_one(stmt) - @lazy_property - def comments(self): - return Comments(self.backend) + async def create_group(self, owner=None, tested_build=None): + """ + Creates a new Build Group + """ + group = await self.db.insert( + BuildGroup, + created_by = owner, + tested_build = tested_build, + ) + return group -class Build(base.DataObject): - table = "builds" - def __repr__(self): - return "<%s id=%s %s>" % (self.__class__.__name__, self.id, self.pkg) +class Build(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "builds" def __str__(self): return "%s %s" % (self.pkg.name, self.pkg.evr) @@ -371,6 +343,10 @@ class Build(base.DataObject): return NotImplemented + # ID + + id = Column(Integer, primary_key=True) + @property def url(self): return "/builds/%s" % self.uuid @@ -429,74 +405,74 @@ class Build(base.DataObject): # All repositories this build has been in have been changed await self._update_repos(build=True) - @property - def uuid(self): - """ - The UUID of this build. - """ - return self.data.uuid + # UUID - @lazy_property - def pkg(self): - """ - Get package that is to be built in the build. - """ - return self.backend.packages.get_by_id(self.data.pkg_id) + uuid = Column(UUID, unique=True, nullable=False) + + # Package ID + + pkg_id = Column(Integer, ForeignKey("packages.id"), nullable=False) + + # Package + + pkg = sqlalchemy.orm.relationship("Package", + foreign_keys=[pkg_id], back_populates="builds", lazy="selectin") @property def name(self): return "%s-%s" % (self.pkg.name, self.pkg.evr) - @property - def created_at(self): - return self.data.created_at + # Created At + + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) + + # Date @property - def finished_at(self): - return self.data.finished_at + def date(self): + return self.created_at.date() + + # Finished At + + finished_at = Column(DateTime(timezone=False)) + + # Owner ID + + owner_id = Column(Integer, ForeignKey("users.id")) # Owner - def get_owner(self): - """ - The owner of this build. - """ - if self.data.owner_id: - return self.backend.users.get_by_id(self.data.owner_id) + owner = sqlalchemy.orm.relationship("User", foreign_keys=[owner_id], lazy="selectin") - async def set_owner(self, owner): - await self._set_attribute("owner_id", owner) + # Build Repo ID - # Build Repository + build_repo_id = Column(Integer, ForeignKey("repositories.id"), nullable=False) - @lazy_property - def build_repo(self): - """ - Returns the repository this build is being built against - """ - return self.backend.repos.get_by_id(self.data.build_repo_id) + # Build Repo - @lazy_property + build_repo = sqlalchemy.orm.relationship("Repo", lazy="selectin") + + # Distro + + @functools.cached_property def distro(self): return self.build_repo.distro - # Group + # Group ID - @lazy_property - def group(self): - if self.data.build_group_id: - return self.backend.builds.groups.get_by_id(self.data.build_group_id) + build_group_id = Column(Integer, ForeignKey("build_groups.id")) + + # Group - def is_broken(self): - return self.state == "broken" + group = sqlalchemy.orm.relationship("BuildGroup", back_populates="builds", + foreign_keys=[build_group_id], lazy="selectin") # Severity - def get_severity(self): - return self.data.severity + severity = Column(Text) - async def set_severity(self, severity): - await self._set_attribute("severity", severity) + # Commit @lazy_property def commit(self): @@ -541,15 +517,11 @@ class Build(base.DataObject): return message - def get_priority(self): - return self.data.priority - - def set_priority(self, priority): - assert priority in (-2, -1, 0, 1, 2) + # Priority - self._set_attribute("priority", priority) + priority = Column(Integer, nullable=False, default=0) - priority = property(get_priority, set_priority) + # Arches @property def arches(self): @@ -567,31 +539,22 @@ class Build(base.DataObject): # Otherwise we return all supported arches return [arch for arch in self.distro.arches if arch in self.pkg.build_arches] - # Jobs + # Jobs - This fetches all jobs that have ever existed for this build - async def _get_jobs(self, *args, **kwargs): - return self.backend.jobs._get_jobs(*args, build=self, **kwargs) + alljobs = sqlalchemy.orm.relationship("Job", back_populates="build", lazy="selectin") @property def jobs(self): """ Returns the current set of jobs """ - for job in self._jobs: + for job in self.alljobs: # Skip any superseeded jobs if job.is_superseeded(): continue yield job - @lazy_property - def _jobs(self): - """ - Get a list of all build jobs that are in this build. - """ - return self._get_jobs("SELECT * FROM jobs \ - WHERE build_id = %s", self.id) - def _create_jobs(self, **kwargs): """ Called after a build has been created and creates all jobs @@ -625,133 +588,98 @@ class Build(base.DataObject): Submits a comment """ # Create a new comment - comment = await self.backend.builds.comments.create(self, *args, **kwargs) + comment = await self.db.insert( + BuildComment, *args, **kwargs, + ) - # Add to cache - self.comments.append(comment) + # Notify + await comment.notify() - return comment + comments = sqlalchemy.orm.relationship("BuildComment", back_populates="build") + # XXX filter out deleted and order - @lazy_property - def comments(self): - """ - Comments on this build - """ - comments = self.backend.builds.comments._get_comments(""" - SELECT - * - FROM - build_comments - WHERE - deleted IS FALSE - AND - build_id = %s - ORDER BY - created_at - """, self.id, - ) + # Deleted By ID - return list(comments) + deleted_by_id = Column(Integer, ForeignKey("users.id")) - # Points + # Deleted By + + deleted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deleted_by_id], lazy="selectin", + ) + + # Add Points async def add_points(self, points, user=None): """ Add points (can be negative) """ # Log points - await self.db.execute(""" - INSERT INTO - build_points - ( - build_id, - points, - user_id - ) - VALUES - ( - %s, %s, %s - ) - """, self.id, points, user, + points = await self.db.insert( + BuildPoints, build=self, points=points, user=user, ) # Update the cache - await self._set_attribute("points", self.points + points) + self.points += points - @property - def points(self): - """ - Return the cached points - """ - return self.data.points + + # Points + + points = Column(Integer, nullable=False, default=0) ## Watchers - @lazy_property - async def watchers(self): - users = await self.backend.users._get_users(""" - SELECT - users.* - FROM - build_watchers - LEFT JOIN - users ON build_watchers.user_id = users.id - WHERE - users.deleted_at IS NULL - AND - build_watchers.build_id = %s - AND - build_watchers.deleted_at IS NULL - """, self.id, + async def get_watchers(self): + stmt = ( + sqlalchemy + + # Select all build watchers + .select(BuildWatcher) + .where( + BuildWatcher.deleted_at == None, + BuildWatcher.build == self, + ) ) - return set(users) + return await self.db.fetch_as_set(stmt) - async def add_watcher(self, user): + async def watched_by(self, user): """ - Adds a watcher to this build + Checks if this build is being watched by user. + + Returns the BuildWatcher object. """ - await self.db.execute(""" - INSERT INTO - build_watchers( - build_id, - user_id - ) - VALUES( - %s, %s + stmt = ( + sqlalchemy + .select(BuildWatcher) + .where( + BuildWatcher.build == self, + BuildWatcher.user == user, + BuildWatcher.deleted_at == None, ) - ON CONFLICT - (build_id, user_id) WHERE deleted_at IS NULL - DO NOTHING - """, self.id, user, ) - # Add to cache - self.watchers.add(user) + return await self.db.fetch_one(stmt) + + async def add_watcher(self, user): + """ + Adds a watcher to this build + """ + return await self.db.insert( + BuildWatcher, + build = self, + user = user, + ) async def remove_watcher(self, user): """ Removes a watcher from this build """ - await self.db.execute(""" - UPDATE - build_watchers - SET - deleted_at = CURRENT_TIMESTAMP - WHERE - build_id = %s - AND - user_id = %s - AND - deleted_at IS NULL - """, self.id, user, - ) + watcher = await self.watched_by(user) - # Remove from cache - try: - self.watchers.remove(user) - except KeyError: - pass + # If we have found a watcher, we will delete it + if watcher: + await watcher.delete() async def _add_watchers(self): """ @@ -780,11 +708,11 @@ class Build(base.DataObject): log.error("Build %s has failed" % self) # Mark as finished - await self._set_attribute_now("finished_at") + self.finished_at = sqlalchemy.func.current_timestamp() # Mark as failed if the build was not successful if not success: - await self._set_attribute("failed", True) + self.failed = True # Award some negative points on failure if not success: @@ -811,6 +739,8 @@ class Build(base.DataObject): return builds + # Finished? + def has_finished(self): """ Returns True if this build has finished @@ -820,17 +750,25 @@ class Build(base.DataObject): return False + # Failed + + failed = Column(Boolean, nullable=False, default=False) + + # Failed? + def has_failed(self): """ Returns True if this build has failed """ - return self.has_finished() and self.data.failed + return self.has_finished() and self.failed is False + + # Successful? def is_successful(self): """ Returns True if this build was successful """ - return self.has_finished() and not self.data.failed + return self.has_finished() and self.failed is True async def _send_email(self, *args, exclude=None, **kwargs): """ @@ -844,28 +782,17 @@ class Build(base.DataObject): # Send an email to the user await user.send_email(*args, build=self, **kwargs) - # Repositories - - @lazy_property - async def repos(self): - """ - Return a list of all repositories this package is in - """ - repos = await self.backend.repos._get_repositories(""" - SELECT - repositories.* - FROM - repository_builds - LEFT JOIN - repositories ON repository_builds.repo_id = repositories.id - WHERE - repository_builds.build_id = %s - AND - repository_builds.removed_at IS NULL - """, self.id, - ) + # Repos - return list(repos) + repos = sqlalchemy.orm.relationship( + "Repo", + secondary = "repository_builds", + primaryjoin = """and_( + RepoBuild.build_id == Build.id, + RepoBuild.removed_at == None + )""", + lazy = "selectin", + ) async def _update_repos(self, build=False): """ @@ -891,6 +818,9 @@ class Build(base.DataObject): if self.owner: return False + # XXX Disabled because of the next_repo bullshit + return False + # This can only be approved if there is another repository if self.next_repo: return True @@ -1086,9 +1016,11 @@ class Build(base.DataObject): if build: self.deprecating_build = build - @property - def deprecated_at(self): - return self.data.deprecated_at + # Deprecated At + + deprecated_at = Column(DateTime(timezone=False)) + + # Deprecated? def is_deprecated(self): if self.deprecated_at: @@ -1101,14 +1033,23 @@ class Build(base.DataObject): if self.data.deprecated_by: return await self.backend.users.get_by_id(self.data.deprecated_by) - # Deprecating Build + # Deprecated By ID + + deprecated_by_id = Column(Integer, ForeignKey("users.id")) + + # Deprecated By - async def get_deprecating_build(self): - if self.data.deprecating_build_id: - return await self.backend.builds.get_by_id(self.data.deprecating_build_id) + deprecated_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deprecated_by_id], lazy="selectin", + ) - async def set_deprecating_build(self, build): - await self._set_attribute("deprecating_build_id", build) + # Deprecating Build ID + + deprecating_build_id = Column(Integer, ForeignKey("builds.id")) + + deprecating_build = sqlalchemy.orm.relationship( + "Build", foreign_keys=[deprecating_build_id], + ) @lazy_property async def deprecated_builds(self): @@ -1149,43 +1090,17 @@ class Build(base.DataObject): # Tests Builds - def is_test(self): - if self.data.test: - return True + test = Column(Boolean, nullable=False, default=False) - return False + # Test? - @lazy_property - async def test_build_for(self): - return await self.backend.builds._get_build(""" - WITH build_test_builds AS ( - SELECT - builds.id AS build_id, - test_builds.id AS test_build_id - FROM - builds - JOIN - build_groups ON builds.test_group_id = build_groups.id - JOIN - builds test_builds ON test_builds.build_group_id = build_groups.id - WHERE - builds.deleted_at IS NULL - AND - builds.test IS FALSE - AND - build_groups.deleted_at IS NULL - ) + def is_test(self): + return self.test - SELECT - builds.* - FROM - build_test_builds test_builds - JOIN - builds ON test_builds.build_id = builds.id - WHERE - test_builds.test_build_id = %s - """, self.id, - ) + @functools.cached_property + def test_build_for(self): + if self.group: + return self.group.tested_build @property def disable_test_builds(self): @@ -1241,18 +1156,6 @@ class Build(base.DataObject): # Return the group return self.test_builds - def get_test_builds(self): - """ - Returns all test builds - """ - if self.data.test_group_id: - return self.backend.builds.groups.get_by_id(self.data.test_group_id) - - def set_test_builds(self, group): - self._set_attribute("test_group_id", group) - - test_builds = lazy_property(get_test_builds, set_test_builds) - async def _test_builds_finished(self, success): """ Called when all test builds have finished @@ -1265,114 +1168,62 @@ class Build(base.DataObject): await self._send_email("builds/messages/test-builds-failed.txt", build=self, test_builds=self.test_builds) + # Test Group ID -class Groups(base.Object): - """ - Build Groups are simple objects that group multiple builds together - """ - def _get_groups(self, query, *args): - res = self.db.query(query, *args) + test_group_id = Column(Integer, ForeignKey("build_groups.id")) - for row in res: - yield Group(self.backend, row.id, data=row) + # Test Group - def _get_group(self, query, *args): - res = self.db.get(query, *args) + test_group = sqlalchemy.orm.relationship( + "BuildGroup", foreign_keys=[test_group_id], lazy="selectin", + ) - if res: - return Group(self.backend, res.id, data=res) - def get_by_id(self, id): - return self._get_group(""" - SELECT - * - FROM - build_groups - WHERE - id = %s - """, id, - ) +class BuildGroup(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "build_groups" - def get_by_uuid(self, uuid): - return self._get_group(""" - SELECT - * - FROM - build_groups - WHERE - deleted_at IS NULL - AND - uuid = %s - """, uuid, - ) + def __str__(self): + return "%s" % self.uuid - def create(self, owner=None, tested_build=None): + def __iter__(self): """ - Creates a new Build Group + Returns an iterator that sorts the builds """ - return self._get_group(""" - INSERT INTO - build_groups - ( - created_by, - tested_build_id - ) - VALUES( - %s, %s - ) - RETURNING - * - """, owner, tested_build, - ) - + builds = sorted(self.builds, key=self._sort_builds) -class Group(base.DataObject): - table = "build_groups" - - def __str__(self): - return "%s" % self.uuid + return iter(builds) - def __iter__(self): - return iter(self.builds) + def __bool__(self): + return True def __len__(self): return len(self.builds) + # ID + + id = Column(Integer, primary_key=True) + # UUID - @property - def uuid(self): - return self.data.uuid + uuid = Column(UUID, nullable=False, server_default=sqlalchemy.func.gen_random_uuid()) # Created At - @property - def created_at(self): - return self.data.created_at + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) # Finished At - @property - def finished_at(self): - return self.data.finished_at + finished_at = Column(DateTime(timezone=False), nullable=False) - # Builds + # Failed - @lazy_property - def builds(self): - builds = self.backend.builds._get_builds(""" - SELECT - * - FROM - builds - WHERE - deleted_at IS NULL - AND - build_group_id = %s - """, self.id, - ) + failed = Column(Boolean, nullable=False, default=False) - return sorted(builds, key=self._sort_builds) + # Builds + + builds = sqlalchemy.orm.relationship("Build", back_populates="group", + foreign_keys=[Build.build_group_id], lazy="selectin") @staticmethod def _sort_builds(build): @@ -1402,18 +1253,22 @@ class Group(base.DataObject): """ return [b for b in self.builds if b.has_failed()] + # Tested Build ID + + tested_build_id = Column(Integer, ForeignKey("builds.id")) + # Tested Build - @lazy_property - def tested_build(self): - if self.data.tested_build_id: - return self.backend.builds.get_by_id(self.data.tested_build_id) + tested_build = sqlalchemy.orm.relationship("Build", + foreign_keys=[tested_build_id], lazy="selectin") + + # Test? def is_test(self): """ Returns True if this is a test group """ - if self.data.tested_build_id: + if self.tested_build: return True return False @@ -1430,10 +1285,6 @@ class Group(base.DataObject): if user: self._set_attribute("deleted_by", user) - @property - def deleted_at(self): - return self.data.deleted_at - # Functions to find out whether this was all successful/failed def has_failed(self): @@ -1452,7 +1303,7 @@ class Group(base.DataObject): """ Returns True if all builds have finished """ - if self.data.finished_at: + if self.finished_at: return True return False @@ -1486,7 +1337,7 @@ class Group(base.DataObject): log.info("Build group %s has finished" % self) # Mark as finished - self._set_attribute_now("finished_at") + self.finished_at = sqlalchemy.func.current_timestamp() # Call the build that has created this test group if self.tested_build: @@ -1503,61 +1354,91 @@ class Group(base.DataObject): log.error("Build group %s has failed" % self) # Mark as failed - self._set_attribute("failed", True) + self.failed = True -class Comments(base.Object): - def _get_comments(self, *args, **kwargs): - return self.db.fetch_many(Comment, *args, **kwargs) +class BuildBug(database.Base, database.BackendMixin): + __tablename__ = "build_bugs" - def _get_comment(self, *args, **kwargs): - return self.db.fetch_one(Comment, *args, **kwargs) + # ID - async def get_by_id(self, id): - return await self._get_comment(""" - SELECT - * - FROM - build_comments - WHERE - id = %s - """, id, - ) + id = Column(Integer, primary_key=True) - async def create(self, build, user, text=None): - comment = self._get_comment(""" - INSERT INTO - build_comments( - build_id, user_id, text - ) - VALUES( - %s, %s, %s - ) - RETURNING - * - """, build, user, text or "" - ) + # Build ID - # Notify people about this new comment - await comment.notify() + build_id = Column(Integer, ForeignKey("builds.id"), index=True, nullable=False) - return comment + # Build + build = sqlalchemy.orm.relationship("Build") -class Comment(base.DataObject): - table = "build_comments" + # Bug ID - @lazy_property - def build(self): - return self.backend.builds.get_by_id(self.data.build_id) + bug_id = Column(Integer, nullable=False) - @lazy_property - def user(self): - return self.backend.users.get_by_id(self.data.user_id) + # Added At - @property - def text(self): - return self.data.text + added_at = Column(DateTime(timezone=False), nullable=False, + default=sqlalchemy.func.current_timestamp()) + + # Added By ID + + added_by_id = Column(Integer, ForeignKey("users.id"), nullable=False) + + # Added By + + added_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[added_by_id], lazy="selectin", + ) + + # Removed At + + removed_at = Column(DateTime(timezone=False)) + + # Removed By ID + + removed_by_id = Column(Integer, ForeignKey("users.id")) + + # Removed ID + + removed_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[removed_by_id], lazy="selectin", + ) + + +class BuildComment(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "build_comments" + + # ID + + id = Column(Integer, primary_key=True) + + # Build ID + + build_id = Column(Integer, ForeignKey("builds.id"), index=True, nullable=False) + + # Build + + build = sqlalchemy.orm.relationship("Build", back_populates="comments") + + # User ID + + user_id = Column(Integer, ForeignKey("users.id"), index=True, nullable=False) + + # User + + user = sqlalchemy.orm.relationship("User") + + # Text + + text = Column(Text, nullable=False, default="") + + # Created At + + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) + + # Notify! async def notify(self): """ @@ -1565,3 +1446,63 @@ class Comment(base.DataObject): """ await self.build._send_email("builds/messages/comment.txt", exclude=[self.user], build=self.build, comment=self) + + +class BuildPoint(database.Base, database.BackendMixin): + __tablename__ = "build_points" + + # Build ID + + build_id = Column(Integer, ForeignKey("builds.id"), primary_key=True, nullable=False) + + # Build + + build = sqlalchemy.orm.relationship("Build") + + # Created At + + created_at = Column(DateTime(timezone=False), primary_key=True, + nullable=False, server_default=sqlalchemy.func.current_timestamp()) + + # Points + + points = Column(Integer, nullable=False) + + # User ID + + user_id = Column(Integer, ForeignKey("users.id")) + + # User + + user = sqlalchemy.orm.relationship("User") + + +class BuildWatcher(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "build_watchers" + + def __lt__(self, other): + if isinstance(other, self.__class__): + return self.added_at < other.added_at or self.user < other.user + + return NotImplemented + + # Build ID + + build_id = Column(Integer, ForeignKey("builds.id"), primary_key=True, nullable=False) + + # Build + + build = sqlalchemy.orm.relationship("Build", lazy="selectin") + + # User ID + + user_id = Column(Integer, ForeignKey("users.id"), primary_key=True, nullable=False) + + # User + + user = sqlalchemy.orm.relationship("User", lazy="joined") + + # Added At + + added_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) diff --git a/src/buildservice/database.py b/src/buildservice/database.py index 3feaa0cd..353dc9a1 100644 --- a/src/buildservice/database.py +++ b/src/buildservice/database.py @@ -1,220 +1,450 @@ -#!/usr/bin/python - -""" - A lightweight wrapper around psycopg2. - - Originally part of the Tornado framework. The tornado.database module - is slated for removal in Tornado 3.0, and it is now available separately - as torndb. -""" - +############################################################################### +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +############################################################################### + +import alembic.migration import asyncio +import functools import logging -import psycopg -import psycopg_pool +import queue import time +import sqlalchemy +import sqlalchemy.ext.asyncio +from sqlalchemy import Column, DateTime + from . import base # Setup logging log = logging.getLogger("pbs.database") -class Connection(object): - """ - A lightweight wrapper around MySQLdb DB-API connections. - The main value we provide is wrapping rows in a dict/object so that - columns can be accessed by name. Typical usage:: +@sqlalchemy.event.listens_for(sqlalchemy.Engine, "before_cursor_execute") +def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): + now = time.time() + + # Create a queue to store start times + try: + q = conn.info["query_start_time"] + except KeyError: + q = conn.info["query_start_time"] = queue.LifoQueue() + + # Push the start time of the query + q.put(now) + + # Log the statement + #log.debug("Start Query: %s %r", statement, parameters) + + +@sqlalchemy.event.listens_for(sqlalchemy.Engine, "after_cursor_execute") +def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): + time_end = time.time() + + # Fetch the latest start time + time_start = conn.info["query_start_time"].get() - db = torndb.Connection("localhost", "mydatabase") - for article in db.query("SELECT * FROM articles"): - print article.title + # Compute the total time + t = time_end - time_start - Cursors are hidden by the implementation, but other than that, the methods - are very similar to the DB-API. + # Log the total runtime + log.debug("Query completed in %.02fms", t * 1000) - We explicitly set the timezone to UTC and the character encoding to - UTF-8 on all connections to avoid time zone and encoding errors. + +class Base(sqlalchemy.ext.asyncio.AsyncAttrs, sqlalchemy.orm.DeclarativeBase): + """ + This is the declarative base for this application + """ + pass + + +class Connection(object): + """ + This is a convenience wrapper around SQLAlchemy. """ def __init__(self, backend, host, database, user=None, password=None): self.backend = backend - # Stores connections assigned to tasks - self.__connections = {} + # Make the URL + self.url = "postgresql+asyncpg://%s:%s@%s/%s" % (user, password, host, database) - # Create a connection pool - self.pool = psycopg_pool.AsyncConnectionPool( - "postgresql://%s:%s@%s/%s" % (user, password, host, database), + # Create the engine + self.engine = sqlalchemy.ext.asyncio.create_async_engine( + self.url, - # Callback to configure any new connections - configure=self.__configure, + # Use our own logger + logging_name=log.name, - # Set limits for min/max connections in the pool - min_size=4, - max_size=32, + # Be more verbose + echo=True, #echo_pool="debug", - # Give clients up to one minute to retrieve a connection - timeout=60, + # Increase the pool size + pool_size=128, + ) - # Close connections after they have been idle for a few minutes - max_idle=120, + # Create a session maker + self.sessionmaker = sqlalchemy.orm.sessionmaker( + self.engine, + expire_on_commit = False, + class_ = sqlalchemy.ext.asyncio.AsyncSession, + info = { + "backend" : self.backend, + }, ) - async def __configure(self, conn): + # Stores sessions assigned to tasks + self.__sessions = {} + + async def check_schema(self): """ - Configures any newly opened connections + This method checks if the current database schema matches with this application """ - # Enable autocommit - await conn.set_autocommit(True) + # Run a schema check + async with self.engine.connect() as c: + return await c.run_sync(self._check_schema) + + def _check_schema(self, engine): + # Create a new context + context = alembic.migration.MigrationContext.configure(engine) + + # Compare the metadata + diff = alembic.autogenerate.compare_metadata(context, Base.metadata) - # Return any rows as dicts - conn.row_factory = psycopg.rows.dict_row + # If we have a difference, lets log it + if diff: + log.warning("The database schema does not match:") - # Automatically convert DataObjects - conn.adapters.register_dumper(base.DataObject, base.DataObjectDumper) + # Product migrations + migrations = alembic.autogenerate.produce_migrations(context, Base.metadata) - async def connection(self, *args, **kwargs): + # Show the differences + for op in migrations.upgrade_ops.ops: + self._show_op(op) + + else: + log.debug("Database schema matches") + + def _show_op(self, op): + if isinstance(op, alembic.operations.ops.DropTableOp): + log.warning("Unknown table '%s'" % op.table_name) + + elif isinstance(op, alembic.operations.ops.ModifyTableOps): + log.warning("Table %s:" % op.table_name) + + # Show all sub-operations + for _op in op.ops: + self._show_op(_op) + + elif isinstance(op, alembic.operations.ops.AddColumnOp): + args = [] + + # Is Nullable? + if not op.column.nullable: + args.append("NOT NULL") + + log.warning(" Missing column %s (%s)%s %s" % + (op.column.name, op.column.type, ":" if args else "", " ".join(args))) + + elif isinstance(op, alembic.operations.ops.AlterColumnOp): + log.warning(" Incorrect column: %s" % op.column_name) + + if not op.modify_type is False and not op.existing_type == op.modify_type: + log.warning(" Type %s -> %s" % (op.existing_type, op.modify_type)) + + if not op.modify_nullable is False and not op.existing_nullable == op.modify_nullable: + log.warning(" %s -> %s" % ( + "NULL" if op.existing_nullable else "NOT NULL", + "NULL" if op.modify_nullable else "NOT NULL", + )) + + if not op.modify_comment is False and not op.existing_comment == op.modify_comment: + log.warning(" Comment: %s -> %s" % (op.existing_comment, op.modify_comment)) + + elif isinstance(op, alembic.operations.ops.CreateIndexOp): + log.warning(" Missing index: %s" % op.index_name) + + elif isinstance(op, alembic.operations.ops.CreateForeignKeyOp): + log.warning(" Missing foreign key %s : %s -> %s(%s)" % + (op.constraint_name or "N/A", op.local_cols, op.referent_table, op.remote_cols)) + + elif isinstance(op, alembic.operations.ops.CreateUniqueConstraintOp): + constraint = op.to_constraint() + + log.warning(" Missing unique constraint %s" % constraint.name) + + elif isinstance(op, alembic.operations.ops.DropColumnOp): + log.warning(" Unknown column: %s" % op.column_name) + + elif isinstance(op, alembic.operations.ops.DropConstraintOp): + log.warning(" Unknown constraint: %s" % op.constraint_name) + + elif isinstance(op, alembic.operations.ops.DropIndexOp): + log.warning(" Unknown index: %s" % op.index_name) + + else: + raise NotImplementedError( + "Unknown migration operation: %s" % op, + ) + + async def session(self): """ - Returns a connection from the pool + Returns a session from the engine """ # Fetch the current task task = asyncio.current_task() assert task, "Could not determine task" - # Try returning the same connection to the same task + # Try returning the same session to the same task try: - return self.__connections[task] + return self.__sessions[task] except KeyError: pass - # Fetch a new connection from the pool - conn = self.__connections[task] = await self.pool.getconn(*args, **kwargs) + # Fetch a new session from the engine + session = self.__sessions[task] = self.sessionmaker() - log.debug("Assigning database connection %s to %s" % (conn, task)) + log.debug("Assigning database session %s to %s" % (session, task)) # When the task finishes, release the connection - task.add_done_callback(self.release_connection) + task.add_done_callback(self.release_session) - return conn + return session - def release_connection(self, task): + def release_session(self, task): """ - Called when a task that requested a connection has finished. + Called when a task that requested a session has finished. - This method will schedule that the connection is being returned into the pool. + This method will schedule that the session is being closed. """ - self.backend.run_task(self.__release_connection, task) + self.backend.run_task(self.__release_session, task) - async def __release_connection(self, task): - # Retrieve the connection + async def __release_session(self, task): + # Retrieve the session try: - conn = self.__connections[task] + session = self.__sessions[task] except KeyError: return - log.debug("Releasing database connection %s of %s" % (conn, task)) + # If there was no exception, we can commit + if not task.exception(): + await session.commit() + + log.debug("Releasing database session %s of %s" % (session, task)) # Delete it - del self.__connections[task] + del self.__sessions[task] - # Return the connection back into the pool - await self.pool.putconn(conn) + # Close the session + await session.close() - async def _execute(self, cursor, execute, query, parameters): - # Store the time we started this query - t = time.monotonic() + async def transaction(self): + """ + Opens a new transaction + """ + # Fetch our session + session = await self.session() - try: - log.debug("Running SQL query %s" % (query % parameters)) - except Exception: - pass + # If we are already in a transaction, begin a nested one + if session.in_transaction(): + return session.begin_nested() - # Execute the query - await execute(query, parameters) + # Otherwise begin the first transaction of the session + return session.begin() - # How long did this take? - elapsed = time.monotonic() - t + async def insert(self, cls, **kwargs): + """ + Inserts a new object into the database + """ + # Fetch our session + session = await self.session() - # Log the query time - log.debug(" Query time: %.2fms" % (elapsed * 1000)) + # Create a new object + object = cls(**kwargs) - async def query(self, query, *parameters, **kwparameters): + # Add it to the database + session.add(object) + + # Return the object + return object + + async def insert_many(self, cls, *args): """ - Returns a row list for the given query and parameters. + Inserts many new objects into the database """ - conn = await self.connection() + # Fetch our session + session = await self.session() + + # Create the new objects + objects = [ + cls(**kwargs) for kwargs in args + ] - async with conn.cursor() as cursor: - await self._execute(cursor, cursor.execute, query, parameters or kwparameters) + # Add them to the database + session.add_all(objects) - async for row in cursor: - yield Row(row) + # Return the objects + return objects - async def get(self, query, *parameters, **kwparameters): + async def get(self, object, pkey, **kwargs): """ - Returns the first row returned for the given query. + Fetches an object by its primary key """ - rows = [] + # Fetch our session + session = await self.session() - async for row in self.query(query, *parameters, **kwparameters): - rows.append(row) + # Return the object + return await session.get(object, pkey, **kwargs) - if len(rows) > 1: - raise Exception("Multiple rows returned for Database.get() query") + async def delete(self, object): + """ + Marks an object as deleted + """ + # Fetch our session + session = await self.session() - return rows[0] if rows else None + # Mark it as deleted + await session.delete(object) - async def execute(self, query, *parameters, **kwparameters): + async def _execute(self, stmt): """ - Executes the given query. + Executes a statement and returns a result object """ - conn = await self.connection() + # Fetch our session + session = await self.session() - async with conn.cursor() as cursor: - await self._execute(cursor, cursor.execute, query, parameters or kwparameters) + # Execute the statement + result = await session.execute(stmt) - async def executemany(self, query, parameters): + # Apply unique filtering + #result = result.unique() + + return result + + async def fetch(self, stmt, batch_size=128): """ - Executes the given query against all the given param sequences. + Fetches objects of the given type """ - conn = await self.connection() + result = await self._execute(stmt) - async with conn.cursor() as cursor: - await self._execute(cursor, cursor.executemany, query, parameters) + # Process the result in batches + if batch_size: + result = result.yield_per(batch_size) - async def transaction(self): + # Return all objects + for object in result.scalars(): + yield object + + async def fetch_one(self, stmt): + result = await self._execute(stmt) + + # Return exactly one object or none, but fail otherwise + return result.scalar_one_or_none() + + async def fetch_as_list(self, *args, **kwargs): """ - Creates a new transaction on the current tasks' connection + Fetches objects and returns them as a list instead of an iterator """ - conn = await self.connection() + objects = self.fetch(*args, **kwargs) - return conn.transaction() + # Return as list + return [o async for o in objects] - async def fetch_one(self, cls, query, *args, **kwargs): + async def fetch_as_set(self, *args, **kwargs): """ - Takes a class and a query and will return one object of that class + Fetches objects and returns them as a set instead of an iterator """ - # Execute the query - res = await self.get(query, *args) + objects = self.fetch(*args, **kwargs) - # Return an object (if possible) - if res: - return cls(self.backend, data=res, **kwargs) + # Return as set + return set([o async for o in objects]) - async def fetch_many(self, cls, query, *args, **kwargs): - # Execute the query - res = self.query(query, *args) + async def select(self, stmt): + """ + Returns the raw result after a SELECT statement + """ + result = await self._execute(stmt) - # Return a generator with objects - async for row in res: - yield cls(self.backend, data=row, **kwargs) + # Process mappings + result = result.mappings() + for object in result.fetchall(): + yield object -class Row(dict): - """A dict that allows for object-like property access syntax.""" - def __getattr__(self, name): - try: - return self[name] - except KeyError: - raise AttributeError(name) + async def select_one(self, stmt, attr=None): + """ + Returns exactly one row + """ + result = await self._execute(stmt) + + # Process mappings + result = result.mappings() + + # Extract exactly one result (or none) + result = result.one_or_none() + + # Return if we have no result + if result is None: + return + + # Return the whole result if no attribute was requested + elif attr is None: + return result + + # Otherwise return the attribute only + return getattr(result, attr) + + +class BackendMixin: + @functools.cached_property + def backend(self): + # Fetch the session + session = sqlalchemy.orm.object_session(self) + + # Return the backend + return session.info.get("backend") + + @functools.cached_property + def db(self): + return self.backend.db + + +class SoftDeleteMixin: + """ + A mixin that will automatically add a deleted_at column with a timestamp + of when an object has been deleted. + """ + + #@sqlalchemy.ext.declarative.declared_attr + #def deleted_column(cls): + # return "deleted_at" + + #@sqlalchemy.ext.declarative.declared_attr + #def deleted_at(cls): + + deleted_at = Column(DateTime(timezone=False), nullable=True) + + def delete(self, user=None): + """ + Called to delete this object + """ + setattr(self, "deleted_at", sqlalchemy.func.current_timestamp()) + + # Optionally set deleted_by + if user: + setattr(self, "deleted_by", user) diff --git a/src/buildservice/distribution.py b/src/buildservice/distribution.py index 6037b703..591f03e4 100644 --- a/src/buildservice/distribution.py +++ b/src/buildservice/distribution.py @@ -3,103 +3,202 @@ import datetime import logging +import sqlalchemy +from sqlalchemy import Column, Computed, ForeignKey +from sqlalchemy import ARRAY, Boolean, DateTime, Integer, Text + from . import base +from . import database from . import misc +from . import repository +from . import sources from .decorators import * # Setup logging log = logging.getLogger("pbs.distros") -class Distributions(base.Object): - def _get_distributions(self, query, *args): - return self.db.fetch_many(Distribution, query, *args) +class Release(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "releases" - async def _get_distribution(self, query, *args): - return await self.db.fetch_one(Distribution, query, *args) + def __str__(self): + return self.name - def __aiter__(self): - distros = self._get_distributions(""" - SELECT - * - FROM - distributions - WHERE - deleted IS FALSE - ORDER BY - name - """, - ) + def has_perm(self, *args, **kwargs): + # Inherit all permissions from the distribution + return self.distro.has_perm(*args, **kwargs) - return aiter(distros) + # ID - async def get_by_id(self, distro_id): - return await self._get_distribution(""" - SELECT - * - FROM - distributions - WHERE - id = %s - """, distro_id, - ) + id = Column(Integer, primary_key=True) - async def get_by_slug(self, slug): - return await self._get_distribution(""" + # Distro + + distro_id = Column(Integer, ForeignKey("distributions.id"), nullable=False) + + distro = sqlalchemy.orm.relationship("Distro", back_populates="releases", lazy="selectin") + + # Name + + name = Column(Text, nullable=False) + + # Slug + + slug = Column(Text, unique=True, nullable=False) + + # Created At + + created_at = Column( + DateTime(timezone=False), nullable=False, server_default=sqlalchemy.func.current_timestamp(), + ) + + # Created By ID + + created_by_id = Column(Integer, ForeignKey("users.id"), nullable=False) + + # Created By + + created_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[created_by_id], lazy="selectin", + ) + + # Deleted By ID + + deleted_by_id = Column(Integer, ForeignKey("users.id")) + + # Deleted By + + deleted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deleted_by_id], lazy="selectin", + ) + + # Stable? + + stable = Column(Boolean, nullable=False) + + # Announcement + + announcement = Column(Text, nullable=False) + + # URL + + @property + def url(self): + return "/distros/%s/releases/%s" % (self.distro.slug, self.slug) + + # Publish + + def is_published(self): + if self.published_at and self.published_at <= datetime.datetime.utcnow(): + return True + + return False + + # Published At + + published_at = Column(DateTime) + + async def publish(self, when=None): + """ + Called to publish the release + """ + self.published_at = when or sqlalchemy.func.current_timestamp() + + # XXX TODO + + # Delete + + async def delete(self, user=None): + """ + Deletes this release + """ + self._set_attribute_now("deleted_at") + if user: + self._set_attribute("deleted_by", user) + + # XXX TODO delete images + + # Images + + #images = sqlalchemy.orm.relationship("Image", back_populates="release") + + @lazy_property + def XXXimages(self): + images = self.backend.distros.releases.images._get_images(""" SELECT * FROM - distributions + release_images WHERE - deleted IS FALSE + release_id = %s AND - distro_id || '-' || version_id = %s - """, slug, + deleted_at IS NULL + """, self.id, + + # Populate cache + release=self, ) - async def get_by_tag(self, tag): - return await self._get_distribution(""" - SELECT - * - FROM - distributions - WHERE - deleted IS FALSE - AND - distro_id || version_id = %s - """, tag, + # Return grouped by architecture + return misc.group(images, lambda image: image.arch) + + +class Distributions(base.Object): + def __aiter__(self): + stmt = ( + sqlalchemy + .select(Distro) + .where( + Distro.deleted_at == None, + ) + + # Order them by name + .order_by(Distro.name) ) - async def create(self, name, distro_id, version_id): - # Insert into the database - return await self._get_distribution(""" - INSERT INTO - distributions - ( - name, - distro_id, - version_id + # Fetch the distros + return self.db.fetch(stmt) + + async def get_by_slug(self, slug): + stmt = ( + sqlalchemy + .select(Distro) + .where( + Distro.deleted_at == None, + + # Select by slug + Distro.distro_id + "-" + Distro.version_id.cast(Text) == slug, ) - VALUES( - %s, %s, %s + ) + + return await self.db.fetch_one(stmt) + + async def get_by_tag(self, tag): + stmt = ( + sqlalchemy + .select(Distro) + .where( + Distro.deleted_at == None, + Distro.distro_id + Distro.version_id == tag, ) - RETURNING - * - """, name, distro_id, version_id, ) - @lazy_property - def releases(self): + return await self.db.fetch_one(stmt) + + async def create(self, name, distro_id, version_id, **kwargs): """ - Releases + Create a new distribution """ - return Releases(self.backend) - + return await self.db.insert( + Distro, + name = name, + distro_id = distro_id, + version_id = version_id, + **kwargs, + ) -class Distribution(base.DataObject): - table = "distributions" - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.name) +class Distro(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "distributions" def __str__(self): return "%s %s" % (self.name, self.version) @@ -120,21 +219,17 @@ class Distribution(base.DataObject): if self.custom_config: config.read_string(self.custom_config) - # Name + # ID - def get_name(self): - return self.data.name + id = Column(Integer, primary_key=True) - def set_name(self, name): - self._set_attribute("name", name) + # Name - name = property(get_name, set_name) + name = Column(Text, nullable=False) # Distro ID - @property - def distro_id(self): - return self.data.distro_id + distro_id = Column(Text, nullable=False) # Version @@ -144,9 +239,7 @@ class Distribution(base.DataObject): # Version ID - @property - def version_id(self): - return self.data.version_id + version_id = Column(Integer, nullable=False) # Slug @@ -156,69 +249,35 @@ class Distribution(base.DataObject): # Slogan - def get_slogan(self): - return self.data.slogan - - def set_slogan(self, slogan): - self._set_attribute("slogan", slogan or "") - - slogan = property(get_slogan, set_slogan) + slogan = Column(Text, nullable=False) # Codename - def get_codename(self): - return self.data.codename - - def set_codename(self, codename): - self._set_attribute("codename", codename or "") - - codename = property(get_codename, set_codename) + codename = Column(Text, nullable=False) # Description - def get_description(self): - return self.data.description - - def set_description(self, description): - self._set_attribute("description", description or "") - - description = property(get_description, set_description) + description = Column(Text, nullable=False) # Arches - def get_arches(self): - return self.data.arches - - def set_arches(self, arches): - self._set_attribute("arches", arches) - - arches = property(get_arches, set_arches) + arches = Column(ARRAY(Text), nullable=False) # Vendor - def get_vendor(self): - return self.data.vendor - - def set_vendor(self, vendor): - self._set_attribute("vendor", vendor) - - vendor = property(get_vendor, set_vendor) + vendor = Column(Text, nullable=False) # Contact - def get_contact(self): - return self.data.contact - - def set_contact(self, contact): - self._set_attribute("contact", contact) - - contact = property(get_contact, set_contact) + contact = Column(Text, nullable=False) # Tag - @property - def tag(self): - return "%s%s" % (self.distro_id, self.version_id) + #@property + #def tag(self): + # return "%s%s" % (self.distro_id, self.version_id) + + #tag = Column(Text, Computed(distro_id + version_id), unique=True, nullable=False) # Pakfire @@ -231,33 +290,15 @@ class Distribution(base.DataObject): # Custom Configuration - def get_custom_config(self): - return self.data.custom_config - - def set_custom_config(self, custom_config): - self._set_attribute("custom_config", custom_config or "") - - custom_config = property(get_custom_config, set_custom_config) + custom_config = Column(Text, nullable=False) # Bugzilla Product - def get_bugzilla_product(self): - return self.data.bugzilla_product - - def set_bugzilla_product(self, product): - self._set_attribute("bugzilla_product", product) - - bugzilla_product = property(get_bugzilla_product, set_bugzilla_product) + bugzilla_product = Column(Text, nullable=False) # Bugzilla Version - def get_bugzilla_version(self): - return self.data.bugzilla_version - - def set_bugzilla_version(self, version): - self._set_attribute("bugzilla_version", version) - - bugzilla_version = property(get_bugzilla_version, set_bugzilla_version) + bugzilla_version = Column(Text, nullable=False) # Bugzilla Fields @@ -281,46 +322,43 @@ class Distribution(base.DataObject): # Must be admin return user.is_admin() - def get_repos(self): - return self.backend.repos._get_repositories(""" - SELECT - * - FROM - repositories - WHERE - deleted_at IS NULL - AND - distro_id = %s - AND - owner_id IS NULL""", - self.id, + # Repos - # Populate cache - distro=self, + async def get_repos(self): + """ + Returns all repositories of this distribution + """ + stmt = ( + sqlalchemy + .select(repository.Repo) + .where( + repository.Repo.deleted_at == None, + repository.Repo.distro == self, + repository.Repo.owner == None, + ) + .order_by( + repository.Repo.name, + ) ) - def get_repo(self, slug): - repo = self.backend.repos._get_repository(""" - SELECT - * - FROM - repositories - WHERE - deleted_at IS NULL - AND - distro_id = %s - AND - owner_id IS NULL - AND - slug = %s""", - self.id, - slug, + return await self.db.fetch_as_list(stmt) - # Populate cache - distro=self, + async def get_repo(self, slug): + """ + Returns a specific repository by its slug + """ + stmt = ( + sqlalchemy + .select(repository.Repo) + .where( + repository.Repo.deleted_at == None, + repository.Repo.distro == self, + repository.Repo.owner == None, + repository.Repo.slug == slug, + ) ) - return repo + return await self.db.fetch_one(stmt) def get_next_repo(self, repo): """ @@ -334,75 +372,74 @@ class Distribution(base.DataObject): # Builds - def get_builds_by_name(self, name, limit=None): + async def get_builds(self, **kwargs): """ - Returns all release builds that match the name + Returns all builds in this distribution """ - return self.backend.builds._get_builds(""" - SELECT - builds.* - FROM - builds - LEFT JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.owner_id IS NULL - AND - packages.deleted_at IS NULL - AND - packages.distro_id = %s - AND - packages.name = %s - ORDER BY - created_at DESC - LIMIT - %s - """, self.id, name, limit, - ) + return await self.backend.builds.get(distro=distro, scratch=False, **kwargs) # Sources - def get_sources(self): - return self.backend.sources._get_sources(""" - SELECT - sources.* - FROM - sources - LEFT JOIN - repositories ON sources.repo_id = repositories.id - WHERE - repositories.distro_id = %s - ORDER BY - sources.name, sources.url - """, self.id, + async def get_sources(self): + """ + Returns a list of all sources + """ + stmt = ( + sqlalchemy + .select( + sources.Source, + ) + .select_from(repository.Repo) + .join( + sources.Source, + sources.Source.repo_id == repository.Repo.id, + ) + .where( + repository.Repo.deleted_at == None, + repository.Repo.distro == self, + repository.Repo.owner == None, + + sources.Source.deleted_at == None, + ) + .order_by( + sources.Source.name, + sources.Source.url, + ) ) + return await self.db.fetch_as_list(stmt) + # Releases - def get_releases(self, limit=None, offset=None): - return self.backend.distros.releases._get_releases(""" - SELECT - * - FROM - releases - WHERE - distro_id = %s - AND - deleted_at IS NULL - ORDER BY - created_at DESC - LIMIT - %s - OFFSET - %s - """, self.id, limit, offset, + releases = sqlalchemy.orm.relationship("Release", back_populates="distro", + order_by="Release.published_at", lazy="selectin") - # Populate cache - distro=self, + # Latest Release + + async def get_latest_release(self): + """ + Returns the latest published release + """ + stmt = ( + sqlalchemy + .select( + Release, + ) + .where( + Release.deleted_at == None, + Release.distro == self, + Release.published_at <= sqlalchemy.func.current_timestamp(), + ) + .order_by( + Release.published_at.desc(), + ) + .limit(1) ) + return await self.db.fetch_one(stmt) + + # Releases + def get_release(self, slug): return self.backend.distros.releases._get_release(""" SELECT @@ -418,248 +455,46 @@ class Distribution(base.DataObject): """, self.id, slug, ) - async def get_latest_release(self): - """ - Returns the latest and published release - """ - return await self.backend.distros.releases._get_release(""" - SELECT - * - FROM - releases - WHERE - distro_id = %s - AND - deleted_at IS NULL - AND - published_at IS NOT NULL - AND - published_at <= CURRENT_TIMESTAMP - ORDER BY - published_at DESC - LIMIT - 1 - """, self.id, - ) - - -class Releases(base.Object): - def _get_releases(self, query, *args, **kwargs): - return self.db.fetch_many(Release, query, *args, **kwargs) - - async def _get_release(self, query, *args, **kwargs): - return await self.db.fetch_one(Release, query, *args, **kwargs) - - async def get_by_id(self, id): - return await self._get_release(""" - SELECT - * - FROM - releases - WHERE - id = %s - """, id, - ) - - async def create(self, distro, name, user, stable=False): + async def create_release(self, name, user, stable=False): """ Creates a new release """ # Create a slug slug = misc.normalize(name) - release = await self._get_release(""" - INSERT INTO - releases - ( - distro_id, - name, - slug, - created_by, - stable - ) - VALUES - ( - %s, %s, %s, %s, %s - ) - RETURNING * - """, distro, name, slug, user, stable, - - # Populate cache - distro=distro, created_by=user, + # Create a new Release + release = await self.db.insert( + Release, + distro = self, + name = name, + slug = slug, + user = user, + stable = stable, ) # XXX create image jobs return release - # Images - - @lazy_property - def images(self): - return Images(self.backend) - - -class Release(base.DataObject): - table = "releases" - - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.name) - - def __str__(self): - return self.name - - def has_perm(self, *args, **kwargs): - # Inherit all permissions from the distribution - return self.distro.has_perm(*args, **kwargs) - - # Distro - - @lazy_property - def distro(self): - return self.backend.distros.get_by_id(self.data.distro_id) - - # Name - - def get_name(self): - return self.data.name - - def set_name(self, name): - self._set_attribute("name", name) - name = property(get_name, set_name) - # Slug - - @property - def slug(self): - return self.data.slug - - # Created At - - @property - def created_at(self): - return self.data.created_at - - # Created By - - @lazy_property - def created_by(self): - return self.backend.users.get_by_id(self.data.created_by) - - # Deleted At - - @property - def deleted_at(self): - return self.data.deleted_at - - # Deleted By - - @lazy_property - def deleted_by(self): - return self.backend.users.get_by_id(self.data.deleted_by) - - # Stable? - - def get_stable(self): - return self.data.stable +class Image(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "release_images" - def set_stable(self, stable): - self._set_attribute("stable", stable) + # ID - stable = property(get_stable, set_stable) + id = Column(Integer, primary_key=True) - # Announcement - - def get_announcement(self): - return self.data.announcement or "" - - def set_announcement(self, text): - self._set_attribute("announcement", text) - - announcement = property(get_announcement, set_announcement) - - # URL - - @property - def url(self): - return "/distros/%s/releases/%s" % (self.distro.slug, self.slug) - - # Publish + # Release ID - def is_published(self): - if self.published_at and self.published_at <= datetime.datetime.utcnow(): - return True + release_id = Column(Integer, ForeignKey("releases.id"), nullable=False) - return False + # Release - @property - def published_at(self): - return self.data.published_at - - async def publish(self, when=None): - """ - Called to publish the release - """ - if when: - self._set_attribute("published_at", when) - else: - self._set_attribute_now("published_at") - - # XXX TODO - - # Delete - - async def delete(self, user=None): - """ - Deletes this release - """ - self._set_attribute_now("deleted_at") - if user: - self._set_attribute("deleted_by", user) - - # XXX TODO delete images - - # Images - - @lazy_property - def images(self): - images = self.backend.distros.releases.images._get_images(""" - SELECT - * - FROM - release_images - WHERE - release_id = %s - AND - deleted_at IS NULL - """, self.id, - - # Populate cache - release=self, - ) - - # Return grouped by architecture - return misc.group(images, lambda image: image.arch) - - -class Images(base.Object): - def _get_images(self, query, *args, **kwargs): - return self.db.fetch_many(Image, query, *args, **kwargs) - - async def _get_image(self, query, *args, **kwargs): - return await self.db.fetch_one(Image, query, *args, **kwargs) - - -class Image(base.DataObject): - table = "release_images" - - @lazy_property - def release(self): - return self.backend.distros.releases.get_by_id(self.data.release_id) + release = sqlalchemy.orm.relationship( + "Release", foreign_keys=[release_id], lazy="selectin", + ) # Arch - @property - def arch(self): - return self.data.arch + arch = Column(Text, nullable=False) diff --git a/src/buildservice/events.py b/src/buildservice/events.py index ee06e5a8..4a811628 100644 --- a/src/buildservice/events.py +++ b/src/buildservice/events.py @@ -19,10 +19,21 @@ # # ############################################################################### +import functools import logging +import sqlalchemy +from sqlalchemy import Column, ForeignKey, DateTime, Integer, Text + from . import base -from .decorators import * +from . import builders +from . import builds +from . import database +from . import distribution as distros +from . import jobs +from . import mirrors +from . import releasemonitoring as monitorings +from . import repository as repos # Setup logging log = logging.getLogger("pbs.events") @@ -34,1041 +45,1053 @@ log = logging.getLogger("pbs.events") # MINOR INFO : 4 # DEBUG : 1 -# The view returns the following fields -# type (of the event) -# t (timestamp) -# priority -# build -# by_build -# build_comment -# build_group -# job -# package_name -# mirror -# user -# by_user -# builder -# repository -# release -# bug -# error -# points -# - -EVENTS_CTE = """ - WITH events AS ( - -- Build creation times - SELECT - 'build-created'::text AS type, - builds.created_at AS t, - 4 AS priority, - builds.id AS build, - NULL::integer AS by_build, - NULL::integer AS build_comment, - NULL::integer AS build_group, - NULL::integer AS job, - NULL::text AS package_name, - NULL::integer AS mirror, - NULL::integer AS user, - builds.owner_id AS by_user, - NULL::integer AS builder, - NULL::integer AS repository, - NULL::integer AS release, - NULL::integer AS bug, - NULL::text AS error, - NULL::integer AS points - FROM - builds - - UNION ALL - - -- Build finish/failed times - SELECT - CASE - WHEN builds.failed IS TRUE - THEN 'build-failed'::text - ELSE 'build-finished'::text - END AS type, - builds.finished_at AS t, - CASE - WHEN builds.failed IS TRUE - THEN 8 - ELSE 4 - END AS priority, - builds.id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - builds - WHERE - builds.finished_at IS NOT NULL - - UNION ALL - - -- Deleted Builds - SELECT - 'build-deleted' AS type, - builds.deleted_at AS t, - 4 AS priority, - builds.id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - builds.deleted_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - builds - WHERE - builds.deleted_at IS NOT NULL - - UNION ALL - - -- Deprecated Builds - - SELECT - 'build-deprecated' AS type, - builds.deprecated_at AS t, - 4 AS priority, - builds.id AS build, - builds.deprecating_build_id AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - builds.deprecated_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - builds - WHERE - builds.deleted_at IS NULL - AND - builds.deprecated_at IS NOT NULL - - UNION ALL - - -- Build Comments - SELECT - 'build-comment' AS type, - build_comments.created_at AS t, - 5 AS priority, - build_comments.build_id AS build, - NULL AS by_build, - build_comments.id AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - build_comments.user_id AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - build_comments - WHERE - deleted IS FALSE - - UNION ALL - - -- Build Watchers added - SELECT - 'build-watcher-added' AS type, - build_watchers.added_at AS t, - 1 AS priority, - build_watchers.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - build_watchers.user_id AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - build_watchers - - UNION ALL - - -- Build Watchers removed - SELECT - 'build-watcher-removed' AS type, - build_watchers.deleted_at AS t, - 1 AS priority, - build_watchers.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - build_watchers.user_id AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - build_watchers - WHERE - deleted_at IS NOT NULL - - UNION ALL - - -- Bugs added to builds - SELECT - 'build-bug-added' AS type, - build_bugs.added_at AS t, - 4 AS priority, - build_bugs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - build_bugs.added_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - build_bugs.bug_id AS bug, - NULL AS error, - NULL AS points - FROM - build_bugs - - UNION ALL - - -- Bugs removed from builds - - SELECT - 'build-bug-removed' AS type, - build_bugs.removed_at AS t, - 4 AS priority, - build_bugs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - build_bugs.removed_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - build_bugs.bug_id AS bug, - NULL AS error, - NULL AS points - FROM - build_bugs - WHERE - removed_at IS NOT NULL - - UNION ALL - - -- Build added to/moved repository - SELECT - CASE - WHEN source_repo.repo_id IS NULL - THEN 'repository-build-added' - ELSE - 'repository-build-moved' - END AS type, - repository_builds.added_at AS t, - 5 AS priority, - repository_builds.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - repository_builds.added_by AS by_user, - NULL AS builder, - repository_builds.repo_id AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - repository_builds - - -- Attempt to find a match in a source repository - LEFT JOIN - repository_builds source_repo - ON - repository_builds.build_id = source_repo.build_id - AND - repository_builds.repo_id <> source_repo.repo_id - AND - repository_builds.added_at = source_repo.removed_at - - UNION ALL - - -- Build removed from repository - SELECT - 'repository-build-removed' AS type, - repository_builds.removed_at AS t, - 5 AS priority, - repository_builds.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - repository_builds.removed_by AS by_user, - NULL AS builder, - repository_builds.repo_id AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - repository_builds - - -- Attempt to find a match in a destination repository - LEFT JOIN - repository_builds destination_repo - ON - repository_builds.build_id = destination_repo.build_id - AND - repository_builds.repo_id <> destination_repo.repo_id - AND - repository_builds.removed_at = destination_repo.added_at - WHERE - repository_builds.removed_at IS NOT NULL - AND - destination_repo.repo_id IS NULL - - UNION ALL - - -- Build Points - - SELECT - 'build-points' AS type, - build_points.created_at AS t, - 1 AS priority, - build_points.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - build_points.user_id AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - build_points.points AS points - FROM - build_points - - UNION ALL - - -- Test Builds - SELECT - CASE WHEN build_groups.failed IS TRUE THEN 'test-builds-failed' - ELSE 'test-builds-succeeded' END AS type, - build_groups.finished_at AS t, - 4 AS priority, - builds.id AS build, - NULL AS by_build, - NULL AS build_comment, - build_groups.id AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - builds - JOIN - build_groups ON builds.test_group_id = build_groups.id - WHERE - builds.deleted_at IS NULL - AND - build_groups.deleted_at IS NULL - AND - build_groups.finished_at IS NOT NULL - - UNION ALL - - -- Jobs Creations - SELECT - 'job-created' AS type, - jobs.created_at AS t, - 1 AS priority, - jobs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - jobs.id AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - jobs - WHERE - jobs.deleted_at IS NULL - - UNION ALL - - -- Failed Jobs - SELECT - 'job-failed' AS type, - jobs.finished_at AS t, - 5 AS priority, - jobs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - jobs.id AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - jobs.builder_id AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - jobs - WHERE - jobs.deleted_at IS NULL - AND - jobs.finished_at IS NOT NULL - AND - jobs.aborted IS FALSE - AND - jobs.failed IS TRUE - - UNION ALL - - -- Finished Jobs - SELECT - 'job-finished' AS type, - jobs.finished_at AS t, - 4 AS priority, - jobs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - jobs.id AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - jobs.builder_id AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - jobs - WHERE - jobs.deleted_at IS NULL - AND - jobs.finished_at IS NOT NULL - AND - jobs.aborted IS FALSE - AND - jobs.failed IS FALSE - - UNION ALL - - -- Aborted Jobs - SELECT - 'job-aborted' AS type, - jobs.finished_at AS t, - 4 AS priority, - jobs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - jobs.id AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - jobs.aborted_by AS by_user, - jobs.builder_id AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - jobs - WHERE - jobs.deleted_at IS NULL - AND - jobs.aborted IS TRUE - - UNION ALL - - -- Dispatched Jobs - SELECT - 'job-dispatched' AS type, - jobs.started_at AS t, - 1 AS priority, - jobs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - jobs.id AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - jobs.builder_id AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - jobs - WHERE - jobs.deleted_at IS NULL - AND - jobs.started_at IS NOT NULL - - UNION ALL - - -- Retried jobs - SELECT - 'job-retry' AS type, - jobs.created_at AS t, - 4 AS priority, - jobs.build_id AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - jobs.id AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - jobs - JOIN - jobs superseeded_jobs ON superseeded_jobs.superseeded_by = jobs.id - WHERE - jobs.deleted_at IS NULL - - UNION ALL - - -- Builders Created - SELECT - 'builder-created' AS type, - builders.created_at AS t, - 5 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - builders.created_by AS by_user, - builders.id AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - builders - - UNION ALL - - -- Builders Deleted - SELECT - 'builder-deleted' AS type, - builders.deleted_at AS t, - 5 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - builders.deleted_by AS by_user, - builders.id AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - builders - WHERE - builders.deleted_at IS NOT NULL - - UNION ALL - - -- Releases Created - SELECT - 'release-created' AS type, - releases.created_at AS t, - 1 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - releases.created_by AS by_user, - NULL AS builder, - NULL AS repository, - releases.id AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - releases - - UNION ALL - - -- Releases Deleted - SELECT - 'release-deleted' AS type, - releases.deleted_at AS t, - 1 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - releases.deleted_by AS by_user, - NULL AS builder, - NULL AS repository, - releases.id AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - releases - WHERE - deleted_at IS NOT NULL - - UNION ALL - - -- Releases Published - SELECT - 'release-published' AS type, - releases.published_at AS t, - CASE WHEN releases.stable IS TRUE - THEN 5 ELSE 4 END AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - NULL AS mirror, - NULL AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - releases.id AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - releases - WHERE - published_at IS NOT NULL - AND - published_at <= CURRENT_TIMESTAMP - - UNION ALL - - -- Mirrors Created - SELECT - 'mirror-created' AS type, - mirrors.created_at AS t, - 5 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - mirrors.id AS mirror, - NULL AS user, - mirrors.created_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - mirrors - - UNION ALL - - -- Mirrors Deleted - SELECT - 'mirror-deleted' AS type, - mirrors.deleted_at AS t, - 5 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - mirrors.id AS mirror, - NULL AS user, - mirrors.deleted_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - mirrors - WHERE - deleted_at IS NOT NULL - - UNION ALL - - -- Mirror Status Changes - SELECT - CASE - WHEN mirror_status_changes.new_status IS TRUE - THEN 'mirror-online' - WHEN mirror_status_changes.new_status IS FALSE - THEN 'mirror-offline' - END AS type, - mirror_status_changes.checked_at AS t, - 4 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - NULL AS package_name, - mirror_status_changes.mirror_id AS mirror, - NULL AS user, - NULL AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - mirror_status_changes.error AS error, - NULL AS points - FROM ( - SELECT - mirror_checks.mirror_id AS mirror_id, - mirror_checks.checked_at AS checked_at, - mirror_checks.success AS new_status, - LAG(success) OVER ( - PARTITION BY mirror_id - ORDER BY checked_at ASC - ) AS old_status, - mirror_checks.error AS error - FROM - mirror_checks - ) mirror_status_changes - WHERE - mirror_status_changes.old_status <> mirror_status_changes.new_status - - UNION ALL - - -- Release Monitoring Created - SELECT - 'release-monitoring-created' AS type, - release_monitorings.created_at AS t, - 4 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - release_monitorings.name AS package_name, - NULL AS mirror, - NULL AS user, - release_monitorings.created_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - release_monitorings - - UNION ALL - - -- Release Monitoring Deleted - SELECT - 'release-monitoring-deleted' AS type, - release_monitorings.deleted_at AS t, - 4 AS priority, - NULL AS build, - NULL AS by_build, - NULL AS build_comment, - NULL AS build_group, - NULL AS job, - release_monitorings.name AS package_name, - NULL AS mirror, - NULL AS user, - release_monitorings.deleted_by AS by_user, - NULL AS builder, - NULL AS repository, - NULL AS release, - NULL AS bug, - NULL AS error, - NULL AS points - FROM - release_monitorings - WHERE - deleted_at IS NOT NULL - ) -""" - class Events(base.Object): - @lazy_property - def map(self): - return { - # Builds - "build" : self.backend.builds.get_by_id, - "by_build" : self.backend.builds.get_by_id, - - # Build Comments - "build_comment" : self.backend.builds.comments.get_by_id, - - # Build Groups - "build_group" : self.backend.builds.groups.get_by_id, - - # Jobs - "job" : self.backend.jobs.get_by_id, - - # Mirrors - "mirror" : self.backend.mirrors.get_by_id, - - # Releases - "release" : self.backend.distros.releases.get_by_id, - - # Repositories - "repository" : self.backend.repos.get_by_id, - - # Builders - "builder" : self.backend.builders.get_by_id, - - # Users - "user" : self.backend.users.get_by_id, - "by_user" : self.backend.users.get_by_id, - } - - async def expand(self, events): + @functools.cached_property + def events(self): """ - Expands any log events + This returns a massive CTE that creates this thing on the fly """ - cache = {} - - for event in events: - # Replace any mappable attributes - for attribute in event: - # Check if we are dealing with a mapped attribute - try: - expand = self.map[attribute] - except KeyError as e: - continue - - # Fetch attribute value - key = event[attribute] - - # Skip everything that is None - if key is None: - continue - - # Lookup the cache - try: - value = cache[attribute][key] - - # Call the expand function on cache miss - except KeyError: - value = await expand(key) - - # Store the expanded value - try: - cache[attribute][key] = value - except KeyError: - cache[attribute] = { key : value } - - # Replace original value with the expanded one - event[attribute] = value + events = [] + + def TYPE(t): + return sqlalchemy.literal(t).label("type") + + def TIMESTAMP(column): + return column.label("t") + + def PRIORITY(priority): + return sqlalchemy.literal(priority).label("priority") + + # Build Created + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-created"), + + # Timestamp + TIMESTAMP(builds.Build.created_at), + + # Priority + PRIORITY(4), + + # Build ID + builds.Build.id.label("build_id"), + + # By User + builds.Build.owner_id.label("by_user_id"), + ) + )) + + # Finished/Failed Builds + events.append(( + sqlalchemy + .select( + # Type + sqlalchemy.case( + (builds.Build.failed == True, sqlalchemy.literal("build-failed")), + else_=sqlalchemy.literal("build-finished"), + ).label("type"), + + # Timestamp + TIMESTAMP(builds.Build.finished_at), + + # Priority + sqlalchemy.case( + (builds.Build.failed == True, 8), + else_=4, + ).label("priority"), + + # Build ID + builds.Build.id.label("build_id"), + ) + .select_from(builds.Build) + .where( + builds.Build.finished_at != None, + ) + )) + + # Deleted Builds + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-deleted"), + + # Timestamp + TIMESTAMP(builds.Build.deleted_at), + + # Priority + PRIORITY(4), + + # Build ID + builds.Build.id.label("build_id"), + + # Deleted By User + builds.Build.deleted_by_id.label("by_user_id"), + ) + .select_from(builds.Build) + .where( + builds.Build.deleted_at != None, + ) + )) + + # Deprecated Builds + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-deprecated"), + + # Timestamp + TIMESTAMP(builds.Build.deprecated_at), + + # Priority + PRIORITY(4), + + # Build ID + builds.Build.id.label("build_id"), + + # By Build ID + builds.Build.deprecating_build_id.label("by_build_id"), + + # By User ID + builds.Build.deprecated_by_id.label("by_user_id"), + ) + .select_from(builds.Build) + .where( + builds.Build.deprecated_at != None, + ) + )) + + # Build Comments + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-comment"), + + # Timestamp + TIMESTAMP(builds.BuildComment.created_at), + + # Priority + PRIORITY(5), + + # Build ID + builds.BuildComment.build_id.label("build_id"), + + # Build Comment ID + builds.BuildComment.id.label("build_comment_id"), + + # User ID + builds.BuildComment.user_id.label("user_id"), + ) + .select_from(builds.BuildComment) + .where( + builds.BuildComment.deleted_at == None, + ) + )) + + # Build Watchers added + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-watcher-added"), + + # Timestamp + TIMESTAMP(builds.BuildWatcher.added_at), + + # Priority + PRIORITY(1), + + # Build ID + builds.BuildWatcher.build_id.label("build_id"), + + # User ID + builds.BuildWatcher.user_id.label("user_id"), + ) + .select_from(builds.BuildWatcher) + )) + + # Build Watchers removed + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-watcher-removed"), + + # Timestamp + TIMESTAMP(builds.BuildWatcher.deleted_at), + + # Priority + PRIORITY(1), + + # Build ID + builds.BuildWatcher.build_id.label("build_id"), + + # User ID + builds.BuildWatcher.user_id.label("user_id"), + ) + .select_from(builds.BuildWatcher) + .where( + builds.BuildWatcher.deleted_at != None, + ) + )) + + # Bugs added to builds + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-bug-added"), + + # Timestamp + TIMESTAMP(builds.BuildBug.added_at), + + # Priority + PRIORITY(4), + + # Build ID + builds.BuildBug.build_id.label("build_id"), + + # By User ID + builds.BuildBug.added_by_id.label("by_user_id"), + + # Bug ID + builds.BuildBug.bug_id.label("bug_id"), + ) + .select_from(builds.BuildBug) + )) + + # Bugs removed from builds + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-bug-removed"), + + # Timestamp + TIMESTAMP(builds.BuildBug.removed_at), + + # Priority + PRIORITY(4), + + # Build ID + builds.BuildBug.build_id.label("build_id"), + + # By User ID + builds.BuildBug.removed_by_id.label("by_user_id"), + + # Bug ID + builds.BuildBug.bug_id.label("bug_id"), + ) + .select_from(builds.BuildBug) + .where( + builds.BuildBug.removed_at != None, + ) + )) + + src_repo = sqlalchemy.orm.aliased(repos.RepoBuild) + dst_repo = sqlalchemy.orm.aliased(repos.RepoBuild) + + # Build added to/moved repository + events.append(( + sqlalchemy + .select( + # Type + sqlalchemy.case( + (src_repo == None, "repository-build-added"), + else_="repository-build-moved", + ).label("type"), + + # Timestamp + TIMESTAMP(dst_repo.added_at), + + # Priority + PRIORITY(5), + + # Build ID + dst_repo.build_id.label("build_id"), + + # By User ID + dst_repo.added_by_id.label("by_user_id"), + + # Repo ID + dst_repo.repo_id.label("repo_id"), + ) + .select_from(dst_repo) + .join( + src_repo, + sqlalchemy.and_( + src_repo.build_id == dst_repo.build_id, + src_repo.repo_id != dst_repo.repo_id, + src_repo.removed_at == dst_repo.added_at, + ), + isouter=True, + ) + )) + + # Build removed from repository + events.append(( + sqlalchemy + .select( + # Type + TYPE("repository-build-removed"), + + # Timestamp + TIMESTAMP(src_repo.removed_at), + + # Priority + PRIORITY(5), + + # Build ID + src_repo.build_id.label("build_id"), + + # By User ID + src_repo.removed_by_id.label("by_user_id"), + + # Repo ID + src_repo.repo_id.label("repo_id"), + ) + .select_from(src_repo) + .join( + dst_repo, + sqlalchemy.and_( + src_repo.build_id == dst_repo.build_id, + src_repo.repo_id != dst_repo.repo_id, + src_repo.removed_at == dst_repo.added_at, + ), + isouter=True, + ) + .where( + src_repo.removed_at != None, + dst_repo.repo_id == None, + ) + )) + + # Build Points + events.append(( + sqlalchemy + .select( + # Type + TYPE("build-points"), + + # Timestamp + TIMESTAMP(builds.BuildPoint.created_at), + + # Priority + PRIORITY(1), + + # Build ID + builds.BuildPoint.build_id.label("build_id"), + + # User ID + builds.BuildPoint.user_id.label("by_user_id"), + + # Points + builds.BuildPoint.points.label("points"), + ) + .select_from(builds.BuildPoint) + )) + + # Test Builds + events.append(( + sqlalchemy + .select( + # Type + sqlalchemy.case( + (builds.BuildGroup.failed == True, "test-builds-failed"), + else_="test-builds-succeeded", + ).label("type"), + + # Timestamp + TIMESTAMP(builds.BuildGroup.finished_at), + + # Priority + PRIORITY(4), + + # Build Group ID + builds.BuildGroup.id.label("build_group_id"), + ) + .select_from(builds.BuildGroup) + .join( + builds.Build, + builds.Build.test_group_id == builds.BuildGroup.id, + isouter=True, + ) + .where( + builds.BuildGroup.deleted_at == None, + builds.Build.deleted_at == None, + builds.BuildGroup.finished_at != None, + ) + )) + + # Created Jobs + events.append(( + sqlalchemy + .select( + # Type + TYPE("job-created"), + + # Timestamp + TIMESTAMP(jobs.Job.created_at), + + # Priority + PRIORITY(1), + + # Build ID + jobs.Job.build_id.label("build_id"), + + # Job ID + jobs.Job.id.label("job_id"), + ) + .select_from(jobs.Job) + .where( + jobs.Job.deleted_at == None, + ) + )) + + # Failed Jobs + events.append(( + sqlalchemy + .select( + # Type + TYPE("job-failed"), + + # Timestamp + TIMESTAMP(jobs.Job.finished_at), + + # Priority + PRIORITY(5), + + # Build ID + jobs.Job.build_id.label("build_id"), + + # Job ID + jobs.Job.id.label("job_id"), + + # Builder ID + jobs.Job.builder_id.label("builder_id"), + ) + .select_from(jobs.Job) + .where( + jobs.Job.deleted_at == None, + jobs.Job.finished_at != None, + jobs.Job.aborted == False, + jobs.Job.failed == True, + ) + )) + + # Finished Jobs + events.append(( + sqlalchemy + .select( + # Type + TYPE("job-finished"), + + # Timestamp + TIMESTAMP(jobs.Job.finished_at), + + # Priority + PRIORITY(4), + + # Build ID + jobs.Job.build_id.label("build_id"), + + # Job ID + jobs.Job.id.label("job_id"), + + # Builder ID + jobs.Job.builder_id.label("builder_id"), + ) + .select_from(jobs.Job) + .where( + jobs.Job.deleted_at == None, + jobs.Job.finished_at != None, + jobs.Job.aborted == False, + jobs.Job.failed == False, + ) + )) + + # Aborted Jobs + events.append(( + sqlalchemy + .select( + # Type + TYPE("job-aborted"), + + # Timestamp + TIMESTAMP(jobs.Job.finished_at), + + # Priority + PRIORITY(4), + + # Build ID + jobs.Job.build_id.label("build_id"), + + # Job ID + jobs.Job.id.label("job_id"), + + # Builder ID + jobs.Job.builder_id.label("builder_id"), + + # By User ID + jobs.Job.aborted_by_id.label("by_user_id"), + ) + .select_from(jobs.Job) + .where( + jobs.Job.deleted_at == None, + jobs.Job.aborted == True, + ) + )) + + # Dispatched Jobs + events.append(( + sqlalchemy + .select( + # Type + TYPE("job-dispatched"), + + # Timestamp + TIMESTAMP(jobs.Job.started_at), + + # Priority + PRIORITY(1), + + # Build ID + jobs.Job.build_id.label("build_id"), + + # Job ID + jobs.Job.id.label("job_id"), + + # Builder ID + jobs.Job.builder_id.label("builder_id"), + ) + .select_from(jobs.Job) + .where( + jobs.Job.deleted_at == None, + jobs.Job.started_at != None, + ) + )) + + superseeded_jobs = sqlalchemy.orm.aliased(jobs.Job) + + # Retried jobs + events.append(( + sqlalchemy + .select( + # Type + TYPE("job-retry"), + + # Timestamp + TIMESTAMP(jobs.Job.created_at), + + # Priority + PRIORITY(4), + + # Build ID + jobs.Job.build_id.label("build_id"), + + # Job ID + jobs.Job.id.label("job_id"), + ) + .select_from(jobs.Job) + .join( + superseeded_jobs, + superseeded_jobs.id == jobs.Job.superseeded_by_id, + ) + .where( + jobs.Job.deleted_at == None, + ) + )) + + # Builders Created + events.append(( + sqlalchemy + .select( + # Type + TYPE("builder-created"), + + # Timestamp + TIMESTAMP(builders.Builder.created_at), + + # Priority + PRIORITY(5), + + # Builder ID + builders.Builder.id.label("builder_id"), + + # By User ID + builders.Builder.created_by_id.label("by_user_id"), + ) + .select_from(builders.Builder) + )) + + # Builders Deleted + events.append(( + sqlalchemy + .select( + # Type + TYPE("builder-deleted"), + + # Timestamp + TIMESTAMP(builders.Builder.deleted_at), + + # Priority + PRIORITY(5), + + # Builder ID + builders.Builder.id.label("builder_id"), + + # By User ID + builders.Builder.deleted_by_id.label("by_user_id"), + ) + .select_from(builders.Builder) + .where( + builders.Builder.deleted_at != None, + ) + )) + + # Releases Created + events.append(( + sqlalchemy + .select( + # Type + TYPE("release-created"), + + # Timestamp + TIMESTAMP(distros.Release.created_at), + + # Priority + PRIORITY(1), + + # Release ID + distros.Release.id.label("release_id"), + + # By User ID + distros.Release.created_by_id.label("by_user_id"), + ) + .select_from(distros.Release) + )) + + # Releases Deleted + events.append(( + sqlalchemy + .select( + # Type + TYPE("release-deleted"), + + # Timestamp + TIMESTAMP(distros.Release.deleted_at), + + # Priority + PRIORITY(1), + + # Release ID + distros.Release.id.label("release_id"), + + # By User ID + distros.Release.deleted_by_id.label("by_user_id"), + ) + .select_from(distros.Release) + .where( + distros.Release.deleted_at != None, + ) + )) + + # Releases Published + events.append(( + sqlalchemy + .select( + # Type + TYPE("release-published"), + + # Timestamp + TIMESTAMP(distros.Release.published_at), + + # Priority + PRIORITY(5), + + # Release ID + distros.Release.id.label("release_id"), + ) + .select_from(distros.Release) + .where( + distros.Release.published_at != None, + distros.Release.published_at <= sqlalchemy.func.current_timestamp(), + ) + )) + + # Mirrors Created + events.append(( + sqlalchemy + .select( + # Type + TYPE("mirror-created"), + + # Timestamp + TIMESTAMP(mirrors.Mirror.created_at), + + # Priority + PRIORITY(5), + + # Mirror ID + mirrors.Mirror.id.label("mirror_id"), + + # By User ID + mirrors.Mirror.created_by_id.label("by_user_id"), + ) + .select_from(mirrors.Mirror) + )) + + # Mirrors Deleted + events.append(( + sqlalchemy + .select( + # Type + TYPE("mirror-created"), + + # Timestamp + TIMESTAMP(mirrors.Mirror.deleted_at), + + # Priority + PRIORITY(5), + + # Mirror ID + mirrors.Mirror.id.label("mirror_id"), + + # By User ID + mirrors.Mirror.deleted_by_id.label("by_user_id"), + ) + .select_from(mirrors.Mirror) + .where( + mirrors.Mirror.deleted_at != None, + ) + )) + + mirror_status_changes = ( + sqlalchemy + .select( + mirrors.MirrorCheck.mirror_id.label("mirror_id"), + mirrors.MirrorCheck.checked_at.label("checked_at"), + mirrors.MirrorCheck.success.label("new_status"), + sqlalchemy.func.lag( + mirrors.MirrorCheck.success, + ) + .over( + partition_by=mirrors.MirrorCheck.mirror_id, + order_by=mirrors.MirrorCheck.checked_at.asc(), + ) + .label("old_status"), + mirrors.MirrorCheck.error.label("error"), + ) + .select_from(mirrors.MirrorCheck) + .cte("mirror_status_changes") + ) - yield Event(self.backend, event) + # Mirror Status Changes + events.append(( + sqlalchemy + .select( + # Type + sqlalchemy.case( + (mirror_status_changes.c.new_status == True, "mirror-online"), + (mirror_status_changes.c.new_status == False, "mirror-offline"), + ).label("type"), + + # Timestamp + TIMESTAMP(mirror_status_changes.c.checked_at), + + # Priority + PRIORITY(4), + + # Mirror ID + mirror_status_changes.c.mirror_id.label("mirror_id"), + + # Error + mirror_status_changes.c.error.label("error"), + ) + .select_from(mirror_status_changes) + .where( + mirror_status_changes.c.old_status != mirror_status_changes.c.new_status, + ) + )) + + # Release Monitoring Created + events.append(( + sqlalchemy + .select( + # Type + TYPE("release-monitoring-created"), + + # Timestamp + TIMESTAMP(monitorings.Monitoring.created_at), + + # Priority + PRIORITY(4), + + # Package Name + monitorings.Monitoring.name.label("package_name"), + + # By User ID + monitorings.Monitoring.created_by_id.label("by_user_id"), + ) + .select_from(monitorings.Monitoring) + )) + + # Release Monitoring Deleted + events.append(( + sqlalchemy + .select( + # Type + TYPE("release-monitoring-deleted"), + + # Timestamp + TIMESTAMP(monitorings.Monitoring.deleted_at), + + # Priority + PRIORITY(4), + + # Package Name + monitorings.Monitoring.name.label("package_name"), + + # By User ID + monitorings.Monitoring.deleted_by_id.label("by_user_id"), + ) + .select_from(monitorings.Monitoring) + .where( + monitorings.Monitoring.deleted_at != None, + ) + )) + + # Discover all columns + columns = [c for c in sqlalchemy.inspection.inspect(Event).c] + + # Add any missing columns to keep the subqueries shorter + events = [ + ( + sqlalchemy + .select( + *( + query.columns.get( + column.name, + sqlalchemy.null().cast(column.type).label(column.name), + ) + for column in columns + ), + ) + ) for query in events + ] + + # Create a new CTE with all events + return sqlalchemy.union_all(*events).cte("events") async def __call__(self, priority=None, offset=None, limit=None, build=None, builder=None, mirror=None, user=None): """ Returns all events filtered by the given criteria """ - conditions, values = [], [] + # Create a subquery to map the model to the CTE + events = ( + sqlalchemy + .select( + Event, + ) + .add_cte( + self.events, + ) + ).subquery() + + # Alias the subquery + events = sqlalchemy.orm.aliased(Event, events) + + # Create a query to filter out the events we are interested in + stmt = ( + sqlalchemy + .select( + events, + ) + .order_by( + events.t.desc(), + events.priority.asc(), + ) + .limit(limit) + .offset(offset) + ) # Filter by build if build: - conditions.append("events.build = %s") - values.append(build) + stmt = stmt.where( + events.build_id == build.id, + ) # Filter by builder if builder: - conditions.append("events.builder = %s") - values.append(builder) + stmt = stmt.where( + events.builder_id == builder.id, + ) # Filter by mirror if mirror: - conditions.append("events.mirror = %s") - values.append(mirror) + stmt = stmt.where( + events.mirror_id == mirror.id, + ) # Filter by user if user: - conditions.append("(events.user = %s OR events.by_user = %s)") - values.append(user) - values.append(user) + stmt = stmt.where( + events.user_id == user.id | + events.by_user_id == user.id, + ) # Filter by priority if priority: - conditions.append("events.priority >= %s") - values.append(priority) - - # Fetch all events - events = await self.db.query( - """ - %s - - -- Filter out everything we want - SELECT - * - FROM - events - WHERE - %s - - -- Sort everything in reverse order - ORDER BY - t DESC, priority ASC - OFFSET - %%s - LIMIT - %%s - """ % (EVENTS_CTE, " AND ".join(conditions) or "TRUE"), - *values, offset, limit, - ) + stmt = stmt.where( + events.priority >= priority, + ) + + # Run the query + return await self.db.fetch_as_list(stmt) + + +class Event(database.Base): + __tablename__ = "events" + + # Type + + type = Column(Text, primary_key=True, nullable=False) + + # Timestamp + + t = Column(DateTime(timezone=False), primary_key=True, nullable=False) + + # Priority + + priority = Column(Integer, nullable=False) + + # Build ID + + build_id = Column(Integer, ForeignKey("builds.id")) + + # Build + + build = sqlalchemy.orm.relationship( + "Build", foreign_keys=[build_id], lazy="selectin", + ) + + # By Build ID + + by_build_id = Column(Integer, ForeignKey("builds.id")) + + # By Build + + by_build = sqlalchemy.orm.relationship( + "Build", foreign_keys=[by_build_id], lazy="selectin", + ) + + # Build Comment ID - # Expand all events - return await self.expand(events) + build_comment_id = Column(Integer, ForeignKey("build_comments.id")) + # Build Comment -class Event(base.Object): - def init(self, data): - self.data = data + build_comment = sqlalchemy.orm.relationship( + "BuildComment", foreign_keys=[build_comment_id], lazy="selectin", + ) + + # Build Group ID + + build_group_id = Column(Integer, ForeignKey("build_groups.id")) + + # Build Group + + build_group = sqlalchemy.orm.relationship( + "BuildGroup", foreign_keys=[build_group_id], lazy="selectin", + ) + + # Job ID + + job_id = Column(Integer, ForeignKey("jobs.id")) + + # Job + + job = sqlalchemy.orm.relationship( + "Job", foreign_keys=[job_id], lazy="selectin", + ) + + # Package Name + + package_name = Column(Text) + + # Mirror ID + + mirror_id = Column(Integer, ForeignKey("mirrors.id")) + + # Mirror + + mirror = sqlalchemy.orm.relationship( + "Mirror", foreign_keys=[mirror_id], lazy="selectin", + ) + + # User ID + + user_id = Column(Integer, ForeignKey("users.id")) + + # User - # Read some useful attributes - try: - self.type = self.data.type - self.t = self.data.t - except AttributeError as e: - log.error("Could not read event: %s" % e) - raise e + user = sqlalchemy.orm.relationship( + "User", foreign_keys=[user_id], lazy="selectin", + ) + + # By User ID + + by_user_id = Column(Integer, ForeignKey("users.id")) + + # By User + + by_user = sqlalchemy.orm.relationship( + "User", foreign_keys=[by_user_id], lazy="selectin", + ) + + # Builder ID + + builder_id = Column(Integer, ForeignKey("builders.id")) + + # Builder + + builder = sqlalchemy.orm.relationship( + "Builder", foreign_keys=[builder_id], lazy="selectin", + ) + + # Repo ID + + repo_id = Column(Integer, ForeignKey("repositories.id")) + + # Repo + + repo = sqlalchemy.orm.relationship( + "Repo", foreign_keys=[repo_id], lazy="selectin", + ) + + # Release ID + + release_id = Column(Integer, ForeignKey("releases.id")) + + # Release + + release = sqlalchemy.orm.relationship( + "Release", foreign_keys=[release_id], lazy="selectin", + ) - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.type) + # Bug - # Make Events accessible as mappings + bug = Column(Integer) - def keys(self): - return self.data.keys() + # Error - def __getitem__(self, key): - return self.data[key] + error = Column(Text) - # Make items accessible as attributes + # Points - def __getattr__(self, key): - try: - return self.data[key] - except KeyError as e: - raise AttributeError(key) from e + points = Column(Integer) diff --git a/src/buildservice/jobs.py b/src/buildservice/jobs.py index c1af558b..284c1cfc 100644 --- a/src/buildservice/jobs.py +++ b/src/buildservice/jobs.py @@ -3,13 +3,20 @@ import asyncio import collections import datetime +import functools import gzip import logging import os import queue +import sqlalchemy + +from sqlalchemy import Column, ForeignKey +from sqlalchemy import BigInteger, Boolean, DateTime, Integer, Interval, LargeBinary, Text, UUID from . import base from . import builders +from . import builds +from . import database from . import misc from . import users @@ -20,57 +27,6 @@ from .errors import * # Setup logging log = logging.getLogger("pbs.jobs") -WITH_JOB_QUEUE_CTE = """ - -- Determine all users which exceed their quotas - %s, - - -- Collect all jobs and order them by priority - job_queue AS ( - SELECT - jobs.*, - rank() OVER ( - ORDER BY - builds.priority DESC, - - -- Put test builds at the end of the queue - CASE - WHEN builds.test THEN 1 - ELSE 0 - END, - - -- Order by when the install check succeeded - jobs.installcheck_performed_at, - - -- If there is anything else, order by creation time - jobs.created_at - ) AS _rank - FROM - jobs - LEFT JOIN - builds ON jobs.build_id = builds.id - WHERE - builds.deleted_at IS NULL - AND - jobs.deleted_at IS NULL - AND - jobs.started_at IS NULL - AND - jobs.finished_at IS NULL - AND - jobs.installcheck_succeeded IS TRUE - - -- Remove any jobs from users that have exceeded their quota - AND - ( - builds.owner_id IS NULL - OR - NOT builds.owner_id IN (SELECT user_id FROM users_with_exceeded_quotas) - ) - ORDER BY - _rank - ) -""" % users.WITH_EXCEEDED_QUOTAS_CTE - class Jobs(base.Object): connections = {} @@ -78,32 +34,16 @@ class Jobs(base.Object): # Setup queue self.queue = Queue(self.backend) - def _get_jobs(self, *args, **kwargs): - return self.db.fetch_many(Job, *args, **kwargs) - - async def _get_job(self, *args, **kwargs): - return await self.db.fetch_one(Job, *args, **kwargs) - async def create(self, build, arch, superseeds=None, timeout=None): - job = await self._get_job(""" - INSERT INTO - jobs - ( - build_id, - arch, - timeout - ) - VALUES - ( - %s, %s, %s - ) - RETURNING *""", - build, - arch, - timeout, - - # Populate cache - build=build, + """ + Create a new job + """ + # Insert into the database + job = await self.db.insert( + Job, + build = build, + arch = arch, + timeout = timeout, ) # Mark if the new job superseeds some other job @@ -112,71 +52,62 @@ class Jobs(base.Object): return job - async def get_by_id(self, id): - return await self._get_job("SELECT * FROM jobs WHERE id = %s", id) - async def get_by_uuid(self, uuid): - return await self._get_job("SELECT * FROM jobs WHERE uuid = %s", uuid) + """ + Fetch a job by its UUID + """ + stmt = ( + sqlalchemy + .select(Job) + .where( + Job.deleted_at == None, + Job.uuid == uuid, + ) + ) + + return await self.db.fetch_one(stmt) + + def get_running(self): + """ + Returns all currently running jobs + """ + stmt = ( + sqlalchemy + .select(Job) + .where( + Job.deleted_at == None, + Job.started_at != None, + Job.finished_at == None, + ) + .order_by( + Job.started_at.desc(), + ) + ) + + return self.backend.db.fetch(stmt) def get_finished(self, failed_only=False, limit=None, offset=None): """ Returns a list of all finished jobs """ - if failed_only: - jobs = self._get_jobs(""" - SELECT - * - FROM - jobs - WHERE - deleted_at IS NULL - AND - finished_at IS NOT NULL - AND - failed IS TRUE - ORDER BY - finished_at DESC - LIMIT - %s - OFFSET - %s - """, limit, offset) + stmt = ( + sqlalchemy + .select(Job) + .where( + Job.deleted_at == None, - else: - jobs = self._get_jobs(""" - SELECT - * - FROM - jobs - WHERE - deleted_at IS NULL - AND - finished_at IS NOT NULL - ORDER BY - finished_at DESC - LIMIT - %s - OFFSET - %s - """, limit, offset) + # Get finished jobs only + Job.finished_at != None, + ) + .limit(limit) + .offset(offset) + ) - return jobs + # Only show failed? + if failed_only: + stmt = stmt.where(Job.failed == True) - def get_running(self): - return self._get_jobs(""" - SELECT - jobs.* - FROM - jobs - WHERE - deleted_at IS NULL - AND - started_at IS NOT NULL - AND - finished_at IS NULL - ORDER BY - started_at DESC - """) + return self.backend.db.fetch(stmt) async def launch(self, jobs): """ @@ -225,57 +156,124 @@ class Queue(base.Object): # Locked when the queue is being processed lock = asyncio.Lock() - async def __aiter__(self): - jobs = await self.get_jobs() + @functools.cached_property + def queue(self): + # XXX Need to filter out any jobs from users that have reached their quotas - return aiter(jobs) + return ( + sqlalchemy - async def get_length(self): - res = await self.db.get(""" - WITH %s + # Collect all jobs and order them by priority + .select( + Job.id.label("job_id"), - SELECT - COUNT(*) AS len - FROM - job_queue - """ % WITH_JOB_QUEUE_CTE) + # Number the jobs by their priority + sqlalchemy.func.rank() + .over( + order_by = ( + builds.Build.priority.desc(), - if res: - return res.len + # Put test builds at the end of the queue + sqlalchemy.case( + (builds.Build.test == True, 1), else_=0, + ), - return 0 + # Order by when the installcheck succeeded + Job.installcheck_performed_at, - async def get_jobs(self, limit=None): - jobs = await self.backend.jobs._get_jobs(""" - WITH %s + # Otherwise use the creation time + Job.created_at, + ), + ).label("rank"), + ) - SELECT - * - FROM - job_queue - LIMIT - %%s - """ % WITH_JOB_QUEUE_CTE, limit, + # Filter out any deleted objects + .where( + builds.Build.deleted_at == None, + Job.deleted_at == None, + ) + + # Filter out any jobs that are not pending + .where( + Job.started_at == None, + Job.finished_at == None, + ) + + # The installcheck must have succeeded + .where( + Job.installcheck_succeeded == True, + ) + + # Order everything by its rank + .order_by("rank") + + # Name the cte + .cte("job_queue") ) - return list(jobs) + def __aiter__(self): + return self.get_jobs() + + async def length(self): + """ + The total length of the job queue + """ + stmt = ( + sqlalchemy - async def get_jobs_for_builder(self, builder, limit=None): + .select( + sqlalchemy.func.count( + self.queue.c.job_id, + ).label("jobs") + ) + #.select_from(self.queue) + ) + + # Run the query + result = await self.db.select_one(stmt) + + return result.jobs + + def get_jobs(self, limit=None): + """ + Returns all or a limited number of jobs ordered by their priority + """ + stmt = ( + sqlalchemy + + # Select jobs + .select(Job) + + # Order them by their rank + .order_by(self.queue.c.rank) + + # Optionally limit + .limit(limit) + ) + + return self.db.fetch(stmt) + + def get_jobs_for_builder(self, builder, limit=None): """ Returns all jobs that the given builder can process. """ - return await self.backend.jobs._get_jobs(""" - WITH %s + stmt = ( + sqlalchemy - SELECT - * - FROM - job_queue - WHERE - job_queue.arch = ANY(%%s) - """ % WITH_JOB_QUEUE_CTE, builder.supported_arches, + # Select jobs + .select(Job) + + # Filter by matching architectures + .where( + Job.arch in builder.supported_atches, + ) + + # Order them by their rank + .order_by(self.queue.c.rank) ) + return self.db.fetch(stmt) + async def dispatch(self): """ Will be called regularly and will dispatch any pending jobs to any @@ -317,10 +315,10 @@ class Queue(base.Object): log.debug(" Processing builder %s" % builder) - with self.backend.db.transaction(): + async with await self.db.transaction(): try: # We are ready for a new job - for job in self.get_jobs_for_builder(builder): + async for job in self.get_jobs_for_builder(builder): # Perform installcheck (just to be sure) if not await job.installcheck(): log.debug("Job %s failed its installcheck" % job) @@ -355,11 +353,8 @@ class Queue(base.Object): await self._dispatch() -class Job(base.DataObject): - table = "jobs" - - def __repr__(self): - return "<%s id=%s %s>" % (self.__class__.__name__, self.id, self.name) +class Job(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "jobs" def __str__(self): return self.name @@ -371,6 +366,36 @@ class Job(base.DataObject): return NotImplemented + @property + def name(self): + return "%s-%s.%s" % (self.pkg.name, self.pkg.evr, self.arch) + + # ID + + id = Column(Integer, primary_key=True) + + # UUID + + uuid = Column(UUID, nullable=False) + + # Build ID + + build_id = Column(Integer, ForeignKey("builds.id"), nullable=False) + + # Build + + build = sqlalchemy.orm.relationship("Build", back_populates="alljobs", lazy="selectin") + + # Arch + + arch = Column(Text, nullable=False) + + def is_test(self): + """ + Returns True if this job belongs to a test build + """ + return self.build.is_test() + def has_perm(self, user): """ Check permissions @@ -382,47 +407,25 @@ class Job(base.DataObject): # This is the same as for builds return self.build.has_perm(user) - @property - def uuid(self): - return self.data.uuid - - @property - def name(self): - return "%s-%s.%s" % (self.pkg.name, self.pkg.evr, self.arch) - - @lazy_property - def build(self): - return self.backend.builds.get_by_id(self.data.build_id) - - def is_test(self): - """ - Returns True if this job belongs to a test build - """ - return self.build.is_test() + # Package @property def pkg(self): return self.build.pkg - # Packages + # Binary Packages - @lazy_property - async def packages(self): - packages = await self.backend.packages._get_packages(""" - SELECT - packages.* - FROM - job_packages - LEFT JOIN - packages ON job_packages.pkg_id = packages.id - WHERE - job_packages.job_id = %s - ORDER BY - packages.name""", - self.id, - ) + JobPackages = sqlalchemy.Table( + "job_packages", database.Base.metadata, - return list(packages) + # Job + Column("job_id", ForeignKey("jobs.id")), + + # Package + Column("pkg_id", ForeignKey("packages.id")), + ) + + packages = sqlalchemy.orm.relationship("Package", secondary=JobPackages, lazy="selectin") async def _import_packages(self, uploads): """ @@ -453,9 +456,6 @@ class Job(base.DataObject): ) ) - # Update the cache - self.packages = packages - # Consume all packages for upload in uploads: await upload.delete() @@ -514,6 +514,8 @@ class Job(base.DataObject): if res: return res.build_time + # Distro + @property def distro(self): return self.build.distro @@ -522,21 +524,20 @@ class Job(base.DataObject): """ Returns True if this job has been superseeded by another one """ - if self.data.superseeded_by: + if self.superseeded_by: return True return False - def get_superseeded_by(self): - if self.data.superseeded_by: - return self.backend.jobs.get_by_id(self.data.superseeded_by) + # Superseeded By ID - def set_superseeded_by(self, superseeded_by): - assert isinstance(superseeded_by, self.__class__) + superseeded_by_id = Column(Integer, ForeignKey("jobs.id")) - self._set_attribute("superseeded_by", superseeded_by.id) + # Superseeded By - superseeded_by = lazy_property(get_superseeded_by, set_superseeded_by) + superseeded_by = sqlalchemy.orm.relationship( + "Job", remote_side=[id], lazy="selectin", + ) @lazy_property def preceeding_jobs(self): @@ -581,33 +582,22 @@ class Job(base.DataObject): """ return [self] + self.preceeding_jobs - @property - def created_at(self): - """ - Returns when this job was created - """ - return self.data.created_at + # Created At - @property - def started_at(self): - """ - Returns when this job was started - """ - return self.data.started_at + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) - @property - def finished_at(self): - """ - Returns when this job finished - """ - return self.data.finished_at + # Started At - @property - def timeout(self): - """ - The timeout for this jobs - """ - return self.data.timeout + started_at = Column(DateTime(timezone=False), nullable=False) + + # Finished At + + finished_at = Column(DateTime(timezone=False), nullable=False) + + # Timeout + + timeout = Column(Interval) @property def times_out_in(self): @@ -631,10 +621,10 @@ class Job(base.DataObject): log.info("Starting job %s on %s" % (self, builder)) # Store the assigned builder - self._set_attribute("builder_id", builder) + self.builder = builder # Store the time - self._set_attribute_now("started_at") + self.started_at = sqlalchemy.func.current_timestamp() def connected(self, connection): """ @@ -683,7 +673,7 @@ class Job(base.DataObject): log.error("Job %s has finished with an error" % self) # Store the time - self._set_attribute_now("finished_at") + self.finished_at = sqlalchemy.func.current_timestamp() # Import log if logfile: @@ -694,10 +684,10 @@ class Job(base.DataObject): await self._import_packages(packages) # Store message - self._set_attribute("message", message) + self.message = message # Mark as failed - self._set_attribute("failed", not success) + self.failed = not success # On success, update all repositories if success: @@ -745,7 +735,7 @@ class Job(base.DataObject): """ return self.is_pending(installcheck=True) - def is_halted(self): + async def is_halted(self): # Only scratch builds can be halted if not self.build.owner: return False @@ -754,7 +744,7 @@ class Job(base.DataObject): return False # Halt if users have exceeded their quota - return self.build.owner.has_exceeded_build_quota() + return await self.build.owner.has_exceeded_build_quota() def is_running(self): """ @@ -773,14 +763,24 @@ class Job(base.DataObject): return False + # Failed + + failed = Column(Boolean) + + # Failed? + def has_failed(self): """ Returns True if this job has failed """ if self.has_finished(): - return self.data.failed + return self.failed + + # Aborted - # Abort + aborted = Column(Boolean, nullable=False, default=False) + + # Abort! async def abort(self, user=None): """ @@ -794,35 +794,47 @@ class Job(base.DataObject): }) # Mark as finished - self._set_attribute_now("finished_at") + self.finished_at = sqlalchemy.func.current_timestamp() # Mark as aborted - self._set_attribute("aborted", True) + self.aborted = True if user: - self._set_attribute("aborted_by", user) + self.aborted_by = user # Try to dispatch more jobs in the background await self.backend.jobs.queue.dispatch() + # Aborted? + def is_aborted(self): """ Returns True if this job has been aborted """ - return self.data.aborted + if self.aborted: + return True + + return False + + # Aborted At @property def aborted_at(self): if self.is_aborted(): - return self.data.finished_at + return self.finished_at - @lazy_property - def aborted_by(self): - if self.data.aborted_by: - return self.backend.users.get_by_id(self.data.aborted_by) + # Aborted By ID - @property - def message(self): - return self.data.message + aborted_by_id = Column(Integer, ForeignKey("users.id")) + + # Aborted By + + aborted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[aborted_by_id], lazy="selectin", + ) + + # Message + + message = Column(Text, nullable=False, default="") async def delete(self, user=None): """ @@ -840,14 +852,16 @@ class Job(base.DataObject): # Delete the log await self._delete_log() - def clone(self): + # Clone! + + async def clone(self): """ Clones this build job """ - job = self.backend.jobs.create( - build=self.build, - arch=self.arch, - superseeds=self, + job = await self.backend.jobs.create( + build = self.build, + arch = self.arch, + superseeds = self, ) log.debug("Cloned job %s as %s" % (self, job)) @@ -890,17 +904,19 @@ class Job(base.DataObject): def log_url(self): return self.backend.path_to_url(self.log_path) - @property - def log_path(self): - return self.data.log_path + # Log Path - @property - def log_size(self): - return self.data.log_size + log_path = Column(Text) - @property - def log_digest_blake2s(self): - return self.data.log_digest_blake2s + # Log Size + + log_size = Column(BigInteger) + + # Log Digest (blake2s) + + log_digest_blake2s = Column(LargeBinary) + + # Open Log async def open_log(self): """ @@ -910,11 +926,9 @@ class Job(base.DataObject): if not self.has_log(): raise FileNotFoundError - return await asyncio.to_thread(self._open_log) - - def _open_log(self): - path = self.log_path + return await asyncio.to_thread(self._open_log, self.log_path) + def _open_log(self, path): # Open gzip-compressed files if path.endswith(".gz"): return gzip.open(path) @@ -923,6 +937,8 @@ class Job(base.DataObject): else: return open(path) + # Tail Log + async def tail_log(self, limit): """ Tails the log file (i.e. returns the N last lines) @@ -936,6 +952,8 @@ class Job(base.DataObject): except FileNotFoundError as e: return [] + # Import the logfile + async def _import_logfile(self, upload): uuid = "%s" % self.uuid @@ -959,9 +977,9 @@ class Job(base.DataObject): digest = await upload.digest("blake2s") # Store everything in the database - self._set_attribute("log_path", path) - self._set_attribute("log_size", upload.size) - self._set_attribute("log_digest_blake2s", digest) + self.log_path = path + self.log_size = size + self.log_digest_blake2s = digest # Consume the upload object await upload.delete() @@ -978,16 +996,17 @@ class Job(base.DataObject): await self.backend.unlink(self.log_path) # Reset all database attributes - self._set_attribute("log_path", None) - self._set_attribute("log_size", None) - self._set_attribute("log_digest_blake2s", None) + self.log_path = None + self.log_size = None + self.log_digest_blake2s = None + + # Builder ID + + builder_id = Column(Integer, ForeignKey("builders.id")) # Builder - @lazy_property - def builder(self): - if self.data.builder_id: - return self.backend.builders.get_by_id(self.data.builder_id) + builder = sqlalchemy.orm.relationship("Builder", foreign_keys=[builder_id], lazy="selectin") @property def ccache_enabled(self): @@ -1036,10 +1055,6 @@ class Job(base.DataObject): return path - @property - def arch(self): - return self.data.arch - @property def duration(self): """ @@ -1106,18 +1121,18 @@ class Job(base.DataObject): # Everything OK else: - self._set_attribute("installcheck_succeeded", True) + self.installcheck_succeeded = True # Store the timestamp - self._set_attribute_now("installcheck_performed_at") + self.installcheck_performed_at = sqlalchemy.func.current_timestamp() - @property - def installcheck_succeeded(self): - return self.data.installcheck_succeeded + # Installcheck Succeeded? - @property - def installcheck_performed_at(self): - return self.data.installcheck_performed_at + installcheck_succeeded = Column(Boolean) + + # Installcheck Performed At + + installcheck_performed_at = Column(DateTime(timezone=False)) # Reverse Requires diff --git a/src/buildservice/keys.py b/src/buildservice/keys.py index a31159f1..4e1ff393 100644 --- a/src/buildservice/keys.py +++ b/src/buildservice/keys.py @@ -5,10 +5,13 @@ import datetime import io import logging import pakfire +import sqlalchemy -from . import base +from sqlalchemy import Column, ForeignKey +from sqlalchemy import DateTime, Integer, Text -from .decorators import * +from . import base +from . import database # Setup logging log = logging.getLogger("pbs.keys") @@ -16,24 +19,6 @@ log = logging.getLogger("pbs.keys") DEFAULT_ALGORITHM = pakfire.PAKFIRE_KEY_ALGO_ED25519 class Keys(base.Object): - def _get_keys(self, query, *args, **kwargs): - return self.db.fetch_many(Key, query, *args, **kwargs) - - def _get_key(self, query, *args, **kwargs): - return self.db.fetch_one(Key, query, *args, **kwargs) - - def __iter__(self): - return self._get_keys(""" - SELECT - * - FROM - keys - WHERE - deleted_at IS NULL - ORDER BY - created_at - """) - async def create(self, comment, user=None): """ Creates a new key @@ -50,88 +35,75 @@ class Keys(base.Object): public_key = key.export() # Store the key in the database - return self._get_key(""" - INSERT INTO - keys - ( - created_by, - public_key, - secret_key, - key_id, - comment - ) - VALUES - ( - %s, %s, %s, %s, %s - ) - RETURNING * - """, user, public_key, secret_key, key.id, comment, + key = await self.db.insert( + Key, + created_by = user, + public_key = public_key, + secret_key = secret_key, + key_id = key.id, + comment = comment, ) - def get_by_id(self, id): - return self._get_key(""" - SELECT - * - FROM - keys - WHERE - id = %s - """, id, - ) + return key +class Key(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "keys" -class Key(base.DataObject): - table = "keys" + # ID - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.key_id) + id = Column(Integer, primary_key=True) - def delete(self, user=None): - # Mark as deleted - self._set_attribute_now("deleted_at") - if user: - self._set_attribute("deleted_by", user) + # Created At - def has_perm(self, user): - # Anonymous users have no permission - if not user: - return False + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) - # Admins have all permissions - return user.is_admin() + # Created By ID - # Key ID + created_by_id = Column(Integer, ForeignKey("users.id")) - @property - def key_id(self): - return self.key_id + # Created By - # Comment + created_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[created_by_id], lazy="selectin", + ) - @property - def comment(self): - return self.data.comment + # Deleted By ID - # Created At + deleted_by_id = Column(Integer, ForeignKey("users.id")) + + # Deleted By - @property - def created_at(self): - return self.data.created_at + deleted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deleted_by_id], lazy="selectin", + ) # Public Key - @property - def public_key(self): - return self.data.public_key + public_key = Column(Text, nullable=False) # Secret Key - @property - def secret_key(self): - return self.data.secret_key + secret_key = Column(Text, nullable=False) + + # Key ID + + key_id = Column(Integer, nullable=False) + + # Comment + + comment = Column(Text) def _make_key(self, pakfire): """ Parses the key and returns a Key object """ return pakfire.import_key(self.secret_key) + + def has_perm(self, user): + # Anonymous users have no permission + if not user: + return False + + # Admins have all permissions + return user.is_admin() diff --git a/src/buildservice/mirrors.py b/src/buildservice/mirrors.py index 4ec3f025..60859ad1 100644 --- a/src/buildservice/mirrors.py +++ b/src/buildservice/mirrors.py @@ -2,6 +2,7 @@ import asyncio import datetime +import functools import logging import random import socket @@ -10,7 +11,11 @@ import urllib.parse import location +import sqlalchemy +from sqlalchemy import Boolean, Column, DateTime, Double, ForeignKey, Integer, Text + from . import base +from . import database from . import httpclient from .decorators import * @@ -19,54 +24,48 @@ from .decorators import * log = logging.getLogger("pbs.mirrors") class Mirrors(base.Object): - def _get_mirror(self, query, *args): - res = self.db.get(query, *args) - - if res: - return Mirror(self.backend, res.id, data=res) - - def _get_mirrors(self, query, *args): - res = self.db.query(query, *args) - - for row in res: - yield Mirror(self.backend, row.id, data=row) - - def __iter__(self): - mirrors = self._get_mirrors(""" - SELECT - * - FROM - mirrors - WHERE - deleted_at IS NULL - ORDER BY - hostname - """, + def __aiter__(self): + stmt = ( + sqlalchemy + .select(Mirror) + .where( + Mirror.deleted_at == None, + ) + + # Order them by hostname + .order_by(Mirror.hostname) + ) + + # Fetch the mirrors + return self.db.fetch(stmt) + + async def get_by_hostname(self, hostname): + stmt = ( + sqlalchemy + .select(Mirror) + .where( + Mirror.deleted_at == None, + + # Match by hostname + Mirror.hostname == hostname, + ) ) - return iter(mirrors) + return await self.db.fetch_one(stmt) async def create(self, hostname, path, owner, contact, notes, user=None, check=True): """ Creates a new mirror """ - mirror = self._get_mirror(""" - INSERT INTO - mirrors - ( - hostname, - path, - owner, - contact, - notes, - created_by - ) - VALUES( - %s, %s, %s, %s, %s, %s - ) - RETURNING - * - """, hostname, path, owner, contact, notes, user, + # Create the new mirror + mirror = await self.db.insert( + Mirror, + hostname = hostname, + path = path, + owner = owner, + contact = contact, + notes = notes, + created_by = user, ) log.info("Mirror %s has been created" % mirror) @@ -77,30 +76,6 @@ class Mirrors(base.Object): return mirror - def get_by_id(self, id): - return self._get_mirror(""" - SELECT - * - FROM - mirrors - WHERE - id = %s - """, id, - ) - - def get_by_hostname(self, hostname): - return self._get_mirror(""" - SELECT - * - FROM - mirrors - WHERE - deleted_at IS NULL - AND - hostname = %s - """, hostname, - ) - def get_mirrors_for_address(self, address): """ Returns all mirrors in random order with preferred mirrors first @@ -121,14 +96,14 @@ class Mirrors(base.Object): # Fetch all mirrors and shuffle them, but put preferred mirrors first return sorted(self, key=__sort) - @lazy_property + @functools.cached_property def location(self): """ The location database """ return location.Database("/var/lib/location/database.db") - @lazy_property + @functools.cached_property def resolver(self): """ A DNS resolver @@ -141,12 +116,14 @@ class Mirrors(base.Object): """ # Check all mirrors concurrently async with asyncio.TaskGroup() as tg: - for mirror in self: - tg.create_task(mirror.check(*args, **kwargs)) + async for mirror in self: + tg.create_task( + mirror.check(*args, **kwargs), + ) -class Mirror(base.DataObject): - table = "mirrors" +class Mirror(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "mirrors" def __str__(self): return self.hostname @@ -157,13 +134,21 @@ class Mirror(base.DataObject): return NotImplemented - @property - def hostname(self): - return self.data.hostname + # ID - @property - def path(self): - return self.data.path + id = Column(Integer, primary_key=True) + + # Hostname + + hostname = Column(Text, unique=True, nullable=False) + + # XXX Must be unique over non-deleted items + + # Path + + path = Column(Text, nullable=False) + + # URL @property def url(self): @@ -180,43 +165,43 @@ class Mirror(base.DataObject): return urllib.parse.urljoin(url, path) - @property - def last_check_success(self): - """ - True if the last check was successful - """ - return self.data.last_check_success + # Last Check Success - True if the last check was successful - @property - def last_check_at(self): - """ - The timestamp of the last check - """ - return self.data.last_check_at + last_check_success = Column(Boolean, nullable=False, default=False) - @property - def error(self): - """ - The error message of the last unsuccessful check - """ - return self.data.error + # Last Check At - @property - def created_at(self): - return self.data.created_at + last_check_at = Column(DateTime(timezone=False)) - # Delete + # Error Message when the check has been unsuccessful - def delete(self, user): - """ - Deleted this mirror - """ - self._set_attribute_now("deleted_at") - if user: - self._set_attribute("deleted_by", user) + error = Column(Text, nullable=False, default="") + + # Created At + + created_at = Column( + DateTime(timezone=False), nullable=False, server_default=sqlalchemy.func.current_timestamp(), + ) + + # Created By ID - # Log the event - log.info("Mirror %s has been deleted" % self) + created_by_id = Column(Integer, ForeignKey("users.id"), nullable=False) + + # Created By + + created_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[created_by_id], lazy="selectin", + ) + + # Deleted By ID + + deleted_by_id = Column(Integer, ForeignKey("users.id")) + + # Deleted By + + deleted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deleted_by_id], lazy="selectin", + ) def has_perm(self, user): # Anonymous users have no permission @@ -228,50 +213,23 @@ class Mirror(base.DataObject): # Owner - def get_owner(self): - return self.data.owner - - def set_owner(self, owner): - self._set_attribute("owner", owner) - - owner = property(get_owner, set_owner) + owner = Column(Text, nullable=False) # Contact - def get_contact(self): - return self.data.contact - - def set_contact(self, contact): - self._set_attribute("contact", contact) - - contact = property(get_contact, set_contact) + contact = Column(Text, nullable=False) # Notes - def get_notes(self): - return self.data.notes + notes = Column(Text, nullable=False, default="") - def set_notes(self, notes): - self._set_attribute("notes", notes or "") + # Country Code - notes = property(get_notes, set_notes) + country_code = Column(Text) - # Country Code & ASN + # ASN - @property - def country_code(self): - """ - The country code - """ - return self.data.country_code - - @lazy_property - def asn(self): - """ - The Autonomous System - """ - if self.data.asn: - return self.backend.mirrors.location.get_as(self.data.asn) + asn = Column(Integer) async def _update_country_code_and_asn(self): """ @@ -301,10 +259,10 @@ class Mirror(base.DataObject): continue # Store the country code - self._set_attribute("country_code", network.country_code) + self.country_code = network.country_code # Store the ASN - self._set_attribute("asn", network.asn) + self.asn = network.asn # Once is enough break @@ -345,7 +303,7 @@ class Mirror(base.DataObject): log.debug("Running mirror check for %s" % self.hostname) # Wrap this into one large transaction - with self.db.transaction(): + async with await self.db.transaction(): # Update the country code & ASN await self._update_country_code_and_asn() @@ -406,75 +364,103 @@ class Mirror(base.DataObject): success = True # Log this check - self.db.execute(""" - INSERT INTO - mirror_checks - ( - mirror_id, - success, - response_time, - http_status, - last_sync_at, - error - ) - VALUES - ( - %s, %s, %s, %s, %s, %s - ) - """, - self.id, - success, - response.request_time if response else None, - response.code if response else None, - timestamp, - error, + await self.db.insert( + MirrorCheck, + mirror_id = self.id, + success = success, + response_time = response.request_time if response else None, + http_status = response.code if response else None, + last_sync_at = timestamp, + error = error, ) # Update the main table - self._set_attribute_now("last_check_at") - self._set_attribute("last_check_success", success) - self._set_attribute("last_sync_at", timestamp) - self._set_attribute("error", error) + self.last_check_at = sqlalchemy.func.current_timestamp() + self.last_check_success = success + self.last_sync_at = timestamp + self.error = error - def get_uptime_since(self, t): + async def get_uptime_since(self, t): # Convert timedeltas to absolute time if isinstance(t, datetime.timedelta): t = datetime.datetime.utcnow() - t - res = self.db.get(""" - -- SELECT all successful checks and find out when the next one failed - WITH uptimes AS ( - SELECT - success, - LEAST( - LEAD(checked_at, 1, CURRENT_TIMESTAMP) - OVER (ORDER BY checked_at ASC) - - checked_at, - INTERVAL '1 hour' - ) AS uptime - FROM - mirror_checks - WHERE - mirror_id = %s - AND - checked_at >= %s - ) - SELECT - ( - EXTRACT( - epoch FROM SUM(uptime) FILTER (WHERE success IS TRUE) - ) - / - EXTRACT( - epoch FROM SUM(uptime) - ) - ) AS uptime - FROM - uptimes - """, self.id, t, - ) + # CTE with uptimes + uptimes = sqlalchemy.select( + MirrorCheck.success, + sqlalchemy.func.least( + sqlalchemy.func.lead( + MirrorCheck.checked_at, + 1, + sqlalchemy.func.current_timestamp(), + ).over( + order_by=MirrorCheck.checked_at.asc(), + ) + - + MirrorCheck.checked_at, + sqlalchemy.text("INTERVAL '1 hour'"), + ).label("uptime"), + ).where( + MirrorCheck.mirror_id == self.id, + MirrorCheck.checked_at >= t, + ).cte("uptimes") + + # Check the percentage of how many checks have been successful + stmt = sqlalchemy.select( + ( + sqlalchemy.func.extract( + "epoch", + sqlalchemy.func.sum( + uptimes.c.uptime, + ).filter( + uptimes.c.success == True, + ), + ) + / + sqlalchemy.func.extract( + "epoch", + sqlalchemy.func.sum( + uptimes.c.uptime + ), + ) + ).label("uptime") + ).select_from(uptimes) + + # Run the statement + return await self.db.select_one(stmt, "uptime") + + +class MirrorCheck(database.Base): + """ + An object that represents a single mirror check + """ + __tablename__ = "mirror_checks" + + # Mirror ID + + mirror_id = Column(Integer, ForeignKey("mirrors.id"), primary_key=True, nullable=False) + + # Checked At + + checked_at = Column(DateTime(timezone=None), primary_key=True, nullable=False, + server_default=sqlalchemy.func.current_timestamp()) + + # Success + + success = Column(Boolean, nullable=False) + + # Response Time + + response_time = Column(Double) + + # HTTP Status + + http_status = Column(Integer) + + # Last Sync At + + last_sync_at = Column(DateTime(timezone=None)) - if res: - return res.uptime or 0 + # Error - return 0 + error = Column(Text) diff --git a/src/buildservice/misc.py b/src/buildservice/misc.py index 54427446..ce5cdf41 100644 --- a/src/buildservice/misc.py +++ b/src/buildservice/misc.py @@ -45,15 +45,6 @@ def normalize(*args, iteration=1): return "-".join(s.split()) -def format_size(s): - units = ("B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB") - - for unit in units: - if s < 1024: - return "%d %s" % (round(s), unit) - - s /= 1024 - async def group(items, key): """ This function takes some iterable and returns it grouped by key. diff --git a/src/buildservice/packages.py b/src/buildservice/packages.py index 3f24edb4..1b747c73 100644 --- a/src/buildservice/packages.py +++ b/src/buildservice/packages.py @@ -8,10 +8,17 @@ import os import shutil import stat +import sqlalchemy +from sqlalchemy import Column, Computed, ForeignKey +from sqlalchemy import ARRAY, BigInteger, Boolean, DateTime, Integer, LargeBinary, Text, UUID +from sqlalchemy.dialects.postgresql import TSVECTOR + import pakfire from . import base +from . import builds from . import database +from . import jobs from . import misc from .constants import * @@ -22,55 +29,50 @@ from .errors import * log = logging.getLogger("pbs.packages") class Packages(base.Object): - async def _get_packages(self, *args, **kwargs): - return await self.db.fetch_many(Package, *args, **kwargs) - - async def _get_package(self, *args, **kwargs): - return await self.db.fetch_one(Package, *args, **kwargs) - - def get_list(self): + async def list(self): """ Returns a list with all package names and the summary line that have at one time been part of the distribution """ - return self.db.query(""" - SELECT - DISTINCT ON (packages.name) - packages.name AS name, - packages.summary AS summary, - packages.created_at - FROM - packages - LEFT JOIN - builds ON packages.id = builds.pkg_id - WHERE - packages.deleted_at IS NULL - AND - builds.deleted_at IS NULL - AND - packages.arch = %s - ORDER BY - packages.name, - packages.created_at DESC - """, "src", - ) + stmt = \ + sqlalchemy.select( + Package.name, + Package.summary, + Package.created_at, + ) \ + .distinct(Package.name) \ + .select_from(Package) \ + .join( + builds.Build, + Package.id == builds.Build.pkg_id, + isouter=True, + ) \ + .where( + Package.deleted_at == None, + builds.Build.deleted_at == None, + Package.arch == "src", + ) \ + .order_by( + Package.name, + Package.created_at.desc(), + ) - async def get_by_id(self, id): - return await self._get_package("SELECT * FROM packages WHERE id = %s", id) + return self.db.select(stmt) async def get_by_uuid(self, uuid): - return await self._get_package(""" - SELECT - * - FROM - packages - WHERE - deleted_at IS NULL - AND - uuid = %s - """, uuid, + stmt = ( + sqlalchemy + .select(Package) + .where( + Package.deleted_at == None, + + # Match by UUID + Package.uuid == uuid, + ) ) + return await self.db.fetch_one(stmt) + async def get_by_buildid(self, buildid): """ Fetches the debug information for the given BuildID @@ -112,70 +114,39 @@ class Packages(base.Object): raise NoSuchDistroError(package.distribution) + # Extract the digest + digest_type, digest = package.digest + # Insert into database - pkg = await self._get_package(""" - INSERT INTO - packages - ( - name, - evr, - arch, - uuid, - groups, - distro_id, - packager, - license, - url, - summary, - description, - requires, - provides, - conflicts, - obsoletes, - recommends, - suggests, - size, - build_arches, - commit_id, - build_id, - build_host, - build_time, - filesize, - digest_type, - digest - ) - VALUES - ( - %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, - %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, - %s, %s, %s, %s, %s, %s - ) - RETURNING *""", - package.name, - package.evr, - package.arch, - package.uuid, - package.groups, - distro, - package.packager, - package.license, - package.url, - package.summary or "", - package.description or "", - package.requires, - package.provides, - package.conflicts, - package.obsoletes, - package.recommends, - package.suggests, - package.installsize, - package.build_arches, - commit, - package.build_id, - package.buildhost, - datetime.datetime.fromtimestamp(package.buildtime), - package.downloadsize, - *package.digest, + pkg = await self.db.insert( + Package, + name = package.name, + evr = package.evr, + arch = package.arch, + uuid = package.uuid, + groups = package.groups, + distro = distro, + packager = package.packager, + license = package.license, + url = package.url, + summary = package.summary or "", + description = package.description or "", + prerequires = package.prerequires, + requires = package.requires, + provides = package.provides, + conflicts = package.conflicts, + obsoletes = package.obsoletes, + recommends = package.recommends, + suggests = package.suggests, + installsize = package.installsize, + build_arches = package.build_arches, + commit = commit, + build_id = package.build_id, + build_host = package.buildhost, + build_time = datetime.datetime.fromtimestamp(package.buildtime), + filesize = package.downloadsize, + digest_type = digest_type, + digest = digest, ) # Import filelist @@ -192,124 +163,105 @@ class Packages(base.Object): This function does not work for UUIDs or filenames. """ - packages = await self._get_packages(""" - WITH package_search_index AS ( - -- Source packages - SELECT - packages.id AS package_id, - packages._search AS document - FROM - builds - LEFT JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.test IS FALSE - AND - packages.deleted_at IS NULL - - UNION - - -- Binary Packages - SELECT - source_packages.id AS package_id, - packages._search AS document - FROM - builds - LEFT JOIN - jobs ON builds.id = jobs.build_id - LEFT JOIN - job_packages ON jobs.id = job_packages.job_id - LEFT JOIN - packages ON job_packages.pkg_id = packages.id - LEFT JOIN - packages source_packages ON builds.pkg_id = source_packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.test IS FALSE - AND - jobs.deleted_at IS NULL - AND - packages.deleted_at IS NULL - AND - source_packages.deleted_at IS NULL - ), - - search AS ( - SELECT - packages.id AS package_id - FROM - package_search_index search_index - JOIN - packages ON search_index.package_id = packages.id - AND - search_index.document @@ websearch_to_tsquery('english', %s) - ORDER BY - ts_rank(search_index.document, websearch_to_tsquery('english', %s)) DESC - LIMIT - %s + source_packages = sqlalchemy.orm.aliased(Package) + + _source_packages = ( + sqlalchemy + .select( + source_packages.id.label("pkg_id"), + source_packages.search, + ) + .join(builds.Build, source_packages.id == builds.Build.pkg_id) + .where( + # Objects must exist + source_packages.deleted_at == None, + builds.Build.deleted_at == None, + + # Ignore test builds + builds.Build.test == False, ) + ) - SELECT - DISTINCT ON (packages.name) - packages.* - FROM - search - LEFT JOIN - packages ON search.package_id = packages.id - """, q, q, limit, + _binary_packages = ( + sqlalchemy + .select( + source_packages.id.label("pkg_id"), + Package.search, + ) + .select_from(builds.Build) + .join(jobs.Job) + .join(Package) + .join(source_packages) + .where( + # Objects must exist + source_packages.deleted_at == None, + builds.Build.deleted_at == None, + jobs.Job.deleted_at == None, + Package.deleted_at == None, + + # Ignore test builds + builds.Build.test == False, + ) ) - return list(packages) + search_index = ( + sqlalchemy + .union( + _source_packages, + _binary_packages, + ) + .cte("search_index") + ) - async def search_by_filename(self, filename, limit=None): - if "*" in filename: - filename = filename.replace("*", "%") - - packages = await self._get_packages(""" - SELECT - DISTINCT ON (packages.name) - packages.* - FROM - package_files - LEFT JOIN - packages ON package_files.pkg_id = packages.id - WHERE - package_files.path LIKE %s - ORDER BY - packages.name, - packages.build_time DESC - LIMIT - %s - """, filename, limit, + stmt = ( + sqlalchemy + .select(Package) + .select_from(search_index) + .join(Package, search_index.c.pkg_id == Package.id) + .where( + sqlalchemy.func.websearch_to_tsquery( + "english", q, + ) + .op("@@")(search_index.c.search), + ) + .order_by( + sqlalchemy.func.ts_rank( + search_index.c.search, + sqlalchemy.func.websearch_to_tsquery( + "english", q, + ), + ).desc(), ) + .limit(limit) + ) - else: - packages = await self._get_packages(""" - SELECT - DISTINCT ON (packages.name) - packages.* - FROM - package_files - LEFT JOIN - packages ON package_files.pkg_id = packages.id - WHERE - package_files.path = %s - ORDER BY - packages.name, - packages.build_time DESC - LIMIT - %s - """, filename, limit, + return await self.db.fetch_as_list(stmt) + + async def search_by_filename(self, filename, limit=None): + stmt = ( + sqlalchemy + .select(Package) + .distinct(Package.name) + .join( + File, + Package.id == File.pkg_id, ) + .where( + File.path.like(filename), + ) + .order_by( + Package.name, + Package.build_time.desc(), + ) + .limit(limit) + ) - return list(packages) + # Run the query + return await self.db.fetch_as_list(stmt) -class Package(base.DataObject): - table = "packages" +class Package(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "packages" def __repr__(self): return "<%s %s>" % (self.__class__.__name__, self.nevra) @@ -323,6 +275,24 @@ class Package(base.DataObject): return NotImplemented + # ID + + id = Column(Integer, primary_key=True) + + # UUID + + uuid = Column(UUID, nullable=False) + + # Build + + builds = sqlalchemy.orm.relationship("Build", back_populates="pkg", lazy="selectin") + + # Created At + + created_at = Column( + DateTime(timezone=False), nullable=False, server_default=sqlalchemy.func.current_timestamp(), + ) + async def delete(self, user=None): # Check if this package can be deleted if not self.can_be_deleted(): @@ -355,21 +325,19 @@ class Package(base.DataObject): # This package can be deleted return True - @property - def uuid(self): - return self.data.uuid + # Name - @property - def name(self): - return self.data.name + name = Column(Text, nullable=False) - @property - def evr(self): - return self.data.evr + # EVR - @property - def arch(self): - return self.data.arch + evr = Column(Text, nullable=False) + + # Arch + + arch = Column(Text, nullable=False) + + # Source? def is_source(self): """ @@ -377,68 +345,59 @@ class Package(base.DataObject): """ return self.arch == "src" + # NEVRA + @property def nevra(self): return "%s-%s.%s" % (self.name, self.evr, self.arch) - @property - def groups(self): - return self.data.groups + # Groups - @lazy_property - def packager(self): - return self.backend.users.get_by_email(self.data.packager) or self.data.packager + groups = Column(ARRAY(Text), nullable=False, default=[]) - @property - def license(self): - return self.data.license + # Packager - @property - def url(self): - return self.data.url + packager = Column(Text, nullable=False, default="") - @property - def summary(self): - # Remove any trailing full stops - return self.data.summary.removesuffix(".") + # License - @property - def description(self): - return self.data.description + license = Column(Text, nullable=False) - @property - def build_arches(self): - return self.data.build_arches + # URL - @property - def size(self): - return self.data.size + url = Column(Text, nullable=False) + + # Summary + + summary = Column(Text, nullable=False) + + # Description + + description = Column(Text, nullable=False) + + # Build Arches + + build_arches = Column(ARRAY(Text), nullable=False) + + # Size + + size = Column(BigInteger, nullable=False) # Dependencies - @property - def requires(self): - return self.data.requires + prerequires = Column(ARRAY(Text), nullable=False, default=[]) - @property - def provides(self): - return self.data.provides + requires = Column(ARRAY(Text), nullable=False, default=[]) - @property - def conflicts(self): - return self.data.conflicts + provides = Column(ARRAY(Text), nullable=False, default=[]) - @property - def obsoletes(self): - return self.data.obsoletes + conflicts = Column(ARRAY(Text), nullable=False, default=[]) - @property - def suggests(self): - return self.data.suggests + obsoletes = Column(ARRAY(Text), nullable=False, default=[]) - @property - def recommends(self): - return self.data.recommends + suggests = Column(ARRAY(Text), nullable=False, default=[]) + + recommends = Column(ARRAY(Text), nullable=False, default=[]) # Commit @@ -447,43 +406,54 @@ class Package(base.DataObject): if self.data.commit_id: return self.backend.sources.get_commit_by_id(self.data.commit_id) + # Distro ID + + distro_id = Column(Integer, ForeignKey("distributions.id"), nullable=False) + # Distro - @property - def distro(self): - return self.backend.distros.get_by_id(self.data.distro_id) + distro = sqlalchemy.orm.relationship("Distro", + foreign_keys=[distro_id], lazy="selectin") - @property - def build_id(self): - return self.data.build_id + # Build ID - @property - def build_host(self): - return self.data.build_host + build_id = Column(UUID) - @property - def build_time(self): - return self.data.build_time + # Build Host - @property - def path(self): - return self.data.path + build_host = Column(Text, nullable=False) + + # Build Time + + build_time = Column(DateTime(timezone=False), nullable=False) + + # Path + + path = Column(Text) + + # Download URL @property def download_url(self): return self.backend.path_to_url(self.path) + # Filename + @property def filename(self): return os.path.basename(self.path) - @property - def digest(self): - return (self.data.digest_type, self.data.digest) + # Digest Type - @property - def filesize(self): - return self.data.filesize + digest_type = Column(Text, nullable=False) + + # Digest + + digest = Column(LargeBinary, nullable=False) + + # File Size + + filesize = Column(BigInteger, nullable=False) async def _import_archive(self, archive): """ @@ -503,7 +473,7 @@ class Package(base.DataObject): log.debug("Importing %s to %s..." % (self, path)) # Store the path - await self._set_attribute("path", path) + self.path = path # Copy the file if it doesn't exist, yet if not await self.backend.exists(path): @@ -561,7 +531,7 @@ class Package(base.DataObject): ) @lazy_property - async def builds(self): + async def XXXbuilds(self): builds = await self.backend.builds._get_builds(""" SELECT * @@ -586,39 +556,44 @@ class Package(base.DataObject): # Files - def _get_files(self, *args, **kwargs): - return self.db.fetch_many(File, *args, package=self, **kwargs) + async def get_files(self): + """ + Returns the filelist of this package + """ + stmt = ( + sqlalchemy + .select(File) - async def _get_file(self, *args, **kwargs): - return await self.db.fetch_one(File, *args, package=self, **kwargs) + # Only select files from this package + .where( + File.pkg_id == self.id, + ) - @lazy_property - async def files(self): - return self._get_files(""" - SELECT - * - FROM - package_files - WHERE - pkg_id = %s - ORDER BY - path - """, self.id, + # Order by path + .order_by( + File.path, + ) ) + return self.db.fetch(stmt) + async def get_file(self, path): - return await self._get_file(""" - SELECT - * - FROM - package_files - WHERE - pkg_id = %s - AND - path = %s - """, self.id, path, + """ + Fetches a single file of this package + """ + stmt = ( + sqlalchemy + .select(File) + + # Only select files from this package and match by path + .where( + File.pkg_id == self.id, + File.path == path, + ) ) + return await self.db.fetch_one(stmt) + async def get_debuginfo(self, buildid): path = buildid_to_path(buildid) @@ -632,72 +607,102 @@ class Package(base.DataObject): """ return await self.backend.open(self.path) - -class File(base.Object): - def init(self, data, package): - self.data, self.package = data, package + # Search + + search = Column( + TSVECTOR, Computed( + """ + ( + setweight( + to_tsvector('simple'::regconfig, name), + 'A'::"char" + ) + || + setweight( + to_tsvector('english'::regconfig, summary), + 'B'::"char" + ) + || + setweight( + to_tsvector('english'::regconfig, description), + 'C'::"char" + ) + ) + """, + persisted=True, + ) + ) + + +class File(database.Base): + __tablename__ = "package_files" def __str__(self): return self.path - @property - def path(self): - return self.data.path + # Package ID - @property - def size(self): - return self.data.size + pkg_id = Column(Integer, ForeignKey("packages.id"), primary_key=True, nullable=False) + + # Package + + package = sqlalchemy.orm.relationship("Package", foreign_keys=[pkg_id], lazy="selectin") + + # Path + + path = Column(Text, primary_key=True, nullable=False) + + # Size + + size = Column(BigInteger, nullable=False) + + # Config + + config = Column(Boolean, nullable=False, default=False) + + # Mode + + mode = Column(Integer, nullable=False) + + # Type @property def type(self): return stat.S_IFMT(self.mode) - @property - def config(self): - return self.data.config + # uname - @property - def mode(self): - return self.data.mode + uname = Column(Text, nullable=False) - @property - def uname(self): - return self.data.uname + # gname - @property - def gname(self): - return self.data.gname + gname = Column(Text, nullable=False) - @property - def ctime(self): - return self.data.ctime + # Creation Time - @property - def mtime(self): - return self.data.mtime + ctime = Column(DateTime(timezone=False), nullable=False) - @property - def mimetype(self): - return self.data.mimetype + # Modification Time - @property - def capabilities(self): - try: - return self.data.capabilities.split() - except AttributeError: - return [] + mtime = Column(DateTime(timezone=False), nullable=False) + + # MIME Type + + mimetype = Column(Text) + + # Capabilities + + capabilities = Column(ARRAY(Text)) # Digest SHA512 - @property - def digest_sha512(self): - return self.data.digest_sha512 + digest_sha2_512 = Column(LargeBinary) # Digest SHA256 - @property - def digest_sha256(self): - return self.data.digest_sha256 + digest_sha2_256 = Column(LargeBinary) + + # Downloadable? def is_downloadable(self): """ @@ -706,6 +711,8 @@ class File(base.Object): # All regular files are downloadable return self.type == stat.S_IFREG + # Viewable? + def is_viewable(self): # Empty files cannot be viewed. if self.size == 0: @@ -721,16 +728,6 @@ class File(base.Object): return False - @property - def mimetype(self): - """ - The (guessed) MIME type of this file - """ - # Guess the MIME type of the file. - type, encoding = mimetypes.guess_type(self.path) - - return type or "application/octet-stream" - # Send Payload async def open(self): diff --git a/src/buildservice/releasemonitoring.py b/src/buildservice/releasemonitoring.py index 72dfeeb3..0aab0981 100644 --- a/src/buildservice/releasemonitoring.py +++ b/src/buildservice/releasemonitoring.py @@ -28,6 +28,9 @@ import re import shutil import urllib.parse +import sqlalchemy +from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Text + from . import base from . import config from . import database @@ -75,823 +78,767 @@ class BuildExistsError(Exception): pass -class Monitorings(base.Object): - baseurl = "https://release-monitoring.org" - - async def _request(self, method, url, data=None): - body = {} - - # Fetch the API key - api_key = await self.settings.get("release-monitoring-api-key") +class MonitoringRelease(database.Base, database.BackendMixin): + __tablename__ = "release_monitoring_releases" - # Authenticate to the API - headers = { - "Authorization" : "Token %s" % api_key, - } + def __str__(self): + return "%s %s" % (self.monitoring.name, self.version) - # Compose the url - url = urllib.parse.urljoin(self.baseurl, url) + # ID - if method == "GET": - url = "%s?%s" % (url, urllib.parse.urlencode(data)) + id = Column(Integer, primary_key=True) - # Reset data - data = None + # Monitoring ID - # For POST requests, encode the payload in JSON - elif method == "POST": - data = urllib.parse.urlencode(data) + monitoring_id = Column(Integer, ForeignKey("release_monitorings.id")) - # Send the request and wait for a response - res = await self.backend.httpclient.fetch(url, method=method, - headers=headers, body=data) + # Monitoring - # Decode JSON response - if res.body: - body = json.loads(res.body) + monitoring = sqlalchemy.orm.relationship("Monitoring", lazy="selectin") - # Check if we have received an error - error = body.get("error") + # Version - # Raise the error - if error: - raise RuntimeError(error) + version = Column(Text, nullable=False) - return body + # Created At - def _get_monitorings(self, query, *args, **kwargs): - return self.db.fetch_many(Monitoring, query, *args, **kwargs) + created_at = Column( + DateTime(timezone=False), nullable=False, server_default=sqlalchemy.func.current_timestamp(), + ) - async def _get_monitoring(self, query, *args, **kwargs): - return await self.db.fetch_one(Monitoring, query, *args, **kwargs) + # Delete - async def get_by_id(self, id): - return await self._get_monitoring(""" - SELECT - * - FROM - release_monitorings - WHERE - id = %s - """, id, - ) + async def delete(self, user=None): + """ + Deletes this release + """ + async with asyncio.TaskGroup() as tasks: + # Delete the repository + if self.repo: + await self.repo.delete() - async def get_by_distro_and_name(self, distro, name): - return await self._get_monitoring(""" - SELECT - * - FROM - release_monitorings - WHERE - deleted_at IS NULL - AND - distro_id = %s - AND - name = %s - """, distro, name, distro=distro, - ) + # Delete the build + if self.build: + tasks.create_task(self.build.delete(user=user)) - async def create(self, distro, name, created_by, project_id, - follow="stable", create_builds=True): - monitoring = await self._get_monitoring(""" - INSERT INTO - release_monitorings - ( - distro_id, - name, - created_by, - project_id, - follow, - create_builds - ) - VALUES( - %s, %s, %s, %s, %s, %s + # Close the bug + tasks.create_task( + self._close_bug( + resolution="WONTFIX", + comment="Release Monitoring for this package has been terminated", + ), ) - RETURNING - * - """, distro, name, created_by, project_id, follow, create_builds, - - # Populate cache - distro=distro, - ) - return monitoring + # Bug - async def search(self, name): + async def _create_bug(self): """ - Returns a bunch of packages that match the given name + Creates a new bug report about this release """ - # Send the request - response = await self._request("GET", "/api/v2/projects", - { - "name" : name, - "items_per_page" : 250, - }, - ) + args = { + # Product, Version & Component + "product" : self.monitoring.distro.bugzilla_product, + "version" : self.monitoring.distro.bugzilla_version, + "component" : self.monitoring.name, - # Return all packages - return [database.Row(item) for item in response.get("items")] + # Summary & Description + "summary" : "%s has been released" % self, + "description" : BUG_DESCRIPTION % \ + { + "name" : self.monitoring.name, + "version" : self.version + }, - async def check(self, limit=None): - """ - Perform checks on monitorings - """ - # Fetch all monitorings that were never checked or checked longer than 24 hours ago - monitorings = self._get_monitorings(""" - SELECT - * - FROM - release_monitorings - WHERE - deleted_at IS NULL - AND - ( - last_check_at IS NULL - OR - last_check_at <= CURRENT_TIMESTAMP - INTERVAL '24 hours' - ) - ORDER BY - last_check_at ASC NULLS FIRST - LIMIT - %s - """, limit, - ) + # Keywords + "keywords" : [ + # Mark this bug as created automatically + "Monitoring", - async for monitoring in monitorings: - await monitoring.check() + # Mark this bug as a new release + "NewRelease", + ], + } - # Releases + # If we have a build, include it in the bug description + if self.build: + args |= { + "description" : BUG_DESCRIPTION_WITH_BUILD % \ + { + "name" : self.monitoring.name, + "version" : self.version, + "url" : await self.backend.url_to(self.build.url), + }, - def _get_releases(self, query, *args, **kwargs): - return self.db.fetch_many(Release, query, *args, **kwargs) + # Set the URL to point to the build + "url" : await self.backend.url_to(self.build.url), + } - async def _get_release(self, query, *args, **kwargs): - return await self.db.fetch_one(Release, query, *args, **kwargs) + # Create the bug + bug = await self.backend.bugzilla.create_bug(**args) + # Store the bug ID + self.bug_id = bug.id -class Monitoring(base.DataObject): - table = "release_monitorings" + # Attach the diff (if we have one) + if self.diff: + await bug.attach( + filename="%s.patch" % self, + data=self.diff, + summary="Patch for %s" % self, + is_patch=True, + ) - def __str__(self): - return "%s - %s" % (self.distro, self.name) + return bug - @property - def url(self): - return "/distros/%s/monitorings/%s" % (self.distro.slug, self.name) + async def _close_bug(self, *args, **kwargs): + # Fetch the bug + bug = await self.get_bug() - @lazy_property - async def distro(self): - """ - The distribution - """ - return await self.backend.distros.get_by_id(self.data.distro_id) + if bug and not bug.is_closed(): + await bug.close(*args, **kwargs) - @property - def name(self): + # Bug ID + + bug_id = Column(Integer) + + async def get_bug(self): """ - The package name + Fetches the bug from Bugzilla """ - return self.data.name + if self.bug_id: + return await self.backend.bugzilla.get_bug(self.bug_id) - @property - def project_id(self): - return self.data.project_id + # Repo ID - # Last Check At + repo_id = Column(Integer, ForeignKey("repositories.id")) - @property - def last_check_at(self): - return self.data.last_check_at + # Repo - @property - def follow(self): - return self.data.follow + repo = sqlalchemy.orm.relationship("Repo") - # Create Builds + # Build ID - def get_create_builds(self): - return self.data.create_builds + build_id = Column(Integer, ForeignKey("builds.id")) - def set_create_builds(self, value): - self._set_attribute("create_builds", value) + build = sqlalchemy.orm.relationship("Build") - create_builds = property(get_create_builds, set_create_builds) + # Diff - # Permissions + diff = Column(Text) - def has_perm(self, user=None): - # Anonymous users can't perform any actions - if user is None: - return False + async def _create_build(self, build, owner): + """ + Creates a build + """ + repo = None - # Users must be admins - return user.is_admin() + if self.build: + raise RuntimeError("Build already exists") - # Delete + log.info("Creating build for %s from %s" % (self, build)) - async def delete(self, user=None): - # Mark as deleted - await self._set_attribute_now("deleted_at") - if user: - await self._set_attribute("deleted_by", user) + try: + # Create a new temporary space for the + async with self.backend.tempdir() as target: + # Create a new source package + file = await self._update_source_package(build.pkg, target) - # Delete all releases - async with asyncio.TaskGroup() as tasks: - for release in self.releases: - tasks.create_task(release.delete()) + if file: + # Create a new repository + repo = await self.backend.repos.create( + self.monitoring.distro, "Test Build for %s" % self, owner=owner) - # Check + # Upload the file + upload = await self.backend.uploads.create_from_local(file) - async def check(self): - log.info("Checking for new releases for %s" % self) + try: + # Create a package + package = await self.backend.packages.create(upload) - release = None + # Create the build + build = await self.backend.builds.create(repo, package, owner=owner) - # Fetch the current versions - versions = await self._fetch_versions() + finally: + await upload.delete() - # Fetch the latest release - # XXX ??? + # If anything went wrong, then remove the repository + except Exception as e: + if repo: + await repo.delete() - # Fetch the latest build - latest_build = await self.get_latest_build() + raise e - async with await self.db.transaction(): - # Store timestamp of this check - await self._set_attribute_now("last_check_at") + else: + # Store the objects + self.build = build + self.repo = repo - try: - if self.follow == "latest": - release = await self._follow_latest(versions) - elif self.follow == "stable": - release = await self._follow_stable(versions) - elif self.follow == "current-branch": - release = await self._follow_current_branch(versions, latest_build) - else: - raise ValueError("Cannot handle follow: %s" % self.follow) + # Launch the build + await self.backend.builds.launch([build]) - # If the release exists, do nothing - except ReleaseExistsError as e: - log.debug("Release %s already exists" % e) + async def _update_source_package(self, package, target): + """ + Takes a package and recreates it with this release + """ + if not package.is_source(): + raise RuntimeError("%s is not a source package" % package) - # The latest build is newer than this release - except BuildExistsError as e: - log.debug("Latest build is newer") + # Capture Pakfire's log + logger = config.PakfireLogger() - # Dispatch any jobs - await self.backend.jobs.queue.dispatch() + # Create temporary directory to extract the package to + try: + async with self.backend.tempdir() as tmp: + # Path to downloaded files + files = os.path.join(tmp, "files") - async def _fetch_versions(self): - """ - Fetches all versions for this project - """ - # Wait until we are allowed to send an API request - async with ratelimiter: - response = await self.backend.monitorings._request( - "GET", "/api/v2/versions/", { - "project_id" : self.project_id, - }, - ) + # Path to the makefile + makefile = os.path.join(tmp, "%s.nm" % package.name) - # Parse the response as JSON and return it - return database.Row(response) + # Create a Pakfire instance from this distribution + async with self.monitoring.distro.pakfire(logger=logger) as p: + # Open the archive + archive = await asyncio.to_thread(p.open, package.path) - async def _follow_stable(self, versions, *, build): - """ - This will follow "stable" i.e. the latest stable version - """ - for version in versions.stable_versions: - return await self.create_release(version, build=build) + # Extract the archive into the temporary space + await asyncio.to_thread(archive.extract, path=tmp) - async def _follow_latest(self, versions, * build): - """ - This will follow the latest version (including pre-releases) - """ - return await self.create_release(versions.latest_version, build=build) + # XXX directories are being created with the wrong permissions + os.system("chmod a+x -R %s" % tmp) - async def _follow_current_branch(self, versions, *, build): - """ - This will follow any minor releases in the same branch - """ - # We cannot perform this if there is no recent build - if not build: - return + # Remove any downloaded files + await asyncio.to_thread(shutil.rmtree, files) - # Find the next version - next_version = self._find_next_version( - latest_build.pkg.evr, versions.stable_versions) + # Update the makefile & store the diff + self.diff = await self._update_makefile(makefile) - # Create a new release with the next version - if next_version: - return await self.create_release(next_version, build=build) + # Log the diff + log.info("Generated diff:\n%s" % self.diff) - def _find_next_version(self, current_version, available_versions): - # Remove epoch - if ":" in current_version: - epoch, delim, current_version = current_version.partition(":") + # Generate a new source package + return await asyncio.to_thread(p.dist, makefile, target) - # Remove release - current_version, delim, release = current_version.rpartition("-") + # If we could not create a new source package, this is okay and we will continue + # without. However, we will log the exception... + except Exception as e: + log.error("Could not create source package for %s" % self, exc_info=True) - # Split the current version into parts - current_version_parts = self._split_version(current_version) + return None - versions = {} + # Store the Pakfire log + finally: + self.log = "%s" % logger - # Find all versions that are interesting for us and store them with - # how many parts are matching against the current version - for version in available_versions: - # Only consider later versions - if pakfire.version_compare(current_version, version) >= 0: - continue + async def _update_makefile(self, path): + """ + Reads the makefile in path and updates it with the newer version + returning a diff between the two. + """ + filename = os.path.basename(path) - # Split the version into parts - parts = self._split_version(version) + # Read the makefile + with open(path, "r") as f: + orig = f.readlines() - # Count the number of parts that match at the beginning - for i, (a, b) in enumerate(zip(current_version_parts, parts)): - if not a == b: - break + # Replace the version & release + updated = self._update_makefile_version(orig) - # Store the number of matching parts - versions[version] = i + 1 + # Write the new file + with open(path, "w") as f: + f.writelines(updated) - # Fetch all versions with the highest number of matches - versions = [v for v in versions if versions[v] == max(versions.values())] + # Generate a diff + return "".join( + difflib.unified_diff(orig, updated, fromfile=filename, tofile=filename), + ) - # Return the latest version - for version in versions: - return version + def _update_makefile_version(self, lines, release=1): + result = [] - @staticmethod - def _split_version(version): - """ - Splits a version into its parts by any punctuation characters - """ - return re.split(r"[\.\-_]", version) + # Walk through the file line by line and replace everything that + # starts with version or release. + for line in lines: + if line and not line.startswith("#"): + # Replace version + m = re.match(r"^(version\s*=)\s*(.*)$", line) + if m: + line = "%s %s\n" % (m.group(1), self.version) - # Releases + # Replace release + m = re.match(r"^(release\s*=)\s*(.*)$", line) + if m: + line = "%s %s\n" % (m.group(1), release) - def _get_releases(self, query, *args, **kwargs): - return self.backend.monitorings._get_releases(query, *args, - monitoring=self, **kwargs) + result.append(line) - async def _get_release(self, query, *args, **kwargs): - return await self.backend.monitorings._get_release(query, *args, - monitoring=self, **kwargs) + return result - @property - async def latest_release(self): + async def _build_finished(self): """ - Returns the latest release of this package + Called when the build has finished """ - return await self._get_release(""" - SELECT - * - FROM - release_monitoring_releases - WHERE - monitoring_id = %s - ORDER BY - created_at DESC - LIMIT 1 - """, self.id, - ) + # Fetch the bug report + bug = await self.get_bug() - @lazy_property - def releases(self): - return self._get_releases(""" - SELECT - * - FROM - release_monitoring_releases - WHERE - monitoring_id = %s - ORDER BY - created_at DESC - """, self.id, - ) + # Do nothing if there is no bug + if not bug: + return - return list(releases) + # If the build has been successful, ... + if self.build.is_successful(): + await bug.update(comment=BUG_BUILD_SUCCESSFUL) - async def _release_exists(self, version): - """ - Returns True if this version already exists - """ - return version in [release.version async for release in self.releases] + # If the build failed, ... + elif self.build.has_failed(): + # Say that the build has failed + await bug.update(comment=BUG_BUILD_FAILED) - async def create_release(self, version, *, build): - """ - Creates a new release for this package - """ - # XXX Do we need to check whether we are going backwards? + # Append any logfiles from failed jobs + for job in self.build.jobs: + if not job.has_failed(): + continue - # Raise an error if the release already exists - if await self._release_exists(version): - raise ReleaseExistsError(version) + # Open the logfile + try: + log = await job.open_log() + except FileNotFoundError as e: + log.warning("Could not open log file for %s" % job) + continue - # Raise an error if we already have a newer build - elif self._build_exists(version): - raise BuildExistsError(version) + # Attach it to the bug + await bug.attach(summary="Log file for %s" % job, filename="%s.log" % job, + data=log, content_type="text/plain") - log.info("%s: Creating new release %s" % (self, version)) - release = await self._get_release(""" +class Monitorings(base.Object): + baseurl = "https://release-monitoring.org" + + async def _request(self, method, url, data=None): + body = {} + + # Fetch the API key + api_key = await self.settings.get("release-monitoring-api-key") + + # Authenticate to the API + headers = { + "Authorization" : "Token %s" % api_key, + } + + # Compose the url + url = urllib.parse.urljoin(self.baseurl, url) + + if method == "GET": + url = "%s?%s" % (url, urllib.parse.urlencode(data)) + + # Reset data + data = None + + # For POST requests, encode the payload in JSON + elif method == "POST": + data = urllib.parse.urlencode(data) + + # Send the request and wait for a response + res = await self.backend.httpclient.fetch(url, method=method, + headers=headers, body=data) + + # Decode JSON response + if res.body: + body = json.loads(res.body) + + # Check if we have received an error + error = body.get("error") + + # Raise the error + if error: + raise RuntimeError(error) + + return body + + async def get_by_distro_and_name(self, distro, name): + stmt = ( + sqlalchemy + .select(Monitoring) + .where( + Monitoring.deleted_at == None, + + # Filter by the given distro and name + Monitoring.distro == distro, + Monitoring.name == name, + ) + ) + + return await self.db.fetch_one(stmt) + + async def create(self, distro, name, created_by, project_id, + follow="stable", create_builds=True): + monitoring = await self._get_monitoring(""" INSERT INTO - release_monitoring_releases + release_monitorings ( - monitoring_id, - version + distro_id, + name, + created_by, + project_id, + follow, + create_builds ) - VALUES - ( - %s, %s + VALUES( + %s, %s, %s, %s, %s, %s ) RETURNING * - """, self.id, version, - ) - - # Add the release to cache - self.releases.append(release) + """, distro, name, created_by, project_id, follow, create_builds, - # Create a build - if self.data.create_builds: - await release._create_build( - build=build, owner=self.backend.users.pakfire, - ) + # Populate cache + distro=distro, + ) - # Create a bug report - await release._create_bug() + return monitoring - # Return the release - return release + async def search(self, name): + """ + Returns a bunch of packages that match the given name + """ + # Send the request + response = await self._request("GET", "/api/v2/projects", + { + "name" : name, + "items_per_page" : 250, + }, + ) - # Builds + # Return all packages + return [database.Row(item) for item in response.get("items")] - def _build_exists(self, version): + async def check(self, limit=None): """ - Returns True if a build with this version already exists + Perform checks on monitorings """ - # If there is no build to check against we return False - if not self.latest_build: - return False + # Fetch all monitorings that were never checked or checked longer than 24 hours ago + monitorings = self._get_monitorings(""" + SELECT + * + FROM + release_monitorings + WHERE + deleted_at IS NULL + AND + ( + last_check_at IS NULL + OR + last_check_at <= CURRENT_TIMESTAMP - INTERVAL '24 hours' + ) + ORDER BY + last_check_at ASC NULLS FIRST + LIMIT + %s + """, limit, + ) - # Compare the versions - if pakfire.version_compare(self.latest_build.pkg.evr, version) > 0: - return True + async for monitoring in monitorings: + await monitoring.check() - return False - async def get_latest_build(self): - distro = await self.distro +class Monitoring(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "release_monitorings" - async for build in distro.get_builds_by_name(self.name, limit=1): - return build + def __str__(self): + return "%s - %s" % (self.distro, self.name) + # ID -class Release(base.DataObject): - table = "release_monitoring_releases" + id = Column(Integer, primary_key=True) - def __str__(self): - return "%s %s" % (self.monitoring.name, self.version) + @property + def url(self): + return "/distros/%s/monitorings/%s" % (self.distro.slug, self.name) - # Monitoring + # Distro ID - @lazy_property - def monitoring(self): - return self.backend.monitorings.get_by_id(self.data.monitoring_id) + distro_id = Column(Integer, ForeignKey("distributions.id"), nullable=False) - # Version + # Distro - @property - def version(self): - return self.data.version + distro = sqlalchemy.orm.relationship("Distro", lazy="selectin") + + # Name + + name = Column(Text, nullable=False) # Created At - @property - def created_at(self): - return self.data.created_at + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) - # Delete + # Created By ID - async def delete(self, user=None): - """ - Deletes this release - """ - async with asyncio.TaskGroup() as tasks: - # Delete the repository - if self.repo: - await self.repo.delete() + created_by_id = Column(Integer, ForeignKey("users.id"), nullable=False) - # Delete the build - if self.build: - tasks.create_task(self.build.delete(user=user)) + # Created By - # Close the bug - tasks.create_task( - self._close_bug( - resolution="WONTFIX", - comment="Release Monitoring for this package has been terminated", - ), - ) + created_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[created_by_id], lazy="selectin", + ) - # Bug + # Deleted By ID - async def _create_bug(self): - """ - Creates a new bug report about this release - """ - args = { - # Product, Version & Component - "product" : self.monitoring.distro.bugzilla_product, - "version" : self.monitoring.distro.bugzilla_version, - "component" : self.monitoring.name, + deleted_by_id = Column(Integer, ForeignKey("users.id")) - # Summary & Description - "summary" : "%s has been released" % self, - "description" : BUG_DESCRIPTION % \ - { - "name" : self.monitoring.name, - "version" : self.version - }, + # Deleted By - # Keywords - "keywords" : [ - # Mark this bug as created automatically - "Monitoring", + deleted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deleted_by_id], lazy="selectin", + ) - # Mark this bug as a new release - "NewRelease", - ], - } + # Project ID - # If we have a build, include it in the bug description - if self.build: - args |= { - "description" : BUG_DESCRIPTION_WITH_BUILD % \ - { - "name" : self.monitoring.name, - "version" : self.version, - "url" : await self.backend.url_to(self.build.url), - }, + project_id = Column(Integer, nullable=False) - # Set the URL to point to the build - "url" : await self.backend.url_to(self.build.url), - } + # Last Check At - # Create the bug - bug = await self.backend.bugzilla.create_bug(**args) + last_check_at = Column(DateTime(timezone=False)) - # Store the bug ID - self._set_attribute("bug_id", bug.id) + # Follow? - # Attach the diff (if we have one) - if self.diff: - await bug.attach( - filename="%s.patch" % self, - data=self.diff, - summary="Patch for %s" % self, - is_patch=True, - ) + follow = Column(Text, nullable=False) - return bug + # Create Builds - async def _close_bug(self, *args, **kwargs): - # Fetch the bug - bug = await self.get_bug() + create_builds = Column(Boolean, nullable=False, default=True) - if bug and not bug.is_closed(): - await bug.close(*args, **kwargs) + # Permissions - @property - def bug_id(self): - return self.data.bug_id + def has_perm(self, user=None): + # Anonymous users can't perform any actions + if user is None: + return False - async def get_bug(self): - """ - Fetches the bug from Bugzilla - """ - if self.bug_id: - return await self.backend.bugzilla.get_bug(self.bug_id) + # Users must be admins + return user.is_admin() - # Repo + # Delete - def get_repo(self): - if self.data.repo_id: - return self.backend.repos.get_by_id(self.data.repo_id) + async def delete(self, user=None): + # Mark as deleted + await self._set_attribute_now("deleted_at") + if user: + await self._set_attribute("deleted_by", user) + + # Delete all releases + async with asyncio.TaskGroup() as tasks: + for release in self.releases: + tasks.create_task(release.delete()) + + # Check + + async def check(self): + log.info("Checking for new releases for %s" % self) + + release = None + + # Fetch the current versions + versions = await self._fetch_versions() - def set_repo(self, repo): - if self.repo: - raise AttributeError("Cannot reset repo") + # Fetch the latest release + # XXX ??? - self._set_attribute("repo_id", repo) + # Fetch the latest build + latest_build = await self.get_latest_build() - repo = lazy_property(get_repo, set_repo) + async with await self.db.transaction(): + # Store timestamp of this check + self.last_check_at = sqlalchemy.func.current_timestamp() + + try: + if self.follow == "latest": + release = await self._follow_latest(versions) + elif self.follow == "stable": + release = await self._follow_stable(versions) + elif self.follow == "current-branch": + release = await self._follow_current_branch(versions, latest_build) + else: + raise ValueError("Cannot handle follow: %s" % self.follow) - # Build + # If the release exists, do nothing + except ReleaseExistsError as e: + log.debug("Release %s already exists" % e) - def get_build(self): - if self.data.build_id: - return self.backend.builds.get_by_id(self.data.build_id) + # The latest build is newer than this release + except BuildExistsError as e: + log.debug("Latest build is newer") - def set_build(self, build): - if self.build and not self.build == build: - raise AttributeError("Cannot reset build") + # Dispatch any jobs + await self.backend.jobs.queue.dispatch() - self._set_attribute("build_id", build) + async def _fetch_versions(self): + """ + Fetches all versions for this project + """ + # Wait until we are allowed to send an API request + async with ratelimiter: + response = await self.backend.monitorings._request( + "GET", "/api/v2/versions/", { + "project_id" : self.project_id, + }, + ) - build = lazy_property(get_build, set_build) + # Parse the response as JSON and return it + return database.Row(response) - # Diff + async def _follow_stable(self, versions, *, build): + """ + This will follow "stable" i.e. the latest stable version + """ + for version in versions.stable_versions: + return await self.create_release(version, build=build) - @property - def diff(self): - return self.data.diff + async def _follow_latest(self, versions, * build): + """ + This will follow the latest version (including pre-releases) + """ + return await self.create_release(versions.latest_version, build=build) - async def _create_build(self, build, owner): + async def _follow_current_branch(self, versions, *, build): """ - Creates a build + This will follow any minor releases in the same branch """ - repo = None + # We cannot perform this if there is no recent build + if not build: + return - if self.build: - raise RuntimeError("Build already exists") + # Find the next version + next_version = self._find_next_version( + latest_build.pkg.evr, versions.stable_versions) - log.info("Creating build for %s from %s" % (self, build)) + # Create a new release with the next version + if next_version: + return await self.create_release(next_version, build=build) - try: - # Create a new temporary space for the - async with self.backend.tempdir() as target: - # Create a new source package - file = await self._update_source_package(build.pkg, target) + def _find_next_version(self, current_version, available_versions): + # Remove epoch + if ":" in current_version: + epoch, delim, current_version = current_version.partition(":") - if file: - # Create a new repository - repo = await self.backend.repos.create( - self.monitoring.distro, "Test Build for %s" % self, owner=owner) + # Remove release + current_version, delim, release = current_version.rpartition("-") - # Upload the file - upload = await self.backend.uploads.create_from_local(file) + # Split the current version into parts + current_version_parts = self._split_version(current_version) - try: - # Create a package - package = await self.backend.packages.create(upload) + versions = {} - # Create the build - build = await self.backend.builds.create(repo, package, owner=owner) + # Find all versions that are interesting for us and store them with + # how many parts are matching against the current version + for version in available_versions: + # Only consider later versions + if pakfire.version_compare(current_version, version) >= 0: + continue - finally: - await upload.delete() + # Split the version into parts + parts = self._split_version(version) - # If anything went wrong, then remove the repository - except Exception as e: - if repo: - await repo.delete() + # Count the number of parts that match at the beginning + for i, (a, b) in enumerate(zip(current_version_parts, parts)): + if not a == b: + break - raise e + # Store the number of matching parts + versions[version] = i + 1 - else: - # Store the objects - self.build = build - self.repo = repo + # Fetch all versions with the highest number of matches + versions = [v for v in versions if versions[v] == max(versions.values())] - # Launch the build - await self.backend.builds.launch([build]) + # Return the latest version + for version in versions: + return version - async def _update_source_package(self, package, target): + @staticmethod + def _split_version(version): """ - Takes a package and recreates it with this release + Splits a version into its parts by any punctuation characters """ - if not package.is_source(): - raise RuntimeError("%s is not a source package" % package) - - # Capture Pakfire's log - logger = config.PakfireLogger() - - # Create temporary directory to extract the package to - try: - async with self.backend.tempdir() as tmp: - # Path to downloaded files - files = os.path.join(tmp, "files") - - # Path to the makefile - makefile = os.path.join(tmp, "%s.nm" % package.name) - - # Create a Pakfire instance from this distribution - async with self.monitoring.distro.pakfire(logger=logger) as p: - # Open the archive - archive = await asyncio.to_thread(p.open, package.path) - - # Extract the archive into the temporary space - await asyncio.to_thread(archive.extract, path=tmp) - - # XXX directories are being created with the wrong permissions - os.system("chmod a+x -R %s" % tmp) - - # Remove any downloaded files - await asyncio.to_thread(shutil.rmtree, files) - - # Update the makefile - diff = await self._update_makefile(makefile) + return re.split(r"[\.\-_]", version) - # Log the diff - log.info("Generated diff:\n%s" % diff) + # Releases - # Store the diff - self._set_attribute("diff", diff) + def get_releases(self): + stmt = ( + sqlalchemy + .select(MonitoringRelease) + .where( + MonitoringRelease.monitoring == self, + ) + .order_by( + MonitoringRelease.created_at.desc(), + ) + ) - # Generate a new source package - return await asyncio.to_thread(p.dist, makefile, target) + return self.db.fetch(stmt) - # If we could not create a new source package, this is okay and we will continue - # without. However, we will log the exception... - except Exception as e: - log.error("Could not create source package for %s" % self, exc_info=True) + # Latest Release - return None + latest_release = sqlalchemy.orm.relationship("MonitoringRelease", + order_by=MonitoringRelease.created_at.desc(), uselist=False, viewonly=True, lazy="selectin", + ) - # Store the Pakfire log - finally: - self._set_attribute("log", "%s" % logger) + async def _release_exists(self, version): + """ + Returns True if this version already exists + """ + return version in [release.version async for release in self.releases] - async def _update_makefile(self, path): + async def create_release(self, version, *, build): """ - Reads the makefile in path and updates it with the newer version - returning a diff between the two. + Creates a new release for this package """ - filename = os.path.basename(path) + # XXX Do we need to check whether we are going backwards? - # Read the makefile - with open(path, "r") as f: - orig = f.readlines() + # Raise an error if the release already exists + if await self._release_exists(version): + raise ReleaseExistsError(version) - # Replace the version & release - updated = self._update_makefile_version(orig) + # Raise an error if we already have a newer build + elif self._build_exists(version): + raise BuildExistsError(version) - # Write the new file - with open(path, "w") as f: - f.writelines(updated) + log.info("%s: Creating new release %s" % (self, version)) - # Generate a diff - return "".join( - difflib.unified_diff(orig, updated, fromfile=filename, tofile=filename), + # Insert into database + release = await self.db.insert( + MonitoringRelease, + monitoring = self, + version = version, ) - def _update_makefile_version(self, lines, release=1): - result = [] - - # Walk through the file line by line and replace everything that - # starts with version or release. - for line in lines: - if line and not line.startswith("#"): - # Replace version - m = re.match(r"^(version\s*=)\s*(.*)$", line) - if m: - line = "%s %s\n" % (m.group(1), self.version) + # Create a build + if self.create_builds: + await release._create_build( + build = build, + owner = self.backend.users.pakfire, + ) - # Replace release - m = re.match(r"^(release\s*=)\s*(.*)$", line) - if m: - line = "%s %s\n" % (m.group(1), release) + # Create a bug report + await release._create_bug() - result.append(line) + # Return the release + return release - return result + # Builds - async def _build_finished(self): + def _build_exists(self, version): """ - Called when the build has finished + Returns True if a build with this version already exists """ - # Fetch the bug report - bug = await self.get_bug() - - # Do nothing if there is no bug - if not bug: - return - - # If the build has been successful, ... - if self.build.is_successful(): - await bug.update(comment=BUG_BUILD_SUCCESSFUL) - - # If the build failed, ... - elif self.build.has_failed(): - # Say that the build has failed - await bug.update(comment=BUG_BUILD_FAILED) + # If there is no build to check against we return False + if not self.latest_build: + return False - # Append any logfiles from failed jobs - for job in self.build.jobs: - if not job.has_failed(): - continue + # Compare the versions + if pakfire.version_compare(self.latest_build.pkg.evr, version) > 0: + return True - # Open the logfile - try: - log = await job.open_log() - except FileNotFoundError as e: - log.warning("Could not open log file for %s" % job) - continue + return False - # Attach it to the bug - await bug.attach(summary="Log file for %s" % job, filename="%s.log" % job, - data=log, content_type="text/plain") + async def get_latest_build(self): + for build in await self.distro.get_builds(name=self.name, limit=1): + return build diff --git a/src/buildservice/repository.py b/src/buildservice/repository.py index 105a551b..6a16dbc3 100644 --- a/src/buildservice/repository.py +++ b/src/buildservice/repository.py @@ -10,8 +10,17 @@ import os.path import shutil import tempfile +import sqlalchemy +from sqlalchemy import Column, ForeignKey +from sqlalchemy import Boolean, DateTime, Integer, Text + from . import base +from . import builds +from . import database +from . import jobs from . import misc +from . import packages +from . import sources from .constants import * from .decorators import * @@ -19,46 +28,96 @@ from .decorators import * # Setup logging log = logging.getLogger("pbs.repositories") -class Repositories(base.Object): - def _get_repositories(self, *args, **kwargs): - return self.db.fetch_many(Repository, *args, **kwargs) +class RepoBuild(database.Base): + __tablename__ = "repository_builds" - async def _get_repository(self, *args, **kwargs): - return await self.db.fetch_one(Repository, *args, **kwargs) + # ID - async def __aiter__(self): - repositories = await self._get_repositories(""" - SELECT - * - FROM - repositories - WHERE - deleted_at IS NULL - ORDER BY - distro_id, name - """, + id = Column(Integer, primary_key=True) + + # Repo ID + + repo_id = Column(Integer, ForeignKey("repositories.id"), nullable=False) + + # Repo + + repo = sqlalchemy.orm.relationship("Repo", foreign_keys=[repo_id], lazy="selectin") + + # Build ID + + build_id = Column(Integer, ForeignKey("builds.id"), nullable=False) + + # Build + + build = sqlalchemy.orm.relationship("Build", foreign_keys=[build_id], lazy="selectin") + + # Added At + + added_at = Column(DateTime(timezone=False), nullable=False, + default=sqlalchemy.func.current_timestamp()) + + # Added By ID + + added_by_id = Column(Integer, ForeignKey("users.id"), nullable=False) + + # Added By + + added_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[added_by_id], lazy="selectin", + ) + + # Removed At + + removed_at = Column(DateTime(timezone=False)) + + # Removed By ID + + removed_by_id = Column(Integer, ForeignKey("users.id")) + + # Removed By + + removed_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[removed_by_id], lazy="selectin", + ) + + +class Repositories(base.Object): + def __aiter__(self): + stmt = ( + sqlalchemy + .select(Repo) + .where( + Repo.deleted_at == None, + ) + + # Order them by distro & name + .order_by( + Repo.distro_id, + Repo.name, + ) ) - return aiter(repositories) + # Fetch the repos + return self.db.fetch(stmt) @property async def mirrored(self): """ Lists all repositories that should be mirrored """ - repos = await self._get_repositories(""" - SELECT - * - FROM - repositories - WHERE - deleted_at IS NULL - AND - mirrored IS TRUE - """, + stmt = ( + sqlalchemy + .select(Repo) + .where( + Repo.deleted_at == None, + + # Filter by those who should be mirrored + Repo.mirrored == True, + ) ) - return list(repos) + # Fetch the repositories + return self.db.fetch(stmt) async def create(self, distro, name, owner=None): """ @@ -110,10 +169,6 @@ class Repositories(base.Object): if not exists: return slug - async def get_by_id(self, repo_id): - return await self._get_repository("SELECT * FROM repositories \ - WHERE id = %s", repo_id) - async def write(self): """ Write/re-write all repositories @@ -122,8 +177,8 @@ class Repositories(base.Object): await repo.write() -class Repository(base.DataObject): - table = "repositories" +class Repo(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "repositories" def __str__(self): return self.name @@ -150,13 +205,21 @@ class Repository(base.DataObject): return ret - @lazy_property - def distro(self): - return self.backend.distros.get_by_id(self.data.distro_id) + # ID - @property - def created_at(self): - return self.data.created_at + id = Column(Integer, primary_key=True) + + # Distro + + distro_id = Column(Integer, ForeignKey("distributions.id"), nullable=False) + + distro = sqlalchemy.orm.relationship("Distro", lazy="selectin") + + # Created At + + created_at = Column( + DateTime(timezone=False), nullable=False, server_default=sqlalchemy.func.current_timestamp(), + ) # Repo Types @@ -177,24 +240,15 @@ class Repository(base.DataObject): # Priority - def get_priority(self): - return self.data.priority + priority = Column(Integer, nullable=False) - def set_priority(self, priority): - self._set_attribute("priority", priority) + # Owner ID - priority = property(get_priority, set_priority) + owner_id = Column(Integer, ForeignKey("users.id")) # Owner - def get_owner(self): - if self.data.owner_id: - return self.backend.users.get_by_id(self.data.owner_id) - - def set_owner(self, owner): - self._set_attribute("owner_id", owner) - - owner = property(get_owner, set_owner) + owner = sqlalchemy.orm.relationship("User", lazy="selectin") def has_perm(self, user): """ @@ -217,9 +271,7 @@ class Repository(base.DataObject): # Slug - @property - def slug(self): - return self.data.slug + slug = Column(Text, unique=True, nullable=False) @lazy_property def path(self): @@ -260,7 +312,7 @@ class Repository(base.DataObject): @property def download_url(self): return "/".join(( - self.settings.get("baseurl", "https://pakfire.ipfire.org"), + self.backend.config.get("global", "baseurl", fallback="https://pakfire.ipfire.org"), "files", "repos", self.path, @@ -270,7 +322,7 @@ class Repository(base.DataObject): @property def mirrorlist(self): return "/".join(( - self.settings.get("baseurl", "https://pakfire.ipfire.org"), + self.backend.config.get("global", "baseurl", fallback="https://pakfire.ipfire.org"), "distros", self.distro.slug, "repos", @@ -323,29 +375,19 @@ class Repository(base.DataObject): # Name - def get_name(self): - return self.data.name - - def set_name(self, name): - self._set_attribute("name", name) - - name = property(get_name, set_name) + name = Column(Text, nullable=False) # Description - def get_description(self): - return self.data.description + description = Column(Text, nullable=False, default="") - def set_description(self, description): - self._set_attribute("description", description or "") + # Key ID - description = property(get_description, set_description) + key_id = Column(Integer, ForeignKey("keys.id")) - # Key Management + # Key - @lazy_property - def key(self): - return self.backend.keys.get_by_id(self.data.key_id) + key = sqlalchemy.orm.relationship("Key", foreign_keys=[key_id], lazy="selectin") # Architectures @@ -355,23 +397,11 @@ class Repository(base.DataObject): # Mirrored - def get_mirrored(self): - return self.data.mirrored - - def set_mirrored(self, mirrored): - self._set_attribute("mirrored", mirrored) - - mirrored = property(get_mirrored, set_mirrored) + mirrored = Column(Boolean, nullable=False, default=False) # Listed - def get_listed(self): - return self.data.listed - - def set_listed(self, listed): - self._set_attribute("listed", listed) - - listed = property(get_listed, set_listed) + listed = Column(Boolean, nullable=False, default=False) # Sibling repositories @@ -418,27 +448,45 @@ class Repository(base.DataObject): distro=self.distro, ) + # Builds + + async def get_builds(self, **kwargs): + """ + Returns builds in this repository + """ + return await self.backend.builds.get(repo=self, **kwargs) + + # Has Build? + + async def has_build(self, build): + """ + Checks if this build is part of this repository + """ + stmt = ( + sqlalchemy + .select(RepoBuild) + .where( + RepoBuild.repo == self, + RepoBuild.build == build, + RepoBuild.removed_at == None, + ) + ) + + return await self.db.fetch_one(stmt) + # Add/Remove Builds async def add_build(self, build, user=None): """ Adds a build to this repository """ - self.db.execute(""" - INSERT INTO - repository_builds( - repo_id, - build_id, - added_by - ) - VALUES( - %s, %s, %s - )""", self.id, build, user, + await self.db.insert( + RepoBuild, + repo = self, + build = build, + added_by = user, ) - # Update the cache - build.repos.append(self) - # Update bug status # XXX TODO @@ -449,24 +497,14 @@ class Repository(base.DataObject): """ Removes a build from this repository """ - self.db.execute(""" - UPDATE - repository_builds - SET - removed_at = CURRENT_TIMESTAMP, - removed_by = %s - WHERE - repo_id = %s - AND - build_id = %s - """, user, self.id, build, - ) + repo_build = await self.has_build(build) - # Update the cache - try: - build.repos.remove(self) - except IndexError: - pass + # Raise an exception if we don't have this build + if not repo_build: + raise ValueError("%s is not part of %s" % (build, self)) + + # Remove the build + repo_build.remove() # Update the repository (in the background) await self.changed() @@ -491,93 +529,20 @@ class Repository(base.DataObject): return list(sources) - def get_source_by_slug(self, slug): - return self.backend.sources._get_source(""" - SELECT - * - FROM - sources - WHERE - deleted_at IS NULL - AND - repo_id = %s - AND - slug = %s - """, self.id, slug, - - # Prefill cache - repo=self, - ) - - # Builds - - @lazy_property - def builds(self): - """ - Returns all builds that are part of this repository - """ - builds = self.backend.builds._get_builds(""" - SELECT - builds.* - FROM - repository_builds - LEFT JOIN - builds ON repository_builds.build_id = builds.id - LEFT JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - repository_builds.repo_id = %s - AND - repository_builds.removed_at IS NULL - ORDER BY - packages.name, packages.evr""", - self.id, - ) - - return list(builds) - - def get_recent_builds(self, limit=None, offset=None): - return self.backend.builds._get_builds(""" - SELECT - builds.* - FROM - repository_builds - LEFT JOIN - builds ON repository_builds.build_id = builds.id - WHERE - builds.deleted_at IS NULL - AND - repository_builds.repo_id = %s - AND - repository_builds.removed_at IS NULL - ORDER BY - repository_builds.added_at DESC - LIMIT - %s - OFFSET - %s - """, self.id, limit, offset, - ) - - async def get_added_at_for_build(self, build): - res = await self.db.get(""" - SELECT - added_at - FROM - repository_builds - WHERE - repository_builds.repo_id = %s - AND - repository_builds.build_id = %s - AND - repository_builds.removed_at IS NULL - """, self.id, build, + async def get_source_by_slug(self, slug): + stmt = ( + sqlalchemy + .select( + sources.Source, + ) + .where( + sources.Source.deleted_at == None, + sources.Source.repo_id == self.id, + sources.Source.slug == slug, + ) ) - if res: - return res.added_at + return await self.db.fetch_one(stmt) @lazy_property def total_builds(self): @@ -599,32 +564,6 @@ class Repository(base.DataObject): return res.count or 0 - async def get_builds_by_name(self, name): - """ - Returns an ordered list of all builds that match this name - """ - builds = await self.backend.builds._get_builds(""" - SELECT - builds.* - FROM - repository_builds - LEFT JOIN - builds ON repository_builds.build_id = builds.id - LEFT JOIN - packages ON builds.pkg_id = packages.id - WHERE - repository_builds.repo_id = %s - AND - builds.deleted_at IS NULL - AND - packages.name = %s - ORDER BY - builds.created_at DESC""", - self.id, name, - ) - - return list(builds) - async def get_packages(self, arch): if arch == "src": packages = await self.backend.packages._get_packages(""" @@ -747,66 +686,80 @@ class Repository(base.DataObject): return { row.arch : row.size for row in res if row.arch in self.distro.arches } - @lazy_property - async def total_size(self): - res = await self.db.get(""" - WITH packages AS ( - -- Source Packages - SELECT - packages.filesize AS size - FROM - repository_builds - LEFT JOIN - builds ON repository_builds.build_id = builds.id - LEFT JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - packages.deleted_at IS NULL - AND - repository_builds.repo_id = %s - AND - repository_builds.removed_at IS NULL - - UNION ALL + async def get_total_size(self): + """ + Returns the total size of the repository + """ + source_packages = ( + sqlalchemy + .select( + packages.Package.filesize.label("size") + ) + .select_from(RepoBuild) + .join( + builds.Build, + builds.Build.id == RepoBuild.build_id, + ) + .join( + packages.Package, + packages.Package.id == builds.Build.pkg_id, + ) + .where( + packages.Package.deleted_at == None, + builds.Build.deleted_at == None, + RepoBuild.removed_at == None, + RepoBuild.repo == self, + ) + ) - -- Binary Packages - SELECT - packages.filesize AS size - FROM - repository_builds - LEFT JOIN - builds ON repository_builds.build_id = builds.id - LEFT JOIN - jobs ON builds.id = jobs.build_id - LEFT JOIN - job_packages ON jobs.id = job_packages.job_id - LEFT JOIN - packages ON job_packages.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - jobs.deleted_at IS NULL - AND - packages.deleted_at IS NULL - AND - repository_builds.repo_id = %s - AND - repository_builds.removed_at IS NULL + binary_packages = ( + sqlalchemy + .select( + packages.Package.filesize.label("size") + ) + .select_from(RepoBuild) + .join( + builds.Build, + builds.Build.id == RepoBuild.build_id, + ) + .join( + jobs.Job, + jobs.Job.build_id == builds.Build.id, ) + .join( + packages.Package, + packages.Package.id == builds.Build.pkg_id, + ) + .where( + packages.Package.deleted_at == None, + builds.Build.deleted_at == None, + RepoBuild.removed_at == None, + RepoBuild.repo == self, + ) + ) - SELECT - SUM(packages.size) AS size - FROM - packages - """, self.id, self.id, + all_packages = ( + sqlalchemy + .union_all( + source_packages, + binary_packages, + ) + .cte("all_packages") ) - if res: - return res.size or 0 + stmt = ( + sqlalchemy + .select( + sqlalchemy.func.sum( + all_packages.c.size, + ).label("total_size"), + ) + .select_from( + all_packages, + ) + ) - return 0 + return await self.db.select_one(stmt, "total_size") # Pakfire diff --git a/src/buildservice/sessions.py b/src/buildservice/sessions.py index 68eba694..eb7a2cfd 100644 --- a/src/buildservice/sessions.py +++ b/src/buildservice/sessions.py @@ -1,23 +1,19 @@ #!/usr/bin/python +import logging +import sqlalchemy + +from sqlalchemy import Column, DateTime, ForeignKey, Integer, Text +from sqlalchemy.dialects.postgresql import INET + from . import base +from . import database from . import misc -from .decorators import * +# Setup logging +log = logging.getLogger("pbs.sessions") class Sessions(base.Object): - async def _get_sessions(self, *args, **kwargs): - return self.db.fetch_many(Session, *args, **kwargs) - - async def _get_session(self, *args, **kwargs): - return self.db.fetch_one(Session, *args, **kwargs) - - async def __aiter__(self): - sessions = await self._get_sessions("SELECT * FROM sessions \ - WHERE valid_until >= NOW() ORDER BY valid_until DESC") - - return aiter(sessions) - async def create(self, user, address, user_agent=None): """ Creates a new session in the data. @@ -27,27 +23,30 @@ class Sessions(base.Object): """ session_id = misc.generate_random_string(48) - return await self._get_session(""" - INSERT INTO - sessions - ( - session_id, - user_id, - address, - user_agent - ) - VALUES - ( - %s, %s, %s, %s - ) - RETURNING - * - """, session_id, user.id, address, user_agent, + session = await self.db.insert( + Session, + session_id = session_id, + user = user, + address = address, + user_agent = user_agent, ) + # Log what we have done + log.info("Created new session %s" % session) + + return session + async def get_by_session_id(self, session_id): - return await self._get_session("SELECT * FROM sessions \ - WHERE session_id = %s AND valid_until >= NOW()", session_id) + stmt = ( + sqlalchemy + .select(Session) + .where( + Session.session_id == session_id, + Session.valid_until >= sqlalchemy.func.current_timestamp(), + ) + ) + + return await self.db.fetch_one(stmt) # Alias function get = get_by_session_id @@ -60,8 +59,8 @@ class Sessions(base.Object): await self.db.execute("DELETE FROM sessions WHERE valid_until < CURRENT_TIMESTAMP") -class Session(base.DataObject): - table = "sessions" +class Session(database.Base): + __tablename__ = "sessions" def __lt__(self, other): if isinstance(other, self.__class__): @@ -69,29 +68,34 @@ class Session(base.DataObject): return NotImplemented - async def destroy(self): - await self.db.execute("DELETE FROM sessions WHERE id = %s", self.id) + # ID + + id = Column(Integer, primary_key=True) + + # Session ID + + session_id = Column(Text, unique=True, nullable=False) + + # User + + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + + user = sqlalchemy.orm.relationship("User", back_populates="sessions", lazy="selectin") + + # Created At + + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) - @property - def session_id(self): - return self.data.session_id + # Valid Until - @lazy_property - def user(self): - return self.backend.users.get_by_id(self.data.user_id) + valid_until = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.text("CURRENT_TIMESTAMP + INTERVAL '14 days'")) - @property - def created_at(self): - return self.data.created_at + # Address - @property - def valid_until(self): - return self.data.valid_until + address = Column(INET(), nullable=False) - @property - def address(self): - return self.data.address + # User Agent - @property - def user_agent(self): - return self.data.user_agent + user_agent = Column(Text) diff --git a/src/buildservice/sources.py b/src/buildservice/sources.py index e1a48e32..cf2bfdac 100644 --- a/src/buildservice/sources.py +++ b/src/buildservice/sources.py @@ -3,17 +3,22 @@ import asyncio import datetime import fnmatch +import functools import logging import os import re +import sqlalchemy import tempfile +from sqlalchemy import Column, ForeignKey +from sqlalchemy import DateTime, Integer, Text + from . import base +from . import database from . import config from . import misc from .constants import * -from .decorators import * # Setup logging log = logging.getLogger("pbs.sources") @@ -33,12 +38,6 @@ VALID_TAGS = ( ) class Sources(base.Object): - def _get_sources(self, query, *args, **kwargs): - return self.db.fetch_many(Source, query, *args, **kwargs) - - async def _get_source(self, query, *args, **kwargs): - return await self.db.fetch_one(Source, query, *args, **kwargs) - def __aiter__(self): sources = self._get_sources(""" SELECT @@ -54,52 +53,29 @@ class Sources(base.Object): return aiter(sources) - async def get_by_id(self, id): - return await self._get_source(""" - SELECT - * - FROM - sources - WHERE - id = %s - """, id, - ) - async def get_by_slug(self, slug): - return await self._get_source(""" - SELECT - * - FROM - sources - WHERE - deleted_at IS NULL - AND - slug = %s - """, slug, + stmt = ( + sqlalchemy + .select(Source) + .where( + Source.deleted_at == None, + Source.slug == slug, + ) ) + return await self.db.fetch_one(stmt) + async def create(self, repo, name, url, user): # Make slug slug = self._make_slug(name) # Insert into the database - source = await self._get_source(""" - INSERT INTO - sources( - name, - url, - created_by, - repo_id - ) - VALUES( - %s, %s, %s, %s - ) - RETURNING - * - """, name, url, user, repo, - - # Populate cache - repo=repo, + source = await self.db.insert( + Source, + name = name, + url = url, + created_by = created_by, + repo = repo, ) return source @@ -115,25 +91,6 @@ class Sources(base.Object): return slug - # Commits - - def _get_commits(self, query, *args, **kwargs): - return self.db.fetch_many(Commit, query, *args, **kwargs) - - async def _get_commit(self, query, *args, **kwargs): - return await self.db.fetch_one(Commit, query, *args, **kwargs) - - async def get_commit_by_id(self, id): - return await self._get_commit(""" - SELECT - * - FROM - source_commits - WHERE - id = %s - """, id, - ) - # Fetch async def fetch(self, run_jobs=True): @@ -151,12 +108,6 @@ class Sources(base.Object): # Run jobs - def _get_jobs(self, query, *args, **kwargs): - return self.db.fetch_many(Job, query, *args, **kwargs) - - async def _get_job(self, query, *args, **kwargs): - return await self.db.fetch_one(Job, query, *args, **kwargs) - @property def pending_jobs(self): """ @@ -191,83 +142,84 @@ class Sources(base.Object): await job.run() -class Source(base.DataObject): - table = "sources" - - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.name) +class Source(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "sources" def __str__(self): return self.name - @lazy_property - def git(self): - # Setup the Git repository - return Git(self.backend, self.path, self.url, self.branch) + # ID + + id = Column(Integer, primary_key=True) # Name - def get_name(self): - return self.data.name + name = Column(Text, nullable=False) - def set_name(self, name): - self._set_attribute("name", name) + # Slug - name = property(get_name, set_name) + slug = Column(Text, unique=True, nullable=False) - # Slug + # URL - @property - def slug(self): - return self.data.slug + url = Column(Text, nullable=False) - # Distro + # Gitweb - @property - def distro(self): - return self.repo.distro + gitweb = Column(Text) - # Repo + # Revision - @lazy_property - def repo(self): - return self.backend.repos.get_by_id(self.data.repo_id) + revision = Column(Text) - # URL + # Branch - def get_url(self): - return self.data.url + branch = Column(Text, nullable=False) - def set_url(self, url): - self._set_attribute("url", url) + # Last Fetched At - url = property(get_url, set_url) + last_fetched_at = Column(DateTime(timezone=False)) - # Gitweb + # Repo ID - def get_gitweb(self): - return self.data.gitweb + repo_id = Column(Integer, ForeignKey("repositories.id"), nullable=False) - def set_gitweb(self, url): - self._set_attribute("gitweb", url) + # Repo - gitweb = property(get_gitweb, set_gitweb) + repo = sqlalchemy.orm.relationship( + "Repo", foreign_keys=[repo_id], lazy="selectin", + ) - # Revision + # Distro @property - def revision(self): - return self.data.revision + def distro(self): + return self.repo.distro - # Branch + # Created At + + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) + + # Created By ID - def get_branch(self): - return self.data.branch + created_by_id = Column(Integer, ForeignKey("users.id")) - def set_branch(self, branch): - self._set_attribute("branch", branch) + # Created By - branch = property(get_branch, set_branch) + created_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[created_by_id], lazy="selectin", + ) + + # Deleted By ID + + deleted_by_id = Column(Integer, ForeignKey("users.id")) + + # Deleted By + + deleted_by = sqlalchemy.orm.relationship( + "User", foreign_keys=[deleted_by_id], lazy="selectin", + ) # Path @@ -282,6 +234,11 @@ class Source(base.DataObject): self.slug, ) + @functools.cached_property + def git(self): + # Setup the Git repository + return Git(self.backend, self.path, self.url, self.branch) + # Commits async def _create_commit(self, revision, initial_commit=False): @@ -311,27 +268,16 @@ class Source(base.DataObject): group = self.backend.builds.groups.create() # Insert into the database - commit = self.backend.sources._get_commit(""" - INSERT INTO - source_commits - ( - source_id, - revision, - author, - committer, - subject, - body, - date, - build_group_id - ) - VALUES - ( - %s, %s, %s, %s, %s, %s, %s, %s - ) - RETURNING - * - """, self.id, revision, author, committer, subject, body, date, group, - source=self, + commit = await self.db.insert( + Commit, + source = self, + revision = revision, + author = author, + committer = committer, + subject = subject, + body = body, + date = date, + group = group, ) # If we are processing the initial commit, we get a list of all files in the tree @@ -370,56 +316,40 @@ class Source(base.DataObject): return commit - def get_commits(self, limit=None): - # XXX sort? - commits = self.backend.sources._get_commits(""" - SELECT - * - FROM - source_commits - WHERE - source_id = %s - """, self.id, - ) - - return list(commits) - - @property - def commits(self): + async def get_commits(self, limit=None): # XXX using the ID is an incorrect way to sort them - return self.backend.sources._get_commits(""" - SELECT - * - FROM - source_commits - WHERE - source_id = %s - ORDER BY - id DESC - """, self.id, source=self, + stmt = ( + sqlalchemy + .select( + SourceCommit, + ) + .where( + SourceCommit.source == self, + ) + .order_by( + SourceCommit.id.desc(), + ) + .limit(limit) ) - def get_commit(self, revision): - commit = self.backend.sources._get_commit(""" - SELECT - * - FROM - source_commits - WHERE - source_id = %s - AND - revision = %s - """, self.id, revision, source=self, + return await self.db.fetch_as_list(stmt) + + async def get_commit(self, revision): + stmt = ( + sqlalchemy + .select( + SourceCommit, + ) + .where( + SourceCommit.source == self, + SourceCommit.revision == revision, + ) ) - return commit + return await self.db.fetch_one(stmt) # Fetch - @property - def last_fetched_at(self): - return self.data.last_fetched_at - async def fetch(self): """ Fetches any new commits from this source @@ -454,55 +384,51 @@ class Source(base.DataObject): self._set_attribute_now("last_fetched_at") -class Commit(base.DataObject): - table = "source_commits" +class SourceCommit(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "source_commits" def __str__(self): return self.subject or self.revision - # Revision + # ID - @property - def revision(self): - return self.data.revision + id = Column(Integer, primary_key=True) + + # Source ID + + source_id = Column(Integer, ForeignKey("sources.id"), nullable=False) # Source - @lazy_property - def source(self): - return self.backend.sources.get_by_id(self.data.source_id) + source = sqlalchemy.orm.relationship( + "Source", foreign_keys=[source_id], lazy="selectin", + ) + + # Revision + + revision = Column(Text, nullable=False) # Author - @lazy_property - def author(self): - return self.backend.users.get_by_email(self.data.author) or self.data.author + author = Column(Text, nullable=False) # Committer - @lazy_property - def committer(self): - return self.backend.users.get_by_email(self.data.committer) or self.data.committer + committer = Column(Text, nullable=False) # Date - @property - def date(self): - return self.data.date + date = Column(DateTime(timezone=False), nullable=False) # Subject - @property - def subject(self): - return self.data.subject.strip() + subject = Column(Text, nullable=False) # Body - @property - def body(self): - return self.data.body + body = Column(Text, nullable=False) - @lazy_property + @functools.cached_property def message(self): """ Returns the message without Git tags @@ -525,7 +451,7 @@ class Commit(base.DataObject): return message - @lazy_property + @functools.cached_property def tags(self): tags = {} @@ -612,12 +538,15 @@ class Commit(base.DataObject): return await self.backend.bugzilla.get_bugs(bug_ids) - # Builds + # Build Group ID + + build_group_id = Column(Integer, ForeignKey("build_groups.id")) + + # Build Group - @lazy_property - def builds(self): - if self.data.build_group_id: - return self.backend.builds.groups.get_by_id(self.data.build_group_id) + builds = sqlalchemy.orm.relationship( + "BuildGroup", foreign_keys=[build_group_id], lazy="selectin", + ) # Jobs @@ -649,7 +578,7 @@ class Commit(base.DataObject): return job - @lazy_property + @functools.cached_property def jobs(self): jobs = self.backend.sources._get_jobs(""" SELECT @@ -676,47 +605,41 @@ class Commit(base.DataObject): return # If we get here, all jobs must have finished successfully - self._set_attribute_now("finished_at") + self.finished_at = sqlalchemy.func.current_timestamp() -class Job(base.DataObject): - table = "source_commit_jobs" +class SourceJob(database.Base, database.BackendMixin): + __tablename__ = "source_commit_jobs" - # Source + # ID + + id = Column(Integer, primary_key=True) - @lazy_property - def source(self): - return self.commit.source + # Commit ID + + commit_id = Column(Integer, ForeignKey("source_commits.id"), nullable=False) # Commit - @lazy_property - def commit(self): - return self.backend.sources.get_commit_by_id(self.data.commit_id) + commit = sqlalchemy.orm.relationship( + "SourceCommit", foreign_keys=[commit_id], lazy="selectin", + ) # Action - @property - def action(self): - return self.data.action + action = Column(Text, nullable=False) # Name - @property - def name(self): - return self.data.name + name = Column(Text, nullable=False) # Finished At - @property - def finished_at(self): - return self.data.finished_at + finished_at = Column(DateTime(timezone=False)) # Error - @property - def error(self): - return self.data.error + error = Column(Text) # Status @@ -754,7 +677,7 @@ class Job(base.DataObject): raise RuntimeError("Unhandled action: %s" % self.action) # Mark as finished - self._set_attribute_now("finished_at") + self.finished_at = sqlalchemy.func.current_timezone() # Report that this job has finished if there is no error if not self.error: @@ -856,7 +779,7 @@ class Job(base.DataObject): log.error("Error running %s: " % self, exc_info=True) # Store the error - self._set_attribute("error", "%s" % e) + self.error = "%s" % e # Always delete the upload & store the log finally: @@ -864,7 +787,7 @@ class Job(base.DataObject): await upload.delete() # Store log - self._set_attribute("log", "%s" % logger) + self.log = "%s" % logger class Git(object): diff --git a/src/buildservice/uploads.py b/src/buildservice/uploads.py index 2e0039e7..b19afa2f 100644 --- a/src/buildservice/uploads.py +++ b/src/buildservice/uploads.py @@ -6,18 +6,20 @@ import hmac import logging import os import shutil +import sqlalchemy + +from sqlalchemy import Column, ForeignKey +from sqlalchemy import BigInteger, DateTime, Integer, LargeBinary, Text, UUID from . import base +from . import database from . import builders from . import users from .constants import * -from .decorators import * # Setup logging log = logging.getLogger("pbs.uploads") -MAX_BUFFER_SIZE = 1 * 1024 * 1024 # 1 MiB - supported_digest_algos = ( "blake2b512", ) @@ -26,34 +28,33 @@ class UnsupportedDigestException(ValueError): pass class Uploads(base.Object): - async def _get_uploads(self, *args, **kwargs): - return await self.db.fetch_many(Upload, *args, **kwargs) + def __aiter__(self): + stmt = ( + sqlalchemy.select(Upload) - async def _get_upload(self, *args, **kwargs): - return await self.db.fetch_one(Upload, *args, **kwargs) - - async def __aiter__(self): - uploads = await self._get_uploads("SELECT * FROM uploads \ - ORDER BY created_at DESC") + # Order them by creation time + .order_by(Upload.created_at) + ) - return aiter(uploads) + # Fetch all objects + return self.db.fetch(stmt) async def get_by_uuid(self, uuid): - return await self._get_upload(""" - SELECT - * - FROM - uploads - WHERE - uuid = %s - AND - expires_at > CURRENT_TIMESTAMP - """, uuid, + stmt = ( + sqlalchemy.select(Upload) + .where( + Upload.uuid == uuid, + Upload.expires_at > sqlalchemy.func.current_timestamp(), + ) ) + return await self.db.fetch_one(stmt) + async def create(self, filename, size, digest_algo, digest, owner=None): - builder = None - user = None + """ + Creates a new upload + """ + builder, user = None, None # Check if the digest algorithm is supported if not digest_algo in supported_digest_algos: @@ -70,38 +71,19 @@ class Uploads(base.Object): user = owner # Check quota for users - if user: - # This will raise an exception if the quota has been exceeded - user.check_storage_quota(size) - - # Allocate a new temporary file - upload = await self._get_upload(""" - INSERT INTO - uploads - ( - filename, - size, - builder_id, - user_id, - digest_algo, - digest - ) - VALUES - ( - %s, - %s, - %s, - %s, - %s, - %s - ) - RETURNING *""", - filename, - size, - builder, - user, - digest_algo, - digest, + #if user: + # # This will raise an exception if the quota has been exceeded + # await user.check_storage_quota(size) + + # Create a new upload + upload = await self.db.insert( + Upload, + filename = filename, + size = size, + builder = builder, + user = user, + digest_algo = digest_algo, + digest = digest, ) # Return the newly created upload object @@ -119,7 +101,11 @@ class Uploads(base.Object): size = os.path.getsize(path) # Create the new upload object - upload = await self.create(filename=filename, size=size, **kwargs) + upload = await self.create( + filename = filename, + size = size, + **kwargs, + ) # Import the data with open(path, "rb") as f: @@ -129,74 +115,75 @@ class Uploads(base.Object): async def cleanup(self): # Find all expired uploads - uploads = await self._get_uploads(""" - SELECT - * - FROM - uploads - WHERE - expires_at <= CURRENT_TIMESTAMP - ORDER BY - created_at - """) + stmt = ( + sqlalchemy + .select(Upload) + .where( + Upload.expires_at <= sqlalchemy.func.current_timestamp(), + ) + .order_by( + Upload.created_at + ) + ) # Delete them all - for upload in uploads: + async for upload in self.db.fetch(stmt): with self.db.transaction(): await upload.delete() -class Upload(base.DataObject): - table = "uploads" +class Upload(database.Base, database.BackendMixin): + __tablename__ = "uploads" def __str__(self): return "%s" % self.uuid - @property - def uuid(self): - return self.data.uuid + # ID + + id = Column(Integer, primary_key=True) + + # UUID - @property - def filename(self): - return self.data.filename + uuid = Column(UUID, unique=True, nullable=False, + server_default=sqlalchemy.func.gen_random_uuid()) - @property - def path(self): - return self.data.path + # Filename - @property - def size(self): - return self.data.size + filename = Column(Text, nullable=False) - @property - def digest_algo(self): - return self.data.digest_algo + # Path - @property - def digest(self): - return self.data.digest + path = Column(Text, nullable=False) + + # Size + + size = Column(BigInteger, nullable=False) + + # Digest Algo + + digest_algo = Column(Text, nullable=False) + + # Digest + + digest = Column(LargeBinary, nullable=False) + + # Builder ID + + builder_id = Column(Integer, ForeignKey("builders.id")) # Builder - def get_builder(self): - if self.data.builder_id: - return self.backend.builders.get_by_id(self.data.builder_id) + builder = sqlalchemy.orm.relationship("Builder", foreign_keys=[builder_id], lazy="selectin") - def set_builder(self, builder): - self._set_attribute("builder_id", builder.id) + # User ID - builder = lazy_property(get_builder, set_builder) + user_id = Column(Integer, ForeignKey("users.id")) # User - def get_user(self): - if self.data.user_id: - return self.backend.users.get_by_id(self.data.user_id) + user = sqlalchemy.orm.relationship("User", foreign_keys=[user_id], lazy="selectin") - def set_user(self, user): - self._set_attribute("user_id", user.id) - - user = lazy_property(get_user, set_user) + # Has Perms? def has_perm(self, who): """ @@ -208,23 +195,29 @@ class Upload(base.DataObject): # No permission return False + # Delete! + async def delete(self): log.info("Deleting upload %s (%s)" % (self, self.filename)) # Remove the uploaded data - if self.path: + if await self.has_payload(): await self.backend.unlink(self.path) # Delete the upload from the database - await self.db.execute("DELETE FROM uploads WHERE id = %s", self.id) + await self.db.delete(self) + + # Created At - @property - def created_at(self): - return self.data.created_at + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) - @property - def expires_at(self): - return self.data.expires_at + # Expires At + + expires_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.text("CURRENT_TIMESTAMP + INTERVAL '24 hours'")) + + # Has Payload? async def has_payload(self): """ @@ -237,6 +230,8 @@ class Upload(base.DataObject): # The data must exist on disk return await self.backend.exists(self.path) + # Copy the payload from somewhere + async def copyfrom(self, src): """ Copies the content of this upload from the source file descriptor @@ -289,7 +284,9 @@ class Upload(base.DataObject): raise e # Store the path - self._set_attribute("path", f.name) + self.path = f.name + + # Copy the payload to somewhere else async def copyinto(self, dst): """ diff --git a/src/buildservice/users.py b/src/buildservice/users.py index 75773067..0e84e934 100644 --- a/src/buildservice/users.py +++ b/src/buildservice/users.py @@ -13,6 +13,7 @@ import cryptography.hazmat.primitives.kdf.hkdf import cryptography.hazmat.primitives.serialization import datetime import email.utils +import functools import json import ldap import logging @@ -25,9 +26,19 @@ import urllib.parse import tornado.locale +import sqlalchemy +from sqlalchemy import BigInteger, Boolean, Column, DateTime, ForeignKey, Integer +from sqlalchemy import Interval, LargeBinary, Text, UUID + from . import base from . import bugtracker +from . import builds +from . import database from . import httpclient +from . import jobs +from . import packages +from . import repository +from . import uploads from .decorators import * @@ -52,50 +63,6 @@ LDAP_ATTRS = ( "mailAlternateAddress", ) -WITH_USED_BUILD_TIME_CTE = """ - user_build_times AS ( - SELECT - users.id AS user_id, - SUM(jobs.finished_at - jobs.started_at) AS used - FROM - users - LEFT JOIN - builds ON users.id = builds.owner_id - LEFT JOIN - jobs ON builds.id = jobs.build_id - WHERE - users.deleted_at IS NULL - AND - users.daily_build_quota IS NOT NULL - AND - jobs.started_at IS NOT NULL - AND - jobs.finished_at IS NOT NULL - AND - jobs.finished_at >= CURRENT_TIMESTAMP - INTERVAL '24 hours' - GROUP BY - users.id - ) -""" - -WITH_EXCEEDED_QUOTAS_CTE = """ - -- Include used build time - %s, - - users_with_exceeded_quotas AS ( - SELECT - * - FROM - user_build_times build_times - LEFT JOIN - users ON build_times.user_id = users.id - WHERE - users.daily_build_quota IS NOT NULL - AND - build_times.used >= users.daily_build_quota - ) -""" % WITH_USED_BUILD_TIME_CTE - class QuotaExceededError(Exception): pass @@ -117,12 +84,6 @@ class Users(base.Object): return self.local.ldap - def _get_users(self, *args, **kwargs): - return self.db.fetch_many(User, *args, **kwargs) - - def _get_user(self, *args, **kwargs): - return self.db.fetch_one(User, *args, **kwargs) - async def __aiter__(self): users = await self._get_users(""" SELECT @@ -210,21 +171,11 @@ class Users(base.Object): storage_quota = DEFAULT_STORAGE_QUOTA # Insert into database - user = await self._get_user(""" - INSERT INTO - users - ( - name, - storage_quota, - _attrs - ) - VALUES - ( - %s, %s, %s - ) - RETURNING - * - """, name, storage_quota, _attrs, + user = await self.db.insert( + User, + name = name, + storage_quota = storage_quota, + _attrs = _attrs, ) log.debug("Created user %s" % user) @@ -235,25 +186,21 @@ class Users(base.Object): return user - async def get_by_id(self, id): - return await self._get_user("SELECT * FROM users WHERE id = %s", id) - async def get_by_name(self, name): """ Fetch a user by its username """ - # Try to find a local user - user = await self._get_user(""" - SELECT - * - FROM - users - WHERE - deleted_at IS NULL - AND - name = %s - """, name, + stmt = ( + sqlalchemy + .select(User) + .where( + User.deleted_at == None, + User.name == name, + ) ) + + # Fetch the user from the database + user = await self.db.fetch_one(stmt) if user: return user @@ -277,7 +224,7 @@ class Users(base.Object): uid = res.get("uid")[0].decode() # Create a new user - return self.create(uid) + return await self.create(uid) async def get_by_email(self, mail): # Strip any excess stuff from the email address @@ -355,19 +302,20 @@ class Users(base.Object): ) # Fetch users - users = await self._get_users(""" - SELECT - * - FROM - users - WHERE - deleted_at IS NULL - AND - name = ANY(%s) - """, [row.get("uid")[0].decode() for row in res], + stmt = ( + sqlalchemy + .select(User) + .where( + User.deleted_at == None, + User.name in [row.get("uid")[0].decode() for row in res], + ) + .order_by( + User.name, + ) ) - return sorted(users) + # Return as list + return await self.db.fetch_as_list(stmt) # Pakfire @@ -382,31 +330,119 @@ class Users(base.Object): return user - @property - async def top(self): + @functools.cached_property + def build_counts(self): """ - Returns the top users (with the most builds in the last year) + Returns a CTE that maps the user ID and the total number of builds """ - users = await self._get_users(""" - SELECT - DISTINCT users.*, - COUNT(builds.id) AS _sort - FROM - users - LEFT JOIN - builds ON users.id = builds.owner_id - WHERE - builds.test IS FALSE - GROUP BY - users.id - ORDER BY - _sort DESC - LIMIT - 30 - """, + return ( + sqlalchemy + .select( + # User ID + builds.Build.owner_id.label("user_id"), + + # Count all builds + sqlalchemy.func.count( + builds.Build.id + ).label("count"), + ) + .where( + builds.Build.deleted_at == None, + builds.Build.owner_id != None, + builds.Build.test == False, + ) + .group_by( + builds.Build.owner_id, + ) + .cte("build_counts") + ) + + async def get_top(self, limit=50): + """ + Returns the top users (with the most builds) + """ + stmt = ( + sqlalchemy + .select(User) + .join( + self.build_counts, + self.build_counts.c.user_id == User.id, + ) + .where( + User.deleted_at == None, + ) + .order_by( + self.build_counts.c.count.desc(), + ) + .limit(50) + ) + + # Run the query + return await self.db.fetch_as_list(stmt) + + @functools.cached_property + def build_times(self): + """ + This is a CTE to easily access a user's consumed build time in the last 24 hours + """ + return ( + sqlalchemy + + .select( + # Fetch the user by its ID + User.id.label("user_id"), + + # Sum up the total build time + sqlalchemy.func.sum( + sqlalchemy.func.coalesce( + jobs.Job.finished_at, + sqlalchemy.func.current_timestamp() + ) + - jobs.Job.started_at, + ).label("used_build_time"), + ) + + # Filter out some things + .where( + User.deleted_at == None, + User.daily_build_quota != None, + + # Jobs must have been started + jobs.Job.started_at != None, + + sqlalchemy.or_( + jobs.Job.finished_at == None, + jobs.Job.finished_at == + sqlalchemy.func.current_timestamp() - sqlalchemy.text("INTERVAL '24 hours'"), + ), + ) + + # Group by user + .group_by( + User.id, + ) + + # Make this into a CTE + .cte("user_build_times") ) - return list(users) + @functools.cached_property + def exceeded_quotas(self): + return ( + sqlalchemy + + .select( + User.id, + self.build_times.c.used_build_time, + ) + .where( + #User.daily_build_quota != None, + self.build_times.c.used_build_time >= User.daily_build_quota, + ) + + # Make this into a CTE + .cte("user_exceeded_quotas") + ) # Push Notifications @@ -483,11 +519,8 @@ class Users(base.Object): return key -class User(base.DataObject): - table = "users" - - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.realname) +class User(database.Base, database.BackendMixin, database.SoftDeleteMixin): + __tablename__ = "users" def __str__(self): return self.realname or self.name @@ -509,9 +542,13 @@ class User(base.DataObject): "name" : self.name, } - @property - def name(self): - return self.data.name + # ID + + id = Column(Integer, primary_key=True) + + # Name + + name = Column(Text, nullable=False) async def delete(self): await self._set_attribute("deleted", True) @@ -525,9 +562,9 @@ class User(base.DataObject): @lazy_property def attrs(self): # Use the stored attributes (only used in the test environment) - if self.data._attrs: - return pickle.loads(self.data._attrs) - + #if self.data._attrs: + # return pickle.loads(self.data._attrs) + # return self.backend.users._ldap_get("(uid=%s)" % self.name, attrlist=LDAP_ATTRS) def _get_attrs(self, key): @@ -574,8 +611,14 @@ class User(base.DataObject): """ await self.send_email("users/messages/welcome.txt") + # Admin + + admin = Column(Boolean, nullable=False, default=False) + + # Admin? + def is_admin(self): - return self.data.admin is True + return self.admin is True # Locale @@ -583,10 +626,6 @@ class User(base.DataObject): def locale(self): return tornado.locale.get() - @property - def deleted(self): - return self.data.deleted - # Avatar def avatar(self, size=512): @@ -597,14 +636,6 @@ class User(base.DataObject): # Permissions - def get_perms(self): - return self.data.perms - - def set_perms(self, perms): - self._set_attribute("perms", perms or []) - - perms = property(get_perms, set_perms) - def has_perm(self, user): """ Check, if the given user has the right to perform administrative @@ -625,6 +656,14 @@ class User(base.DataObject): # No permission return False + # Sessions + + sessions = sqlalchemy.orm.relationship("Session", back_populates="user") + + # Bugzilla API Key + + bugzilla_api_key = Column(Text) + # Bugzilla async def connect_to_bugzilla(self, api_key): @@ -634,79 +673,49 @@ class User(base.DataObject): if not self.email == await bz.whoami(): raise ValueError("The API key does not belong to %s" % self) - self._set_attribute("bugzilla_api_key", api_key) + # Store the API key + self.bugzilla_api_key = api_key - @lazy_property + @functools.cached_property def bugzilla(self): """ Connection to Bugzilla as this user """ - if self.data.bugzilla_api_key: - return bugtracker.Bugzilla(self.backend, self.data.bugzilla_api_key) + if self.bugzilla_api_key: + return bugtracker.Bugzilla(self.backend, self.bugzilla_api_key) # Build Quota - def get_daily_build_quota(self): - return self.data.daily_build_quota - - def set_daily_build_quota(self, quota): - self._set_attribute("daily_build_quota", quota) - - daily_build_quota = property(get_daily_build_quota, set_daily_build_quota) - - @property - def _build_times(self): - return self.db.get(""" - WITH %s - - SELECT - * - FROM - user_build_times - WHERE - user_build_times.user_id = %%s - """ % WITH_BUILD_TIMES_CTE, self.id, - ) + daily_build_quota = Column(Interval) - @property - def used_daily_build_quota(self): - res = self.db.get(""" - WITH %s + # Build Times - SELECT - user_build_times.used AS used - FROM - user_build_times - WHERE - user_build_times.user_id = %%s - """ % WITH_USED_BUILD_TIME_CTE, self.id, + async def get_used_daily_build_quota(self): + # Fetch the build time from the CTE + stmt = ( + sqlalchemy + .select( + self.backend.users.build_times.c.used_build_time, + ) + .where( + self.backend.users.build_times.c.user_id == self.id, + ) ) - if res: - return res.used - - return 0 + # Fetch the result + return await self.db.select_one(stmt, "used_build_time") or datetime.timedelta(0) - def has_exceeded_build_quota(self): + async def has_exceeded_build_quota(self): if not self.daily_build_quota: return False - if not self.used_daily_build_quota: - return False - - return self.used_daily_build_quota >= self.daily_build_quota + return await self.get_used_daily_build_quota() >= self.daily_build_quota # Storage Quota - def get_storage_quota(self): - return self.data.storage_quota + storage_quota = Column(BigInteger) - def set_storage_quota(self, quota): - self._set_attribute("storage_quota", quota) - - storage_quota = property(get_storage_quota, set_storage_quota) - - def has_exceeded_storage_quota(self, size=None): + async def has_exceeded_storage_quota(self, size=None): """ Returns True if this user has exceeded their quota """ @@ -714,9 +723,9 @@ class User(base.DataObject): if not self.storage_quota: return - return self.disk_usage + (size or 0) >= self.storage_quota + return await self.get_disk_usage() + (size or 0) >= self.storage_quota - def check_storage_quota(self, size=None): + async def check_storage_quota(self, size=None): """ Determines the user's disk usage and raises an exception when the user is over quota. @@ -725,177 +734,126 @@ class User(base.DataObject): if self.has_exceeded_storage_quota(size=size): raise QuotaExceededError - @lazy_property - def disk_usage(self): + async def get_disk_usage(self): """ Returns the total disk usage of this user """ - res = self.db.get(""" - WITH objects AS ( - -- Uploads - SELECT - uploads.size AS size - FROM - uploads - WHERE - uploads.user_id = %s - AND - uploads.expires_at > CURRENT_TIMESTAMP - - UNION ALL - - -- Source Packages - SELECT - packages.filesize AS size - FROM - builds - LEFT JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.owner_id = %s - AND - builds.test IS FALSE - AND - packages.deleted_at IS NULL - - UNION ALL - - -- Binary Packages - SELECT - packages.filesize AS size - FROM - builds - LEFT JOIN - jobs ON builds.id = jobs.build_id - LEFT JOIN - job_packages ON jobs.id = job_packages.job_id - LEFT JOIN - packages ON job_packages.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.owner_id = %s - AND - builds.test IS FALSE - AND - jobs.deleted_at IS NULL - AND - packages.deleted_at IS NULL - - UNION ALL - - -- Build Logs - SELECT - jobs.log_size AS size - FROM - jobs - LEFT JOIN - builds ON builds.id = jobs.build_id - WHERE - builds.deleted_at IS NULL - AND - jobs.deleted_at IS NULL - AND - builds.owner_id = %s - AND - jobs.log_size IS NOT NULL + # Uploads + upload_disk_usage = ( + sqlalchemy + .select( + uploads.Upload.size + ) + .where( + uploads.Upload.user == self, + uploads.Upload.expires_at > sqlalchemy.func.current_timestamp(), ) - - SELECT - SUM(size) AS disk_usage - FROM - objects - """, self.id, self.id, self.id, self.id, ) - if res: - return res.disk_usage - - return 0 - - # Builds + # Source Packages + source_package_disk_usage = ( + sqlalchemy + .select( + packages.Package.filesize + ) + .select_from(builds.Build) + .join(builds.Build.pkg) + .where( + # All objects must exist + packages.Package.deleted_at == None, + builds.Build.deleted_at == None, + jobs.Job.deleted_at == None, + + # Don't consider test builds + builds.Build.test == False, + ) + ) - async def get_builds(self, name=None, limit=None, offset=None): - """ - Returns builds by a certain user - """ - if name: - return await self.get_builds_by_name(name, limit=limit, offset=offset) + # Binary Packages + binary_package_disk_usage = ( + sqlalchemy + .select( + packages.Package.filesize, + ) + .select_from(builds.Build) + .join(jobs.Job) + #.join(jobs.JobPackages) + .where( + # All objects must exist + packages.Package.deleted_at == None, + builds.Build.deleted_at == None, + jobs.Job.deleted_at == None, + + # Don't consider test builds + builds.Build.test == False, + + # The build must be owned by the user + builds.Build.owner == self, + ) + ) - builds = await self.backend.builds._get_builds(""" - SELECT - * - FROM - builds - WHERE - deleted_at IS NULL - AND - test IS FALSE - AND - owner_id = %s - ORDER BY - created_at DESC - LIMIT - %s - OFFSET - %s - """, self.id, limit, offset, + # Build Logs + build_log_disk_usage = ( + sqlalchemy + .select( + jobs.Job.log_size + ) + .select_from(builds.Build) + .join(jobs.Job) + .where( + # All objects must exist + builds.Build.deleted_at == None, + jobs.Job.deleted_at == None, + + # Don't consider test builds + builds.Build.test == False, + + # The build must be owned by the user + builds.Build.owner == self, + ) ) - return list(builds) + # Pull everything together + disk_usage = ( + sqlalchemy + .union_all( + upload_disk_usage, + source_package_disk_usage, + binary_package_disk_usage, + build_log_disk_usage, + ) + .cte("disk_usage") + ) - async def get_builds_by_name(self, name, limit=None, offset=None): - """ - Fetches all builds matching name - """ - builds = await self.backend.builds._get_builds(""" - SELECT - builds.* - FROM - builds - JOIN - packages ON builds.pkg_id = packages.id - WHERE - builds.deleted_at IS NULL - AND - builds.test IS FALSE - AND - builds.owner_id = %s - AND - packages.deleted_at IS NULL - AND - packages.name = %s - LIMIT - %s - OFFSET - %s - """, self.id, name, limit, offset, + # Add it all up + stmt = ( + sqlalchemy + .select( + sqlalchemy.func.sum( + disk_usage.c.size + ).label("disk_usage"), + ) ) - return list(builds) + # Run the query + return await self.db.select_one(stmt, "disk_usage") # Stats - @lazy_property - def total_builds(self): - res = self.db.get(""" - SELECT - COUNT(*) AS builds - FROM - builds - WHERE - test IS FALSE - AND - owner_id = %s - """, self.id, + async def get_total_builds(self): + stmt = ( + sqlalchemy + .select( + self.backend.users.build_counts.c.count.label("count"), + ) + .select_from(self.backend.users.build_counts) + .where( + self.backend.users.build_counts.c.user_id == self.id, + ) ) - if res: - return res.builds - - return 0 + # Run the query + return await self.db.select_one(stmt, "count") @lazy_property def total_build_time(self): @@ -920,94 +878,67 @@ class User(base.DataObject): # Custom repositories - @property - def repos(self): + async def get_repos(self): """ Returns all custom repositories """ - repos = self.backend.repos._get_repositories(""" - SELECT - * - FROM - repositories - WHERE - deleted_at IS NULL - AND - owner_id = %s - ORDER BY - name""", - self.id, + stmt = ( + sqlalchemy + .select(repository.Repo) + .where( + repository.Repo.deleted_at == None, + repository.Repo.owner == self, + ) + .order_by( + repository.Repo.name, + ) ) - distros = {} + return await self.db.fetch_as_list(stmt) - # Group by distro - for repo in repos: - try: - distros[repo.distro].append(repo) - except KeyError: - distros[repo.distro] = [repo] - - return distros - - def get_repo(self, distro, slug): + async def get_repo(self, distro, slug=None): + """ + Fetches a single repository + """ # Return the "home" repository if slug is empty - if not slug: + if slug is None: slug = self.name - return self.backend.repos._get_repository(""" - SELECT - * - FROM - repositories - WHERE - deleted_at IS NULL - AND - owner_id = %s - AND - distro_id = %s - AND - slug = %s""", - self.id, - distro, - slug, + stmt = ( + sqlalchemy + .select(repository.Repo) + .where( + repository.Repo.deleted_at == None, + repository.Repo.owner == self, + repository.Repo.distro == distro, + repository.Repo.slug == slug, + ) ) - @property - def uploads(self): + return await self.db.fetch_one(stmt) + + # Uploads + + def get_uploads(self): """ Returns all uploads that belong to this user """ - uploads = self.backend.uploads._get_uploads(""" - SELECT - * - FROM - uploads - WHERE - user_id = %s - AND - expires_at > CURRENT_TIMESTAMP - ORDER BY - created_at DESC - """, self.id, + stmt = ( + sqlalchemy + .select(uploads.Upload) + .where( + uploads.Upload.user == self, + uploads.Upload.expires_at > sqlalchemy.func.current_timestamp(), + ) + .order_by( + uploads.Upload.created_at.desc(), + ) ) - return list(uploads) + return self.db.fetch(stmt) # Push Subscriptions - def _get_subscriptions(self, query, *args): - res = self.db.query(query, *args) - - for row in res: - yield UserPushSubscription(self.backend, row.id, data=row) - - def _get_subscription(self, query, *args): - res = self.db.get(query, *args) - - if res: - return UserPushSubscription(self.backend, res.id, data=res) - @lazy_property def subscriptions(self): subscriptions = self._get_subscriptions(""" @@ -1099,33 +1030,37 @@ class User(base.DataObject): return message -class UserPushSubscription(base.DataObject): - table = "user_push_subscriptions" +class UserPushSubscription(database.Base): + __tablename__ = "user_push_subscriptions" - @property - def uuid(self): - """ - UUID - """ - return self.data.uuid + # ID - @property - def created_at(self): - return self.data.created_at + id = Column(Integer, primary_key=True) - @property - def deleted_at(self): - return self.data.deleted_at + # User ID - async def delete(self): - """ - Deletes this subscription - """ - await self._set_attribute_now("deleted_at") + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) - @property - def endpoint(self): - return self.data.endpoint + # User + + user = sqlalchemy.orm.relationship("User", lazy="selectin") + + # UUID + + uuid = Column(UUID, unique=True, nullable=False) + + # Created At + + created_at = Column(DateTime(timezone=False), nullable=False, + server_default=sqlalchemy.func.current_timestamp()) + + # User Agent + + user_agent = Column(Text) + + # Endpoint + + endpoint = Column(Text, nullable=False) @lazy_property def p256dh(self): @@ -1138,9 +1073,9 @@ class UserPushSubscription(base.DataObject): return p - @property - def auth(self): - return bytes(self.data.auth) + # Auth + + auth = Column(LargeBinary, nullable=False) @property def vapid_private_key(self): diff --git a/src/scripts/pakfire-web b/src/scripts/pakfire-web index 194c3e8a..035c8ef3 100644 --- a/src/scripts/pakfire-web +++ b/src/scripts/pakfire-web @@ -5,14 +5,17 @@ import tornado.options import pakfire.buildservice.web -tornado.options.define("debug", type=bool, default=False, help="Enable debug mode") tornado.options.define("port", type=int, default=9000, help="Port to listen on") async def main(): tornado.options.parse_command_line() # Initialise application - app = pakfire.buildservice.web.Application(debug=tornado.options.options.debug) + app = pakfire.buildservice.web.Application() + + # Check the database schema + #await app.backend.db.check_schema() + app.listen( tornado.options.options.port, xheaders=True, diff --git a/src/templates/base.html b/src/templates/base.html index 208fac87..0407c8bf 100644 --- a/src/templates/base.html +++ b/src/templates/base.html @@ -10,7 +10,7 @@ {{ hostname }} - {% block title %}{{ _("No title given") }}{% endblock %} - + @@ -135,14 +135,14 @@

- © {{ year }} - Pakfire Build Service {{ version }} + © {{ now.year }} - Pakfire Build Service {{ version }}

- - + + diff --git a/src/web/events.py b/src/templates/bugs/macros.html similarity index 52% rename from src/web/events.py rename to src/templates/bugs/macros.html index 3d9f2ed4..90760ed9 100644 --- a/src/web/events.py +++ b/src/templates/bugs/macros.html @@ -1,8 +1,7 @@ -#!/usr/bin/python3 -############################################################################### +{############################################################################## # # # Pakfire - The IPFire package management system # -# Copyright (C) 2022 Pakfire development team # +# Copyright (C) 2025 Pakfire development team # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # @@ -17,35 +16,54 @@ # You should have received a copy of the GNU General Public License # # along with this program. If not, see . # # # -############################################################################### +##############################################################################} -import tornado.web +{% from "users/macros.html" import Avatar, LinkToUser with context %} -from . import ui_modules +{% macro BugList(bugs) %} + {% for bug in bugs %} +
+
+

+ {% if bug.creator %} + {{ Avatar(bug.creator, size=96) }} + {% endif %} +

+
-class ListModule(ui_modules.UIModule): - def render(self, *args, show_build=True, show_builder=True, **kwargs): - # Fetch all events - events = self.backend.events(*args, **kwargs) +
+

+ + {{ bug }} + - return self.render_string("events/modules/list.html", - events=events, show_build=show_build, show_builder=show_builder) + ‐ + + {{ bug.summary }} + -class BuildCommentModule(ui_modules.UIModule): - def render(self, event, show_build=False, show_builder=True): - return self.render_string("events/modules/build-comment.html", - event=event, comment=event.build_comment, - show_build=show_build, show_builder=show_builder) + + {{ bug.creator }} + + + {{ bug.created_at | format_date(shorter=True) }} + -class UserMessageModule(ui_modules.UIModule): - def render(self, event, show_build=False, show_builder=True): - return self.render_string("events/modules/user-message.html", - event=event, show_build=show_build, show_builder=show_builder) +
+ {{ bug.status }} -class SystemMessageModule(ui_modules.UIModule): - def render(self, event, show_build=True, show_builder=True): - return self.render_string("events/modules/system-message.html", - event=event, show_build=show_build, show_builder=show_builder) + {% if bug.resolution %} + {{ bug.resolution }} + {% endif %} + + {% if bug.assignee %} + ‐ {{ LinkToUser(bug.assignee) }} + {% endif %} +

+
+
+ {% endfor %} +{% endmacro %} diff --git a/src/templates/bugs/modules/list.html b/src/templates/bugs/modules/list.html deleted file mode 100644 index bfb8b474..00000000 --- a/src/templates/bugs/modules/list.html +++ /dev/null @@ -1,28 +0,0 @@ -{% for bug in bugs %} -
-
-

- {% if bug.creator %} - - {% end %} -

-
- -
-

- {{ bug }} ‐ - {{ bug.summary }} - {{ bug.creator }} - {{ locale.format_date(bug.created_at, shorter=True) }} - -
- - {{ bug.status }} {% if bug.resolution %}{{ bug.resolution }}{% end %} - - {% if bug.assignee %} - ‐ {% module LinkToUser(bug.assignee) %} - {% end %} -

-
-
-{% end %} diff --git a/src/templates/builders/index.html b/src/templates/builders/index.html index c839b813..1c8139e0 100644 --- a/src/templates/builders/index.html +++ b/src/templates/builders/index.html @@ -1,6 +1,6 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Builders") }}{% end block %} +{% block title %}{{ _("Builders") }}{% endblock %} {% block body %}
@@ -25,16 +25,25 @@
- {{ builder }} + + {{ builder }} + {% if builder.is_online() %}
- {{ _("Online") }} - {{ len(builder.jobs) }} + + {{ _("Online") }} + + + + {{ builder.jobs | count }} +
{% else %} - {{ _("Offline") }} - {% end %} + + {{ _("Offline") }} + + {% endif %}
@@ -42,13 +51,13 @@
- {% end %} + {% endfor %} {% if current_user and current_user.is_admin() %} {{ _("Create Builder") }} - {% end %} + {% endif %}
@@ -59,26 +68,36 @@
{{ _("Total Build Time By Architecture") }}
- {% set arches = backend.builders.total_build_time_by_arch %} + {% set arches = backend.builders.get_total_build_time_by_arch() %} -{% end block %} +{% endblock %} diff --git a/src/templates/builders/macros.html b/src/templates/builders/macros.html new file mode 100644 index 00000000..b1f23551 --- /dev/null +++ b/src/templates/builders/macros.html @@ -0,0 +1,54 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% macro BuilderStats(builder) %} + {# XXX Not sure if this is a good place to load the JS #} + + +
+
+
+ {{ _("Processor") }} +
+ +
+ +
+
+ +
+
+ {{ _("Memory") }} +
+ +
+ +
+ +
+ {{ _("Swap Usage") }} +
+ +
+ +
+
+
+{% endmacro %} diff --git a/src/templates/builders/modules/stats.html b/src/templates/builders/modules/stats.html deleted file mode 100644 index 689478aa..00000000 --- a/src/templates/builders/modules/stats.html +++ /dev/null @@ -1,31 +0,0 @@ -
- {% if builder.is_online() %} -
-
- {{ _("Processor") }} -
- -
- -
-
- -
-
- {{ _("Memory") }} -
- -
- -
- -
- {{ _("Swap Usage") }} -
- -
- -
-
- {% end %} -
diff --git a/src/templates/builders/show.html b/src/templates/builders/show.html index 65484485..64b66a6e 100644 --- a/src/templates/builders/show.html +++ b/src/templates/builders/show.html @@ -1,6 +1,11 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Builders") }} - {{ builder.name }}{% end block %} +{% from "macros.html" import Text with context %} +{% from "builders/macros.html" import BuilderStats with context %} +{% from "events/macros.html" import EventList with context %} +{% from "jobs/macros.html" import JobList with context %} + +{% block title %}{{ _("Builders") }} - {{ builder.name }}{% endblock %} {% block body %}
@@ -22,7 +27,7 @@
{% if builder.maintenance %} {{ _("Maintenance") }} - {% end %} + {% endif %} {# Status #} {% if is_running %} @@ -31,13 +36,13 @@ {{ _("Shutting Down") }} {% elif is_shut_down %} {{ _("Stopped") }} - {% end %} + {% endif %} {% if builder.is_online() %} {{ _("Online") }} {% else %} {{ _("Offline") }} - {% end %} + {% endif %}
@@ -49,22 +54,22 @@ {{ builder.cpu_model or _("Unknown CPU Model") }} {% if builder.cpu_count > 1 %} × {{ builder.cpu_count }} - {% end %} + {% endif %}

- {% end %} + {% endif %} {% if builder.mem_total %}

{{ _("Memory") }}

- {{ format_size(builder.mem_total) }} + {{ builder.mem_total | filesizeformat(binary=True) }}

- {% end %} + {% endif %} {% if builder.arch %}
@@ -75,7 +80,7 @@

- {% end %} + {% endif %} {% if builder.os_name %}
@@ -86,7 +91,7 @@

- {% end %} + {% endif %} {% if builder.pakfire_version %}
@@ -97,7 +102,7 @@

- {% end %} + {% endif %} {% if builder.total_build_time %}
@@ -108,19 +113,19 @@

- {% end %} + {% endif %} {# Builder Stats #} {% if builder.is_online() %}
- {% module BuilderStats(builder) %} + {{ BuilderStats(builder) }}
- {% end %} + {% endif %} {% if builder.description %} - {% module Text(builder.description) %} - {% end %} + {{ Text(builder.description) }} + {% endif %}
@@ -146,27 +151,27 @@ {{ _("Start") }} - {% end %} - {% end %} + {% endif %} + {% endif %} - {% end %} + {% endif %} {% if builder.jobs %}
{{ _("Running Jobs") }}
- {% module JobsList(builder.jobs) %} + {{ JobsList(builder.jobs) }}
- {% end %} + {% endif %}
{{ _("Log") }}
- {% module EventsList(builder=builder, show_builder=False, limit=10) %} + {{ EventList(builder=builder, show_builder=False, limit=10) }}
-{% end block %} +{% endblock %} diff --git a/src/templates/builds/groups/macros.html b/src/templates/builds/groups/macros.html new file mode 100644 index 00000000..97e49a3a --- /dev/null +++ b/src/templates/builds/groups/macros.html @@ -0,0 +1,88 @@ +{% macro BuildGroupList(group) %} + +{% endmacro %} diff --git a/src/templates/builds/groups/modules/list.html b/src/templates/builds/groups/modules/list.html deleted file mode 100644 index 0a8040a5..00000000 --- a/src/templates/builds/groups/modules/list.html +++ /dev/null @@ -1,76 +0,0 @@ - diff --git a/src/templates/builds/groups/show.html b/src/templates/builds/groups/show.html index 98ed01bd..37bc9116 100644 --- a/src/templates/builds/groups/show.html +++ b/src/templates/builds/groups/show.html @@ -1,6 +1,8 @@ -{% extends "../../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Build Group %s") % group }}{% end block %} +{% from "builds/macros.html" import BuildList with context %} + +{% block title %}{{ _("Build Group %s") % group }}{% endblock %} {% block body %} {% set build = group.tested_build %} @@ -38,23 +40,23 @@ - {% end %} + {% endif %}

{% if group.is_test() %} {{ _("Test Builds for %s") % build }} {% else %} {{ _("Build Group %s") % group }} - {% end %} + {% endif %}

{% if group.builds %} - {% module BuildsList(group.builds) %} + {{ BuildList(group.builds) }} {% else %}
{{ _("This build group does not have any builds") }}
- {% end %} + {% endif %} -{% end block %} +{% endblock %} diff --git a/src/templates/builds/index.html b/src/templates/builds/index.html index 8c6456ca..a7babe03 100644 --- a/src/templates/builds/index.html +++ b/src/templates/builds/index.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Builds") }}{% end block %} +{% from "builds/macros.html" import BuildList with context %} + +{% block title %}{{ _("Builds") }}{% endblock %} {% block body %}
@@ -18,7 +20,7 @@

{% if user and name %} - {{ _("%(user)s's Builds Of '%(name)s'") \ + {{ _("%(user)s's Builds Of '%(name)s'") % { "user" : user, "name" : name } }} {% elif user %} {{ _("%s's Builds") % user }} @@ -26,7 +28,7 @@ {{ _("Builds Of '%s'") % name }} {% else %} {{ _("Recent Builds") }} - {% end %} + {% endif %}

@@ -35,17 +37,19 @@
{# Render all builds #} - {% for date in builds %} + {% for date, items in builds | groupby("date") %}
-

{{ locale.format_day(date) }}

+

+ {{ date | format_day }} +

- {% module BuildsList(builds[date]) %} + {{ BuildList(items) }}
- {% end %} + {% endfor %}
-{% end block %} +{% endblock %} diff --git a/src/templates/builds/macros.html b/src/templates/builds/macros.html new file mode 100644 index 00000000..6955c760 --- /dev/null +++ b/src/templates/builds/macros.html @@ -0,0 +1,209 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% from "users/macros.html" import Avatar with context %} + +{% macro BuildList(builds, limit=None, more_url=None) %} + {% if builds %} + {% set rest = False %} + + + {% endif %} +{% endmacro %} + +{% macro BuildWatchers(build, watchers=None) %} + {% if watchers is none %} + {% set watchers = build.get_watchers() %} + {% endif %} + +
+
+
+ {# Watch/Unwatch #} +
+
+ {% if current_user in watchers %} +
+ {{ xsrf_form_html() | safe }} + + +
+ {% else %} +
+ {{ xsrf_form_html() | safe }} + + +
+ {% endif %} +
+
+ + {# List all watchers #} + {% for watcher in watchers | sort %} + + + {{ Avatar(watcher.user) }} + + + {% endfor %} +
+
+
+{% endmacro %} diff --git a/src/templates/builds/modules/list.html b/src/templates/builds/modules/list.html deleted file mode 100644 index 30ec7afe..00000000 --- a/src/templates/builds/modules/list.html +++ /dev/null @@ -1,107 +0,0 @@ -{% if builds %} - -{% end %} diff --git a/src/templates/builds/modules/watchers.html b/src/templates/builds/modules/watchers.html deleted file mode 100644 index 69e115d9..00000000 --- a/src/templates/builds/modules/watchers.html +++ /dev/null @@ -1,66 +0,0 @@ -
-
-
- {# Watch/Unwatch #} -
-
- {% if current_user in watchers %} -
- {% raw xsrf_form_html() %} - - -
- {% else %} -
- {% raw xsrf_form_html() %} - - -
- {% end %} -
-
- - {# List all watchers #} - {% for watcher in watchers %} - - -
- {{ watcher }} -
-
-
- {% end %} -
-
-
diff --git a/src/templates/builds/show.html b/src/templates/builds/show.html index 94fc27f4..9a5766f2 100644 --- a/src/templates/builds/show.html +++ b/src/templates/builds/show.html @@ -1,6 +1,15 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Build") }} - {{ build }}{% end block %} +{% from "macros.html" import Text with context %} +{% from "bugs/macros.html" import BugList with context %} +{% from "builds/macros.html" import BuildWatchers with context %} +{% from "builds/groups/macros.html" import BuildGroupList with context %} +{% from "events/macros.html" import EventList with context %} +{% from "jobs/macros.html" import JobList with context %} +{% from "repos/macros.html" import RepoList with context %} +{% from "sources/macros.html" import SourceCommitMessage with context %} + +{% block title %}{{ _("Build") }} - {{ build }}{% endblock %} {% block body %}
+ {% endif %}">
@@ -101,7 +110,7 @@
{# Watchers #} - {% module BuildWatchers(build) %} + {{ BuildWatchers(build) }}
@@ -123,14 +132,14 @@ {{ _("Clone") }} - {% end %} + {% endif %} {# Delete #} {% if build.can_be_deleted(current_user) %} {{ _("Delete Build") }} - {% end %} + {% endif %}
@@ -140,7 +149,7 @@
{{ _("Jobs")}}
- {% module JobsList(build.jobs, show_arch_only=True, show_packages=True) %} + {{ JobList(build.jobs, show_arch_only=True, show_packages=True) }} {# Bug? #} {% if build.has_failed() %} @@ -150,10 +159,10 @@ {{ _("File A Bug Report") }} - {% end %} + {% endif %}
- {% end %} + {% endif %} {# Repos #} {% if not build.is_test() %} @@ -162,15 +171,15 @@
{{ _("Repositories") }}
{% if build.repos %} - {% module ReposList(build.repos, build=build) %} - {% end %} + {{ RepoList(build.repos, build=build) }} + {% endif %}
{% if build.can_be_approved(current_user) %} {{ _("Approve") }} - {% end %} + {% endif %} {% if build.owner and build.has_perm(current_user) %} @@ -181,12 +190,12 @@ {{ _("Remove Build From Repository") }} - {% end %} - {% end %} + {% endif %} + {% endif %}
- {% end %} + {% endif %} {# Test Builds #} {% if build.disable_test_builds %} @@ -199,22 +208,22 @@ - {% elif build.test_builds %} + {% elif build.test_group %}
{{ _("Test Builds")}}
- {% module BuildGroupList(build.test_builds, limit=8) %} + {{ BuildGroupList(build.test_group, limit=8) }}
- {% end %} + {% endif %} {# Log #}
{{ _("Log") }}
- {% module EventsList(priority=4, build=build, show_build=False) %} + {{ EventList(priority=4, build=build, show_build=False) }}
@@ -222,7 +231,7 @@
- {% raw xsrf_form_html() %} + {{ xsrf_form_html() | safe }}
@@ -240,4 +249,4 @@
-{% end block %} +{% endblock %} diff --git a/src/templates/distros/index.html b/src/templates/distros/index.html index e0e7147b..26f7ad12 100644 --- a/src/templates/distros/index.html +++ b/src/templates/distros/index.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Distributions") }}{% end block %} +{% from "distros/macros.html" import DistroList with context %} + +{% block title %}{{ _("Distributions") }}{% endblock %} {% block body %}
@@ -23,7 +25,7 @@
- {% module DistrosList(distros) %} + {{ DistroList(distros) }} {% if current_user and current_user.is_admin() %}
@@ -31,7 +33,7 @@ {{ _("Create Distribution") }}
- {% end %} + {% endif %}
-{% end block %} +{% endblock %} diff --git a/src/templates/distros/macros.html b/src/templates/distros/macros.html new file mode 100644 index 00000000..6ee0c899 --- /dev/null +++ b/src/templates/distros/macros.html @@ -0,0 +1,37 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% macro DistroList(distros) %} + +{% endmacro %} diff --git a/src/templates/distros/modules/list.html b/src/templates/distros/modules/list.html deleted file mode 100644 index 5dfb8b75..00000000 --- a/src/templates/distros/modules/list.html +++ /dev/null @@ -1,14 +0,0 @@ - diff --git a/src/templates/distros/releases/show.html b/src/templates/distros/releases/show.html index c5408f21..b36c5087 100644 --- a/src/templates/distros/releases/show.html +++ b/src/templates/distros/releases/show.html @@ -1,6 +1,8 @@ -{% extends "../../base.html" %} +{% extends "base.html" %} -{% block title %}{{ distro }} - {{ release }}{% end block %} +{% from "macros.html" import Text with context %} + +{% block title %}{{ distro }} - {{ release }}{% endblock %} {% block body %}
@@ -30,10 +32,14 @@

{{ release }}

{% if release.stable %} - {{ _("Stable Release") }} + + {{ _("Stable Release") }} + {% else %} - {{ _("Development Release") }} - {% end %} + + {{ _("Development Release") }} + + {% endif %}

@@ -47,7 +53,7 @@ {{ _("Publish") }} - {% end %} + {% endif %} {{ _("Edit") }} @@ -59,14 +65,14 @@
- {% end %} + {% endif %} {# Announcement #} {% if release.announcement %}
- {% module Text(release.announcement) %} + {{ Text(release.announcement) }}
- {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/distros/show.html b/src/templates/distros/show.html index 7566cd6e..d697841e 100644 --- a/src/templates/distros/show.html +++ b/src/templates/distros/show.html @@ -1,6 +1,11 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Distributions") }} - {{ distro }}{% end block %} +{% from "macros.html" import Text with context %} +{% from "releases/macros.html" import ReleaseList with context %} +{% from "repos/macros.html" import RepoList with context %} +{% from "sources/macros.html" import SourceList with context %} + +{% block title %}{{ _("Distributions") }} - {{ distro }}{% endblock %} {% block body %}
@@ -18,16 +23,16 @@

- {{ distro }} {% if distro.codename %}‐ {{ distro.codename }}{% end %} + {{ distro }} {% if distro.codename %}‐ {{ distro.codename }}{% endif %}

{% if distro.slogan %}

{{ distro.slogan }}

- {% end %} + {% endif %} {% if distro.description %}
- {% module Text(distro.description) %} + {{ Text(distro.description) }}
@@ -39,14 +44,14 @@ {% for arch in distro.arches %} {{ arch }} - {% end %} + {% endfor %}

- {% end %} + {% endif %}
@@ -58,7 +63,7 @@
{{ _("Edit") }} - {% end %} + {% endif %}
@@ -68,13 +73,14 @@

{{ _("Latest Release") }}

- {% if latest_release %} - {% module ReleasesList([latest_release]) %} + {% set release = distro.get_latest_release() %} + {% if release %} + {{ ReleaseList([release]) }} {% else %}

{{ _("No release, yet") }}

- {% end %} + {% endif %}
@@ -86,25 +92,27 @@ {# Repositories #} + {% set repos = distro.get_repos() %} {% if repos %}

{{ _("Repositories") }}

- {% module ReposList(repos) %} + {{ RepoList(repos) }}
- {% end %} + {% endif %} {# Sources #} + {% set sources = distro.get_sources() %} {% if sources %}

{{ _("Sources") }}

- {% module SourcesList(sources) %} + {{ SourceList(sources) }}
- {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/events/macros.html b/src/templates/events/macros.html new file mode 100644 index 00000000..3cab11ad --- /dev/null +++ b/src/templates/events/macros.html @@ -0,0 +1,321 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% from "users/macros.html" import Avatar with context %} + +{# EventList #} +{% macro EventList(show_build=True, show_builder=True) %} + {# Fetch events #} + {% set events = backend.events(**kwargs) %} + + {% for event in events %} + {{ Event(event, show_build=show_build, show_builder=show_builder) }} + {% endfor %} +{% endmacro %} + +{# Event #} +{% macro Event(event, show_build=True, show_builder=True) %} +
+ {% if event.user %} +
+

+ {{ Avatar(event.user) }} +

+
+ {% else %} +
+ {% if event.type == "job-created" %} +

+ +

+ {% elif event.type == "job-dispatched" %} +

+ +

+ {% elif event.type == "job-retry" %} +

+ +

+ {% elif event.type == "build-finished" %} +

+ +

+ {% elif event.type == "job-finished" %} +

+ +

+ {% elif event.type in ("build-failed", "job-failed") %} +

+ +

+ {% elif event.type == "build-points" and event.points > 0 %} +

+ +

+ {% elif event.type == "build-points" and event.points < 0 %} +

+ +

+ {% elif event.type == "build-bug-added" %} +

+ +

+ {% elif event.type == "build-bug-removed" %} +

+ +

+ {% elif event.type == "test-builds-succeeded" %} +

+ +

+ {% elif event.type == "test-builds-failed" %} +

+ +

+ {% elif event.type == "repository-build-added" %} +

+ +

+ {% elif event.type == "repository-build-moved" %} +

+ +

+ {% elif event.type == "repository-build-removed" %} +

+ +

+ {% elif event.type == "builder-created" %} +

+ +

+ {% elif event.type == "builder-deleted" %} +

+ +

+ {% elif event.type == "mirror-created" %} +

+ +

+ {% elif event.type == "mirror-deleted" %} +

+ +

+ {% elif event.type == "mirror-online" %} +

+ +

+ {% elif event.type == "mirror-offline" %} +

+ +

+ {% elif event.type == "release-monitoring-created" %} +

+ +

+ {% elif event.type == "release-monitoring-deleted" %} +

+ +

+ {% elif event.type == "release-created" %} +

+ +

+ {% elif event.type == "release-deleted" %} +

+ +

+ {% elif event.type == "release-published" %} +

+ +

+ {% else %} +

+ +

+ {% endif %} +
+ {% endif %} + +
+

+ + {% if event.type == "build-comment" %} + {{ event.by_user }} + {% elif event.type == "build-created" %} + {{ _("Build Created") }} + {% elif event.type == "build-deleted" %} + {{ _("Build Deleted") }} + {% elif event.type == "build-failed" %} + {{ _("Build Failed") }} + {% elif event.type == "build-finished" %} + {{ _("Build Finished") }} + {% elif event.type == "build-deprecated" %} + {{ _("This build was deprecated") }} + {% elif event.type == "build-watcher-added" %} + {{ _("%s started watching this build") % event.user }} + {% elif event.type == "build-watcher-removed" %} + {{ _("%s stopped watching this build") % event.user }} + {% elif event.type == "build-bug-added" %} + {{ _("Bug #%s has been added") % event.bug }} + {% elif event.type == "build-bug-removed" %} + {{ _("Bug #%s has been removed") % event.bug }} + {% elif event.type == "build-points" %} + {% if event.points > 0 %} + {{ _("This build has gained one point", "This build has gained %(points)s points", event.points) % { "points" : event.points } }} + {% elif event.points < 0 %} + {{ _("This build has lost one point", "This build has lost %(points)s points", -event.points) % { "points" : -event.points } }} + {% endif %} + {% elif event.type == "test-builds-succeeded" %} + {{ _("All Test Builds Succeeded") }} + {% elif event.type == "test-builds-failed" %} + {{ _("Test Builds Failed") }} + {% elif event.type == "job-created" %} + {{ _("Job Created") }} + {% elif event.type == "job-failed" %} + {{ _("Job Failed") }} + {% elif event.type == "job-finished" %} + {{ _("Job Finished") }} + {% elif event.type == "job-aborted" %} + {{ _("Job Aborted") }} + {% elif event.type == "job-dispatched" %} + {{ _("Job Dispatched") }} + {% elif event.type == "job-retry" %} + {{ _("Job Restarted") }} + {% elif event.type == "builder-created" %} + {{ _("Builder Created") }} + {% elif event.type == "builder-deleted" %} + {{ _("Builder Deleted") }} + {% elif event.type == "mirror-created" %} + {{ _("Mirror Created") }} + {% elif event.type == "mirror-deleted" %} + {{ _("Mirror Deleted") }} + {% elif event.type == "mirror-online" %} + {{ _("Mirror Came Online") }} + {% elif event.type == "mirror-offline" %} + {{ _("Mirror Went Offline") }} + {% elif event.type == "repository-build-added" %} + {{ _("Build has been added to repository %s") % event.repository }} + {% elif event.type == "repository-build-moved" %} + {{ _("Build has been moved to repository %s") % event.repository }} + {% elif event.type == "repository-build-removed" %} + {{ _("Build has been removed from repository %s") % event.repository }} + {% elif event.type == "release-monitoring-created" %} + {{ _("Release Monitoring has been enabled for %s") % event.package_name }} + {% elif event.type == "release-monitoring-deleted" %} + {{ _("Release Monitoring has been disabled for %s") % event.package_name }} + {% elif event.type == "release-created" %} + {{ _("Release Created")}} + {% elif event.type == "release-deleted" %} + {{ _("Release Deleted") }} + {% elif event.type == "release-published" %} + {{ _("Release of %s") % event.release }} + {% else %} + {{ _("- Unknown Event %s -") % event.type }} + {% endif %} + + + {{ event.t | format_date(shorter=True) }} +

+ + {# Show the error message #} + {% if event.error %} +

+ {{ event.error }} +

+ {% endif %} + + {% block content %}{% endblock %} + +
+
+
+{% endmacro %} diff --git a/src/templates/events/modules/build-comment.html b/src/templates/events/modules/build-comment.html deleted file mode 100644 index 00c6fd47..00000000 --- a/src/templates/events/modules/build-comment.html +++ /dev/null @@ -1,5 +0,0 @@ -{% extends "user-message.html" %} - -{% block content %} - {% module Text(comment.text) %} -{% end %} diff --git a/src/templates/events/modules/list.html b/src/templates/events/modules/list.html deleted file mode 100644 index 6de54e76..00000000 --- a/src/templates/events/modules/list.html +++ /dev/null @@ -1,9 +0,0 @@ -{% for event in events %} - {% if event.build_comment %} - {% module EventBuildComment(event, show_build=show_build, show_builder=show_builder) %} - {% elif event.user %} - {% module EventUserMessage(event, show_build=show_build, show_builder=show_builder) %} - {% else %} - {% module EventSystemMessage(event, show_build=show_build, show_builder=show_builder) %} - {% end %} -{% end %} diff --git a/src/templates/events/modules/system-message.html b/src/templates/events/modules/system-message.html deleted file mode 100644 index 382320b3..00000000 --- a/src/templates/events/modules/system-message.html +++ /dev/null @@ -1,280 +0,0 @@ -
- {% block thumbnail %} -
- {% if event.type == "job-created" %} -

- -

- {% elif event.type == "job-dispatched" %} -

- -

- {% elif event.type == "job-retry" %} -

- -

- {% elif event.type == "build-finished" %} -

- -

- {% elif event.type == "job-finished" %} -

- -

- {% elif event.type in ("build-failed", "job-failed") %} -

- -

- {% elif event.type == "build-points" and event.points > 0 %} -

- -

- {% elif event.type == "build-points" and event.points < 0 %} -

- -

- {% elif event.type == "build-bug-added" %} -

- -

- {% elif event.type == "build-bug-removed" %} -

- -

- {% elif event.type == "test-builds-succeeded" %} -

- -

- {% elif event.type == "test-builds-failed" %} -

- -

- {% elif event.type == "repository-build-added" %} -

- -

- {% elif event.type == "repository-build-moved" %} -

- -

- {% elif event.type == "repository-build-removed" %} -

- -

- {% elif event.type == "builder-created" %} -

- -

- {% elif event.type == "builder-deleted" %} -

- -

- {% elif event.type == "mirror-created" %} -

- -

- {% elif event.type == "mirror-deleted" %} -

- -

- {% elif event.type == "mirror-online" %} -

- -

- {% elif event.type == "mirror-offline" %} -

- -

- {% elif event.type == "release-monitoring-created" %} -

- -

- {% elif event.type == "release-monitoring-deleted" %} -

- -

- {% elif event.type == "release-created" %} -

- -

- {% elif event.type == "release-deleted" %} -

- -

- {% elif event.type == "release-published" %} -

- -

- {% else %} -

- -

- {% end %} -
- {% end block %} - -
-

- - {% if event.type == "build-comment" %} - {{ event.by_user }} - {% elif event.type == "build-created" %} - {{ _("Build Created") }} - {% elif event.type == "build-deleted" %} - {{ _("Build Deleted") }} - {% elif event.type == "build-failed" %} - {{ _("Build Failed") }} - {% elif event.type == "build-finished" %} - {{ _("Build Finished") }} - {% elif event.type == "build-deprecated" %} - {{ _("This build was deprecated") }} - {% elif event.type == "build-watcher-added" %} - {{ _("%s started watching this build") % event.user }} - {% elif event.type == "build-watcher-removed" %} - {{ _("%s stopped watching this build") % event.user }} - {% elif event.type == "build-bug-added" %} - {{ _("Bug #%s has been added") % event.bug }} - {% elif event.type == "build-bug-removed" %} - {{ _("Bug #%s has been removed") % event.bug }} - {% elif event.type == "build-points" %} - {% if event.points > 0 %} - {{ _("This build has gained one point", "This build has gained %(points)s points", event.points) % { "points" : event.points } }} - {% elif event.points < 0 %} - {{ _("This build has lost one point", "This build has lost %(points)s points", -event.points) % { "points" : -event.points } }} - {% end %} - {% elif event.type == "test-builds-succeeded" %} - {{ _("All Test Builds Succeeded") }} - {% elif event.type == "test-builds-failed" %} - {{ _("Test Builds Failed") }} - {% elif event.type == "job-created" %} - {{ _("Job Created") }} - {% elif event.type == "job-failed" %} - {{ _("Job Failed") }} - {% elif event.type == "job-finished" %} - {{ _("Job Finished") }} - {% elif event.type == "job-aborted" %} - {{ _("Job Aborted") }} - {% elif event.type == "job-dispatched" %} - {{ _("Job Dispatched") }} - {% elif event.type == "job-retry" %} - {{ _("Job Restarted") }} - {% elif event.type == "builder-created" %} - {{ _("Builder Created") }} - {% elif event.type == "builder-deleted" %} - {{ _("Builder Deleted") }} - {% elif event.type == "mirror-created" %} - {{ _("Mirror Created") }} - {% elif event.type == "mirror-deleted" %} - {{ _("Mirror Deleted") }} - {% elif event.type == "mirror-online" %} - {{ _("Mirror Came Online") }} - {% elif event.type == "mirror-offline" %} - {{ _("Mirror Went Offline") }} - {% elif event.type == "repository-build-added" %} - {{ _("Build has been added to repository %s") % event.repository }} - {% elif event.type == "repository-build-moved" %} - {{ _("Build has been moved to repository %s") % event.repository }} - {% elif event.type == "repository-build-removed" %} - {{ _("Build has been removed from repository %s") % event.repository }} - {% elif event.type == "release-monitoring-created" %} - {{ _("Release Monitoring has been enabled for %s") % event.package_name }} - {% elif event.type == "release-monitoring-deleted" %} - {{ _("Release Monitoring has been disabled for %s") % event.package_name }} - {% elif event.type == "release-created" %} - {{ _("Release Created")}} - {% elif event.type == "release-deleted" %} - {{ _("Release Deleted") }} - {% elif event.type == "release-published" %} - {{ _("Release of %s") % event.release }} - {% else %} - {{ _("- Unknown Event %s -") % event.type }} - {% end %} - - - {{ locale.format_date(event.t, shorter=True) }} -

- - {# Show the error message #} - {% if event.error %} -

- {{ event.error }} -

- {% end %} - - {% block content %}{% end %} - - -
-
diff --git a/src/templates/events/modules/user-message.html b/src/templates/events/modules/user-message.html deleted file mode 100644 index b288d276..00000000 --- a/src/templates/events/modules/user-message.html +++ /dev/null @@ -1,12 +0,0 @@ -{% extends "system-message.html" %} - -{% block thumbnail %} - {% set user = event.user or event.by_user %} - -
-

- {{ user }} -

-
-{% end block %} diff --git a/src/templates/index.html b/src/templates/index.html index 5a57c4e6..10b8afbf 100644 --- a/src/templates/index.html +++ b/src/templates/index.html @@ -1,6 +1,8 @@ {% extends "base.html" %} -{% block title %}{{ _("Welcome!") }}{% end block %} +{% from "jobs/macros.html" import JobQueue with context %} + +{% block title %}{{ _("Welcome!") }}{% endblock %} {% block body %}
@@ -17,15 +19,15 @@
{# Show a status bar with running/finished jobs #} - {% if running_jobs or finished_jobs %} + {% if jobs %}
- {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/jobs/macros.html b/src/templates/jobs/macros.html new file mode 100644 index 00000000..a5c34a83 --- /dev/null +++ b/src/templates/jobs/macros.html @@ -0,0 +1,231 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% macro JobList(jobs, show_arch_only=False, show_packages=False) %} + {% for job in jobs | sort %} + {% set build = job.build %} + +
+
+
+
+ + +
+ {% if job.is_halted() %} + {{ _("Halted") }} + {% elif job.is_queued() %} + {{ _("Queued") }} + {% elif job.is_pending(installcheck=False) %} + {{ _("Dependency Problems") }} + {% elif job.is_pending() %} + {{ _("Pending") }} + {% elif job.is_running() %} + {{ _("Running...") }} + {% elif job.is_aborted() %} + {{ _("Aborted") }} + {% elif job.has_failed() %} + {{ _("Failed") }} + {% elif job.has_finished() %} + {{ _("Finished") }} + {% endif %} +
+
+
+ + {# Log #} + {% if job.is_running() %} +
+ {..% module JobsLogStream(job, limit=5) %..} +
+ + {# Dependency Issues #} + {% elif job.is_pending() and job.installcheck_succeeded is false %} +
+
    + {% for line in job.message.splitlines() %} +
  • {{ line }}
  • + {% endfor %} +
+
+ {% endif %} + + {# Show all packages that have been built #} + {% if show_packages and job.packages %} + {% for package in job.packages | sort %} + + + + + + + {{ package.name }} + + + + {{ package.size | filesizeformat(binary=True) }} + + + {% endfor %} + {% endif %} + + {% if job.is_running() or job.has_finished() %} +
+
+
+
+ {{ job.duration | format_time }} + + {# If the job is approaching its timeout, we will show a warning #} + {% if job.times_out_in and job.times_out_in <= datetime.timedelta(hours=1) %} + / {{ format_time(job.timeout) }} + {% endif %} +
+
+ + {% set has_log = False %} + + {# Does at last one job have a log? #} + {% for job in job.all_jobs %} + {% if job.has_log() %} + {% set has_log = True %} + {% endif %} + {% endfor %} + +
+
+
+ {% if has_log or job.is_running() %} + {% if job.preceeding_jobs %} + + {% else %} + + {{ _("View Log") }} + + {% endif %} + {% endif %} + + {% if job.can_be_retried() %} + + {{ _("Retry") }} + + {% elif job.is_running() %} + + {{ _("Abort") }} + + {% endif %} +
+
+
+
+
+ {% endif %} +
+
+ {% endfor %} +{% endmacro %} + +{% macro JobQueue(jobs) %} + +{% endmacro %} diff --git a/src/templates/jobs/modules/list.html b/src/templates/jobs/modules/list.html deleted file mode 100644 index c11c0cf7..00000000 --- a/src/templates/jobs/modules/list.html +++ /dev/null @@ -1,160 +0,0 @@ -{% import datetime %} - -{% for job in sorted(jobs) %} - {% set build = job.build %} - -
-
-
-
-
-
- {% if show_arch_only %} - {{ job.arch }} - {% else %} - - {{ job }} - - {% end %} -
-
- -
- {% if job.is_halted() %} - {{ _("Halted") }} - {% elif job.is_queued() %} - {{ _("Queued") }} - {% elif job.is_pending(installcheck=False) %} - {{ _("Dependency Problems") }} - {% elif job.is_pending() %} - {{ _("Pending") }} - {% elif job.is_running() %} - {{ _("Running...") }} - {% elif job.is_aborted() %} - {{ _("Aborted") }} - {% elif job.has_failed() %} - {{ _("Failed") }} - {% elif job.has_finished() %} - {{ _("Finished") }} - {% end %} -
-
-
- - {# Log #} - {% if job.is_running() %} -
- {% module JobsLogStream(job, limit=5) %} -
- - {# Dependency Issues #} - {% elif job.is_pending() and job.installcheck_succeeded is False %} -
-
    - {% for line in job.message.splitlines() %} -
  • {{ line }}
  • - {% end %} -
-
- {% end %} - - {# Show all packages that have been built #} - {% if show_packages and job.packages %} - {% for package in job.packages %} - - - - - - - {{ package.name }} - - - - {{ format_size(package.size) }} - - - {% end %} - {% end %} - - {% if job.is_running() or job.has_finished() %} -
-
-
-
- {{ format_time(job.duration) }} - - {# If the job is approaching its timeout, we will show a warning #} - {% if job.times_out_in and job.times_out_in <= datetime.timedelta(hours=1) %} - / {{ format_time(job.timeout) }} - {% end %} -
-
- -
-
-
- {% if any((j.has_log() for j in job.all_jobs)) or job.is_running() %} - {% if job.preceeding_jobs %} - - {% else %} - - {{ _("View Log") }} - - {% end %} - {% end %} - - {% if job.can_be_retried() %} - - {{ _("Retry") }} - - {% elif job.is_running() %} - - {{ _("Abort") }} - - {% end %} -
-
-
-
-
- {% end %} -
-
-{% end %} diff --git a/src/templates/jobs/modules/queue.html b/src/templates/jobs/modules/queue.html deleted file mode 100644 index 2f6e6440..00000000 --- a/src/templates/jobs/modules/queue.html +++ /dev/null @@ -1,38 +0,0 @@ - diff --git a/src/templates/log.html b/src/templates/log.html index 6d5162f4..38dd2182 100644 --- a/src/templates/log.html +++ b/src/templates/log.html @@ -1,6 +1,9 @@ {% extends "base.html" %} -{% block title %}{{ _("Log") }}{% end block %} +{# Load events macros #} +{% from "events/macros.html" import EventList with context %} + +{% block title %}{{ _("Log") }}{% endblock %} {% block body %}
@@ -16,13 +19,13 @@

{{ _("Log") }}

- {% module EventsList(priority=priority, offset=offset, limit=limit, - builder=builder, user=user) %} + {{ EventList(priority=priority, offset=offset, limit=limit, + builder=builder, user=user) }}
-{% end %} +{% endblock %} diff --git a/src/web/bugs.py b/src/templates/macros.html similarity index 74% rename from src/web/bugs.py rename to src/templates/macros.html index 4b065a7a..7a40613c 100644 --- a/src/web/bugs.py +++ b/src/templates/macros.html @@ -1,8 +1,7 @@ -#!/usr/bin/python3 -############################################################################### +{############################################################################## # # # Pakfire - The IPFire package management system # -# Copyright (C) 2022 Pakfire development team # +# Copyright (C) 2025 Pakfire development team # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # @@ -17,12 +16,20 @@ # You should have received a copy of the GNU General Public License # # along with this program. If not, see . # # # -############################################################################### +##############################################################################} -import tornado.web +{% macro Text(text, pre=False) %} + {% if text %} +
+ {% if pre %} +
{{ text }}
+ {% else %} + {{ text | markdown | safe }} + {% endif %} +
+ {% endif %} +{% endmacro %} -from . import ui_modules - -class ListModule(ui_modules.UIModule): - def render(self, bugs): - return self.render_string("bugs/modules/list.html", bugs=bugs) +{% macro Highlight(text, filename=None) %} + {{ text | highlight(filename=filename) | safe }} +{% endmacro %} diff --git a/src/templates/mirrors/index.html b/src/templates/mirrors/index.html index 96e82217..f6ac4362 100644 --- a/src/templates/mirrors/index.html +++ b/src/templates/mirrors/index.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Mirrors") }}{% end block %} +{% from "mirrors/macros.html" import MirrorList with context %} + +{% block title %}{{ _("Mirrors") }}{% endblock %} {% block body %}
@@ -23,7 +25,9 @@
- {% module MirrorsList(mirrors) %} +
+ {{ MirrorList(mirrors) }} +
{% if current_user and current_user.is_admin() %}
@@ -31,7 +35,7 @@ {{ _("Create Mirror") }}
- {% end %} + {% endif %}
-{% end block %} +{% endblock %} diff --git a/src/templates/mirrors/macros.html b/src/templates/mirrors/macros.html new file mode 100644 index 00000000..c7a2d5b6 --- /dev/null +++ b/src/templates/mirrors/macros.html @@ -0,0 +1,35 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% macro MirrorList(mirrors) %} + {% for mirror in mirrors %} +
+
+ + {{ mirror }} + +
+ + {% if mirror.owner %} +
{{ mirror.owner }}
+ {% endif %} +
+ {% endfor %} +{% endmacro %} diff --git a/src/templates/mirrors/modules/list.html b/src/templates/mirrors/modules/list.html deleted file mode 100644 index edccfd4f..00000000 --- a/src/templates/mirrors/modules/list.html +++ /dev/null @@ -1,15 +0,0 @@ -
- {% for mirror in mirrors %} -
-
- - {{ mirror }} - -
- - {% if mirror.owner %} -
{{ mirror.owner }}
- {% end %} -
- {% end %} -
diff --git a/src/templates/mirrors/show.html b/src/templates/mirrors/show.html index 7e70ac6d..3c1f29ec 100644 --- a/src/templates/mirrors/show.html +++ b/src/templates/mirrors/show.html @@ -1,6 +1,9 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Mirrors") }} - {{ mirror }}{% end block %} +{% from "macros.html" import Text with context %} +{% from "events/macros.html" import EventList with context %} + +{% block title %}{{ _("Mirrors") }} - {{ mirror }}{% endblock %} {% block body %}
@@ -21,20 +24,20 @@ {% if mirror.owner %}

{{ mirror.owner }}

- {% end %} + {% endif %}

{{ _("Status") }}

- {% if mirror.last_check_success is True %} + {% if mirror.last_check_success is true %} {{ _("Online") }} - {% elif mirror.last_check_success is False %} + {% elif mirror.last_check_success is false %} {{ _("Offline") }} {% else %} {{ _("Pending") }} - {% end %} + {% endif %}

@@ -49,7 +52,7 @@

- {% end %} + {% endif %} {# Country Code #} {% if mirror.country_code %} @@ -61,7 +64,7 @@

- {% end %} + {% endif %} {# Last Check #} {% if mirror.last_check_at %} @@ -73,11 +76,11 @@

- {% end %} + {% endif %} {# Uptime #} {% set uptime = mirror.get_uptime_since(datetime.timedelta(days=30)) %} - {% if uptime is not None %} + {% if uptime is not none %}

{{ _("Uptime In The Last 30 Days") }}

@@ -88,11 +91,11 @@ {{ "%.4f%%" % (uptime * 100) }} {% else %} {{ "%.4f%%" % (uptime * 100) }} - {% end %} + {% endif %}

- {% end %} + {% endif %} @@ -102,7 +105,7 @@
{# Errors #} - {% if mirror.last_check_success is False %} + {% if mirror.last_check_success is false %}
@@ -113,20 +116,20 @@
- {% end %} + {% endif %} {# Notes #} {% if mirror.notes %}
- {% module Text(mirror.notes) %} + {{ Text(mirror.notes) }}
- {% end %} + {% endif %}
- {% raw xsrf_form_html() %} + {{ xsrf_form_html() | safe }}
- {% end %} + {% endif %} {# Log #}
{{ _("Log") }}
- {% module EventsList(priority=4, mirror=mirror) %} + {{ EventList(priority=4, mirror=mirror) }}
-{% end block %} +{% endblock %} diff --git a/src/templates/modules/commit-message.html b/src/templates/modules/commit-message.html deleted file mode 100644 index 8b905a19..00000000 --- a/src/templates/modules/commit-message.html +++ /dev/null @@ -1,3 +0,0 @@ -
{{ commit.subject }}
- -{% module Text(commit.message) %} diff --git a/src/templates/modules/link-to-user.html b/src/templates/modules/link-to-user.html deleted file mode 100644 index 08460a19..00000000 --- a/src/templates/modules/link-to-user.html +++ /dev/null @@ -1,16 +0,0 @@ -{% if isinstance(user, users.User) %} - {% if user.is_admin() %} - - {% else %} - - {% end %} -
- {{ user.realname }} - -{% elif user %} - {% import email.utils %} - {% set name, email_address = email.utils.parseaddr(user) %} - - - {{ name or email_address }} -{% end %} \ No newline at end of file diff --git a/src/templates/modules/packages-files-table.html b/src/templates/modules/packages-files-table.html deleted file mode 100644 index 76085f82..00000000 --- a/src/templates/modules/packages-files-table.html +++ /dev/null @@ -1,47 +0,0 @@ -{% import stat %} - - - - {% for file in filelist %} - {% set mode = stat.filemode(file.mode) %} - {% set owner = "%6s:%-6s" % (file.uname, file.gname) %} - {% set size = "%6s" % ("-" if file.size is None else format_size(file.size)) %} - - - - - - - - - - - {% end %} - -
- {{ mode }} - - {{ owner }} - - {{ size }} - - {{ file.path }} - -
- {% if file.is_viewable() %} - - - - - - {% end %} - - {% if file.is_downloadable() %} - - - - - - {% end %} - -
diff --git a/src/templates/modules/text.html b/src/templates/modules/text.html deleted file mode 100644 index 1be46353..00000000 --- a/src/templates/modules/text.html +++ /dev/null @@ -1,9 +0,0 @@ -{% if text %} -
- {% if pre %} -
{{ text }}
- {% else %} - {% raw text %} - {% end %} -
-{% end %} diff --git a/src/templates/monitorings/macros.html b/src/templates/monitorings/macros.html new file mode 100644 index 00000000..6e87708b --- /dev/null +++ b/src/templates/monitorings/macros.html @@ -0,0 +1,47 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% macro MonitoringReleaseList(releases, show_empty=True) %} + {# Show a message if we don't have any releases, yet #} + {% if show_empty and not releases %} +

+ {{ _("No new releases have been found, yet") }} +

+ + {# Show releases #} + {% elif releases %} + {% for release in releases %} +
+
+
+ {{ release }} + + + {{ _("Bug #%s") % release.bug_id }} + +
+
{{ release.created_at | format_date(shorter=True) }}
+ + {# XXX Show build and build status #} +
+
+ {% endfor %} + {% endif %} +{% endmacro %} diff --git a/src/templates/monitorings/modules/releases-list.html b/src/templates/monitorings/modules/releases-list.html deleted file mode 100644 index a18f69fb..00000000 --- a/src/templates/monitorings/modules/releases-list.html +++ /dev/null @@ -1,25 +0,0 @@ -{# Show a message if we don't have any releases, yet #} -{% if show_empty and not releases %} -

- {{ _("No new releases have been found, yet") }} -

- -{# Show releases #} -{% elif releases %} - {% for release in releases %} -
-
-
- {{ release }} - - - {{ _("Bug #%s") % release.bug_id }} - -
-
{{ locale.format_date(release.created_at, shorter=True) }}
- - {# XXX Show build and build status #} -
-
- {% end %} -{% end %} diff --git a/src/templates/monitorings/show.html b/src/templates/monitorings/show.html index eaeed8a4..8d4c3b17 100644 --- a/src/templates/monitorings/show.html +++ b/src/templates/monitorings/show.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Release Monitoring") }} - {{ monitoring }}{% end block %} +{% from "monitorings/macros.html" import MonitoringReleaseList with context %} + +{% block title %}{{ _("Release Monitoring") }} - {{ monitoring }}{% endblock %} {% block body %}
@@ -48,11 +50,11 @@ {{ _("Unknown: %s") % monitoring.follow }} - {% end %} + {% endif %}

- {% end %} + {% endif %} {% if monitoring.latest_release %}
@@ -63,7 +65,7 @@

- {% end %} + {% endif %} {% if monitoring.latest_build %}
@@ -76,7 +78,7 @@

- {% end %} + {% endif %}
@@ -86,7 +88,7 @@ {{ locale.format_date(monitoring.last_check_at, shorter=True) }} {% else %} {{ _("N/A") }} - {% end %} + {% endif %}

@@ -100,7 +102,7 @@
- {% raw xsrf_form_html() %} + {{ xsrf_form_html() | safe }}
- {% end %} + {% endif %} {# List Releases #}
- {% module MonitoringsReleasesList(monitoring.releases) %} + {{ MonitoringReleaseList(monitoring.get_releases()) }}
-{% end block %} +{% endblock %} diff --git a/src/templates/packages/index.html b/src/templates/packages/index.html index fd649e83..ae658786 100644 --- a/src/templates/packages/index.html +++ b/src/templates/packages/index.html @@ -1,6 +1,6 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Packages") }}{% end block %} +{% block title %}{{ _("Packages") }}{% endblock %} {% block body %}
@@ -19,28 +19,20 @@ - {% for letter, pkgs in sorted(packages.items()) %} + {% for pkg in packages %} - + + - - {% for package in pkgs %} - - - - - {% end %} - {% end %} + {% endfor %}
- {{ letter.upper() }} - + + {{ pkg.name }} + + + {{ pkg.summary }} +
- - {{ package.name }} - - - {{ package.summary }} -
-{% end block %} +{% endblock %} diff --git a/src/templates/packages/macros.html b/src/templates/packages/macros.html new file mode 100644 index 00000000..7868e461 --- /dev/null +++ b/src/templates/packages/macros.html @@ -0,0 +1,205 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% from "macros.html" import Text with context %} + +{% macro PackageInfo(package, show_evr=False) %} +
+

+ {% if show_evr %} + {{ package }} + {% else %} + {{ package.name }} + {% endif %} +

+ + {% if package.summary %} +
+ {{ package.summary }} +
+ {% endif %} + +
+ {{ Text(package.description) }} +
+ +
+ +
+
+{% endmacro %} + +{% macro _PackageDeps(title, deps) %} +
+

{{ title }}

+ +
    + {% for dep in deps %} +
  • + {{ dep }} +
  • + {% endfor %} +
+
+{% endmacro %} + +{% macro PackageDependencies(package) %} +
{{ _("Dependencies") }}
+ +
+ {% if package.provides %} + {{ _PackageDeps(_("Provides"), package.provides) }} + {% endif %} + + {% if package.prerequires %} + {{ _PackageDeps(_("Pre-Requires"), package.prerequires) }} + {% endif %} + + {% if package.requires %} + {{ _PackageDeps(_("Requires"), package.requires) }} + {% endif %} + + {% if package.conflicts %} + {{ _PackageDeps(_("Conflicts"), package.conflicts) }} + {% endif %} + + {% if package.obsoletes %} + {{ _PackageDeps(_("Obsoletes"), package.obsoletes) }} + {% endif %} + + {% if package.recommends %} + {{ _PackageDeps(_("Recommends"), package.recommends) }} + {% endif %} + + {% if package.suggests %} + {{ _PackageDeps(_("Suggests"), package.suggests) }} + {% endif %} +
+{% endmacro %} + +{% macro PackageFilelist(package, filelist=None) %} + {% if filelist is none %} + {% set filelist = package.get_files() %} + {% endif %} + + + + {% for file in filelist %} + + + + + + + + + + {% endfor %} + +
+ {{ file.mode | file_mode }} + + {{ "%6s:%-6s" % (file.uname, file.gname) }} + + {% if file.size %} + {{ file.size | filesizeformat(binary=True) }} + {% else %} + ‐ + {% endif %} + + {{ file.path }} + +
+ {% if file.is_viewable() %} + + + + + + {% endif %} + + {% if file.is_downloadable() %} + + + + + + {% endif %} + +
+{% endmacro %} diff --git a/src/templates/packages/modules/dependencies.html b/src/templates/packages/modules/dependencies.html deleted file mode 100644 index 44f57467..00000000 --- a/src/templates/packages/modules/dependencies.html +++ /dev/null @@ -1,19 +0,0 @@ -
{{ _("Dependencies") }}
- -
- {% for dep in deps %} - {% if deps[dep] %} -
-

{{ dep }}

- -
    - {% for line in deps[dep] %} -
  • - {{ line }} -
  • - {% end %} -
-
- {% end %} - {% end %} -
diff --git a/src/templates/packages/modules/info.html b/src/templates/packages/modules/info.html deleted file mode 100644 index 7bbd4217..00000000 --- a/src/templates/packages/modules/info.html +++ /dev/null @@ -1,83 +0,0 @@ -
-

- {% if show_evr %} - {{ package }} - {% else %} - {{ package.name }} - {% end %} -

- - {% if package.summary %} -
- {{ package.summary }} -
- {% end %} - -
- {% module Text(package.description) %} -
- -
- -
-
diff --git a/src/templates/packages/name/builds.html b/src/templates/packages/name/builds.html index e79108c1..8a53df2d 100644 --- a/src/templates/packages/name/builds.html +++ b/src/templates/packages/name/builds.html @@ -1,6 +1,8 @@ -{% extends "../../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Package") }} - {{ package.name }}{% end block %} +{% from "builds/macros.html" import BuildList with context %} + +{% block title %}{{ _("Package") }} - {{ package.name }}{% endblock %} {% block body %}
@@ -32,15 +34,15 @@

{{ _("Release Builds") }}

- {% for distro in sorted(distros, reverse=True) %} + {% for distro in distros | sort(reverse=True) %}
{{ distro }}
- {% module BuildsList(distros[distro], limit=limit, - more_url=make_url("/builds", name=package.name)) %} - {% end %} + {{ BuildList(distros[distro], limit=limit, + more_url=make_url("/builds", name=package.name)) }} + {% endfor %}
- {% end %} + {% endif %} {# Scratch Builds #} {% if users %} @@ -51,10 +53,10 @@ {% for user in users %}
{{ user }}
- {% module BuildsList(users[user], limit=limit, - more_url=make_url("/builds", name=package.name, user=user.name)) %} - {% end %} + {{ BuildList(users[user], limit=limit, + more_url=make_url("/builds", name=package.name, user=user.name)) }} + {% endfor %} - {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/packages/name/index.html b/src/templates/packages/name/index.html index e349e5e0..1a9e563e 100644 --- a/src/templates/packages/name/index.html +++ b/src/templates/packages/name/index.html @@ -1,6 +1,9 @@ -{% extends "../../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Package") }} - {{ package.name }}{% end block %} +{% from "bugs/macros.html" import BugList with context %} +{% from "packages/macros.html" import PackageInfo with context %} + +{% block title %}{{ _("Package") }} - {{ package.name }}{% endblock %} {% block body %}
@@ -17,7 +20,7 @@ - {% module PackageInfo(package) %} + {{ PackageInfo(package) }}
@@ -32,7 +35,7 @@

{{ distro }}

{% for repo in distro.repos %} - {% set builds = repo.get_builds_by_name(package.name) %} + {% set builds = repo.get_builds(name=package.name) %} {% if builds %}

{{ repo }}

@@ -46,10 +49,10 @@ {{ build }} - {% end %} + {% endfor %} - {% end %} - {% end %} + {% endif %} + {% endfor %} {% if distro in scratch_builds %}

{{ _("My Scratch Builds") }}

@@ -63,9 +66,9 @@ {{ build }} - {% end %} + {% endfor %} - {% end %} + {% endif %} {# Release Monitoring #} @@ -83,10 +86,10 @@ {{ _("Enable Release Monitoring") }} - {% end %} + {% endif %} - {% end %} - {% end %} + {% endif %} + {% endfor %} + {# Bugs #} {% if bugs %}
- {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/packages/show.html b/src/templates/packages/show.html index e35ae271..96047007 100644 --- a/src/templates/packages/show.html +++ b/src/templates/packages/show.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Packages") }} - {{ package }}{% end block %} +{% from "packages/macros.html" import PackageInfo, PackageDependencies, PackageFilelist with context %} + +{% block title %}{{ _("Packages") }} - {{ package }}{% endblock %} {% block body %}
@@ -20,12 +22,12 @@ - {% module PackageInfo(package, show_evr=True) %} + {{ PackageInfo(package, show_evr=True) }} {# XXX add reference to commit for source packages #}
- {% module PackageDependencies(package) %} + {{ PackageDependencies(package) }}
@@ -56,23 +58,21 @@ {{ _("Build from %s") % locale.format_date(build.created_at) }} - {% end %} + {% endfor %} - {% end %} + {% endif %}
{# Filelist #} - {% if package.files %} -
-
-
{{ _("Filelist") }}
+
+
+
{{ _("Filelist") }}
- {% module PackageFilesTable(package, package.files) %} -
-
- {% end %} -{% end block %} + {{ PackageFilelist(package) }} +
+
+{% endblock %} diff --git a/src/templates/packages/view-file.html b/src/templates/packages/view-file.html index a3a6eafa..7aa6cdfe 100644 --- a/src/templates/packages/view-file.html +++ b/src/templates/packages/view-file.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ package }} - {{ file }}{% end block %} +{% from "macros.html" import Highlight with context %} + +{% block title %}{{ package }} - {{ file }}{% endblock %} {% block body %}
@@ -22,14 +24,14 @@

{{ file }}

- {% module Highlight(payload, filename=file.path) %} + {{ Highlight(payload, filename=file.path) }}
-{% end block %} +{% endblock %} diff --git a/src/templates/releases/macros.html b/src/templates/releases/macros.html new file mode 100644 index 00000000..793ea846 --- /dev/null +++ b/src/templates/releases/macros.html @@ -0,0 +1,36 @@ +{% macro ReleaseList(releases) %} + +{% endmacro %} diff --git a/src/templates/repos/builds.html b/src/templates/repos/builds.html index f9875d3b..40a9a442 100644 --- a/src/templates/repos/builds.html +++ b/src/templates/repos/builds.html @@ -1,6 +1,6 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Repository") }} - {{ repo }} - {{ _("Builds") }}{% end block %} +{% block title %}{{ _("Repository") }} - {{ repo }} - {{ _("Builds") }}{% endblock %} {% block body %}
@@ -27,7 +27,7 @@
  • {{ distro }}
  • - {% end %} + {% endif %}
  • {{ _("Repositories") }}
  • @@ -42,28 +42,27 @@

    {{ distro }} - {{ repo }} - {{ _("Builds") }}

    - {% set builds = group(repo.builds, lambda build: build.pkg.name) %} - - {% for name in sorted(builds) %} + {% for name, items in repo.get_builds() | sort | groupby("pkg.name") %} + - {% end %} + {% endfor %}
    {{ name }} - {% for build in reversed(builds[name]) %} + {% for build in items | reverse %}

    {{ build.pkg.evr }}

    - {% end %} + {% endfor %}
    -{% end block %} +{% endblock %} diff --git a/src/templates/repos/macros.html b/src/templates/repos/macros.html new file mode 100644 index 00000000..438ea525 --- /dev/null +++ b/src/templates/repos/macros.html @@ -0,0 +1,56 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% macro RepoList(repos, build) %} + +{% endmacro %} diff --git a/src/templates/repos/modules/list.html b/src/templates/repos/modules/list.html deleted file mode 100644 index a8c70082..00000000 --- a/src/templates/repos/modules/list.html +++ /dev/null @@ -1,34 +0,0 @@ - diff --git a/src/templates/repos/show.html b/src/templates/repos/show.html index 2830ea3a..9365009c 100644 --- a/src/templates/repos/show.html +++ b/src/templates/repos/show.html @@ -1,6 +1,9 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Repository") }} - {{ repo }}{% end block %} +{% from "macros.html" import Text with context %} +{% from "builds/macros.html" import BuildList with context %} + +{% block title %}{{ _("Repository") }} - {{ repo }}{% endblock %} {% block body %}
    @@ -28,7 +31,7 @@
  • {{ distro }}
  • - {% end %} + {% endif %}
  • {{ _("Repositories") }}
  • @@ -43,9 +46,9 @@ {# Description #} {% if repo.description %}
    - {% module Text(repo.description) %} + {{ Text(repo.description) }}
    - {% end %} + {% endif %}
    @@ -96,21 +99,23 @@ {{ _("Delete") }} - {% end %} + {% endif %}
    - {% set builds = repo.get_recent_builds(limit=6) %} - {# Builds #} + {% set builds = repo.get_builds(limit=10) %} + {% if builds %}
    -

    {{ _("Recently Added Builds") }}

    +

    + {{ _("Recently Added Builds") }} +

    - {% module BuildsList(builds) %} + {{ BuildList(builds) }}
    - {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/search.html b/src/templates/search.html index af9afa1a..9bed0f1e 100644 --- a/src/templates/search.html +++ b/src/templates/search.html @@ -1,6 +1,6 @@ {% extends "base.html" %} -{% block title %}{{ _("Search") }}{% if q %} - {{ q }}{% end %}{% end block %} +{% block title %}{{ _("Search") }}{% if q %} - {{ q }}{% endif %}{% endblock %} {% block body %}
    @@ -22,13 +22,13 @@
    {{ _("We could not find anything for '%s'") % q }}
    - {% end %} + {% endif %}
    @@ -48,7 +48,7 @@

    - {{ _("Packages") }} {{ len(packages) }} + {{ _("Packages") }} {{ packages | count }}

    @@ -62,19 +62,19 @@ {{ package.summary }} - {% end %} + {% endfor %}
    - {% end %} + {% endif %} {# Files #} {% if files %}

    - {{ _("Files") }} {{ len(files) }} + {{ _("Files") }} {{ files | count }}

    @@ -88,19 +88,19 @@ {{ file }} - {% end %} + {% endfor %}
    - {% end %} + {% endif %} {# Users #} {% if users %}

    - {{ _("Users") }} {{ len(users) }} + {{ _("Users") }} {{ users | count }}

    @@ -111,10 +111,10 @@ {{ user }} - {% end %} + {% endfor %}
    - {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/sources/commit.html b/src/templates/sources/commit.html index 20cd8e2d..ccafe427 100644 --- a/src/templates/sources/commit.html +++ b/src/templates/sources/commit.html @@ -1,10 +1,13 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Source") }} - {{ source }}{% end block %} +{% from "macros.html" import Text with context %} +{% from "bugs/macros.html" import BugList with context %} +{% from "builds/macros.html" import BuildList with context %} +{% from "users/macros.html" import LinkToUser with context %} -{% block body %} - {% from pakfire.buildservice.users import User %} +{% block title %}{{ _("Source") }} - {{ source }}{% endblock %} +{% block body %}
    @@ -30,7 +33,7 @@
  • {{ source.distro }}
  • - {% end %} + {% endif %}
  • {{ _("Repositories") }}
  • @@ -54,27 +57,25 @@
    - {{ locale.format_date(commit.date, shorter=True) }} + {{ commit.date | format_date(shorter=True) }}
    - {% if isinstance(commit.author, User) %} - - {{ commit.author }} - - {% end %} + {{ LinkToUser(email=commit.author) }}
    - {% module Text(commit.message, pre=True) %} + {{ Text(commit.message, pre=True) }} {% if commit.tags %}
      {% for tag in commit.tags %} {# Skip Fixes: #} - {% if tag == "Fixes" %}{% continue %}{% end %} + {% if tag == "Fixes" %} + {% continue %} + {% endif %}
    • @@ -84,28 +85,20 @@ {{ tag }} {% else %} {{ tag }} - {% end %} + {% endif %}
      {% for user in commit.tags[tag] %} - {% set user = backend.users.get_by_email(user) or user %} -
      - {% if isinstance(user, User) %} - - {{ user }} - - {% else %} - {{ user }} - {% end %} + {{ LinkToUser(email=user) }}
      - {% end %} + {% endfor %}
    - {% end %} + {% endfor %} - {% end %} + {% endif %}
    @@ -115,18 +108,18 @@

    {{ _("Fixed Bugs") }}

    - {% module BugsList(fixed_bugs) %} + {{ BugList(fixed_bugs) }}
    - {% end %} + {% endif %} {% if commit.builds %}

    {{ _("Builds") }}

    - {% module BuildsList(commit.builds, shorter=True) %} + {{ BuildList(commit.builds) }}
    - {% end %} -{% end block %} + {% endif %} +{% endblock %} diff --git a/src/templates/sources/macros.html b/src/templates/sources/macros.html new file mode 100644 index 00000000..f1f45858 --- /dev/null +++ b/src/templates/sources/macros.html @@ -0,0 +1,58 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% from "macros.html" import Text with context %} + +{% macro SourceList(sources) %} +
    + +
    +{% endmacro %} + +{% macro SourceCommitList(commits) %} + {% for commit in commits %} + {% set source = commit.source %} + + + {% endfor %} +{% endmacro %} + +{% macro SourceCommitMessage(commit) %} +
    + {{ commit.subject }} +
    + + {{ Text(commit.message, pre=True) }} +{% endmacro %} diff --git a/src/templates/sources/modules/commits.html b/src/templates/sources/modules/commits.html deleted file mode 100644 index 3d3973e2..00000000 --- a/src/templates/sources/modules/commits.html +++ /dev/null @@ -1,12 +0,0 @@ -{% for commit in commits %} - {% set source = commit.source %} - - -{% end %} diff --git a/src/templates/sources/modules/list.html b/src/templates/sources/modules/list.html deleted file mode 100644 index f160051f..00000000 --- a/src/templates/sources/modules/list.html +++ /dev/null @@ -1,11 +0,0 @@ -
    - -
    diff --git a/src/templates/sources/show.html b/src/templates/sources/show.html index 3e729ff0..476db8f1 100644 --- a/src/templates/sources/show.html +++ b/src/templates/sources/show.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Source") }} - {{ source }}{% end block %} +{% from "sources/macros.html" import SourceCommitList with context %} + +{% block title %}{{ _("Source") }} - {{ source }}{% endblock %} {% block body %}
    @@ -28,7 +30,7 @@
  • {{ source.distro }}
  • - {% end %} + {% endif %}
  • {{ _("Repositories") }}
  • @@ -50,13 +52,16 @@ {% if source.last_fetched_at %}
    -

    {{ _("Last Check") }}

    +

    + {{ _("Last Check") }} +

    +

    - {{ locale.format_date(source.last_fetched_at, shorter=True) }} + {{ source.last_fetched_at | format_date(shorter=True) }}

    - {% end %} + {% endif %} @@ -64,11 +69,13 @@ {% set commits = source.get_commits(limit=10) %} -
    -
    -

    {{ _("Commits") }}

    + {% if commits %} +
    +
    +

    {{ _("Commits") }}

    - {% module CommitsList(commits) %} -
    -
    -{% end block %} + {{ SourceCommitList(commits) }} +
    +
    + {% endif %} +{% endblock %} diff --git a/src/templates/users/index.html b/src/templates/users/index.html index 270381cf..eaa09a38 100644 --- a/src/templates/users/index.html +++ b/src/templates/users/index.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Users") }}{% end block %} +{% from "users/macros.html" import UserList with context %} + +{% block title %}{{ _("Users") }}{% endblock %} {% block body %}
    @@ -17,7 +19,7 @@

    {{ _("Users") }}

    - {% module UsersList(users) %} + {{ UserList(users) }}
    -{% end block %} +{% endblock %} diff --git a/src/templates/users/macros.html b/src/templates/users/macros.html new file mode 100644 index 00000000..6c5b6db7 --- /dev/null +++ b/src/templates/users/macros.html @@ -0,0 +1,112 @@ +{############################################################################## +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +##############################################################################} + +{% macro Avatar(user, size=None) %} + {{ user }} +{% endmacro %} + +{% macro LinkToUser(user=None, email=None) %} + {# Find the user if we don't have one #} + {% if user is none %} + {% set user = backend.users.get_by_email(email) %} + {% endif %} + + {# If we have a user, we link to the profile #} + {% if user %} + {% if user.is_admin() %} + + {% else %} + + {% endif %} + + + {{ user.realname }} + + + {# Otherwise we add a link to the email address #} + {% else %} + + + + {{ email | email_name }} + + {% endif %} +{% endmacro %} + +{% macro UserList(users) %} + {% for rank, user in users | enumerate %} +
    +
    +
    +
    + {{ Avatar(user, size=256) }} +
    +
    + +
    +
    + {{ user }} + + + #{{ rank + 1 }} + +
    + +
    + {{ user.name }} +
    + + +
    +
    +
    + {% endfor %} +{% endmacro %} diff --git a/src/templates/users/modules/list.html b/src/templates/users/modules/list.html deleted file mode 100644 index e9996cdb..00000000 --- a/src/templates/users/modules/list.html +++ /dev/null @@ -1,46 +0,0 @@ -{% for rank, user in enumerate(users) %} -
    -
    -
    -
    - {{ user }} -
    -
    - -
    -
    - {{ user }} - - - #{{ rank + 1 }} - -
    -
    {{ user.name }}
    - - -
    -
    -
    -{% end %} diff --git a/src/templates/users/show.html b/src/templates/users/show.html index 0fff9e4c..a70311ba 100644 --- a/src/templates/users/show.html +++ b/src/templates/users/show.html @@ -1,6 +1,8 @@ -{% extends "../base.html" %} +{% extends "base.html" %} -{% block title %}{{ _("Users") }} - {{ user }}{% end block %} +{% from "repos/macros.html" import RepoList with context %} + +{% block title %}{{ _("Users") }} - {{ user }}{% endblock %} {% block body %}
    @@ -36,11 +38,11 @@

    {{ _("Total Build Time") }}

    - {{ format_time(user.total_build_time) }} + {{ user.total_build_time | format_time }}

    - {% end %} + {% endif %} {# Quotas #} {% if user.has_perm(current_user) %} @@ -49,36 +51,35 @@
    {% if user.daily_build_quota %}

    {{ _("Daily Build Quota") }}

    -

    - {{ format_time(user.used_daily_build_quota) }}/{{ format_time(user.daily_build_quota) }} +

    + {{ user.get_used_daily_build_quota() | format_time }}/{{ user.daily_build_quota | format_time }}

    {% else %}

    {{ _("Daily Build Usage") }}

    - {{ format_time(user.used_daily_build_quota) }} + {{ user.get_used_daily_build_quota() | format_time }}

    - {% end %} + {% endif %}
    - {# Disk Usage #}
    {% if user.storage_quota %}

    {{ _("Disk Quota") }}

    -

    - {{ format_size(user.disk_usage) }}/{{ format_size(user.storage_quota) }} +

    + {{ user.get_disk_usage() | filesizeformat(binary=True) }}/{{ user.storage_quota | filesizeformat(binary=True) }}

    {% else %}

    {{ _("Disk Usage") }}

    - {{ format_size(user.disk_usage) }} + {{ user.get_disk_usage() | filesizeformat(binary=True) }}

    - {% end %} + {% endif %}
    - {% end %} + {% endif %} @@ -93,13 +94,13 @@ {{ _("Edit") }} - {% end %} + {% endif %} {% if current_user and current_user.is_admin() %} {{ _("Email") }} - {% end %} + {% endif %}
    @@ -109,19 +110,22 @@

    {{ _("Repositories") }}

    - {% if user.repos %} - {% for distro in sorted(user.repos) %} + {# Fetch all repositories #} + {% set repos = user.get_repos() %} + + {% if repos %} + {% for distro, items in repos | sort | groupby("distro") %}
    {{ distro }}
    - {% module ReposList(user.repos[distro]) %} + {{ RepoList(items) }}
    - {% end %} + {% endfor %} {% else %}
    {{ _("No Repositories, Yet") }}
    - {% end %} + {% endif %} {% if user.has_perm(current_user) %}
    @@ -129,7 +133,7 @@ {{ _("Create Repository") }}
    - {% end %} + {% endif %}
    -{% end block %} +{% endblock %} diff --git a/src/web/__init__.py b/src/web/__init__.py index 05651a40..2dfd37e4 100644 --- a/src/web/__init__.py +++ b/src/web/__init__.py @@ -13,13 +13,11 @@ from .. import misc # Import all handlers from . import auth -from . import bugs from . import builders from . import builds from . import debuginfo from . import distributions from . import errors -from . import events from . import jobs from . import mirrors from . import monitorings @@ -31,8 +29,6 @@ from . import uploads from . import users from .handlers import * -from . import ui_modules - class Application(tornado.web.Application): def __init__(self, **kwargs): settings = dict( @@ -41,74 +37,23 @@ class Application(tornado.web.Application): static_path = STATICDIR, ui_modules = { - "Highlight" : ui_modules.HighlightModule, - "Text" : ui_modules.TextModule, - - # Bugs - "BugsList" : bugs.ListModule, - - # Builds - "BuildsList" : builds.ListModule, - "BuildWatchers" : builds.WatchersModule, - - # BuildGroups - "BuildGroupList" : builds.GroupListModule, - - # Builders - "BuilderStats" : builders.StatsModule, - - # Distros - "DistrosList" : distributions.ListModule, - - # Events - "EventsList" : events.ListModule, - "EventBuildComment" : events.BuildCommentModule, - "EventSystemMessage" : events.SystemMessageModule, - "EventUserMessage" : events.UserMessageModule, - # Jobs - "JobsList" : jobs.ListModule, "JobsLogStream" : jobs.LogStreamModule, - "JobsQueue" : jobs.QueueModule, - - # Mirrors - "MirrorsList" : mirrors.ListModule, - - # Monitorings - "MonitoringsReleasesList" : monitorings.ReleasesListModule, - - # Packages - "PackageInfo" : packages.InfoModule, - "PackageDependencies": packages.DependenciesModule, # Releases "ReleasesList" : distributions.ReleasesListModule, - # Repositories - "ReposList" : repos.ListModule, - - # Sources - "SourcesList" : sources.ListModule, - "CommitsList" : sources.CommitsListModule, - # Users - "UsersList" : users.ListModule, "UserPushSubscribeButton" : users.PushSubscribeButton, - - "CommitMessage" : ui_modules.CommitMessageModule, - "LinkToUser" : ui_modules.LinkToUserModule, - "PackageFilesTable" : ui_modules.PackageFilesTableModule, }, ui_methods = { - "extract_hostname" : self.extract_hostname, - "format_time" : self.format_time, "group" : self.group, "make_url" : self.make_url, }, xsrf_cookies = True, - xsrf_cookie_kwargs = dict( - secure = True, - ), + xsrf_cookie_kwargs = { + "secure" : True, + }, # WebSocket websocket_ping_interval = 15, @@ -275,44 +220,10 @@ class Application(tornado.web.Application): ## UI methods - def extract_hostname(self, handler, url): - url = urllib.parse.urlparse(url) - - return url.hostname - - def format_time(self, handler, s, shorter=False): - _ = handler.locale.translate - - if isinstance(s, datetime.timedelta): - s = s.total_seconds() - - args = { - "s" : round(s % 60), - "m" : round(s / 60 % 60), - "h" : round(s / 3600 % 3600), - "d" : round(s / 86400), - } - - # Less than one minute - if s < 60: - return _("%(s)d s") % args - - # Less than one hour - elif s < 3600: - return _("%(m)d:%(s)02d m") % args - - # Less than one day - elif s < 86400: - return _("%(h)d:%(m)02d h") % args - - # More than one day - else: - return _("%(d)d:%(h)02d d") % args - def group(self, handler, *args, **kwargs): return misc.group(*args, **kwargs) - def make_url(self, handler, url, **kwargs): + def make_url(self, url, **kwargs): # Format any query arguments and append them to the URL if kwargs: # Filter out None diff --git a/src/web/auth.py b/src/web/auth.py index 3612d453..af89e772 100644 --- a/src/web/auth.py +++ b/src/web/auth.py @@ -10,7 +10,8 @@ log = logging.getLogger("pbs.web.auth") class LoginHandler(base.KerberosAuthMixin, base.BaseHandler): async def get(self, username=None, failed=False): - if self.current_user: + current_user = await self.get_current_user() + if current_user: raise tornado.web.HTTPError(403, "Already logged in") await self.render("login.html", username=username, failed=failed) @@ -26,7 +27,7 @@ class LoginHandler(base.KerberosAuthMixin, base.BaseHandler): return self.get(username=username, failed=True) # If the authentication was successful, we create a new session - async with self.db.transaction(): + async with await self.db.transaction(): # Fetch the authenticated user user = await self.backend.users.get_by_name(username) if not user: diff --git a/src/web/base.py b/src/web/base.py index 83f85f59..357922d7 100644 --- a/src/web/base.py +++ b/src/web/base.py @@ -2,6 +2,8 @@ import asyncio import base64 +import binascii +import datetime import functools import http.client import jinja2 @@ -14,9 +16,11 @@ import socket import sys import time import tornado.concurrent +import tornado.escape import tornado.locale import tornado.web import tornado.websocket +import tornado.util import traceback import urllib.parse @@ -26,6 +30,8 @@ from .. import misc from .. import users from ..decorators import * +from . import filters + # Setup logging log = logging.getLogger("pbs.web.base") @@ -39,11 +45,14 @@ class KerberosAuthMixin(object): @property def kerberos_service(self): - return self.settings.get("krb5-service", "HTTP") + return self.backend.config.get("krb5", "service", fallback="HTTP") + @property def kerberos_principal(self): - return self.settings.get("krb5-principal", "pakfire/%s" % socket.getfqdn()) + return self.backend.config.get( + "krb5", "principal", fallback="pakfire/%s" % socket.getfqdn(), + ) def authenticate_redirect(self): """ @@ -86,7 +95,7 @@ class KerberosAuthMixin(object): auth_value = auth_header.removeprefix("Negotiate ") # Set keytab to use - os.environ["KRB5_KTNAME"] = self.backend.settings.get("krb5-keytab") + os.environ["KRB5_KTNAME"] = self.backend.config.get("krb5", "keytab") try: # Initialise the server session @@ -143,7 +152,7 @@ class KerberosAuthMixin(object): def _auth_with_credentials(self, username, password): # Set keytab to use - os.environ["KRB5_KTNAME"] = self.backend.settings.get("krb5-keytab") + os.environ["KRB5_KTNAME"] = self.backend.config.get("krb5", "keytab") # Check the credentials against the Kerberos database try: @@ -172,24 +181,41 @@ class BaseHandler(tornado.web.RequestHandler): def db(self): return self.backend.db - async def _get_current_user(self): - # Get the session from the cookie - session_id = self.get_cookie("session_id", None) - if not session_id: - return + async def get_session(self): + """ + Returns the user session (if logged in) + """ + if not hasattr(self, "_session"): + # Get the session from the cookie + session_id = self.get_cookie("session_id", None) - # Fetch the session - session = await self.backend.sessions.get(session_id) - if not session: - return + # Fetch the session + if session_id: + session = await self.backend.sessions.get(session_id) + else: + session = None + + # Store the session + self._session = session + + return self._session + + async def get_current_user(self): + session = await self.get_session() - # Return the user - return await session.get_user() + # If logged in, return the user + if session: + return session.user - def get_user_locale(self): + @property + def current_user(self): + raise NotImplementedError("We don't use this any more") + + async def get_user_locale(self): # Get the locale from the user settings - if self.current_user: - return self.current_user.locale + current_user = await self.get_current_user() + if current_user: + return current_user.locale # If no locale was provided, we take what ever the browser requested return self.get_browser_locale() @@ -243,25 +269,54 @@ class BaseHandler(tornado.web.RequestHandler): env.globals |= { "backend" : self.backend, "version" : __version__, + + # Python modules + "datetime" : datetime, + + # Functions + "make_url" : self.application.make_url, + } + + # Custom Filters + env.filters |= { + "avatar_url" : filters.avatar_url, + "email_address" : filters.email_address, + "email_name" : filters.email_name, + "enumerate" : filters._enumerate, + "file_mode" : filters.file_mode, + "format_date" : filters.format_date, + "format_day" : filters.format_day, + "format_time" : filters.format_time, + "highlight" : filters.highlight, + "hostname" : filters.hostname, + "markdown" : filters._markdown, + "static_url" : filters.static_url, } return JinjaTemplateLoader(env) - def get_template_namespace(self): - ns = tornado.web.RequestHandler.get_template_namespace(self) + async def get_template_namespace(self): + # Fetch the current user + current_user = await self.get_current_user() + + # Fetch the locale + locale = await self.get_user_locale() - ns.update({ + ns = { + "handler" : self, + "current_user" : current_user, "hostname" : self.request.host, - "format_date" : self.format_date, - "format_size" : misc.format_size, - "xsrf_token" : self.xsrf_token, - "year" : time.strftime("%Y"), + "now" : datetime.datetime.now(), # i18n - "gettext" : self.locale.translate, - "ngettext" : self.locale.translate, - "pgettext" : self.locale.pgettext, - }) + "locale" : locale, + "gettext" : locale.translate, + "ngettext" : locale.translate, + "pgettext" : locale.pgettext, + + # XSRF Stuff + "xsrf_form_html" : self.xsrf_form_html, + } return ns @@ -282,7 +337,7 @@ class BaseHandler(tornado.web.RequestHandler): template = loader.load(template_name) # Make the namespace - namespace = self.get_template_namespace() + namespace = await self.get_template_namespace() namespace.update(kwargs) return await template.render_async(**namespace) @@ -423,21 +478,66 @@ class BaseHandler(tornado.web.RequestHandler): self.finish() async def write_error(self, code, exc_info=None, **kwargs): + # Fetch the current user + current_user = await self.get_current_user() + + # Translate the HTTP status code try: message = http.client.responses[code] except KeyError: message = None - _traceback = [] + tb = [] # Collect more information about the exception if possible. if exc_info: - if self.current_user and isinstance(self.current_user, users.User): - if self.current_user.is_admin(): - _traceback += traceback.format_exception(*exc_info) + if current_user and isinstance(current_user, users.User): + if current_user.is_admin(): + tb += traceback.format_exception(*exc_info) await self.render("errors/error.html", - code=code, message=message, traceback="".join(_traceback), **kwargs) + code=code, message=message, traceback="".join(tb), **kwargs) + + # XSRF Token Stuff + + @property + def xsrf_token(self): + raise NotImplementedError("We don't use this any more") + + async def _make_xsrf_token(self): + if not hasattr(self, "_xsrf_token"): + version, token, timestamp = self._get_raw_xsrf_token() + + output_version = self.settings.get("xsrf_cookie_version", 2) + cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {}) + + mask = os.urandom(4) + self._xsrf_token = b"|".join( + [ + b"2", + binascii.b2a_hex(mask), + binascii.b2a_hex(tornado.util._websocket_mask(mask, token)), + tornado.escape.utf8(str(int(timestamp))), + ] + ) + + if version is None: + current_user = await self.get_current_user() + if current_user and "expires_days" not in cookie_kwargs: + cookie_kwargs["expires_days"] = 30 + cookie_name = self.settings.get("xsrf_cookie_name", "_xsrf") + self.set_cookie(cookie_name, self._xsrf_token, **cookie_kwargs) + + return self._xsrf_token + + async def xsrf_form_html(self): + # Fetch the token + xsrf_token = await self._make_xsrf_token() + + # Escape the token + xsrf_token = tornado.escape.xhtml_escape(xsrf_token) + + return "" % xsrf_token # Typed Arguments @@ -473,11 +573,11 @@ class BaseHandler(tornado.web.RequestHandler): except (TypeError, ValueError): raise tornado.web.HTTPError(400, "%s is not an float" % arg) - def get_argument_builder(self, *args, **kwargs): + async def get_argument_builder(self, *args, **kwargs): name = self.get_argument(*args, **kwargs) if name: - return self.backend.builders.get_by_name(name) + return await self.backend.builders.get_by_name(name) def get_argument_distro(self, *args, **kwargs): slug = self.get_argument(*args, **kwargs) @@ -514,11 +614,11 @@ class BaseHandler(tornado.web.RequestHandler): # Return all uploads return [self._get_upload(uuid) for uuid in uuids] - def get_argument_user(self, *args, **kwargs): + async def get_argument_user(self, *args, **kwargs): name = self.get_argument(*args, **kwargs) if name: - return self.backend.users.get_by_name(name) + return await self.backend.users.get_by_name(name) # XXX TODO BackendMixin = BaseHandler @@ -548,7 +648,7 @@ class APIMixin(KerberosAuthMixin): def check_xsrf_cookie(self): pass - def get_current_user(self): + async def get_current_user(self): """ Authenticates a user or builder """ @@ -568,13 +668,13 @@ class APIMixin(KerberosAuthMixin): if self.allow_builders and principal.startswith("host/"): hostname = principal.removeprefix("host/") - return self.backend.builders.get_by_name(hostname) + return await self.backend.builders.get_by_name(hostname) # Return any users if self.allow_users: - return self.backend.users.get_by_name(principal) + return await self.backend.users.get_by_name(principal) - def get_user_locale(self): + async def get_user_locale(self): return self.get_browser_locale() @property @@ -659,9 +759,9 @@ def authenticated(method): """ @functools.wraps(method) async def wrapper(self, *args, **kwargs): - self.current_user = await self._get_current_user() + current_user = await self.get_current_user() - if not self.current_user: + if not current_user: if self.request.method in ("GET", "HEAD"): url = self.get_login_url() if "?" not in url: @@ -694,8 +794,10 @@ def negotiate(method): Requires clients to use SPNEGO """ @functools.wraps(method) - def wrapper(self, *args, **kwargs): - if not self.current_user: + async def wrapper(self, *args, **kwargs): + current_user = await self.get_current_user() + + if not current_user: # Send the Negotiate header self.add_header("WWW-Authenticate", "Negotiate") @@ -705,7 +807,12 @@ def negotiate(method): return None - return method(self, *args, **kwargs) + # Call the wrapped method + result = method(self, *args, **kwargs) + + # Await it if it is a coroutine + if asyncio.iscoroutine(result): + return await result return wrapper diff --git a/src/web/builders.py b/src/web/builders.py index b10e4e36..837ffc49 100644 --- a/src/web/builders.py +++ b/src/web/builders.py @@ -80,13 +80,13 @@ class StatsHandler(base.BaseHandler, tornado.websocket.WebSocketHandler): class IndexHandler(base.BaseHandler): - def get(self): - self.render("builders/index.html", builders=self.backend.builders) + async def get(self): + await self.render("builders/index.html", builders=self.backend.builders) class ShowHandler(base.BaseHandler): async def get(self, hostname): - builder = self.backend.builders.get_by_name(hostname) + builder = await self.backend.builders.get_by_name(hostname) if not builder: raise tornado.web.HTTPError(404, "Could not find builder %s" % hostname) @@ -97,7 +97,7 @@ class ShowHandler(base.BaseHandler): "is_shut_down" : await builder.is_shut_down(), } - self.render("builders/show.html", builder=builder, **args) + await self.render("builders/show.html", builder=builder, **args) class CreateHandler(base.BaseHandler): @@ -270,13 +270,3 @@ class StopHandler(base.BaseHandler): raise self.redirect("/builders/%s" % builder.hostname) - - -class StatsModule(ui_modules.UIModule): - def render(self, builder): - return self.render_string("builders/modules/stats.html", builder=builder) - - def javascript_files(self): - return ( - "js/builders-stats.min.js", - ) diff --git a/src/web/builds.py b/src/web/builds.py index 2860d82b..a4f0246f 100644 --- a/src/web/builds.py +++ b/src/web/builds.py @@ -77,31 +77,30 @@ class IndexHandler(base.BaseHandler): # Filters name = self.get_argument("name", None) - user = self.get_argument_user("user", None) + user = await self.get_argument_user("user", None) # Fetch the most recent builds - if user: - builds = user.get_builds(name, limit=limit, offset=offset) - else: - builds = self.backend.builds.get_recent(name=name, limit=limit, offset=offset) + builds = await self.backend.builds.get( + name = name, + user = user, + limit = limit, + offset = offset, + ) - # Group builds by date - builds = await misc.group(builds, lambda build: build.created_at.date()) - - self.render("builds/index.html", builds=builds, name=name, user=user, - limit=limit, offset=offset) + await self.render("builds/index.html", builds=builds, + name=name, user=user, limit=limit, offset=offset) class ShowHandler(base.BaseHandler): async def get(self, uuid): - build = self.backend.builds.get_by_uuid(uuid) + build = await self.backend.builds.get_by_uuid(uuid) if not build: raise tornado.web.HTTPError(404, "Could not find build %s" % uuid) # Fetch any fixed Bugs - bugs = await build.get_bugs() + bugs = [] # XXX await build.get_bugs() - self.render("builds/show.html", build=build, pkg=build.pkg, + await self.render("builds/show.html", build=build, pkg=build.pkg, distro=build.distro, bugs=bugs) @@ -370,36 +369,9 @@ class ReposRemoveHandler(base.BaseHandler): class GroupShowHandler(base.BaseHandler): - def get(self, uuid): - group = self.backend.builds.groups.get_by_uuid(uuid) + async def get(self, uuid): + group = await self.backend.builds.get_group_by_uuid(uuid) if not group: raise tornado.web.HTTPError(404, "Could not find build group %s" % uuid) - self.render("builds/groups/show.html", group=group) - - -class ListModule(ui_modules.UIModule): - def render(self, builds, limit=None, shorter=False, more_url=None): - rest = None - - # Limit builds - if limit: - builds, rest = builds[:limit], builds[limit:] - - return self.render_string("builds/modules/list.html", builds=builds, - rest=rest, shorter=shorter, more_url=more_url) - - -class GroupListModule(ui_modules.UIModule): - def render(self, group, limit=None): - return self.render_string("builds/groups/modules/list.html", - group=group, limit=limit) - - -class WatchersModule(ui_modules.UIModule): - def render(self, build, watchers=None): - if watchers is None: - watchers = build.watchers - - return self.render_string("builds/modules/watchers.html", - build=build, watchers=watchers) + await self.render("builds/groups/show.html", group=group) diff --git a/src/web/distributions.py b/src/web/distributions.py index 0dd2fd6d..2e46c3f3 100644 --- a/src/web/distributions.py +++ b/src/web/distributions.py @@ -8,10 +8,7 @@ from . import ui_modules class IndexHandler(base.BaseHandler): async def get(self): - # Fetch all distributions - distros = [distro async for distro in self.backend.distros] - - self.render("distros/index.html", distros=distros) + await self.render("distros/index.html", distros=self.backend.distros) class ShowHandler(base.BaseHandler): @@ -20,17 +17,7 @@ class ShowHandler(base.BaseHandler): if not distro: raise tornado.web.HTTPError(404, "Could not find distro: %s" % slug) - # Fetch the latest release - latest_release = await distro.get_latest_release() - - # Fetch all repos - repos = [repo async for repo in distro.get_repos()] - - # Fetch all sources - sources = [source async for source in distro.get_sources()] - - self.render("distros/show.html", distro=distro, - latest_release=latest_release, repos=repos, sources=sources) + await self.render("distros/show.html", distro=distro) class EditHandler(base.BaseHandler): @@ -268,11 +255,6 @@ class ReleasesPublishHandler(base.BaseHandler): self.redirect(release.url) -class ListModule(ui_modules.UIModule): - def render(self, distros): - return self.render_string("distros/modules/list.html", distros=distros) - - class ReleasesListModule(ui_modules.UIModule): def render(self, releases): return self.render_string("distros/releases/modules/list.html", releases=releases) diff --git a/src/web/filters.py b/src/web/filters.py new file mode 100644 index 00000000..8bcf3082 --- /dev/null +++ b/src/web/filters.py @@ -0,0 +1,181 @@ +############################################################################### +# # +# Pakfire - The IPFire package management system # +# Copyright (C) 2025 Pakfire development team # +# # +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +############################################################################### + +import datetime +import email.utils +import jinja2 +import markdown +import pygments +import re +import stat +import urllib.parse + +def avatar_url(user, size=None): + """ + Returns the avatar URL + """ + return user.avatar(size) + +def email_address(e): + """ + Extracts the raw email address + """ + name, address = email.utils.parseaddr(e) + + return address + +def email_name(e): + """ + Shows the name of the email address (of if there is none the plain email address) + """ + name, address = email.utils.parseaddr(e) + + return name or address + +def _enumerate(*args, **kwargs): + """ + Wraps enumerate() + """ + return enumerate(*args, **kwargs) + +def file_mode(mode): + """ + Converts the file mode into a string + """ + return stat.filemode(mode) + +@jinja2.pass_context +def format_date(ctx, *args, **kwargs): + # Fetch locale + locale = ctx.get("locale") + + return locale.format_date(*args, **kwargs) + +@jinja2.pass_context +def format_day(ctx, *args, **kwargs): + # Fetch locale + locale = ctx.get("locale") + + return locale.format_day(*args, **kwargs) + +@jinja2.pass_context +def format_time(ctx, s, shorter=False): + # Fetch locale + locale = ctx.get("locale") + + # Fetch the translation function + _ = locale.translate + + # Convert into seconds + if isinstance(s, datetime.timedelta): + s = s.total_seconds() + + args = { + "s" : round(s % 60), + "m" : round(s / 60 % 60), + "h" : round(s / 3600 % 3600), + "d" : round(s / 86400), + } + + # Less than one minute + if s < 60: + return _("%(s)d s") % args + + # Less than one hour + elif s < 3600: + return _("%(m)d:%(s)02d m") % args + + # Less than one day + elif s < 86400: + return _("%(h)d:%(m)02d h") % args + + # More than one day + else: + return _("%(d)d:%(h)02d d") % args + +def highlight(text, filename=None): + # Find a lexer + try: + if filename: + lexer = pygments.lexers.guess_lexer_for_filename(filename, text) + else: + lexer = pygments.lexers.guess_lexer(text) + except pygments.util.ClassNotFound as e: + lexer = pygments.lexers.special.TextLexer() + + # Format to HTML + formatter = pygments.formatters.HtmlFormatter() + + return pygments.highlight(text, lexer, formatter) + +def hostname(url): + # Parse the URL + url = urllib.parse.urlparse(url) + + # Return only the hostname + return url.hostname + +class PrettyLinksExtension(markdown.extensions.Extension): + def extendMarkdown(self, md): + md.preprocessors.register(BugzillaLinksPreprocessor(md), "bugzilla", 10) + md.preprocessors.register(CVELinksPreprocessor(md), "cve", 10) + + +class BugzillaLinksPreprocessor(markdown.preprocessors.Preprocessor): + regex = re.compile(r"(?:#(\d{5,}))", re.I) + + def run(self, lines): + for line in lines: + yield self.regex.sub( + r"[#\1](https://bugzilla.ipfire.org/show_bug.cgi?id=\1)", line) + + +class CVELinksPreprocessor(markdown.preprocessors.Preprocessor): + regex = re.compile(r"(?:CVE)[\s\-](\d{4}\-\d+)") + + def run(self, lines): + for line in lines: + yield self.regex.sub( + r"[CVE-\1](https://cve.mitre.org/cgi-bin/cvename.cgi?name=\1)", line) + +# Create the renderer +markdown_processor = markdown.Markdown( + extensions=[ + PrettyLinksExtension(), + "codehilite", + "fenced_code", + "sane_lists", + ], +) + +def _markdown(text): + """ + Implements a simple markdown processor + """ + # Pass the text through a markdown processor + return markdown_processor.convert(text) + + +@jinja2.pass_context +def static_url(ctx, *args, **kwargs): + # Fetch the handler + handler = ctx.get("handler") + + return handler.static_url(*args, **kwargs) diff --git a/src/web/handlers.py b/src/web/handlers.py index f2b51889..53654e9c 100644 --- a/src/web/handlers.py +++ b/src/web/handlers.py @@ -8,16 +8,15 @@ class IndexHandler(base.BaseHandler): async def get(self): async with await self.db.transaction(): # Fetch all running jobs - running_jobs = [job async for job in self.backend.jobs.get_running()] + running_jobs = self.backend.jobs.get_running() # Fetch finished jobs - finished_jobs = [job async for job in self.backend.jobs.get_finished(limit=8)] + finished_jobs = self.backend.jobs.get_finished(limit=8) - # Fetch the total length of the queue - queue_length = await self.backend.jobs.queue.get_length() + # Concactenate all jobs + jobs = [job async for job in running_jobs] + [job async for job in finished_jobs] - await self.render("index.html", running_jobs=running_jobs, - finished_jobs=finished_jobs, queue_length=queue_length) + await self.render("index.html", jobs=jobs, queue=self.backend.jobs.queue) class LogHandler(base.BaseHandler): @@ -31,8 +30,8 @@ class LogHandler(base.BaseHandler): "priority" : self.get_argument_int("priority", None) or 5, # Filters - "builder" : self.get_argument_builder("builder", None), - "user" : self.get_argument_user("user", None), + "builder" : await self.get_argument_builder("builder", None), + "user" : await self.get_argument_user("user", None), } await self.render("log.html", **kwargs) diff --git a/src/web/jobs.py b/src/web/jobs.py index 0412151c..a2e8f9a9 100644 --- a/src/web/jobs.py +++ b/src/web/jobs.py @@ -250,17 +250,6 @@ class RetryHandler(base.BaseHandler): self.redirect("/builds/%s" % job.build.uuid) -class ListModule(ui_modules.UIModule): - def render(self, jobs, show_arch_only=False, show_packages=False): - return self.render_string("jobs/modules/list.html", jobs=jobs, - show_arch_only=show_arch_only, show_packages=show_packages) - - -class QueueModule(ui_modules.UIModule): - def render(self, jobs): - return self.render_string("jobs/modules/queue.html", jobs=jobs) - - class LogStreamModule(ui_modules.UIModule): def render(self, job, limit=None, small=False): return self.render_string("jobs/modules/log-stream.html", diff --git a/src/web/mirrors.py b/src/web/mirrors.py index 7419bbda..272fd53f 100644 --- a/src/web/mirrors.py +++ b/src/web/mirrors.py @@ -6,17 +6,17 @@ from . import base from . import ui_modules class IndexHandler(base.BaseHandler): - def get(self): - self.render("mirrors/index.html", mirrors=self.backend.mirrors) + async def get(self): + await self.render("mirrors/index.html", mirrors=self.backend.mirrors) class ShowHandler(base.BaseHandler): - def get(self, hostname): - mirror = self.backend.mirrors.get_by_hostname(hostname) + async def get(self, hostname): + mirror = await self.backend.mirrors.get_by_hostname(hostname) if not mirror: raise tornado.web.HTTPError(404, "Could not find mirror %s" % hostname) - self.render("mirrors/show.html", mirror=mirror) + await self.render("mirrors/show.html", mirror=mirror) class CheckHandler(base.BaseHandler): @@ -130,8 +130,3 @@ class DeleteHandler(base.BaseHandler): # Redirect back to all mirrors self.redirect("/mirrors") - - -class ListModule(ui_modules.UIModule): - def render(self, mirrors): - return self.render_string("mirrors/modules/list.html", mirrors=mirrors) diff --git a/src/web/monitorings.py b/src/web/monitorings.py index fe618950..f535450f 100644 --- a/src/web/monitorings.py +++ b/src/web/monitorings.py @@ -24,18 +24,18 @@ from . import base from . import ui_modules class ShowHandler(base.BaseHandler): - def get(self, slug, name): + async def get(self, slug, name): # Fetch the distribution - distro = self.backend.distros.get_by_slug(slug) + distro = await self.backend.distros.get_by_slug(slug) if not distro: raise tornado.web.HTTPError(404, "Could not find distro %s" % slug) # Fetch the monitoring - monitoring = self.backend.monitorings.get_by_distro_and_name(distro, name) + monitoring = await self.backend.monitorings.get_by_distro_and_name(distro, name) if not monitoring: raise tornado.web.HTTPError(404, "Could not find monitoring for %s in %s" % (name, distro)) - self.render("monitorings/show.html", monitoring=monitoring) + await self.render("monitorings/show.html", monitoring=monitoring) class CreateHandler(base.BaseHandler): @@ -208,9 +208,3 @@ class CheckHandler(base.BaseHandler): # Redirect back self.redirect(monitoring.url) - - -class ReleasesListModule(ui_modules.UIModule): - def render(self, releases, show_empty=True): - return self.render_string("monitorings/modules/releases-list.html", - releases=releases, show_empty=show_empty) diff --git a/src/web/packages.py b/src/web/packages.py index 6918b4f2..752d11a3 100644 --- a/src/web/packages.py +++ b/src/web/packages.py @@ -11,48 +11,54 @@ from . import ui_modules class IndexHandler(base.BaseHandler): async def get(self): - packages = self.backend.packages.get_list() + packages = await self.backend.packages.list() - # Sort all packages in an array like "" --> [packages, ...] - # to print them in a table for each letter of the alphabet. - packages = await misc.group(packages, lambda pkg: pkg.name[0].lower()) - - self.render("packages/index.html", packages=packages) + await self.render("packages/index.html", packages=packages) class NameHandler(base.BaseHandler): async def get(self, name): - build = self.backend.builds.get_latest_by_name(name) + build = await self.backend.builds.get_latest_by_name(name) if not build: raise tornado.web.HTTPError(404, "Package '%s' was not found" % name) + # Fetch the current user + current_user = await self.get_current_user() + # Fetch all distributions distros = {} - # Get the latest bugs from Bugzilla + # Collect all scratch builds by distro + scratch_builds = {} + + # Collect data async with asyncio.TaskGroup() as tasks: async for distro in self.backend.distros: + # Fetch bugs distros[distro] = tasks.create_task( self.backend.bugzilla.search(component=name, **distro.bugzilla_fields), ) + # Fetch scratch builds + if current_user: + scratch_builds[distro] = tasks.create_task( + self.backend.builds.get( + user=current_user, scratch=True, name=name, distro=distro), + ) + + # Map all bugs bugs = { distro : await distros[distro] for distro in distros } - # Fetch my own builds - if self.current_user: - scratch_builds = await misc.group( - self.current_user.get_builds_by_name(name), lambda build: build.distro, - ) - else: - scratch_builds = [] + # Map all scratch builds + scratch_builds = { distro : await scratch_builds[distro] for distro in distros } - self.render("packages/name/index.html", package=build.pkg, distros=distros, - scratch_builds=scratch_builds, bugs=bugs) + await self.render("packages/name/index.html", + package=build.pkg, distros=distros, scratch_builds=scratch_builds, bugs=bugs) class NameBuildsHandler(base.BaseHandler): async def get(self, name): - build = self.backend.builds.get_latest_by_name(name) + build = await self.backend.builds.get_latest_by_name(name) if not build: raise tornado.web.HTTPError(404, "Package '%s' was not found" % name) @@ -71,7 +77,7 @@ class NameBuildsHandler(base.BaseHandler): # Group them by user users = await misc.group(scratch_builds, lambda build: build.owner) - self.render("packages/name/builds.html", limit=limit, + await self.render("packages/name/builds.html", limit=limit, package=build.pkg, distros=distros, users=users) @@ -81,7 +87,7 @@ class ShowHandler(base.BaseHandler): if not package: raise tornado.web.HTTPError(404, "Could not find package: %s" % uuid) - self.render("packages/show.html", package=package) + await self.render("packages/show.html", package=package) class FileDownloadHandler(base.BaseHandler): @@ -103,7 +109,8 @@ class FileDownloadHandler(base.BaseHandler): self.set_header("Content-Length", file.size) # Send MIME type - self.set_header("Content-Type", file.mimetype) + if file.mimetype: + self.set_header("Content-Type", file.mimetype) # Send the filename self.set_header("Content-Disposition", @@ -140,26 +147,4 @@ class FileViewHandler(base.BaseHandler): # These pages should not be indexed self.add_header("X-Robots-Tag", "noindex") - self.render("packages/view-file.html", package=package, file=file, payload=payload) - - -class DependenciesModule(ui_modules.UIModule): - def render(self, package): - _ = self.locale.translate - - deps = { - _("Provides") : package.provides, - _("Requires") : package.requires, - _("Conflicts") : package.conflicts, - _("Obsoletes") : package.obsoletes, - _("Recommends") : package.recommends, - _("Suggests") : package.suggests, - } - - return self.render_string("packages/modules/dependencies.html", deps=deps) - - -class InfoModule(ui_modules.UIModule): - def render(self, package, show_evr=False, show_size=True): - return self.render_string("packages/modules/info.html", - package=package, show_evr=show_evr, show_size=show_size) + await self.render("packages/view-file.html", package=package, file=file, payload=payload) diff --git a/src/web/repos.py b/src/web/repos.py index 7f539d42..e2e50d74 100644 --- a/src/web/repos.py +++ b/src/web/repos.py @@ -109,25 +109,25 @@ class APIv1ShowHandler(base.APIMixin, base.BaseHandler): class BaseHandler(base.BaseHandler): - def _get_repo(self, distro_slug, repo_slug, user_slug=None): + async def _get_repo(self, distro_slug, repo_slug, user_slug=None): user = None # Find the user if user_slug: - user = self.backend.users.get_by_name(user_slug) + user = await self.backend.users.get_by_name(user_slug) if not user: raise tornado.web.HTTPError(404, "Could not find user: %s" % user_slug) # Find the distribution - distro = self.backend.distros.get_by_slug(distro_slug) + distro = await self.backend.distros.get_by_slug(distro_slug) if not distro: raise tornado.web.HTTPError(404, "Could not find distro: %s" % distro_slug) # Find the repository if user: - repo = user.get_repo(distro, repo_slug) + repo = await user.get_repo(distro, repo_slug) else: - repo = distro.get_repo(repo_slug) + repo = await distro.get_repo(repo_slug) if not repo: raise tornado.web.HTTPError(404, "Could not find repo: %s" % repo_slug) @@ -135,19 +135,19 @@ class BaseHandler(base.BaseHandler): class ShowHandler(BaseHandler): - def get(self, **kwargs): + async def get(self, **kwargs): # Fetch the repository - repo = self._get_repo(**kwargs) + repo = await self._get_repo(**kwargs) - self.render("repos/show.html", repo=repo, distro=repo.distro) + await self.render("repos/show.html", repo=repo, distro=repo.distro) class BuildsHandler(BaseHandler): - def get(self, **kwargs): + async def get(self, **kwargs): # Fetch the repository - repo = self._get_repo(**kwargs) + repo = await self._get_repo(**kwargs) - self.render("repos/builds.html", repo=repo, distro=repo.distro) + await self.render("repos/builds.html", repo=repo, distro=repo.distro) class CreateCustomHandler(BaseHandler): @@ -187,15 +187,13 @@ class CreateCustomHandler(BaseHandler): class ConfigHandler(BaseHandler): - def get(self, **kwargs): + async def get(self, **kwargs): # Fetch the repository - repo = self._get_repo(**kwargs) + repo = await self._get_repo(**kwargs) # Generate configuration config = configparser.ConfigParser(interpolation=None) - - with self.db.transaction(): - repo.write_config(config) + repo.write_config(config) # This is plain text self.set_header("Content-Type", "text/plain") @@ -302,8 +300,3 @@ class MirrorlistHandler(BaseHandler): "version" : 1, "mirrors" : mirrors, }) - - -class ListModule(ui_modules.UIModule): - def render(self, repos, build=None): - return self.render_string("repos/modules/list.html", repos=repos, build=build) diff --git a/src/web/search.py b/src/web/search.py index bd09df0f..b5bbe10f 100644 --- a/src/web/search.py +++ b/src/web/search.py @@ -5,10 +5,10 @@ import re from . import base class SearchHandler(base.BaseHandler): - def get(self): + async def get(self): q = self.get_argument("q", None) if not q: - self.render("search.html", q=None, packages=None, files=None, users=None) + await self.render("search.html", q=None, packages=None, files=None, users=None) return # Check if the given search pattern is a UUID @@ -16,19 +16,19 @@ class SearchHandler(base.BaseHandler): # Search for a matching object and redirect to it # Search in packages - package = self.backend.packages.get_by_uuid(q) + package = await self.backend.packages.get_by_uuid(q) if package: self.redirect("/packages/%s" % package.uuid) return # Search in builds. - build = self.backend.builds.get_by_uuid(q) + build = await self.backend.builds.get_by_uuid(q) if build: self.redirect("/builds/%s" % build.uuid) return # Search in jobs. - job = self.backend.jobs.get_by_uuid(q) + job = await self.backend.jobs.get_by_uuid(q) if job: self.redirect("/builds/%s" % job.build.uuid) return @@ -37,12 +37,12 @@ class SearchHandler(base.BaseHandler): # If the query starting starts with / we are searching for a file if q.startswith("/"): - files = self.backend.packages.search_by_filename(q, limit=50) + files = await self.backend.packages.search_by_filename(q, limit=50) # Otherwise we are performing a search for packages & users else: - packages = self.backend.packages.search(q, limit=50) - users = self.backend.users.search(q, limit=50) + packages = await self.backend.packages.search(q, limit=50) + users = await self.backend.users.search(q, limit=50) # Redirect if we have an exact match for a package if len(packages) == 1 and not files and not users: @@ -51,4 +51,4 @@ class SearchHandler(base.BaseHandler): return # Render results - self.render("search.html", q=q, packages=packages, files=files, users=users) + await self.render("search.html", q=q, packages=packages, files=files, users=users) diff --git a/src/web/sources.py b/src/web/sources.py index 315073e0..9dfbdaf6 100644 --- a/src/web/sources.py +++ b/src/web/sources.py @@ -24,91 +24,81 @@ from . import base from . import ui_modules class ShowHandler(base.BaseHandler): - def _get_source(self, distro_slug, repo_slug, source_slug, user_slug=None): + async def _get_source(self, distro_slug, repo_slug, source_slug, user_slug=None): user = None # Find the user if user_slug: - user = self.backend.users.get_by_name(user_slug) + user = await self.backend.users.get_by_name(user_slug) if not user: raise tornado.web.HTTPError(404, "Could not find user: %s" % user_slug) # Find the distribution - distro = self.backend.distros.get_by_slug(distro_slug) + distro = await self.backend.distros.get_by_slug(distro_slug) if not distro: raise tornado.web.HTTPError(404, "Could not find distro: %s" % distro_slug) # Find the repository if user: - repo = user.get_repo(distro, repo_slug) + repo = await user.get_repo(distro, repo_slug) else: - repo = distro.get_repo(repo_slug) + repo = await distro.get_repo(repo_slug) if not repo: raise tornado.web.HTTPError(404, "Could not find repo: %s" % repo_slug) # Find the source - source = repo.get_source_by_slug(source_slug) + source = await repo.get_source_by_slug(source_slug) if not source: raise tornado.web.HTTPError(404, "Could not find source: %s" % source_slug) return source - def get(self, **kwargs): - source = self._get_source(**kwargs) + async def get(self, **kwargs): + source = await self._get_source(**kwargs) - self.render("sources/show.html", source=source) + await self.render("sources/show.html", source=source) class ShowCommitHandler(base.BaseHandler): - def _get_commit(self, distro_slug, repo_slug, source_slug, commit_slug, user_slug=None): + async def _get_commit(self, distro_slug, repo_slug, source_slug, commit_slug, user_slug=None): user = None # Find the user if user_slug: - user = self.backend.users.get_by_name(user_slug) + user = await self.backend.users.get_by_name(user_slug) if not user: raise tornado.web.HTTPError(404, "Could not find user: %s" % user_slug) # Find the distribution - distro = self.backend.distros.get_by_slug(distro_slug) + distro = await self.backend.distros.get_by_slug(distro_slug) if not distro: raise tornado.web.HTTPError(404, "Could not find distro: %s" % distro_slug) # Find the repository if user: - repo = user.get_repo(distro, repo_slug) + repo = await user.get_repo(distro, repo_slug) else: - repo = distro.get_repo(repo_slug) + repo = await distro.get_repo(repo_slug) if not repo: raise tornado.web.HTTPError(404, "Could not find repo: %s" % repo_slug) # Find the source - source = repo.get_source_by_slug(source_slug) + source = await repo.get_source_by_slug(source_slug) if not source: raise tornado.web.HTTPError(404, "Could not find source: %s" % source_slug) # Find the commit - commit = source.get_commit(commit_slug) + commit = await source.get_commit(commit_slug) if not commit: raise tornado.web.HTTPError(404, "Could not find commit %s in %s" % (commit_slug, source)) return commit async def get(self, **kwargs): - commit = self._get_commit(**kwargs) + commit = await self._get_commit(**kwargs) # Fetch any fixed bugs fixed_bugs = await commit.get_fixed_bugs() - self.render("sources/commit.html", source=commit.source, commit=commit, + await self.render("sources/commit.html", source=commit.source, commit=commit, fixed_bugs=fixed_bugs) - - -class ListModule(ui_modules.UIModule): - def render(self, sources): - return self.render_string("sources/modules/list.html", sources=sources) - - -class CommitsListModule(ui_modules.UIModule): - def render(self, commits): - return self.render_string("sources/modules/commits.html", commits=commits) diff --git a/src/web/ui_modules.py b/src/web/ui_modules.py deleted file mode 100644 index eb6626af..00000000 --- a/src/web/ui_modules.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/python - -import markdown -import pygments -import pygments.formatters -import pygments.lexers -import re -import tornado.web - -from .. import users -from ..constants import * - -class UIModule(tornado.web.UIModule): - @property - def backend(self): - return self.handler.application.backend - - -class TextModule(UIModule): - """ - Renders the text through the Markdown processor - """ - def render(self, text, pre=False): - # Do nothing for no input - if text is None: - text = "" - - # Pass the text through a markdown processor - if not pre and text: - text = markdown.markdown(text, - extensions=[ - PrettyLinksExtension(), - "codehilite", - "fenced_code", - "sane_lists", - ]) - - return self.render_string("modules/text.html", text=text, pre=pre) - - -class PrettyLinksExtension(markdown.extensions.Extension): - def extendMarkdown(self, md): - md.preprocessors.register(BugzillaLinksPreprocessor(md), "bugzilla", 10) - md.preprocessors.register(CVELinksPreprocessor(md), "cve", 10) - - -class BugzillaLinksPreprocessor(markdown.preprocessors.Preprocessor): - regex = re.compile(r"(?:#(\d{5,}))", re.I) - - def run(self, lines): - for line in lines: - yield self.regex.sub( - r"[#\1](https://bugzilla.ipfire.org/show_bug.cgi?id=\1)", line) - - -class CVELinksPreprocessor(markdown.preprocessors.Preprocessor): - regex = re.compile(r"(?:CVE)[\s\-](\d{4}\-\d+)") - - def run(self, lines): - for line in lines: - yield self.regex.sub( - r"[CVE-\1](https://cve.mitre.org/cgi-bin/cvename.cgi?name=\1)", line) - - -class HighlightModule(UIModule): - def render(self, text, filename=None): - # Find a lexer - try: - if filename: - lexer = pygments.lexers.guess_lexer_for_filename(filename, text) - else: - lexer = pygments.lexers.guess_lexer(text) - except pygments.util.ClassNotFound as e: - lexer = pygments.lexers.special.TextLexer() - - # Find a formatter - formatter = pygments.formatters.HtmlFormatter() - - return pygments.highlight(text, lexer, formatter) - - -class CommitMessageModule(UIModule): - def render(self, commit): - return self.render_string("modules/commit-message.html", commit=commit) - - -class PackageFilesTableModule(UIModule): - def render(self, pkg, filelist): - return self.render_string("modules/packages-files-table.html", - pkg=pkg, filelist=filelist) - - -class LinkToUserModule(UIModule): - def render(self, user): - return self.render_string("modules/link-to-user.html", user=user, users=users) diff --git a/src/web/uploads.py b/src/web/uploads.py index 5220fa3e..06abd932 100644 --- a/src/web/uploads.py +++ b/src/web/uploads.py @@ -35,7 +35,11 @@ class APIv1IndexHandler(base.APIMixin, base.BaseHandler): async def get(self): uploads = [] - for upload in self.current_user.uploads: + # Fetch the current user + current_user = await self.get_current_user() + + # Send information about all uploads + async for upload in current_user.get_uploads(): uploads.append({ "id" : "%s" % upload.uuid, "filename" : upload.filename, @@ -57,6 +61,9 @@ class APIv1IndexHandler(base.APIMixin, base.BaseHandler): """ Creates a new upload and returns its UUID """ + # Fetch the current user + current_user = await self.get_current_user() + # Fetch the filename filename = self.get_argument("filename") @@ -76,21 +83,21 @@ class APIv1IndexHandler(base.APIMixin, base.BaseHandler): raise tornado.web.HTTPError(400, "Invalid hexdigest") from e # Create a new upload - with self.db.transaction(): + async with await self.db.transaction(): try: upload = await self.backend.uploads.create( - filename, - size=size, - owner=self.current_user, - digest_algo=digest_algo, - digest=digest, + filename = filename, + size = size, + owner = current_user, + digest_algo = digest_algo, + digest = digest, ) except uploads.UnsupportedDigestException as e: raise base.APIError(errno.ENOTSUP, "Unsupported digest %s" % digest_algo) from e except users.QuotaExceededError as e: - raise base.APIError(errno.EDQUOT, "Quota exceeded for %s" % self.current_user) from e + raise base.APIError(errno.EDQUOT, "Quota exceeded for %s" % current_user) from e except ValueError as e: raise base.APIError(errno.EINVAL, "%s" % e) from e @@ -123,16 +130,18 @@ class APIv1DetailHandler(base.APIMixin, base.BaseHandler): Called to store the received payload """ # Fetch the upload - upload = self.backend.uploads.get_by_uuid(uuid) + upload = await self.backend.uploads.get_by_uuid(uuid) if not upload: - raise tornado.web.HTTPError(400, "Could not find upload %s" % uuid) + raise tornado.web.HTTPError(404, "Could not find upload %s" % uuid) + + # XXX has perm? # Fail if we did not receive anything if not self.buffer.tell(): raise base.APIError(errno.ENODATA, "No data received") # Import the payload from the buffer - with self.db.transaction(): + async with await self.db.transaction(): try: await upload.copyfrom(self.buffer) diff --git a/src/web/users.py b/src/web/users.py index 888c0de9..1d436200 100644 --- a/src/web/users.py +++ b/src/web/users.py @@ -8,17 +8,20 @@ from . import base from . import ui_modules class IndexHandler(base.BaseHandler): - def get(self): - self.render("users/index.html", users=self.backend.users.top) + async def get(self): + # Fetch the top users + users = await self.backend.users.get_top() + + await self.render("users/index.html", users=users) class ShowHandler(base.BaseHandler): - def get(self, name): - user = self.backend.users.get_by_name(name) + async def get(self, name): + user = await self.backend.users.get_by_name(name) if not user: raise tornado.web.HTTPError(404, "Could not find user: %s" % name) - self.render("users/show.html", user=user) + await self.render("users/show.html", user=user) class DeleteHandler(base.BaseHandler): @@ -122,11 +125,6 @@ class PushSubscribeHandler(base.BaseHandler): await self.current_user.subscribe(**args) -class ListModule(ui_modules.UIModule): - def render(self, users): - return self.render_string("users/modules/list.html", users=users) - - class PushSubscribeButton(ui_modules.UIModule): def render(self): # Fetch the application server key -- 2.47.2