#!/usr/bin/python
+import asyncio
+import datetime
+import fnmatch
import logging
import os
import pakfire
import tempfile
from . import base
-from . import git
from . import misc
from .constants import *
if commit:
return Commit(self.pakfire, commit.id)
- def pull(self):
- for source in self:
- with git.Repo(self.backend, source, mode="mirror") as repo:
- # Fetch the latest updates
- repo.fetch()
-
- # Import all new revisions
- repo.import_revisions()
-
- def dist(self):
- # Walk through all source repositories
- for source in self:
- # Get access to the git repo
- with git.Repo(self.pakfire, source) as repo:
- # Walk through all pending commits
- for commit in source.pending_commits:
- commit.state = "running"
-
- logging.debug("Processing commit %s: %s" % (commit.revision, commit.subject))
-
- # Navigate to the right revision.
- repo.checkout(commit.revision)
-
- # Get all changed makefiles.
- deleted_files = []
- updated_files = []
-
- for file in repo.changed_files(commit.revision):
- # Don't care about files that are not a makefile.
- if not file.endswith(".%s" % MAKEFILE_EXTENSION):
- continue
-
- if os.path.exists(file):
- updated_files.append(file)
- else:
- deleted_files.append(file)
-
- if updated_files:
- # Create a temporary directory where to put all the files
- # that are generated here.
- pkg_dir = tempfile.mkdtemp()
-
- try:
- config = pakfire.config.Config(["general.conf",])
- config.parse(source.distro.get_config())
-
- p = pakfire.PakfireServer(config=config)
-
- pkgs = []
- for file in updated_files:
- try:
- pkg_file = p.dist(file, pkg_dir)
- pkgs.append(pkg_file)
- except:
- raise
-
- # Import all packages in one swoop.
- for pkg in pkgs:
- with self.db.transaction():
- self.backend.builds.create_from_source_package(pkg,
- source.distro, commit=commit, type="release")
-
- except:
- if commit:
- commit.state = "failed"
-
- raise
-
- finally:
- if os.path.exists(pkg_dir):
- shutil.rmtree(pkg_dir)
-
- for file in deleted_files:
- # Determine the name of the package.
- name = os.path.basename(file)
- name = name[:len(MAKEFILE_EXTENSION) + 1]
-
- source.distro.delete_package(name)
-
- if commit:
- commit.state = "finished"
-
class Source(base.DataObject):
table = "sources"
# Commits
- def create_commit(self, revision, author, committer, subject, body, date):
- commit = self.backend.sources._get_commit("INSERT INTO source_commits(source_id, \
- revision, author, committer, subject, body, date) VALUES(%s, %s, %s, %s, %s, %s, %s) \
- RETURNING *", self.id, revision, author, committer, subject, body, date)
+ async def _import_commit(self, git, revision):
+ """
+ Imports the commit with the given revision
+ """
+ # Fetch the author's name and email address
+ author = await git.show_attribute(revision, r"%an <%ae>")
+
+ # Fetch the committer's name and email address
+ committer = await git.show_attribute(revision, r"%cn <%ce>")
- # Commit
+ # Subject
+ subject = await git.show_attribute(revision, r"%s")
+
+ # Body
+ body = await git.show_attribute(revision, r"%b")
+
+ # Date
+ date = datetime.datetime.fromisoformat(
+ await git.show_attribute(revision, r"%aI"),
+ )
+
+ # Insert into the database
+ commit = self.backend.sources._get_commit("""
+ INSERT INTO
+ source_commits(
+ source_id,
+ revision,
+ author,
+ committer,
+ subject,
+ body,
+ date
+ )
+ VALUES(
+ %s, %s, %s, %s, %s, %s, %s
+ )
+ RETURNING
+ *
+ """, self.id, revision, author, committer, subject, body, date,
+ )
+
+ # Populate the cache
commit.source = self
return commit
else:
await git.clone()
+ # Determine which commits there are to process
+ revisions = [
+ await git.show_ref(self.branch),
+ ]
+
+ # Process all revisions one after the other
+ for revision in revisions:
+ with self.db.transaction():
+ await self._process_revision(git, revision)
+
+ async def _process_revision(self, git, revision):
+ """
+ Processes a certain revision
+ """
+ # Create the commit metadata
+ commit = await self._import_commit(git, revision)
+
+ # Find changed files
+ changed_files = await git.changed_files(revision, filter="*/*.nm")
+
+ # Exit if there have not been any changes
+ if not changed_files:
+ log.debug("%s has no changes")
+ return
+
+ deleted_packages = []
+ dist_files = []
+
+ # Run through all changed files
+ for status, filename in changed_files:
+ # Collect the names of all deleted packages
+ if status == "D":
+ deleted_packages.append(
+ self._filename_to_package(filename),
+ )
+
+ # For any other changed makefiles, we collect their paths
+ else:
+ dist_files.append(filename)
+
+ # Obsolete any deleted packages
+ if deleted_packages:
+ pass # XXX TODO
+
+ # Create builds from all other changed files
+ if dist_files:
+ await self._dist_files(git, revision, dist_files)
+
+ def _filename_to_package(self, filename):
+ """
+ Maps a filename to a package name
+ """
+ name = os.path.dirname(filename)
+
+ # Check that the file part matches
+ if not filename.endswith("/%s.nm" % name):
+ raise ValueError("Invalid package name")
+
+ return name
+
+ async def _dist_files(self, git, revision, files):
+ """
+ Runs "pakfire dist" on all given files at the given revision
+ """
+ with tempfile.TemporaryDirectory() as path:
+ # Checkout the revision into a new temporary directory
+ await git.checkout(revision, path)
+
+ uploads = []
+
+ # Launch a Pakfire instance with the repository configuration
+ # XXX vendor needs to be set from the repository metadata
+ try:
+ with self.backend.pakfire(distro=self.repo.distro) as p:
+ # Walk through all files one by one
+ for file in files:
+ # Run dist()
+ file = p.dist(file)
+
+ # Upload the file
+ uploads.append(
+ await upload.create_from_file(file),
+ )
+
+ # Remove the source file
+ os.unlink(file)
+
+ finally:
+ # Delete any uploads
+ await asyncio.gather(
+ *(upload.delete() for upload in uploads)
+ )
class Commit(base.DataObject):
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.path)
- async def command(self, *args):
+ async def command(self, *args, **kwargs):
"""
Executes a Git command
"""
if self.is_cloned():
cwd = self.path
- return await self.backend.command("git", *args, cwd=cwd)
+ return await self.backend.command("git", *args, cwd=cwd, **kwargs)
def is_cloned(self):
"""
Fetches any changes
"""
await self.command("fetch", self.url, self.branch)
+
+ async def show_attribute(self, revision, format):
+ return await self.command(
+ "show",
+ "--no-patch",
+ "--format=format:%s" % format,
+ revision,
+ return_output=True,
+ )
+
+ async def show_ref(self, branch):
+ """
+ Resolves a ref into a hash
+ """
+ return await self.command("show-ref", "--hash", branch, return_output=True)
+
+ async def changed_files(self, revision, filter=None):
+ """
+ Returns a list of files that has been changed
+ """
+ changed_files = []
+
+ output = await self.command(
+ "show",
+
+ # Don't show anything else
+ "--format=format:",
+
+ # Show the status of each changed file
+ "--name-status",
+
+ # Revision
+ revision,
+
+ # Ask for the output to be returned
+ return_output=True,
+ )
+
+ for line in output.splitlines():
+ m = re.match(r"^([A-Z])\s+(.*)$", line)
+ if not m:
+ raise ValueError("Could not parse line: %s" % line)
+
+ # Extract values
+ status, filename = m.groups()
+
+ # Filter out anything unwanted
+ if filter and not fnmatch.fnmatch(filename, filter):
+ continue
+
+ # Append the file to the list
+ changed_files.append((status, filename))
+
+ return changed_files
+
+ async def checkout(self, revision, path):
+ """
+ Creates a working directory at the revision in path
+ """
+ log.debug("Checking out %s into %s" % (revision, path))
+
+ await self.command(
+ # Set the destination path as work tree
+ "--work-tree", path,
+
+ # Perform a checkout of the specified revision
+ "checkout", revision, ".",
+ )