from . import updates
from . import users
+log = logging.getLogger("builds")
+log.propagate = 1
+
from .constants import *
from .decorators import *
-def import_from_package(_pakfire, filename, distro=None, commit=None, type="release",
- arches=None, check_for_duplicates=True, owner=None):
-
- if distro is None:
- distro = commit.source.distro
-
- assert distro
-
- # Open the package file to read some basic information.
- pkg = pakfire.packages.open(None, None, filename)
-
- if check_for_duplicates:
- if distro.has_package(pkg.name, pkg.epoch, pkg.version, pkg.release):
- logging.warning("Duplicate package detected: %s. Skipping." % pkg)
- return
-
- # Open the package and add it to the database.
- pkg = packages.Package.open(_pakfire, filename)
- logging.debug("Created new package: %s" % pkg)
-
- # Associate the package to the processed commit.
- if commit:
- pkg.commit = commit
-
- # Create a new build object from the package which
- # is always a release build.
- build = Build.create(_pakfire, pkg, type=type, owner=owner, distro=distro)
- logging.debug("Created new build job: %s" % build)
-
- # Create all automatic jobs.
- build.create_autojobs(arches=arches)
-
- return pkg, build
-
-
class Builds(base.Object):
def _get_build(self, query, *args):
res = self.db.get(query, *args)
return builds
+ def create(self, pkg, type="release", owner=None, distro=None):
+ assert type in ("release", "scratch", "test")
+ assert distro, "You need to specify the distribution of this build."
+
+ # Check if scratch build has an owner.
+ if type == "scratch" and not owner:
+ raise Exception, "Scratch builds require an owner"
+
+ # Set the default priority of this build.
+ if type == "release":
+ priority = 0
+
+ elif type == "scratch":
+ priority = 1
+
+ elif type == "test":
+ priority = -1
+
+ # Create build in database
+ build = self._get_build("INSERT INTO builds(uuid, pkg_id, type, distro_id, priority) \
+ VALUES(%s, %s, %s, %s, %s) RETURNING *", "%s" % uuid.uuid4(), pkg.id, type, distro.id, priority)
+
+ # Set the owner of this build
+ if owner:
+ build.owner = owner
+
+ # Log that the build has been created.
+ build.log("created", user=owner)
+
+ # Create directory where the files live
+ if not os.path.exists(build.path):
+ os.makedirs(build.path)
+
+ # Move package file to the directory of the build.
+ build.pkg.move(os.path.join(build.path, "src"))
+
+ # Generate an update id.
+ build.generate_update_id()
+
+ # Obsolete all other builds with the same name to track updates.
+ build.obsolete_others()
+
+ # Search for possible bug IDs in the commit message.
+ build.search_for_bugs()
+
+ return build
+
+ def create_from_source_package(self, filename, distro, commit=None, type="release",
+ arches=None, check_for_duplicates=True, owner=None):
+ assert distro
+
+ # Open the package file to read some basic information.
+ pkg = pakfire.packages.open(None, None, filename)
+
+ if check_for_duplicates:
+ if distro.has_package(pkg.name, pkg.epoch, pkg.version, pkg.release):
+ log.warning("Duplicate package detected: %s. Skipping." % pkg)
+ return
+
+ # Open the package and add it to the database
+ pkg = self.backend.packages.create(filename)
+
+ # Associate the package to the processed commit
+ if commit:
+ pkg.commit = commit
+
+ # Create a new build object from the package
+ build = self.create(pkg, type=type, owner=owner, distro=distro)
+
+ # Create all automatic jobs
+ build.create_autojobs(arches=arches)
+
+ return build
+
def get_changelog(self, name, public=None, limit=5, offset=0):
query = "SELECT builds.* FROM builds \
JOIN packages ON builds.pkg_id = packages.id \
class Build(base.Object):
+ table = "builds"
+
def __init__(self, pakfire, id, data=None):
base.Object.__init__(self, pakfire)
self._depends_on = None
self._pkg = None
self._credits = None
- self._owner = None
self._update = None
self._repo = None
self._distro = None
return iter(sorted(jobs))
- @classmethod
- def create(cls, pakfire, pkg, type="release", owner=None, distro=None, public=True):
- assert type in ("release", "scratch", "test")
- assert distro, "You need to specify the distribution of this build."
-
- if public:
- public = "Y"
- else:
- public = "N"
-
- # Check if scratch build has an owner.
- if type == "scratch" and not owner:
- raise Exception, "Scratch builds require an owner"
-
- # Set the default priority of this build.
- if type == "release":
- priority = 0
-
- elif type == "scratch":
- priority = 1
-
- elif type == "test":
- priority = -1
-
- id = pakfire.db.execute("""
- INSERT INTO builds(uuid, pkg_id, type, distro_id, time_created, public, priority)
- VALUES(%s, %s, %s, %s, NOW(), %s, %s)""", "%s" % uuid.uuid4(), pkg.id,
- type, distro.id, public, priority)
-
- # Set the owner of this buildgroup.
- if owner:
- pakfire.db.execute("UPDATE builds SET owner_id = %s WHERE id = %s",
- owner.id, id)
-
- build = cls(pakfire, id)
-
- # Log that the build has been created.
- build.log("created", user=owner)
-
- # Create directory where the files live.
- if not os.path.exists(build.path):
- os.makedirs(build.path)
-
- # Move package file to the directory of the build.
- source_path = os.path.join(build.path, "src")
- build.pkg.move(source_path)
-
- # Generate an update id.
- build.generate_update_id()
-
- # Obsolete all other builds with the same name to track updates.
- build.obsolete_others()
-
- # Search for possible bug IDs in the commit message.
- build.search_for_bugs()
-
- return build
-
def delete(self):
"""
Deletes this build including all jobs, packages and the source
"""
return self.data.type
- @property
- def owner_id(self):
- """
- The ID of the owner of this build.
- """
- return self.data.owner_id
-
- @property
- def owner(self):
+ def get_owner(self):
"""
The owner of this build.
"""
- if not self.owner_id:
- return
+ if self.data.owner_id:
+ return self.backend.users.get_by_id(self.data.owner_id)
- if self._owner is None:
- self._owner = self.pakfire.users.get_by_id(self.owner_id)
- assert self._owner
+ def set_owner(self, owner):
+ if owner:
+ self._set_attribute("owner_id", owner.id)
+ else:
+ self._set_attribute("owner_id", None)
- return self._owner
+ owner = lazy_property(get_owner, set_owner)
@property
def distro_id(self):
# Create a new job for every given archirecture.
for arch in self.pakfire.arches.expand(arches):
- # Don't create jobs for src.
- if arch.name == "src":
+ # Don't create jobs for src
+ if arch == "src":
continue
job = self.add_job(arch, type=type)
return
update = self.db.get("SELECT update_num AS num FROM builds \
- WHERE update_year = YEAR(NOW()) ORDER BY update_num DESC LIMIT 1")
+ WHERE update_year = EXTRACT(year FROM NOW()) ORDER BY update_num DESC LIMIT 1")
if update:
update_num = update.num + 1
else:
update_num = 1
- self.db.execute("UPDATE builds SET update_year = YEAR(NOW()), update_num = %s \
+ self.db.execute("UPDATE builds SET update_year = EXTRACT(year FROM NOW()), update_num = %s \
WHERE id = %s", update_num, self.id)
## Comment stuff
except ValueError:
pass
else:
- where.append("(DATE(time_created) = %s OR \
- DATE(time_started) = %s OR DATE(time_finished) = %s)")
+ where.append("(time_created::date = %s OR \
+ time_started::date = %s OR time_finished::date = %s)")
args += (date, date, date)
if age:
return Package(self.pakfire, pkg.id, pkg)
+ def create(self, path):
+ # Just check if the file really exist
+ assert os.path.exists(path)
+
+ _pkg = packages.open(pakfire.PakfireServer(), None, path)
+
+ hash_sha512 = misc.calc_hash(path, "sha512")
+ assert hash_sha512
+
+ query = [
+ ("name", _pkg.name),
+ ("epoch", _pkg.epoch),
+ ("version", _pkg.version),
+ ("release", _pkg.release),
+ ("type", _pkg.type),
+ ("arch", _pkg.arch),
+
+ ("groups", " ".join(_pkg.groups)),
+ ("maintainer", _pkg.maintainer),
+ ("license", _pkg.license),
+ ("url", _pkg.url),
+ ("summary", _pkg.summary),
+ ("description", _pkg.description),
+ ("size", _pkg.inst_size),
+ ("uuid", _pkg.uuid),
+
+ # Build information.
+ ("build_id", _pkg.build_id),
+ ("build_host", _pkg.build_host),
+ ("build_time", datetime.datetime.utcfromtimestamp(_pkg.build_time)),
+
+ # File "metadata".
+ ("path", path),
+ ("filesize", os.path.getsize(path)),
+ ("hash_sha512", hash_sha512),
+ ]
+
+ if _pkg.type == "source":
+ query.append(("supported_arches", _pkg.supported_arches))
+
+ keys = []
+ vals = []
+ for key, val in query:
+ keys.append(key)
+ vals.append(val)
+
+ _query = "INSERT INTO packages(%s)" % ", ".join(keys)
+ _query += " VALUES(%s) RETURNING *" % ", ".join("%s" for v in vals)
+
+ # Create package entry in the database.
+ pkg = self._get_package(_query, *vals)
+
+ # Dependency information.
+ for d in _pkg.prerequires:
+ pkg.add_dependency("prerequires", d)
+
+ for d in _pkg.requires:
+ pkg.add_dependency("requires", d)
+
+ for d in _pkg.provides:
+ pkg.add_dependency("provides", d)
+
+ for d in _pkg.conflicts:
+ pkg.add_dependency("conflicts", d)
+
+ for d in _pkg.obsoletes:
+ pkg.add_dependency("obsoletes", d)
+
+ # Add all files to filelists table
+ for f in _pkg.filelist:
+ pkg.add_file(f.name, f.size, f.hash1, f.type, f.config, f.mode,
+ f.user, f.group, f.mtime, f.capabilities)
+
+ # Return the newly created object
+ return pkg
+
def search(self, pattern, limit=None):
"""
Searches for packages that do match the query.
return pakfire.util.version_compare(self.pakfire,
self.friendly_name, other.friendly_name)
- @classmethod
- def open(cls, _pakfire, path):
- # Just check if the file really does exist.
- assert os.path.exists(path)
-
- p = pakfire.PakfireServer()
- file = packages.open(p, None, path)
-
- hash_sha512 = misc.calc_hash(path, "sha512")
- assert hash_sha512
-
- query = [
- ("name", file.name),
- ("epoch", file.epoch),
- ("version", file.version),
- ("release", file.release),
- ("type", file.type),
- ("arch", file.arch),
-
- ("groups", " ".join(file.groups)),
- ("maintainer", file.maintainer),
- ("license", file.license),
- ("url", file.url),
- ("summary", file.summary),
- ("description", file.description),
- ("size", file.inst_size),
- ("uuid", file.uuid),
-
- # Build information.
- ("build_id", file.build_id),
- ("build_host", file.build_host),
- ("build_time", datetime.datetime.utcfromtimestamp(file.build_time)),
-
- # File "metadata".
- ("path", path),
- ("filesize", os.path.getsize(path)),
- ("hash_sha512", hash_sha512),
- ]
-
- if file.type == "source":
- query.append(("supported_arches", file.supported_arches))
-
- keys = []
- vals = []
- for key, val in query:
- keys.append(key)
- vals.append(val)
-
- _query = "INSERT INTO packages(%s)" % ", ".join(keys)
- _query += " VALUES(%s)" % ", ".join("%s" for v in vals)
-
- # Create package entry in the database.
- id = _pakfire.db.execute(_query, *vals)
-
- # Dependency information.
- deps = []
- for d in file.prerequires:
- deps.append((id, "prerequires", d))
-
- for d in file.requires:
- deps.append((id, "requires", d))
-
- for d in file.provides:
- deps.append((id, "provides", d))
-
- for d in file.conflicts:
- deps.append((id, "conflicts", d))
-
- for d in file.obsoletes:
- deps.append((id, "obsoletes", d))
-
- if deps:
- _pakfire.db.executemany("INSERT INTO packages_deps(pkg_id, type, what) \
- VALUES(%s, %s, %s)", deps)
-
- # Add all files to filelists table.
- filelist = []
- for f in file.filelist:
- if f.config:
- config = "Y"
- else:
- config = "N"
-
- # Convert mtime to integer.
- try:
- mtime = int(f.mtime)
- except ValueError:
- mtime = 0
-
- filelist.append((id, f.name, f.size, f.hash1, f.type, config, f.mode,
- f.user, f.group, datetime.datetime.utcfromtimestamp(mtime),
- f.capabilities))
-
- _pakfire.db.executemany("INSERT INTO filelists(pkg_id, name, size, hash_sha512, \
- type, config, mode, user, group, mtime, capabilities) \
- VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", filelist)
-
- # Return the newly created object.
- return cls(_pakfire, id)
-
def delete(self):
self.db.execute("INSERT INTO queue_delete(path) VALUES(%s)", self.path)
def size(self):
return self.data.size
+ def add_dependency(self, type, what):
+ self.db.execute("INSERT INTO packages_deps(pkg_id, type, what) \
+ VALUES(%s, %s, %s)", self.id, type, what)
+
def has_deps(self):
"""
Returns True if the package has got dependencies.
return self._filelist
+ def add_file(self, name, size, hash_sha512, type, config, mode, user, group, mtime, capabilities):
+ # Convert mtime from seconds since epoch to datetime
+ mtime = datetime.datetime.utcfromtimestamp(float(mtime))
+
+ self.db.execute("INSERT INTO filelists(pkg_id, name, size, hash_sha512, type, config, mode, \
+ \"user\", \"group\", mtime, capabilities) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
+ self.id, name, size, hash_sha512, type, config, mode, user, group, mtime, capabilities)
+
def get_file(self):
path = os.path.join(PACKAGES_DIR, self.path)