;
;gpgkey = /not/yet/existant
-[ipfire-development]
-description = IPFire Development Repository
+;[ipfire-development]
+;description = IPFire Development Repository
#url = http://mirror0.ipfire.org/pakfire3/$name/$arch
#url = file:///ipfire-3.x/build/packages/i686
-url = file:///tmp/pakfire3
+;url = file:///tmp/pakfire3
-gpgkey = /not/yet/existant
+;gpgkey = /not/yet/existant
[testingbay]
description = IPFire Testing Repository
# Update all indexes of the repositories (not force) so that we will
# always work with valid data.
- self.repos.update_indexes()
+ self.repos.update()
def check_root_user(self):
if not os.getuid() == 0 or not os.getgid() == 0:
pkgs = []
for pattern in patterns:
- pkgs += self.repos.get_by_provides(pattern)
+ requires = depsolve.Requires(None, pattern)
+ pkgs += self.repos.get_by_provides(requires)
pkgs = packages.PackageListing(pkgs)
#pkgs.unique()
path=path,
)
- # Create a new temporary repository.
- repo.index.create_database()
-
for input_path in input_paths:
repo._collect_packages(input_path)
repo.save()
-
- # Destroy the temporary database.
- repo.index.destroy_database()
BUFFER_SIZE = 1024**2
+MIRRORLIST_MAXSIZE = 1024**2
+
METADATA_FORMAT = 0
METADATA_DOWNLOAD_LIMIT = 1024**2
METADATA_DOWNLOAD_PATH = "repodata"
class Package(object):
type = None # either "bin", "src" or "virt"
- def __init__(self, pakfire, repo):
+ def __init__(self, pakfire, repo=None):
self.pakfire = pakfire
self._repo = repo
ret = util.version_compare(self.version_tuple, other.version_tuple)
# Compare the build times if we have a rebuilt package.
- if not ret:
+ if not ret and self.build_time and other.build_time:
ret = cmp(self.build_time, other.build_time)
#if ret == 0:
@property
def repo(self):
- return self._repo
+ if self._repo:
+ return self._repo
+
+ # By default, every package is connected to a dummy repository
+ return self.pakfire.repos.dummy
@property
def name(self):
A faster version to find a file in the database.
"""
c = self.db.cursor()
- c.execute("SELECT pkg FROM files WHERE name = ?", (requires.requires,))
+ c.execute("SELECT * FROM files WHERE name = ? AND pkg = ?",
+ (requires.requires, self.id))
ret = False
for pkg in c:
- if self.id == pkg[0]:
- ret = True
- break
+ ret = True
+ break
c.close()
from urlgrabber.progress import TextMeter
import packager
-import pakfire.repository as repository
from base import Package
from source import SourcePackage
class Makefile(Package):
def __init__(self, pakfire, filename):
- repo = repository.DummyRepository(pakfire)
-
- Package.__init__(self, pakfire, repo)
+ Package.__init__(self, pakfire)
self.filename = filename
@property
+++ /dev/null
-#!/usr/bin/python
-
-import fnmatch
-import logging
-import os
-import shutil
-import stat
-import time
-
-from ConfigParser import ConfigParser
-
-import base
-import database
-import downloader
-import index
-import packages
-import util
-
-from constants import *
-
-class Repositories(object):
- """
- Class that loads all repositories from the configuration files.
-
- This is the place where repositories can be activated or deactivated.
- """
-
- def __init__(self, pakfire):
- self.pakfire = pakfire
-
- self.config = pakfire.config
- self.distro = pakfire.distro
-
- # Place to store the repositories
- self._repos = []
-
- # Create the local repository
- self.local = InstalledRepository(self.pakfire)
- self.add_repo(self.local)
-
- # If we running in build mode, we include our local build repository.
- #if self.pakfire.builder:
- if True:
- self.local_build = LocalBuildRepository(self.pakfire)
- self.add_repo(self.local_build)
-
- for repo_name, repo_args in self.config.get_repos():
- self._parse(repo_name, repo_args)
-
- def __len__(self):
- """
- Return the count of enabled repositories.
- """
- i = 0
- for repo in self.enabled:
- i += 1
-
- return i
-
- def _parse(self, name, args):
- # XXX need to make variable expansion
-
- _args = {
- "name" : name,
- "enabled" : True,
- "gpgkey" : None,
- "mirrorlist" : None,
- }
- _args.update(args)
-
- repo = RemoteRepository(self.pakfire, **_args)
-
- self.add_repo(repo)
-
- def add_repo(self, repo):
- self._repos.append(repo)
- self._repos.sort()
-
- @property
- def enabled(self):
- for repo in self._repos:
- if not repo.enabled:
- continue
-
- yield repo
-
- def disable_repo(self, name):
- for repo in self.enabled:
- if repo.name == name:
- logging.debug("Disabled repository '%s'" % repo.name)
- repo.enabled = False
- continue
-
- def update_indexes(self, force=False):
- logging.debug("Updating all repository indexes (force=%s)" % force)
-
- # XXX update all indexes if necessary or forced
- for repo in self.enabled:
- repo.update_index(force=force)
-
- def get_all(self):
- for repo in self.enabled:
- for pkg in repo.get_all():
- yield pkg
-
- def get_by_name(self, name):
- for repo in self.enabled:
- for pkg in repo.get_by_name(name):
- yield pkg
-
- def get_by_glob(self, pattern):
- for repo in self.enabled:
- for pkg in repo.get_by_glob(pattern):
- yield pkg
-
- def get_by_provides(self, requires):
- for repo in self.enabled:
- for pkg in repo.get_by_provides(requires):
- yield pkg
-
- def get_by_file(self, filename):
- for repo in self.enabled:
- for pkg in repo.get_by_file(filename):
- yield pkg
-
- def search(self, pattern):
- pkg_names = []
-
- for repo in self.enabled:
- for pkg in repo.search(pattern):
- if pkg.name in pkg_names:
- continue
-
- pkg_names.append(pkg.name)
- yield pkg
-
-
-class RepositoryFactory(object):
- def __init__(self, pakfire, name, description):
- self.pakfire = pakfire
-
- self.name, self.description = name, description
-
- # All repositories are enabled by default
- self.enabled = True
-
- # Add link to distro object
- self.distro = pakfire.distro #distro.Distribution()
-
- def __repr__(self):
- return "<%s %s>" % (self.__class__.__name__, self.name)
-
- def __cmp__(self, other):
- return cmp(self.priority * -1, other.priority * -1) or \
- cmp(self.name, other.name)
-
- @property
- def priority(self):
- raise NotImplementedError
-
- @property
- def local(self):
- """
- Say if a repository is a local one or remotely located.
-
- Used to check if we need to download files.
- """
- return False
-
- def update_index(self, force=False):
- """
- A function that is called to update the local data of
- the repository.
- """
- pass
-
- def get_all(self):
- """
- Simply returns an instance of every package in this repository.
- """
- for pkg in self.packages:
- yield pkg
-
- def get_by_name(self, name):
- for pkg in self.packages:
- if pkg.name == name:
- yield pkg
-
- def get_by_glob(self, pattern):
- """
- Returns a list of all packages that names match the glob pattern
- that is provided.
- """
- for pkg in self.packages:
- if fnmatch.fnmatch(pkg.name, pattern):
- yield pkg
-
- def get_by_provides(self, requires):
- """
- Returns a list of all packages that offer a matching "provides"
- of the given "requires".
- """
- for pkg in self.packages:
- if pkg.does_provide(requires):
- yield pkg
-
- def get_by_file(self, filename):
- for pkg in self.packages:
- if filename in pkg.filelist:
- yield pkg
-
- def search(self, pattern):
- """
- Returns a list of packages, that match the given pattern,
- which can be either a part of the name, summary or description
- or can be a glob pattern that matches one of these.
- """
- for pkg in self.packages:
- for item in (pkg.name, pkg.summary, pkg.description):
- if pattern.lower() in item.lower() or \
- fnmatch.fnmatch(item, pattern):
- yield pkg
-
- @property
- def packages(self):
- """
- Returns all packages.
- """
- return self.index.packages
-
-
-class DummyRepository(RepositoryFactory):
- """
- Just a dummy repository that actually does nothing.
- """
- def __init__(self, pakfire):
- RepositoryFactory.__init__(self, pakfire, "dummy",
- "This is a dummy repository.")
-
-
-class FileSystemRepository(RepositoryFactory):
- """
- Dummy repository to indicate that a specific package came from the
- filesystem.
- """
- def __init__(self, pakfire):
- RepositoryFactory.__init__(self, pakfire, "filesystem",
- "Filesystem repository")
-
-
-class LocalRepository(RepositoryFactory):
- def __init__(self, pakfire, name, description, path):
- RepositoryFactory.__init__(self, pakfire, name, description)
-
- # Save location of the repository and create it if not existant.
- self.path = path
- if not os.path.exists(self.path):
- os.makedirs(self.path)
-
- self.index = index.DatabaseIndex(self.pakfire, self)
-
- @property
- def local(self):
- # This is obviously local.
- return True
-
- @property
- def priority(self):
- """
- The local repository has always a high priority.
- """
- return 10
-
- # XXX need to implement better get_by_name
-
- def _collect_packages(self, path):
- logging.info("Collecting packages from %s." % path)
-
- for dir, subdirs, files in os.walk(path):
- for file in files:
- if not file.endswith(".%s" % PACKAGE_EXTENSION):
- continue
-
- file = os.path.join(dir, file)
-
- pkg = packages.BinaryPackage(self.pakfire, self, file)
- self._add_package(pkg)
-
- def _add_package(self, pkg):
- # XXX gets an instance of binary package and puts it into the
- # repo location if not done yet
- # then: the package gets added to the index
-
- if not isinstance(pkg, packages.BinaryPackage):
- raise Exception
-
- repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
-
- # Do we need to copy the package files?
- copy = True
-
- pkg_exists = None
- if os.path.exists(repo_filename):
- pkg_exists = packages.BinaryPackage(self.pakfire, self, repo_filename)
-
- # If package in the repo is equivalent to the given one, we can
- # skip any further processing.
- if pkg == pkg_exists:
- logging.debug("The package does already exist in this repo: %s" % pkg.friendly_name)
- copy = False
-
- if copy:
- logging.debug("Copying package '%s' to repository." % pkg.friendly_name)
- repo_dirname = os.path.dirname(repo_filename)
- if not os.path.exists(repo_dirname):
- os.makedirs(repo_dirname)
-
- # Try to use a hard link if possible, if we cannot do that we simply
- # copy the file.
- try:
- os.link(pkg.filename, repo_filename)
- except OSError:
- shutil.copy2(pkg.filename, repo_filename)
-
- # Create new package object, that is connected to this repository
- # and so we can do stuff.
- pkg = packages.BinaryPackage(self.pakfire, self, repo_filename)
-
- logging.info("Adding package '%s' to repository." % pkg.friendly_name)
- self.index.add_package(pkg)
-
- def save(self, path=None):
- """
- Save the index information to path.
- """
- self.index.save(path)
-
-
-class InstalledRepository(RepositoryFactory):
- def __init__(self, pakfire):
- RepositoryFactory.__init__(self, pakfire, "installed", "Installed packages")
-
- self.index = index.InstalledIndex(self.pakfire, self)
-
- @property
- def local(self):
- # This is obviously local.
- return True
-
- @property
- def priority(self):
- """
- The installed repository has always the highest priority.
- """
- return 0
-
-
-class LocalBuildRepository(LocalRepository):
- def __init__(self, pakfire):
- RepositoryFactory.__init__(self, pakfire, "build", "Locally built packages")
-
- self.path = self.pakfire.config.get("local_build_repo_path")
- if not os.path.exists(self.path):
- os.makedirs(self.path)
-
- self.index = index.DirectoryIndex(self.pakfire, self, self.path)
-
- @property
- def priority(self):
- return 20000
-
-
-class RepositoryCache(object):
- """
- An object that is able to cache all data that is loaded from a
- remote repository.
- """
-
- def __init__(self, pakfire, repo):
- self.pakfire = pakfire
- self.repo = repo
-
- self.create()
-
- @property
- def path(self):
- return os.path.join(REPO_CACHE_DIR, self.pakfire.distro.release, \
- self.repo.name, self.repo.arch)
-
- def abspath(self, path):
- return os.path.join(self.path, path)
-
- def create(self):
- """
- Create all necessary directories.
- """
- for path in ("mirrors", "packages", "repodata"):
- path = self.abspath(path)
-
- if not os.path.exists(path):
- os.makedirs(path)
-
- def exists(self, filename):
- """
- Returns True if a file exists and False if it doesn't.
- """
- return os.path.exists(self.abspath(filename))
-
- def age(self, filename):
- """
- Returns the age of a downloaded file in minutes.
- i.e. the time from download until now.
- """
- if not self.exists(filename):
- return None
-
- filename = self.abspath(filename)
-
- # Creation time of the file
- ctime = os.stat(filename)[stat.ST_CTIME]
-
- return (time.time() - ctime) / 60
-
- def open(self, filename, *args, **kwargs):
- filename = self.abspath(filename)
-
- return open(filename, *args, **kwargs)
-
- def verify(self, filename, hash1):
- """
- Return a bool that indicates if a file matches the given hash.
- """
- return util.calc_hash1(self.abspath(filename)) == hash1
-
- def remove(self, filename):
- """
- Remove a file from cache.
- """
- if not self.exists(filename):
- return
-
- filename = self.abspath(filename)
- os.unlink(filename)
-
-
-class RemoteRepository(RepositoryFactory):
- def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled):
- RepositoryFactory.__init__(self, pakfire, name, description)
-
- self.url, self.gpgkey = url, gpgkey
- self.mirrorlist = mirrorlist
-
- if enabled in (True, 1, "1", "yes", "y"):
- self.enabled = True
- else:
- self.enabled = False
-
- # A place to cache the download grabber for this repo.
- self.__grabber = None
-
- # Create a cache for the repository where we can keep all temporary data.
- self.cache = RepositoryCache(self.pakfire, self)
-
- # Initialize mirror servers.
- self.mirrors = downloader.MirrorList(self.pakfire, self)
-
- if self.local:
- self.index = index.DirectoryIndex(self.pakfire, self, self.url)
- else:
- self.index = index.DatabaseIndex(self.pakfire, self)
-
- logging.debug("Created new repository(name='%s', url='%s', enabled='%s')" % \
- (self.name, self.url, self.enabled))
-
- def __repr__(self):
- return "<%s %s>" % (self.__class__.__name__, self.url)
-
- @property
- def local(self):
- # If files are located somewhere in the filesystem we assume it is
- # local.
- if self.url.startswith("file://"):
- return True
-
- # Otherwise not.
- return False
-
- @property
- def arch(self):
- return self.pakfire.distro.arch
-
- @property
- def path(self):
- if self.local:
- return self.url[7:]
-
- return self.cache.path
-
- @property
- def priority(self):
- priority = 100
-
- url2priority = {
- "file://" : 50,
- "http://" : 75,
- }
-
- for url, prio in url2priority.items():
- if self.url.startswith(url):
- priority = prio
- break
-
- return priority
-
- def update_index(self, force=False):
- if self.index:
- self.index.update(force=force)
-
- def save_index(self, path=None):
- self.index.save(path)
-
- #def get_all(self, requires):
- # for pkg in self.index.get_all():
- # if pkg.does_provide(requires):
- # yield pkg
-
- def get_by_file(self, filename):
- return self.index.get_by_file(filename)
-
--- /dev/null
+#!/usr/bin/python
+
+import logging
+
+from installed import InstalledRepository
+from local import LocalRepository, LocalBuildRepository
+from oddments import DummyRepository
+from remote import RemoteRepository
+
+class Repositories(object):
+ """
+ Class that loads all repositories from the configuration files.
+
+ This is the place where repositories can be activated or deactivated.
+ """
+
+ def __init__(self, pakfire):
+ self.pakfire = pakfire
+
+ self.config = pakfire.config
+ self.distro = pakfire.distro
+
+ # Place to store the repositories
+ self._repos = []
+
+ # Create a dummy repository
+ self.dummy = DummyRepository(self.pakfire)
+
+ # Create the local repository
+ self.local = InstalledRepository(self.pakfire)
+ self.add_repo(self.local)
+
+ # If we running in build mode, we include our local build repository.
+ if self.pakfire.builder:
+ self.local_build = LocalBuildRepository(self.pakfire)
+ self.add_repo(self.local_build)
+
+ for repo_name, repo_args in self.config.get_repos():
+ self._parse(repo_name, repo_args)
+
+ def __len__(self):
+ """
+ Return the count of enabled repositories.
+ """
+ i = 0
+ for repo in self.enabled:
+ i += 1
+
+ return i
+
+ def _parse(self, name, args):
+ # XXX need to make variable expansion
+
+ _args = {
+ "name" : name,
+ "enabled" : True,
+ "gpgkey" : None,
+ "mirrorlist" : None,
+ }
+ _args.update(args)
+
+ repo = RemoteRepository(self.pakfire, **_args)
+
+ self.add_repo(repo)
+
+ def add_repo(self, repo):
+ self._repos.append(repo)
+ self._repos.sort()
+
+ @property
+ def enabled(self):
+ for repo in self._repos:
+ if not repo.enabled:
+ continue
+
+ yield repo
+
+ def disable_repo(self, name):
+ for repo in self.enabled:
+ if repo.name == name:
+ logging.debug("Disabled repository '%s'" % repo.name)
+ repo.enabled = False
+ continue
+
+ def update(self, force=False):
+ logging.debug("Updating all repository indexes (force=%s)" % force)
+
+ # XXX update all indexes if necessary or forced
+ for repo in self.enabled:
+ repo.update(force=force)
+
+ #def get_all(self):
+ # for repo in self.enabled:
+ # for pkg in repo.get_all():
+ # yield pkg
+
+ def get_by_name(self, name):
+ for repo in self.enabled:
+ for pkg in repo.get_by_name(name):
+ yield pkg
+
+ def get_by_glob(self, pattern):
+ for repo in self.enabled:
+ for pkg in repo.get_by_glob(pattern):
+ yield pkg
+
+ def get_by_provides(self, requires):
+ if requires.type == "file":
+ for pkg in self.get_by_file(requires.requires):
+ yield pkg
+
+ else:
+ for repo in self.enabled:
+ for pkg in repo.get_by_provides(requires):
+ yield pkg
+
+ def get_by_file(self, filename):
+ for repo in self.enabled:
+ for pkg in repo.get_by_file(filename):
+ yield pkg
+
+ def search(self, pattern):
+ pkg_names = []
+
+ for repo in self.enabled:
+ for pkg in repo.search(pattern):
+ if pkg.name in pkg_names:
+ continue
+
+ pkg_names.append(pkg.name)
+ yield pkg
--- /dev/null
+#!/usr/bin/python
+
+import fnmatch
+import glob
+
+class RepositoryFactory(object):
+ def __init__(self, pakfire, name, description):
+ self.pakfire = pakfire
+
+ self.name, self.description = name, description
+
+ # All repositories are enabled by default
+ self.enabled = True
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.name)
+
+ def __cmp__(self, other):
+ return cmp(self.priority * -1, other.priority * -1) or \
+ cmp(self.name, other.name)
+
+ @property
+ def distro(self):
+ """
+ Link to distro object.
+ """
+ return self.pakfire.distro
+
+ @property
+ def priority(self):
+ raise NotImplementedError
+
+ @property
+ def local(self):
+ """
+ Say if a repository is a local one or remotely located.
+
+ Used to check if we need to download files.
+ """
+ return False
+
+ def update(self, force=False):
+ """
+ A function that is called to update the local data of
+ the repository.
+ """
+ if hasattr(self, "index"):
+ self.index.update(force)
+
+ def get_all(self):
+ """
+ Simply returns an instance of every package in this repository.
+ """
+ for pkg in self.packages:
+ yield pkg
+
+ def get_by_name(self, name):
+ for pkg in self.packages:
+ if pkg.name == name:
+ yield pkg
+
+ def get_by_glob(self, pattern):
+ """
+ Returns a list of all packages that names match the glob pattern
+ that is provided.
+ """
+ for pkg in self.packages:
+ if fnmatch.fnmatch(pkg.name, pattern):
+ yield pkg
+
+ def get_by_provides(self, requires):
+ """
+ Returns a list of all packages that offer a matching "provides"
+ of the given "requires".
+ """
+ for pkg in self.packages:
+ if pkg.does_provide(requires):
+ yield pkg
+
+ def get_by_file(self, filename):
+ for pkg in self.packages:
+ if filename in pkg.filelist:
+ yield pkg
+
+ def search(self, pattern):
+ """
+ Returns a list of packages, that match the given pattern,
+ which can be either a part of the name, summary or description
+ or can be a glob pattern that matches one of these.
+ """
+ for pkg in self.packages:
+ for item in (pkg.name, pkg.summary, pkg.description):
+ if pattern.lower() in item.lower() or \
+ fnmatch.fnmatch(item, pattern):
+ yield pkg
+
+ @property
+ def packages(self):
+ """
+ Returns all packages.
+ """
+ return self.index.packages
--- /dev/null
+#!/usr/bin/python
+
+import os
+import stat
+import time
+
+import index
+
+import pakfire.util as util
+from pakfire.constants import *
+
+class RepositoryCache(object):
+ """
+ An object that is able to cache all data that is loaded from a
+ remote repository.
+ """
+
+ def __init__(self, pakfire, repo):
+ self.pakfire = pakfire
+ self.repo = repo
+
+ self.create()
+
+ # Initialize index of cache.
+ self.index = index.DirectoryIndex(self.pakfire, self.repo,
+ os.path.join(self.path, "packages"))
+
+ @property
+ def packages(self):
+ return self.index.packages
+
+ @property
+ def path(self):
+ return os.path.join(REPO_CACHE_DIR, self.pakfire.distro.release, \
+ self.repo.name, self.repo.arch)
+
+ def abspath(self, path):
+ return os.path.join(self.path, path)
+
+ def create(self):
+ """
+ Create all necessary directories.
+ """
+ for path in ("mirrors", "packages", "repodata"):
+ path = self.abspath(path)
+
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+ def exists(self, filename):
+ """
+ Returns True if a file exists and False if it doesn't.
+ """
+ return os.path.exists(self.abspath(filename))
+
+ def age(self, filename):
+ """
+ Returns the age of a downloaded file in minutes.
+ i.e. the time from download until now.
+ """
+ if not self.exists(filename):
+ return None
+
+ filename = self.abspath(filename)
+
+ # Creation time of the file
+ ctime = os.stat(filename)[stat.ST_CTIME]
+
+ return (time.time() - ctime) / 60
+
+ def open(self, filename, *args, **kwargs):
+ filename = self.abspath(filename)
+
+ return open(filename, *args, **kwargs)
+
+ def verify(self, filename, hash1):
+ """
+ Return a bool that indicates if a file matches the given hash.
+ """
+ return util.calc_hash1(self.abspath(filename)) == hash1
+
+ def remove(self, filename):
+ """
+ Remove a file from cache.
+ """
+ if not self.exists(filename):
+ return
+
+ filename = self.abspath(filename)
+ os.unlink(filename)
+
import sqlite3
import time
-import packages
-
-from constants import *
+from pakfire.constants import *
class Cursor(sqlite3.Cursor):
def execute(self, *args, **kwargs):
if not self._db:
logging.debug("Open database %s" % self.filename)
- dirname = os.path.dirname(self.filename)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
+ database_exists = False
+
+ if not self.filename == ":memory:":
+ dirname = os.path.dirname(self.filename)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
- database_exists = os.path.exists(self.filename)
+ database_exists = os.path.exists(self.filename)
# Make a connection to the database.
self._db = sqlite3.connect(self.filename)
def cursor(self):
return self._db.cursor(Cursor)
+ def executescript(self, *args, **kwargs):
+ return self._db.executescript(*args, **kwargs)
+
def save(self, path):
"""
- Save (means copy) the database to path.
+ Save a copy of this database to a new one located at path.
"""
- # Commit all data in memory to the database.
- self.commit()
+ db2 = Database(self.pakfire, path)
+
+ script = ""
+ for line in self._db.iterdump():
+ script += "%s\n" % line
+
+ db2.executescript(script)
+ db2.commit()
- # Copy the file.
- shutil.copy(self.filename, path)
+ db2.close()
class PackageDatabase(Database):
from urlgrabber.mirror import MirrorGroup
from urlgrabber.progress import TextMeter
-from constants import *
-
-MIRRORLIST_MAXSIZE = 1024**2
+from pakfire.constants import *
class PakfireGrabber(URLGrabber):
"""
import os
import random
import shutil
+import time
import zlib
import database
import downloader
import metadata
-import packages
-import repository
-import util
-from constants import *
-from i18n import _
+import pakfire.packages as packages
+import pakfire.util as util
+
+from pakfire.constants import *
+from pakfire.i18n import _
class Index(object):
def __init__(self, pakfire, repo):
self.pakfire = pakfire
self.repo = repo
- self.arch = self.pakfire.distro.arch # XXX ???
-
self._packages = []
- def get_all(self):
- for package in self.packages:
- yield package
+ @property
+ def arch(self):
+ return self.pakfire.distro.arch
def get_all_by_name(self, name):
for package in self.packages:
if package.name == name:
yield package
- def get_latest_by_name(self, name):
- p = [p for p in self.get_all_by_name(name)]
- if not p:
- return
-
- # Get latest version of the package to the bottom of
- # the list.
- p.sort()
-
- # Return the last one.
- return p[-1]
-
def get_by_file(self, filename):
for pkg in self.packages:
if filename in pkg.filelist:
if pkg.uuid == uuid:
return pkg
+ def get_by_provides(self, requires):
+ for pkg in self.packages:
+ if pkg.does_provide(requires):
+ yield pkg
+
@property
def packages(self):
for pkg in self._packages:
yield pkg
def update(self, force=False):
- raise NotImplementedError
+ pass
def add_package(self, pkg):
raise NotImplementedError
- def tag_db(self):
- raise NotImplementedError
-
class DirectoryIndex(Index):
def __init__(self, pakfire, repo, path):
db.close()
-class InstalledIndex(Index):
+class DatabaseIndexFactory(Index):
def __init__(self, pakfire, repo):
Index.__init__(self, pakfire, repo)
- # Open the database.
- self.db = database.LocalPackageDatabase(self.pakfire)
+ # Add empty reference to a fictional database.
+ self.db = None
- def _get_from_cache(self, pkg):
- """
- Check if package is already in cache and return an instance of
- BinaryPackage instead.
- """
- if hasattr(self.repo, "cache"):
- filename = os.path.join("packages", os.path.basename(pkg.filename))
+ self.open_database()
- if self.repo.cache.exists(filename):
- filename = self.repo.cache.abspath(filename)
+ def open_database(self):
+ raise NotImplementedError
+
+ @property
+ def packages(self):
+ c = self.db.cursor()
+ c.execute("SELECT * FROM packages")
- pkg = packages.BinaryPackage(self.pakfire, self.repo, filename)
+ for pkg in c:
+ yield packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
- return pkg
+ c.close()
def add_package(self, pkg, reason=None):
return self.db.add_package(pkg, reason)
def get_by_id(self, id):
c = self.db.cursor()
- c.execute("SELECT uuid FROM packages WHERE id = ?", (id,))
+ c.execute("SELECT * FROM packages WHERE id = ? LIMIT 1", (id,))
+ ret = None
for pkg in c:
- break
+ ret = packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
c.close()
- return self.get_by_uuid(pkg["uuid"])
+ return ret
def get_by_file(self, filename):
c = self.db.cursor()
c.execute("SELECT pkg FROM files WHERE name = ?", (filename,))
- for file in c:
- pkg = self.get_by_id(file["pkg"])
- if pkg:
- yield pkg
+ for pkg in c:
+ yield self.get_by_id(pkg["pkg"])
c.close()
-class DatabaseIndex(InstalledIndex):
- def __init__(self, pakfire, repo):
- Index.__init__(self, pakfire, repo)
+class InstalledIndex(DatabaseIndexFactory):
+ def open_database(self):
+ # Open the local package database.
+ self.db = database.LocalPackageDatabase(self.pakfire)
+
+
+class LocalIndex(DatabaseIndexFactory):
+ def open_database(self):
+ self.db = database.RemotePackageDatabase(self.pakfire, ":memory:")
+
+ def save(self, path=None, compress="xz"):
+ """
+ This function saves the database and metadata to path so it can
+ be exported to a remote repository.
+ """
+ if not path:
+ path = self.repo.path
- # Initialize with no content.
- self.db, self.metadata = None, None
+ # Create filenames
+ metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
+ db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
+ md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)
- def create_database(self):
- filename = "/tmp/.%s-%s" % (random.randint(0, 1024**2), METADATA_DATABASE_FILE)
+ if not os.path.exists(metapath):
+ os.makedirs(metapath)
- self.db = database.RemotePackageDatabase(self.pakfire, filename)
+ else:
+ # If a database is present, we remove it because we want to start
+ # with a clean environment.
+ if os.path.exists(db_path):
+ os.unlink(db_path)
- def destroy_database(self):
- if self.db:
- self.db.close()
+ # Save the database to path and get the filename.
+ self.db.save(db_path)
- os.unlink(self.db.filename)
+ # Make a reference to the database file that it will get a unique name
+ # so we won't get into any trouble with caching proxies.
+ db_hash = util.calc_hash1(db_path)
- def load_database(self):
- """
- Read all packages into RAM.
- """
- self._packages = []
+ db_path2 = os.path.join(os.path.dirname(db_path),
+ "%s-%s" % (db_hash, os.path.basename(db_path)))
- c = self.db.cursor()
- c.execute("SELECT * FROM packages")
+ # Compress the database.
+ if compress:
+ i = open(db_path)
+ os.unlink(db_path)
- for pkg in c:
- pkg = packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
+ o = open(db_path, "w")
- # Try to get package from cache.
- self._packages.append(self._get_from_cache(pkg))
+ # Choose a compressor.
+ if compress == "xz":
+ comp = lzma.LZMACompressor()
+ elif compress == "zlib":
+ comp = zlib.compressobj(9)
- c.close()
+ buf = i.read(BUFFER_SIZE)
+ while buf:
+ o.write(comp.compress(buf))
+
+ buf = i.read(BUFFER_SIZE)
+
+ o.write(comp.flush())
+
+ i.close()
+ o.close()
+
+ if not os.path.exists(db_path2):
+ shutil.move(db_path, db_path2)
+
+ # Create a new metadata object and add out information to it.
+ md = metadata.Metadata(self.pakfire, self)
+
+ # Save name of the hashed database to the metadata.
+ md.database = os.path.basename(db_path2)
+ md.database_hash1 = db_hash
+ md.database_compression = compress
+
+ # Save metdata to repository.
+ md.save(md_path)
+
+
+class RemoteIndex(DatabaseIndexFactory):
+ def open_database(self):
+ self.update(force=False)
def _update_metadata(self, force):
# Shortcut to repository cache.
# (Re-)open the database.
self.db = database.RemotePackageDatabase(self.pakfire,
cache.abspath(filename))
- self.load_database()
def update(self, force=False):
"""
# XXX this code needs lots of work:
# XXX * check the metadata content
-
- def save(self, path=None, compress="xz"):
- """
- This function saves the database and metadata to path so it can
- be exported to a remote repository.
- """
- if not path:
- path = self.repo.path
-
- # Create filenames
- metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
- db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
- md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)
-
- if not os.path.exists(metapath):
- os.makedirs(metapath)
-
- # Save the database to path and get the filename.
- self.db.save(db_path)
-
- # Make a reference to the database file that it will get a unique name
- # so we won't get into any trouble with caching proxies.
- db_hash = util.calc_hash1(db_path)
-
- db_path2 = os.path.join(os.path.dirname(db_path),
- "%s-%s" % (db_hash, os.path.basename(db_path)))
-
- # Compress the database.
- if compress:
- i = open(db_path)
- os.unlink(db_path)
-
- o = open(db_path, "w")
-
- # Choose a compressor.
- if compress == "xz":
- comp = lzma.LZMACompressor()
- elif compress == "zlib":
- comp = zlib.compressobj(9)
-
- buf = i.read(BUFFER_SIZE)
- while buf:
- o.write(comp.compress(buf))
-
- buf = i.read(BUFFER_SIZE)
-
- o.write(comp.flush())
-
- i.close()
- o.close()
-
- if not os.path.exists(db_path2):
- shutil.move(db_path, db_path2)
-
- # Create a new metadata object and add out information to it.
- md = metadata.Metadata(self.pakfire, self)
-
- # Save name of the hashed database to the metadata.
- md.database = os.path.basename(db_path2)
- md.database_hash1 = db_hash
- md.database_compression = compress
-
- # Save metdata to repository.
- md.save(md_path)
--- /dev/null
+#!/usr/bin/python
+
+import index
+
+from base import RepositoryFactory
+
+class InstalledRepository(RepositoryFactory):
+ def __init__(self, pakfire):
+ RepositoryFactory.__init__(self, pakfire, "installed", "Installed packages")
+
+ self.index = index.InstalledIndex(self.pakfire, self)
+
+ @property
+ def local(self):
+ # This is obviously local.
+ return True
+
+ @property
+ def priority(self):
+ """
+ The installed repository has always the highest priority.
+ """
+ return 0
--- /dev/null
+#!/usr/bin/python
+
+import logging
+import os
+import shutil
+
+import pakfire.packages as packages
+
+import index
+
+from base import RepositoryFactory
+
+from pakfire.constants import *
+
+class LocalRepository(RepositoryFactory):
+ def __init__(self, pakfire, name, description, path):
+ RepositoryFactory.__init__(self, pakfire, name, description)
+
+ # Save location of the repository and create it if not existant.
+ self.path = path
+ if not os.path.exists(self.path):
+ os.makedirs(self.path)
+
+ self.index = index.LocalIndex(self.pakfire, self)
+
+ @property
+ def local(self):
+ # This is obviously local.
+ return True
+
+ @property
+ def priority(self):
+ """
+ The local repository has always a high priority.
+ """
+ return 10
+
+ def _collect_packages(self, path):
+ logging.info("Collecting packages from %s." % path)
+
+ for dir, subdirs, files in os.walk(path):
+ for file in files:
+ if not file.endswith(".%s" % PACKAGE_EXTENSION):
+ continue
+
+ file = os.path.join(dir, file)
+
+ pkg = packages.BinaryPackage(self.pakfire, self, file)
+ self._add_package(pkg)
+
+ def _add_package(self, pkg):
+ # XXX gets an instance of binary package and puts it into the
+ # repo location if not done yet
+ # then: the package gets added to the index
+
+ if not isinstance(pkg, packages.BinaryPackage):
+ raise Exception
+
+ repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
+
+ # Do we need to copy the package files?
+ copy = True
+
+ pkg_exists = None
+ if os.path.exists(repo_filename):
+ pkg_exists = packages.BinaryPackage(self.pakfire, self, repo_filename)
+
+ # If package in the repo is equivalent to the given one, we can
+ # skip any further processing.
+ if pkg == pkg_exists:
+ logging.debug("The package does already exist in this repo: %s" % pkg.friendly_name)
+ copy = False
+
+ if copy:
+ logging.debug("Copying package '%s' to repository." % pkg.friendly_name)
+ repo_dirname = os.path.dirname(repo_filename)
+ if not os.path.exists(repo_dirname):
+ os.makedirs(repo_dirname)
+
+ # Try to use a hard link if possible, if we cannot do that we simply
+ # copy the file.
+ try:
+ os.link(pkg.filename, repo_filename)
+ except OSError:
+ shutil.copy2(pkg.filename, repo_filename)
+
+ # Create new package object, that is connected to this repository
+ # and so we can do stuff.
+ pkg = packages.BinaryPackage(self.pakfire, self, repo_filename)
+
+ logging.info("Adding package '%s' to repository." % pkg.friendly_name)
+ self.index.add_package(pkg)
+
+ def save(self, path=None):
+ """
+ Save the index information to path.
+ """
+ self.index.save(path)
+
+
+class LocalBuildRepository(LocalRepository):
+ def __init__(self, pakfire):
+ RepositoryFactory.__init__(self, pakfire, "build", "Locally built packages")
+
+ self.path = self.pakfire.config.get("local_build_repo_path")
+ if not os.path.exists(self.path):
+ os.makedirs(self.path)
+
+ self.index = index.DirectoryIndex(self.pakfire, self, self.path)
+
+ @property
+ def priority(self):
+ return 20000
import json
import time
-from constants import *
+from pakfire.constants import *
class Metadata(object):
def __init__(self, pakfire, index, metafile=None, metadata=None):
--- /dev/null
+#!/usr/bin/python
+
+from base import RepositoryFactory
+
+class DummyRepository(RepositoryFactory):
+ """
+ Just a dummy repository that actually does nothing.
+ """
+ def __init__(self, pakfire):
+ RepositoryFactory.__init__(self, pakfire, "dummy",
+ "This is a dummy repository.")
+
+
+class FileSystemRepository(RepositoryFactory):
+ """
+ Dummy repository to indicate that a specific package came from the
+ filesystem.
+ """
+ def __init__(self, pakfire):
+ RepositoryFactory.__init__(self, pakfire, "filesystem",
+ "Filesystem repository")
+
--- /dev/null
+#!/usr/bin/python
+
+import logging
+import os
+
+import cache
+import downloader
+import index
+
+from base import RepositoryFactory
+
+class RemoteRepository(RepositoryFactory):
+ def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled):
+ RepositoryFactory.__init__(self, pakfire, name, description)
+
+ # Parse arguments.
+ self.url = url
+ self.gpgkey = gpgkey
+ self.mirrorlist = mirrorlist
+
+ if enabled:
+ self.enabled = True
+ else:
+ self.enabled = False
+
+ # Create a cache for the repository where we can keep all temporary data.
+ self.cache = cache.RepositoryCache(self.pakfire, self)
+
+ # Initialize mirror servers.
+ self.mirrors = downloader.MirrorList(self.pakfire, self)
+
+ # Initialize index.
+ self.index = index.RemoteIndex(self.pakfire, self)
+
+ logging.debug("Created new repository(name='%s', url='%s', enabled='%s')" % \
+ (self.name, self.url, self.enabled))
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.url)
+
+ @property
+ def local(self):
+ # If files are located somewhere in the filesystem we assume it is
+ # local.
+ if self.url.startswith("file://"):
+ return True
+
+ # Otherwise not.
+ return False
+
+ @property
+ def arch(self):
+ return self.pakfire.distro.arch
+
+ @property
+ def path(self):
+ if self.local:
+ return self.url[7:]
+
+ return self.cache.path
+
+ @property
+ def priority(self):
+ priority = 100
+
+ url2priority = {
+ "file://" : 50,
+ "http://" : 75,
+ }
+
+ for url, prio in url2priority.items():
+ if self.url.startswith(url):
+ priority = prio
+ break
+
+ return priority
+
+ def update(self, force=False):
+ if self.index:
+ self.index.update(force=force)
+
+ def _replace_from_cache(self, pkg):
+ for _pkg in self.cache.packages:
+ if pkg == _pkg:
+ pkg = _pkg
+ break
+
+ return pkg
+
+ @property
+ def packages(self):
+ for pkg in self.index.packages:
+ yield self._replace_from_cache(pkg)
+
+ def get_by_provides(self, requires):
+ for pkg in self.index.get_by_provides(requires):
+ yield self._replace_from_cache(pkg)
+
+ def get_by_file(self, filename):
+ return self.index.get_by_file(filename)
pakfire/cli.py
pakfire/config.py
pakfire/constants.py
-pakfire/database.py
pakfire/depsolve.py
pakfire/distro.py
-pakfire/downloader.py
pakfire/errors.py
pakfire/i18n.py
-pakfire/index.py
pakfire/__init__.py
pakfire/logger.py
-pakfire/metadata.py
pakfire/packages/base.py
pakfire/packages/binary.py
+pakfire/packages/file.py
pakfire/packages/__init__.py
pakfire/packages/installed.py
pakfire/packages/listing.py
pakfire/packages/source.py
pakfire/packages/util.py
pakfire/packages/virtual.py
+pakfire/plugins/base.py
pakfire/plugins/__init__.py
-pakfire/repository.py
+pakfire/repository/base.py
+pakfire/repository/cache.py
+pakfire/repository/database.py
+pakfire/repository/downloader.py
+pakfire/repository/index.py
+pakfire/repository/__init__.py
+pakfire/repository/installed.py
+pakfire/repository/local.py
+pakfire/repository/metadata.py
+pakfire/repository/oddments.py
+pakfire/repository/remote.py
pakfire/transaction.py
pakfire/util.py
scripts/pakfire
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2011-02-25 20:34+0100\n"
+"POT-Creation-Date: 2011-02-27 12:47+0100\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Content-Type: text/plain; charset=CHARSET\n"
"Content-Transfer-Encoding: 8bit\n"
-#: ../pakfire/cli.py:26
+#: ../pakfire/cli.py:27
#, python-format
msgid "%s [y/N]"
msgstr ""
-#: ../pakfire/cli.py:37
+#: ../pakfire/cli.py:36
msgid "Pakfire command line interface."
msgstr ""
-#: ../pakfire/cli.py:44
+#: ../pakfire/cli.py:43
msgid "The path where pakfire should operate in."
msgstr ""
-#: ../pakfire/cli.py:77
+#: ../pakfire/cli.py:76
msgid "Enable verbose output."
msgstr ""
-#: ../pakfire/cli.py:80
+#: ../pakfire/cli.py:79
msgid "Path to a configuration file to load."
msgstr ""
-#: ../pakfire/cli.py:83
+#: ../pakfire/cli.py:82
msgid "Disable a repository temporarily."
msgstr ""
-#: ../pakfire/cli.py:88
+#: ../pakfire/cli.py:87
msgid "Install one or more packages to the system."
msgstr ""
-#: ../pakfire/cli.py:90
+#: ../pakfire/cli.py:89
msgid "Give name of at least one package to install."
msgstr ""
-#: ../pakfire/cli.py:96
+#: ../pakfire/cli.py:95
msgid "Install one or more packages from the filesystem."
msgstr ""
-#: ../pakfire/cli.py:98
+#: ../pakfire/cli.py:97
msgid "Give filename of at least one package."
msgstr ""
-#: ../pakfire/cli.py:104
+#: ../pakfire/cli.py:103
msgid "Update the whole system or one specific package."
msgstr ""
-#: ../pakfire/cli.py:106
+#: ../pakfire/cli.py:105
msgid "Give a name of a package to update or leave emtpy for all."
msgstr ""
-#: ../pakfire/cli.py:112
+#: ../pakfire/cli.py:111
msgid "Print some information about the given package(s)."
msgstr ""
-#: ../pakfire/cli.py:114
+#: ../pakfire/cli.py:113
msgid "Give at least the name of one package."
msgstr ""
-#: ../pakfire/cli.py:120
+#: ../pakfire/cli.py:119
msgid "Search for a given pattern."
msgstr ""
-#: ../pakfire/cli.py:122
+#: ../pakfire/cli.py:121
msgid "A pattern to search for."
msgstr ""
-#: ../pakfire/cli.py:128
+#: ../pakfire/cli.py:127
msgid "Get a list of packages that provide a given file or feature."
msgstr ""
-#: ../pakfire/cli.py:130
+#: ../pakfire/cli.py:129
msgid "File or feature to search for."
msgstr ""
-#: ../pakfire/cli.py:192
+#: ../pakfire/cli.py:191
msgid "Pakfire builder command line interface."
msgstr ""
-#: ../pakfire/cli.py:230
+#: ../pakfire/cli.py:229
msgid "Update the package indexes."
msgstr ""
-#: ../pakfire/cli.py:236
+#: ../pakfire/cli.py:235
msgid "Build one or more packages."
msgstr ""
-#: ../pakfire/cli.py:238
+#: ../pakfire/cli.py:237
msgid "Give name of at least one package to build."
msgstr ""
-#: ../pakfire/cli.py:242
+#: ../pakfire/cli.py:241
msgid "Build the package for the given architecture."
msgstr ""
-#: ../pakfire/cli.py:244 ../pakfire/cli.py:266
+#: ../pakfire/cli.py:243 ../pakfire/cli.py:265
msgid "Path were the output files should be copied to."
msgstr ""
-#: ../pakfire/cli.py:249
+#: ../pakfire/cli.py:248
msgid "Go into a shell."
msgstr ""
-#: ../pakfire/cli.py:251 ../pakfire/cli.py:262
+#: ../pakfire/cli.py:250 ../pakfire/cli.py:261
msgid "Give name of a package."
msgstr ""
-#: ../pakfire/cli.py:255
+#: ../pakfire/cli.py:254
msgid "Emulated architecture in the shell."
msgstr ""
-#: ../pakfire/cli.py:260
+#: ../pakfire/cli.py:259
msgid "Generate a source package."
msgstr ""
-#: ../pakfire/cli.py:336
+#: ../pakfire/cli.py:335
msgid "Pakfire server command line interface."
msgstr ""
-#: ../pakfire/cli.py:361
+#: ../pakfire/cli.py:360
msgid "Repository management commands."
msgstr ""
-#: ../pakfire/cli.py:369
+#: ../pakfire/cli.py:368
msgid "Create a new repository index."
msgstr ""
-#: ../pakfire/cli.py:370
+#: ../pakfire/cli.py:369
msgid "Path to the packages."
msgstr ""
-#: ../pakfire/cli.py:371
+#: ../pakfire/cli.py:370
msgid "Path to input packages."
msgstr ""
-#: ../pakfire/depsolve.py:211
+#: ../pakfire/depsolve.py:217
msgid "Package"
msgstr ""
-#: ../pakfire/depsolve.py:211 ../pakfire/packages/base.py:51
+#: ../pakfire/depsolve.py:217 ../pakfire/packages/base.py:55
msgid "Arch"
msgstr ""
-#: ../pakfire/depsolve.py:211 ../pakfire/packages/base.py:52
+#: ../pakfire/depsolve.py:217 ../pakfire/packages/base.py:56
msgid "Version"
msgstr ""
-#: ../pakfire/depsolve.py:211
+#: ../pakfire/depsolve.py:217
msgid "Repository"
msgstr ""
-#: ../pakfire/depsolve.py:211 ../pakfire/packages/base.py:54
+#: ../pakfire/depsolve.py:217 ../pakfire/packages/base.py:58
msgid "Size"
msgstr ""
-#: ../pakfire/depsolve.py:214
+#: ../pakfire/depsolve.py:220
msgid "Installing:"
msgstr ""
-#: ../pakfire/depsolve.py:215
+#: ../pakfire/depsolve.py:221
msgid "Installing for dependencies:"
msgstr ""
-#: ../pakfire/depsolve.py:216
+#: ../pakfire/depsolve.py:222
msgid "Updating:"
msgstr ""
-#: ../pakfire/depsolve.py:217
+#: ../pakfire/depsolve.py:223
msgid "Updating for dependencies:"
msgstr ""
-#: ../pakfire/depsolve.py:218
+#: ../pakfire/depsolve.py:224
msgid "Removing:"
msgstr ""
-#: ../pakfire/depsolve.py:219
+#: ../pakfire/depsolve.py:225
msgid "Removing for dependencies:"
msgstr ""
-#: ../pakfire/depsolve.py:221
+#: ../pakfire/depsolve.py:227
msgid "Transaction Summary"
msgstr ""
-#: ../pakfire/depsolve.py:227
+#: ../pakfire/depsolve.py:233
msgid "Install"
msgstr ""
-#: ../pakfire/depsolve.py:228 ../pakfire/depsolve.py:232
-#: ../pakfire/depsolve.py:236
+#: ../pakfire/depsolve.py:234 ../pakfire/depsolve.py:238
+#: ../pakfire/depsolve.py:242
msgid "Package(s)"
msgstr ""
-#: ../pakfire/depsolve.py:231
+#: ../pakfire/depsolve.py:237
msgid "Updates"
msgstr ""
-#: ../pakfire/depsolve.py:235
+#: ../pakfire/depsolve.py:241
msgid "Remove"
msgstr ""
-#: ../pakfire/depsolve.py:242
+#: ../pakfire/depsolve.py:248
#, python-format
msgid "Total download size: %s"
msgstr ""
-#: ../pakfire/index.py:278
-#, python-format
-msgid "%s: package database"
-msgstr ""
-
-#: ../pakfire/__init__.py:172
+#: ../pakfire/__init__.py:191
msgid "Is this okay?"
msgstr ""
-#: ../pakfire/packages/base.py:50
+#: ../pakfire/packages/base.py:54
msgid "Name"
msgstr ""
-#: ../pakfire/packages/base.py:53
+#: ../pakfire/packages/base.py:57
msgid "Release"
msgstr ""
-#: ../pakfire/packages/base.py:55
+#: ../pakfire/packages/base.py:59
msgid "Repo"
msgstr ""
-#: ../pakfire/packages/base.py:56
+#: ../pakfire/packages/base.py:60
msgid "Summary"
msgstr ""
-#: ../pakfire/packages/base.py:57
+#: ../pakfire/packages/base.py:61
msgid "URL"
msgstr ""
-#: ../pakfire/packages/base.py:58
+#: ../pakfire/packages/base.py:62
msgid "License"
msgstr ""
-#: ../pakfire/packages/base.py:61
+#: ../pakfire/packages/base.py:65
msgid "Description"
msgstr ""
-#: ../pakfire/packages/base.py:67
+#: ../pakfire/packages/base.py:71
+msgid "UUID"
+msgstr ""
+
+#: ../pakfire/packages/base.py:72
msgid "Build ID"
msgstr ""
-#: ../pakfire/packages/base.py:68
+#: ../pakfire/packages/base.py:73
msgid "Build date"
msgstr ""
-#: ../pakfire/packages/base.py:69
+#: ../pakfire/packages/base.py:74
msgid "Build host"
msgstr ""
-#: ../pakfire/packages/packager.py:70
+#: ../pakfire/packages/packager.py:75
msgid "Extracting"
msgstr ""
-#: ../pakfire/packages/packager.py:125
+#: ../pakfire/packages/packager.py:130
msgid "Extracting:"
msgstr ""
+
+#: ../pakfire/repository/index.py:312
+#, python-format
+msgid "%s: package database"
+msgstr ""
author = "IPFire.org Team",
author_email = "info@ipfire.org",
url = "http://redmine.ipfire.org/projects/buildsystem3",
- packages = ["pakfire", "pakfire.packages", "pakfire.plugins"],
+ packages = ["pakfire", "pakfire.packages", "pakfire.plugins", "pakfire.repository",],
scripts = ["scripts/pakfire", "scripts/pakfire-build", "scripts/pakfire-server"],
cmdclass = { "build" : build_extra.build_extra,
"build_i18n" : build_i18n.build_i18n },