]> git.ipfire.org Git - pakfire.git/commitdiff
Introduce the C solver module.
authorMichael Tremer <michael.tremer@ipfire.org>
Sat, 16 Jul 2011 15:11:59 +0000 (17:11 +0200)
committerMichael Tremer <michael.tremer@ipfire.org>
Sat, 16 Jul 2011 15:11:59 +0000 (17:11 +0200)
49 files changed:
Makefile
examples/pakfire.repos.d/ipfire.repo
pakfire/base.py
pakfire/builder.py
pakfire/cli.py
pakfire/constants.py
pakfire/packages/__init__.py
pakfire/packages/base.py
pakfire/packages/installed.py
pakfire/packages/solv.py [new file with mode: 0644]
pakfire/repository/__init__.py
pakfire/repository/base.py
pakfire/repository/cache.py
pakfire/repository/database.py
pakfire/repository/database_old.py [new file with mode: 0644]
pakfire/repository/index.py
pakfire/repository/index_old.py [new file with mode: 0644]
pakfire/repository/local.py
pakfire/repository/local_old.py [new file with mode: 0644]
pakfire/repository/oddments.py
pakfire/repository/remote.py
pakfire/repository/remote_old.py [new file with mode: 0644]
pakfire/repository/solver.py [deleted file]
pakfire/repository/transaction.py
pakfire/satsolver.py [new file with mode: 0644]
po/POTFILES.in
po/pakfire.pot
setup.py
src/_pakfiremodule.c [new file with mode: 0644]
src/config.h [new file with mode: 0644]
src/pool.c [new file with mode: 0644]
src/pool.h [new file with mode: 0644]
src/problem.c [new file with mode: 0644]
src/problem.h [new file with mode: 0644]
src/relation.c [new file with mode: 0644]
src/relation.h [new file with mode: 0644]
src/repo.c [new file with mode: 0644]
src/repo.h [new file with mode: 0644]
src/request.c [new file with mode: 0644]
src/request.h [new file with mode: 0644]
src/solvable.c [new file with mode: 0644]
src/solvable.h [new file with mode: 0644]
src/solver.c [new file with mode: 0644]
src/solver.h [new file with mode: 0644]
src/step.c [new file with mode: 0644]
src/step.h [new file with mode: 0644]
src/test.py [new file with mode: 0644]
src/transaction.c [new file with mode: 0644]
src/transaction.h [new file with mode: 0644]

index 2e69c825dc0c2d74bc2ed3fbfbe0b1e09113dbf1..5b9c98b1b7058641714858daad03d45891e8363d 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -10,6 +10,7 @@ build:
 .PHONY: clean
 clean:
        python setup.py clean
+       -rm -rfv build
 
 .PHONY: dist
 dist:
index 337f09fe351d90b2d9552c6f4afff0c5176e8c05..2fe3264b6ba6103ced60ccb8ce299ffa0350f9ad 100644 (file)
 
 ;gpgkey = /not/yet/existant
 
-[testingbay]
+[testingbay2]
 description = IPFire Testing Repository
 
-mirrorlist = http://people.ipfire.org/~ms/testingbay/mirrors
+;mirrorlist = http://people.ipfire.org/~ms/testingbay/mirrors
+mirrorlist = file:///build/pakfire/testingbay/mirrors
 
-url = http://people.ipfire.org/~ms/testingbay
+;url = http://people.ipfire.org/~ms/testingbay
+url = file:///build/pakfire/testingbay
 
+enabled = 1
index ef4ebbfe1143ab927ef86fa0674254a8c1644606..2d2562131d8eaec8c998bea2169d96def69853d7 100644 (file)
@@ -11,6 +11,7 @@ import distro
 import logger
 import repository
 import packages
+import satsolver
 import util
 
 from constants import *
@@ -47,11 +48,19 @@ class Pakfire(object):
                # Get more information about the distribution we are running
                # or building
                self.distro = distro.Distribution(self, distro_config)
+               self.pool   = satsolver.Pool(self.distro.arch)
                self.repos  = repository.Repositories(self,
                        enable_repos=enable_repos, disable_repos=disable_repos)
 
-               # Create a short reference to the solver of this pakfire instance.
-               self.solver = self.repos.solver
+               # Create the solver of this pakfire instance.
+               # XXX maybe we can only create it when we need it?
+               #self.solver = satsolver.Solver(self, self.pool)
+
+       def create_solver(self):
+               return satsolver.Solver(self, self.pool)
+
+       def create_request(self):
+               return satsolver.Request(self.pool)
 
        def destroy(self):
                if not self.path == "/":
@@ -298,7 +307,7 @@ class Pakfire(object):
        def provides(self, patterns):
                pkgs = []
                for pattern in patterns:
-                       pkgs += self.repos.get_by_provides(pattern)
+                       pkgs += self.repos.whatprovides(pattern)
 
                pkgs = packages.PackageListing(pkgs)
                #pkgs.unique()
@@ -316,7 +325,7 @@ class Pakfire(object):
                return pkgs
 
        def repo_create(self, path, input_paths):
-               repo = repository.LocalBinaryRepository(
+               repo = repository.RepositoryDir(
                        self,
                        name="new",
                        description="New repository.",
@@ -324,9 +333,9 @@ class Pakfire(object):
                )
 
                for input_path in input_paths:
-                       repo._collect_packages(input_path)
+                       repo.collect_packages(input_path)
 
                repo.save()
 
        def repo_list(self):
-               return self.repos.all
+               return [r for r in self.repos]
index 0d61104cf97ff34b5d48b50890f7136b22c9b492..f23336010010b4e626b70cf9d41432099e0d01db 100644 (file)
@@ -272,7 +272,7 @@ class Builder(object):
                        return
 
                # Create a request and fill it with what we need.
-               request = self.solver.create_request()
+               request = self.pakfire.create_request()
 
                for req in requires:
                        if isinstance(req, packages.BinaryPackage):
@@ -283,10 +283,15 @@ class Builder(object):
 
                        request.install(req)
 
+               # Create a new solver instance.
+               solver = self.pakfire.create_solver()
+
                # Do the solving.
-               transaction = self.solver.solve(request, allow_downgrade=True)
+               transaction = solver.solve(request, allow_downgrade=True)
 
                # XXX check for errors
+               if not transaction:
+                       raise DependencyError, "Could not resolve dependencies"
 
                # Show the user what is going to be done.
                transaction.dump(logger=self.log)
@@ -630,12 +635,9 @@ class Builder(object):
                        k, v = m.groups()
                        pkg[k] = v.strip("\"")
 
-               # Create a dummy repository to link the virtual packages to
-               repo = repository.DummyRepository(self.pakfire)
-
                self._packages = []
                for pkg in pkgs:
-                       pkg = packages.VirtualPackage(self.pakfire, pkg) # XXX had to remove repo here?!
+                       pkg = packages.VirtualPackage(self.pakfire, pkg)
                        self._packages.append(pkg)
 
                return self._packages
index 6b9035bfb8e3a53d9097598e8b89605b273e4c18..97a996919449a24d7cfdc44e003bb65c91305f3a 100644 (file)
@@ -216,11 +216,10 @@ class Cli(object):
 
        def handle_repolist(self):
                repos = pakfire.repo_list(**self.pakfire_args)
-               repos.sort()
 
-               FORMAT = " %-20s %8s %12s "
+               FORMAT = " %-20s %8s %12s %12s "
 
-               title = FORMAT % (_("Repository"), _("Enabled"), _("Priority"))
+               title = FORMAT % (_("Repository"), _("Enabled"), _("Priority"), _("Packages"))
                print title
                print "=" * len(title) # spacing line
 
@@ -229,7 +228,7 @@ class Cli(object):
                        if repo.name == "installed":
                                continue
 
-                       print FORMAT % (repo.name, repo.enabled, repo.priority)
+                       print FORMAT % (repo.name, repo.enabled, repo.priority, len(repo))
 
 
 class CliBuilder(Cli):
@@ -374,6 +373,12 @@ class CliBuilder(Cli):
                pakfire.dist(pkgs, resultdirs=[self.args.resultdir,],
                        **self.pakfire_args)
 
+       def handle_provides(self):
+               pkgs = pakfire.provides(self.args.pattern, **self.pakfire_args)
+
+               for pkg in pkgs:
+                       print pkg.dump(long=True)
+
 
 class CliRepo(Cli):
        def __init__(self):
index b8fc1bf45766c7dcc7a94fae202ff8f21c3abaa0..df11281e72c7d6f85360551e378e322d451a5d4f 100644 (file)
@@ -30,7 +30,7 @@ METADATA_FORMAT = 0
 METADATA_DOWNLOAD_LIMIT = 1024**2
 METADATA_DOWNLOAD_PATH  = "repodata"
 METADATA_DOWNLOAD_FILE  = "repomd.json"
-METADATA_DATABASE_FILE  = "packages.db"
+METADATA_DATABASE_FILE  = "packages.solv"
 
 PACKAGE_FORMAT = 0
 PACKAGE_EXTENSION = "pfm"
@@ -39,7 +39,7 @@ MAKEFILE_EXTENSION = "nm"
 PACKAGE_FILENAME_FMT = "%(name)s-%(version)s-%(release)s.%(arch)s.%(ext)s"
 
 BUILD_PACKAGES = ["build-essentials",]
-SHELL_PACKAGES = ["elinks", "less", "pakfire", "vim",]
+SHELL_PACKAGES = ["elinks", "less", "vim",]
 BUILD_ROOT = "/var/lib/pakfire/build"
 
 SOURCE_DOWNLOAD_URL = "http://source.ipfire.org/source-3.x/"
index 2826bd9553c95cdd1805d3f3e3df038617a3b774..e8959f607cbca5e866b405e0808a88f4dab7e0f9 100644 (file)
@@ -3,6 +3,7 @@
 from binary import BinaryPackage
 from file import InnerTarFile
 from installed import DatabasePackage, InstalledPackage
+from solv import SolvPackage
 from source import SourcePackage
 from virtual import VirtualPackage
 
index 51dc3b7aba728a82b82078dc72737b6d6213285f..8f6f467e483cc18784dd1c1aa17af40c58c883ec 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/python
 
+import datetime
 import logging
 import xml.sax.saxutils
 
@@ -279,17 +280,15 @@ class Package(object):
 
        @property
        def build_date(self):
-               return self.metadata.get("BUILD_DATE")
+               """
+                       Automatically convert the UNIX timestamp from self.build_time to
+                       a humanly readable format.
+               """
+               return "%s UTC" % datetime.datetime.utcfromtimestamp(self.build_time)
 
        @property
        def build_host(self):
-               host = self.metadata.get("BUILD_HOST")
-
-               # XXX Workaround tripple X as hostname.
-               if host == "X"*3:
-                       host = ""
-
-               return host
+               return self.metadata.get("BUILD_HOST")
 
        @property
        def build_id(self):
@@ -355,68 +354,3 @@ class Package(object):
 
        def extract(self, path, prefix=None):
                raise NotImplementedError, "%s" % repr(self)
-
-       def export_xml_string(self):
-               info = self.info
-               info["groups"] = " ".join(info["groups"])
-
-               # Escape everything to conform to XML.
-               for key, value in info.items():
-                       if not type(value) in (type(a) for a in ("a", u"a")):
-                               continue
-
-                       info[key] = xml.sax.saxutils.escape(value, {'"': "&quot;"})
-
-               s = """\
-                       <package type="rpm">
-                               <name>%(name)s</name>
-                               <arch>%(arch)s</arch>
-                               <version epoch="%(epoch)s" ver="%(version)s" rel="%(release)s"/>
-                               <checksum type="sha" pkgid="YES">%(hash1)s</checksum>
-                               <summary>%(summary)s</summary>
-                               <description>%(description)s</description>
-                               <packager>%(maintainer)s</packager>
-                               <url>%(url)s</url>
-                               <time file="0" build="%(build_time)s"/>
-                               <size package="%(size)s" installed="%(inst_size)s" />
-                               <format>
-                                       <rpm:license>%(license)s</rpm:license>
-                                       <rpm:vendor>%(vendor)s</rpm:vendor>
-                                       <rpm:group>%(groups)s</rpm:group>
-                                       <rpm:buildhost>%(build_host)s</rpm:buildhost>\n""" \
-                       % info
-
-               if self.provides:
-                       s += "<rpm:provides>"
-                       for provides in self.provides:
-                               s += "<rpm:entry name=\"%s\" />" % provides
-                       s += "</rpm:provides>"
-
-               if self.requires or self.pre_requires:
-                       s += "<rpm:requires>"
-                       for requires in self.requires:
-                               s += "<rpm:entry name=\"%s\" />" % requires
-
-                       for requires in self.pre_requires:
-                               s += "<rpm:entry name=\"%s\" pre=\"1\" />" % requires
-                       s += "</rpm:requires>"
-
-               if self.conflicts:
-                       s += "<rpm:conflicts>"
-                       for conflict in self.conflicts:
-                               s += "<rpm:entry name=\"%s\" />" % conflict
-                       s += "</rpm:conflicts>"
-
-               if self.obsoletes:
-                       s += "<rpm:obsoletes>"
-                       for obsolete in self.obsoletes:
-                               s += "<rpm:entry name=\"%s\" />" % obsolete
-                       s += "</rpm:obsoletes>"
-
-               for file in self.filelist:
-                       # XXX what about type="dir"?
-                       s += "<file>%s</file>" % file
-
-               s += "</format></package>"
-
-               return s
index f31c8f21322b0582daefc1c863a91c459daaad66..0a784fe6330f60410808338cbe6839687d2a7318 100644 (file)
@@ -110,27 +110,19 @@ class DatabasePackage(Package):
 
        @property
        def provides(self):
-               provides = self.metadata.get("provides", "").split()
-
-               return set(provides)
+               return self.metadata.get("provides", "").split()
 
        @property
        def requires(self):
-               requires = self.metadata.get("requires", "").split()
-
-               return set(requires)
+               return self.metadata.get("requires", "").split()
 
        @property
        def conflicts(self):
-               conflicts = self.metadata.get("conflicts", "").split()
-
-               return set(conflicts)
+               return self.metadata.get("conflicts", "").split()
 
        @property
        def obsoletes(self):
-               obsoletes = self.metadata.get("obsoletes", "").split()
-
-               return set(obsoletes)
+               return self.metadata.get("obsoletes", "").split()
 
        @property
        def hash1(self):
diff --git a/pakfire/packages/solv.py b/pakfire/packages/solv.py
new file mode 100644 (file)
index 0000000..d78b805
--- /dev/null
@@ -0,0 +1,145 @@
+#!/usr/bin/python
+
+import re
+
+import base
+import binary
+
+class SolvPackage(base.Package):
+       def __init__(self, pakfire, solvable):
+               base.Package.__init__(self, pakfire)
+
+               # Save solvable object
+               self.solvable = solvable
+
+               self.__evr = None
+
+       @property
+       def uuid(self):
+               return self.solvable.get_uuid()
+
+       @property
+       def hash1(self):
+               return self.solvable.get_hash1()
+
+       @property
+       def name(self):
+               return self.solvable.get_name()
+
+       @property
+       def evr(self):
+               if self.__evr is None:
+                       m = re.match("([0-9]+\:)?([0-9A-Za-z\.]+)-?([0-9]+\.?[a-z0-9]+|[0-9]+)?",
+                               self.solvable.get_evr())
+
+                       if m:
+                               (e, v, r) = m.groups()
+
+                               if e:
+                                       e = e.replace(":", "")
+                                       e = int(e)
+
+                               self.__evr = (e, v, r)
+
+               return self.__evr
+
+       @property
+       def epoch(self):
+               return self.evr[0]
+
+       @property
+       def version(self):
+               return self.evr[1]
+
+       @property
+       def release(self):
+               return self.evr[2]
+
+       @property
+       def arch(self):
+               return self.solvable.get_arch()
+
+       @property
+       def repo(self):
+               repo_name = self.solvable.get_repo_name()
+
+               return self.pakfire.repos.get_repo(repo_name)
+
+       @property
+       def summary(self):
+               return self.solvable.get_summary()
+
+       @property
+       def description(self):
+               return self.solvable.get_description()
+
+       @property
+       def groups(self):
+               return self.solvable.get_groups().split()
+
+       @property
+       def license(self):
+               return self.solvable.get_license()
+
+       @property
+       def maintainer(self):
+               return self.solvable.get_maintainer()
+
+       @property
+       def url(self):
+               return self.solvable.get_url()
+
+       @property
+       def size(self):
+               return self.solvable.get_downloadsize()
+
+       @property
+       def uuid(self):
+               return self.solvable.get_uuid()
+
+       @property
+       def build_host(self):
+               return self.solvable.get_buildhost()
+
+       @property
+       def build_time(self):
+               return self.solvable.get_buildtime()
+
+       @property
+       def build_id(self):
+               return "XXX CURRENTLY NOT IMPLEMENTED"
+
+       @property
+       def provides(self):
+               return self.solvable.get_provides()
+
+       @property
+       def requires(self):
+               return self.solvable.get_requires()
+
+       @property
+       def obsoletes(self):
+               return self.solvable.get_obsoletes()
+
+       @property
+       def conflicts(self):
+               return self.solvable.get_conflicts()
+
+       @property
+       def filename(self):
+               return self.solvable.get_filename()
+
+       @property
+       def is_in_cache(self):
+               return self.repo.cache.exists("package/%s" % self.filename)
+
+       def get_from_cache(self):
+               filename = "packages/%s" % self.filename
+
+               if self.repo.cache.exists(filename):
+                       return binary.BinaryPackage(self.pakfire, self.repo, self.repo.cache.abspath(filename))
+
+       def download(self, text=""):
+               self.repo.download(self.filename, text=text, hash1=self.hash1)
+
+               return self.get_from_cache()
index 21557e5bd6dec2928cfd5f7e581af83cdf4f643d..7c742946c01f5757bb8d3854514607b772bc35ef 100644 (file)
@@ -3,11 +3,13 @@
 import logging
 
 import solver
+import satsolver
 
-from installed import InstalledRepository
-from local import LocalRepository, LocalBuildRepository, LocalSourceRepository
-from oddments import DummyRepository, FileSystemRepository
-from remote import RemoteRepository
+import pakfire.packages as packages
+
+from local import RepositoryDir, RepositoryBuild, RepositoryLocal
+from oddments import RepositoryDummy
+from remote import RepositorySolv
 
 class Repositories(object):
        """
@@ -23,41 +25,52 @@ class Repositories(object):
                self.distro = pakfire.distro
 
                # Place to store the repositories
-               self._repos = []
+               self.__repos = {}
 
                # Create a dummy repository
-               self.dummy = DummyRepository(self.pakfire)
+               self.dummy = RepositoryDummy(self.pakfire)
 
                # Create the local repository
-               self.local = InstalledRepository(self.pakfire)
+               self.local = RepositoryLocal(self.pakfire)
                self.add_repo(self.local)
 
                # If we running in build mode, we include our local build repository.
                if self.pakfire.builder:
-                       self.local_build = LocalBuildRepository(self.pakfire)
+                       self.local_build = RepositoryBuild(self.pakfire)
                        self.add_repo(self.local_build)
 
                for repo_name, repo_args in self.config.get_repos():
                        self._parse(repo_name, repo_args)
 
-               # XXX need to process enable_repos and disable_repos here
+               # Enable all repositories here as demanded on commandline
+               if enable_repos:
+                       for repo in enable_repos:
+                               self.enable_repo(repo)
+
+               # Disable all repositories here as demanded on commandline
+               if disable_repos:
+                       for repo in disable_repos:
+                               self.disable_repo(repo)
 
                # Update all indexes of the repositories (not force) so that we will
                # always work with valid data.
                self.update()
 
-               # Initialize the solver.
-               self.solver = solver.Solver(self.pakfire, self)
+       def __iter__(self):
+               repositories = self.__repos.values()
+               repositories.sort()
+
+               return iter(repositories)
 
        def __len__(self):
                """
                        Return the count of enabled repositories.
                """
-               i = 0
-               for repo in self.enabled:
-                       i += 1
+               return len([r for r in self if r.enabled])
 
-               return i
+       @property
+       def pool(self):
+               return self.pakfire.pool
 
        def _parse(self, name, args):
                # XXX need to make variable expansion
@@ -70,95 +83,49 @@ class Repositories(object):
                }
                _args.update(args)
 
-               repo = RemoteRepository(self.pakfire, **_args)
+               repo = RepositorySolv(self.pakfire, **_args)
 
                self.add_repo(repo)
 
        def add_repo(self, repo):
-               self._repos.append(repo)
-               self._repos.sort()
+               if self.__repos.has_key(repo.name):
+                       raise Exception, "Repository with that name does already exist."
 
-       @property
-       def all(self):
-               return self._repos[:]
+               self.__repos[repo.name] = repo
 
-       @property
-       def enabled(self):
-               for repo in self._repos:
-                       if not repo.enabled:
-                               continue
+       def get_repo(self, name):
+               """
+                       Get the repository with the given name, if not available, return
+                       the dummy repository.
+               """
+               try:
+                       return self.__repos[name]
+               except KeyError:
+                       return self.dummy
 
-                       yield repo
+       def enable_repo(self, name):
+               try:
+                       self.__repo[name].enabled = True
+               except KeyError:
+                       pass
 
        def disable_repo(self, name):
-               for repo in self.enabled:
-                       if repo.name == name:
-                               logging.debug("Disabled repository '%s'" % repo.name)
-                               repo.enabled = False
-                               continue
+               try:
+                       self.__repo[name].enabled = False
+               except KeyError:
+                       pass
 
        def update(self, force=False):
                logging.debug("Updating all repository indexes (force=%s)" % force)
 
-               # XXX update all indexes if necessary or forced
-               for repo in self.enabled:
+               # update all indexes if necessary or forced
+               for repo in self:
                        repo.update(force=force)
 
-       def get_repo_by_name(self, name):
-               for repo in self.enabled:
-                       if repo.name == name:
-                               return repo
-
-       def get_all(self):
-               for repo in self.enabled:
-                       for pkg in repo.get_all():
-                               yield pkg
-
-       def get_by_name(self, name):
-               #for repo in self.enabled:
-               #       for pkg in repo.get_by_name(name):
-               #               yield pkg
-               return self.solver.get_by_name(name)
-
-       def get_by_glob(self, pattern):
-               for repo in self.enabled:
-                       for pkg in repo.get_by_glob(pattern):
-                               yield pkg
-
-       #def get_by_provides(self, requires):
-       #       if requires.type == "file":
-       #               for pkg in self.get_by_file(requires.requires):
-       #                       yield pkg
-       #
-       #       else:
-       #               for repo in self.enabled:
-       #                       for pkg in repo.get_by_provides(requires):
-       #                               yield pkg
-       get_by_provides = get_by_name
-
-       def get_by_requires(self, requires):
-               for repo in self.enabled:
-                       for pkg in repo.get_by_requires(requires):
-                               yield pkg
-
-       #def get_by_file(self, filename):
-       #       for repo in self.enabled:
-       #               for pkg in repo.get_by_file(filename):
-       #                       yield pkg
-       get_by_file = get_by_name
-
-       def get_by_group(self, group):
-               for repo in self.enabled:
-                       for pkg in repo.get_by_group(group):
-                               yield pkg
-
-       def search(self, pattern):
-               pkg_names = []
-
-               for repo in self.enabled:
-                       for pkg in repo.search(pattern):
-                               if pkg.name in pkg_names:
-                                       continue
-
-                               pkg_names.append(pkg.name)
-                               yield pkg
+       def whatprovides(self, what):
+               for solv in self.pool.providers(what):
+                       yield packages.SolvPackage(self.pakfire, solv)
+
+       def search(self, what):
+               raise NotImplementedError
+
index 50f06ca0e300bfb0fb3632da92fea0ac8c16fafd..51a27557c6b50659a81533c6e1601f7fbc639087 100644 (file)
@@ -5,17 +5,25 @@ import glob
 import logging
 import re
 
+import cache
+import satsolver
+
 class RepositoryFactory(object):
        def __init__(self, pakfire, name, description):
                self.pakfire = pakfire
+               self.name = name
+               self.description = description
+
+               # Reference to corresponding Repo object in the solver.
+               self.solver_repo = satsolver.Repo(self.pool, self.name)
 
-               self.name, self.description = name, description
+               logging.debug("Initialized new repository: %s" % self)
 
-               # All repositories are enabled by default
-               self.enabled = True
+               # Create an cache object
+               self.cache = cache.RepositoryCache(self.pakfire, self)
 
-               # Reference to corresponding Repo object in the solver.
-               self.solver_repo = None
+               # The index MUST be set by an inheriting class.
+               self.index = None
 
        def __repr__(self):
                return "<%s %s>" % (self.__class__.__name__, self.name)
@@ -24,6 +32,30 @@ class RepositoryFactory(object):
                return cmp(self.priority * -1, other.priority * -1) or \
                        cmp(self.name, other.name)
 
+       def __len__(self):
+               return self.solver_repo.size()
+
+       @property
+       def pool(self):
+               return self.pakfire.pool
+
+       def get_enabled(self):
+               return self.solver_repo.get_enabled()
+
+       def set_enabled(self, val):
+               self.solver_repo.set_enabled(val)
+
+               if val:
+                       logging.debug("Enabled repository '%s'." % self.name)
+               else:
+                       logging.debug("Disabled repository '%s'." % self.name)
+
+       enabled = property(get_enabled, set_enabled)
+
+       @property
+       def arch(self):
+               return self.pakfire.distro.arch
+
        @property
        def distro(self):
                """
@@ -49,91 +81,97 @@ class RepositoryFactory(object):
                        A function that is called to update the local data of
                        the repository.
                """
-               if hasattr(self, "index"):
-                       self.index.update(force)
-
-       def get_all(self):
-               """
-                       Simply returns an instance of every package in this repository.
-               """
-               for pkg in self.packages:
-                       yield pkg
-
-       def get_by_name(self, name):
-               for pkg in self.packages:
-                       if pkg.name == name:
-                               yield pkg
-
-       def get_by_evr(self, name, evr):
-               m = re.match(r"([0-9]+\:)?([0-9A-Za-z\.\-]+)-([0-9]+\.?[a-z0-9]+|[0-9]+)", evr)
-
-               if not m:
-                       raise Exception, "Invalid input: %s" % evr
-
-               (epoch, version, release) = m.groups()
-               if epoch and epoch.endswith(":"):
-                       epoch = epoch[:-1]
-
-               pkgs = [p for p in self.index.get_by_evr(name, epoch, version, release)]
-
-               if not pkgs:
-                       return
-
-               if not len(pkgs) == 1:
-                       raise Exception
-
-               return pkgs[0]
-
-       def get_by_glob(self, pattern):
-               """
-                       Returns a list of all packages that names match the glob pattern
-                       that is provided.
-               """
-               for pkg in self.packages:
-                       if fnmatch.fnmatch(pkg.name, pattern):
-                               yield pkg
-
-       def get_by_provides(self, requires):
-               """
-                       Returns a list of all packages that offer a matching "provides"
-                       of the given "requires".
-               """
-               for pkg in self.packages:
-                       if pkg.does_provide(requires):
-                               yield pkg
-
-       def get_by_requires(self, requires):
-               """
-                       Returns a list of all packages that require the given requirement.
-               """
-               for pkg in self.packages:
-                       # XXX does not use the cmp() function of Requires.
-                       if requires.requires in pkg.requires:
-                               yield pkg
-
-       def get_by_file(self, filename):
-               for pkg in self.packages:
-                       match = False
-                       for pkg_filename in pkg.filelist:
-                               if fnmatch.fnmatch(pkg_filename, filename):
-                                       match = True
-                                       break
-
-                       if match:
-                               yield pkg
-
-       def get_by_group(self, group):
-               """
-                       Get all packages that belong to a specific group.
-               """
-               for pkg in self.packages:
-                       if group in pkg.groups:
-                               yield pkg
-
-       def get_by_friendly_name(self, name):
-               for pkg in self.packages:
-                       if pkg.friendly_name == name:
-                               return pkg
+               assert self.index
+
+               self.index.update(force)
+
+       #def get_all(self):
+       #       """
+       #               Simply returns an instance of every package in this repository.
+       #       """
+       #       for pkg in self.packages:
+       #               yield pkg
+
+       #def get_by_name(self, name):
+       #       for pkg in self.packages:
+       #               if pkg.name == name:
+       #                       yield pkg
+
+       #def get_by_uuid(self, uuid):
+       #       for pkg in self.packages:
+       #               if pkg.uuid == uuid:
+       #                       return pkg
+
+       #def get_by_evr(self, name, evr):
+       #       m = re.match(r"([0-9]+\:)?([0-9A-Za-z\.\-]+)-([0-9]+\.?[a-z0-9]+|[0-9]+)", evr)
+
+       #       if not m:
+       #               raise Exception, "Invalid input: %s" % evr
+
+       #       (epoch, version, release) = m.groups()
+       #       if epoch and epoch.endswith(":"):
+       #               epoch = epoch[:-1]
+
+       #       pkgs = [p for p in self.index.get_by_evr(name, epoch, version, release)]
+
+       #       if not pkgs:
+       #               return
+
+       #       if not len(pkgs) == 1:
+       #               raise Exception
+
+       #       return pkgs[0]
+
+       #def get_by_glob(self, pattern):
+       #       """
+       #               Returns a list of all packages that names match the glob pattern
+       #               that is provided.
+       #       """
+       #       for pkg in self.packages:
+       #               if fnmatch.fnmatch(pkg.name, pattern):
+       #                       yield pkg
+
+       #def get_by_provides(self, requires):
+       #       """
+       #               Returns a list of all packages that offer a matching "provides"
+       #               of the given "requires".
+       #       """
+       #       for pkg in self.packages:
+       #               if pkg.does_provide(requires):
+       #                       yield pkg
+
+       #def get_by_requires(self, requires):
+       #       """
+       #               Returns a list of all packages that require the given requirement.
+       #       """
+       #       for pkg in self.packages:
+       #               # XXX does not use the cmp() function of Requires.
+       #               if requires.requires in pkg.requires:
+       #                       yield pkg
+
+       #def get_by_file(self, filename):
+       #       for pkg in self.packages:
+       #               match = False
+       #               for pkg_filename in pkg.filelist:
+       #                       if fnmatch.fnmatch(pkg_filename, filename):
+       #                               match = True
+       #                               break
+
+       #               if match:
+       #                       yield pkg
+
+       #def get_by_group(self, group):
+       #       """
+       #               Get all packages that belong to a specific group.
+       #       """
+       #       for pkg in self.packages:
+       #               if group in pkg.groups:
+       #                       yield pkg
+
+       #def get_by_friendly_name(self, name):
+       #       for pkg in self.packages:
+       #               if pkg.friendly_name == name:
+       #                       return pkg
 
        def search(self, pattern):
                """
@@ -146,30 +184,3 @@ class RepositoryFactory(object):
                                if pattern.lower() in item.lower() or \
                                                fnmatch.fnmatch(item, pattern):
                                        yield pkg
-
-       @property
-       def packages(self):
-               """
-                       Returns all packages.
-               """
-               return self.index.packages
-
-       @property
-       def size(self):
-               """
-                       Return the number of packages.
-               """
-               return self.index.size
-
-       @property
-       def filelist(self):
-               if hasattr(self.index, "filelist"):
-                       return self.index.filelist
-
-               return {}
-
-       def import_to_solver(self, solver, repo):
-               if hasattr(self, "index"):
-                       self.solver_repo = repo
-
-                       self.index.import_to_solver(solver, repo)
index 78d5d347dacee23cf9e424c633fc02cdf6d6c5fb..f6b044cab21416b0f542b37a1dfdb141e42b771d 100644 (file)
@@ -4,8 +4,6 @@ import os
 import stat
 import time
 
-import index
-
 import pakfire.util as util
 from pakfire.constants import *
 
@@ -19,34 +17,45 @@ class RepositoryCache(object):
                self.pakfire = pakfire
                self.repo = repo
 
-               self.create()
-
-               # Initialize index of cache.
-               self.index = index.DirectoryIndex(self.pakfire, self.repo,
-                       os.path.join(self.path, "packages"))
+               self.__created = None
 
        @property
-       def packages(self):
-               return self.index.packages
+       def created(self):
+               """
+                       Tells us, if the cache was already created.
+               """
+               if self.__created is None:
+                       self.__created = os.path.exists(self.path)
+
+               return self.__created
 
        @property
        def path(self):
                return os.path.join(REPO_CACHE_DIR, self.pakfire.distro.release, \
                        self.repo.name, self.repo.arch)
 
-       def abspath(self, path):
+       def abspath(self, path, create=True):
+               if create:
+                       self.create()
+
                return os.path.join(self.path, path)
 
        def create(self):
                """
                        Create all necessary directories.
                """
+               # Do nothing, if the cache has already been created.
+               if self.created:
+                       return
+
                for path in ("mirrors", "packages", "repodata"):
-                       path = self.abspath(path)
+                       path = self.abspath(path, create=False)
 
                        if not os.path.exists(path):
                                os.makedirs(path)
 
+               self.__created = True
+
        def exists(self, filename):
                """
                        Returns True if a file exists and False if it doesn't.
index e360e20798c535c1d127cbc0ce703b8051e4503e..89579152a8fd104ba520678d3ee88925a0b72d7c 100644 (file)
@@ -7,6 +7,8 @@ import shutil
 import sqlite3
 import time
 
+import pakfire.packages as packages
+
 from pakfire.constants import *
 
 class Cursor(sqlite3.Cursor):
@@ -20,29 +22,20 @@ class Cursor(sqlite3.Cursor):
 class Database(object):
        def __init__(self, pakfire, filename):
                self.pakfire = pakfire
-               self._db = None
-
-               self._tmp = False
-
-               if filename == ":memory:":
-                       self._tmp = True
-
-                       filename = "/tmp/.%s-db" % random.randint(0, 1024**2)
-
                self.filename = filename
 
-               self.open()
+               self._db = None
 
        def __del__(self):
                if self._db:
-                       #self._db.commit()
                        self._db.close()
+                       self._db = None
 
        def create(self):
                pass
 
        def open(self):
-               if not self._db:
+               if self._db is None:
                        logging.debug("Open database %s" % self.filename)
 
                        dirname = os.path.dirname(self.filename)
@@ -60,35 +53,50 @@ class Database(object):
                                self.create()
 
        def close(self):
-               self._db.close()
-               self._db = None
-
-               if self._tmp:
-                       os.unlink(self.filename)
+               self.__del__()
 
        def commit(self):
+               self.open()
                self._db.commit()
 
        def cursor(self):
+               self.open()
                return self._db.cursor(Cursor)
 
        def executescript(self, *args, **kwargs):
+               self.open()
                return self._db.executescript(*args, **kwargs)
 
-       def save(self, path):
-               """
-                       Save a copy of this database to a new one located at path.
-               """
-               self.commit()
 
-               shutil.copy2(self.filename, path)
+class DatabaseLocal(Database):
+       def __init__(self, pakfire, repo):
+               self.repo = repo
 
+               # Generate filename for package database
+               filename = os.path.join(pakfire.path, PACKAGES_DB)
+
+               Database.__init__(self, pakfire, filename)
+
+       def __len__(self):
+               count = 0
 
-class PackageDatabase(Database):
-       def create(self):
                c = self.cursor()
+               c.execute("SELECT COUNT(*) AS count FROM packages")
+               for row in c:
+                       count = row["count"]
+               c.close()
 
+               return count
+
+       def create(self):
+               c = self.cursor()
                c.executescript("""
+                       CREATE TABLE settings(
+                               key                     TEXT,
+                               val                     TEXT
+                       );
+                       INSERT INTO settings(key, val) VALUES('version', '0');
+
                        CREATE TABLE files(
                                name            TEXT,
                                pkg                     INTEGER,
@@ -119,174 +127,103 @@ class PackageDatabase(Database):
                                build_id        TEXT,
                                build_host      TEXT,
                                build_date      TEXT,
-                               build_time      INTEGER
+                               build_time      INTEGER,
+                               installed       INT,
+                               reason          TEXT,
+                               repository      TEXT,
+                               scriptlet       TEXT,
+                               triggers        TEXT
                        );
                """)
                # XXX add some indexes here
-
                self.commit()
                c.close()
 
-       def list_packages(self):
-               c = self.cursor()
-               c.execute("SELECT DISTINCT name FROM packages ORDER BY name")
-
-               for pkg in c:
-                       yield pkg["name"]
-
-               c.close()
-
-       def package_exists(self, pkg):
-               return not self.get_id_by_pkg(pkg) is None
-
-       def get_id_by_pkg(self, pkg):
-               c = self.cursor()
-
-               c.execute("SELECT id FROM packages WHERE name = ? AND version = ? AND \
-                       release = ? AND epoch = ? LIMIT 1", (pkg.name, pkg.version, pkg.release, pkg.epoch))
-
-               ret = None
-               for i in c:
-                       ret = i["id"]
-                       break
-
-               c.close()
-
-               return ret
-
-       def add_package(self, pkg):
-               raise NotImplementedError
-
-
-class RemotePackageDatabase(PackageDatabase):
        def add_package(self, pkg, reason=None):
-               if self.package_exists(pkg):
-                       logging.debug("Skipping package which already exists in database: %s" % pkg.friendly_name)
-                       return
-
                logging.debug("Adding package to database: %s" % pkg.friendly_name)
 
-               filename = ""
-               if pkg.repo.local:
-                       # Get the path relatively to the repository.
-                       filename = pkg.filename[len(pkg.repo.path):]
-                       # Strip leading / if any.
-                       if filename.startswith("/"):
-                               filename = filename[1:]
-
                c = self.cursor()
-               c.execute("""
-                       INSERT INTO packages(
-                               name,
-                               epoch,
-                               version,
-                               release,
-                               arch,
-                               groups,
-                               filename,
-                               size,
-                               hash1,
-                               provides,
-                               requires,
-                               conflicts,
-                               obsoletes,
-                               license,
-                               summary,
-                               description,
-                               uuid,
-                               build_id,
-                               build_host,
-                               build_date,
-                               build_time
-                       ) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
-                       (
-                               pkg.name,
-                               pkg.epoch,
-                               pkg.version,
-                               pkg.release,
-                               pkg.arch,
-                               " ".join(pkg.groups),
-                               filename,
-                               pkg.size,
-                               pkg.hash1,
-                               " ".join(pkg.provides),
-                               " ".join(pkg.requires),
-                               " ".join(pkg.conflicts),
-                               " ".join(pkg.obsoletes),
-                               pkg.license,
-                               pkg.summary,
-                               pkg.description,
-                               pkg.uuid,
-                               pkg.build_id,
-                               pkg.build_host,
-                               pkg.build_date,
-                               pkg.build_time,
-                       )
-               )
-               self.commit()
-               c.close()
-
-               pkg_id = self.get_id_by_pkg(pkg)
 
-               c = self.cursor()
-               for file in pkg.filelist:
-                       c.execute("INSERT INTO files(name, pkg) VALUES(?, ?)", (file, pkg_id))
-
-               self.commit()
-               c.close()
-
-               return pkg_id
+               try:
+                       c.execute("""
+                               INSERT INTO packages(
+                                       name,
+                                       epoch,
+                                       version,
+                                       release,
+                                       arch,
+                                       groups,
+                                       filename,
+                                       size,
+                                       hash1,
+                                       provides,
+                                       requires,
+                                       conflicts,
+                                       obsoletes,
+                                       license,
+                                       summary,
+                                       description,
+                                       uuid,
+                                       build_id,
+                                       build_host,
+                                       build_date,
+                                       build_time,
+                                       installed,
+                                       repository,
+                                       reason,
+                                       scriptlet,
+                                       triggers
+                               ) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
+                               (
+                                       pkg.name,
+                                       pkg.epoch,
+                                       pkg.version,
+                                       pkg.release,
+                                       pkg.arch,
+                                       " ".join(pkg.groups),
+                                       pkg.filename,
+                                       pkg.size,
+                                       pkg.hash1,
+                                       " ".join(pkg.provides),
+                                       " ".join(pkg.requires),
+                                       " ".join(pkg.conflicts),
+                                       " ".join(pkg.obsoletes),
+                                       pkg.license,
+                                       pkg.summary,
+                                       pkg.description,
+                                       pkg.uuid,
+                                       pkg.build_id,
+                                       pkg.build_host,
+                                       pkg.build_date,
+                                       pkg.build_time,
+                                       time.time(),
+                                       pkg.repo.name,
+                                       reason or "",
+                                       pkg.scriptlet,
+                                       " ".join(pkg.triggers)
+                               )
+                       )
 
+                       pkg_id = c.lastrowid
 
-class LocalPackageDatabase(RemotePackageDatabase):
-       def __init__(self, pakfire):
-               # Generate filename for package database
-               filename = os.path.join(pakfire.path, PACKAGES_DB)
+                       c.executemany("INSERT INTO files(name, pkg) VALUES(?, ?)",
+                               ((file, pkg_id) for file in pkg.filelist))
 
-               RemotePackageDatabase.__init__(self, pakfire, filename)
+               except:
+                       raise
 
-       def create(self):
-               RemotePackageDatabase.create(self)
+               else:
+                       self.commit()
 
-               # Alter the database layout to store additional local information.
-               logging.debug("Altering database table for local information.")
-               c = self.cursor()
-               c.executescript("""
-                       ALTER TABLE packages ADD COLUMN installed INT;
-                       ALTER TABLE packages ADD COLUMN reason TEXT;
-                       ALTER TABLE packages ADD COLUMN repository TEXT;
-                       ALTER TABLE packages ADD COLUMN scriptlet TEXT;
-                       ALTER TABLE packages ADD COLUMN triggers TEXT;
-               """)
-               self.commit()
                c.close()
 
-       def add_package(self, pkg, reason=None):
-               # Insert all the information to the database we have in the remote database
-               pkg_id = RemotePackageDatabase.add_package(self, pkg)
-
-               # then: add some more information
+       @property
+       def packages(self):
                c = self.cursor()
 
-               # Save timestamp when the package was installed.
-               c.execute("UPDATE packages SET installed = ? WHERE id = ?", (time.time(), pkg_id))
-
-               # Add repository information.
-               c.execute("UPDATE packages SET repository = ? WHERE id = ?", (pkg.repo.name, pkg_id))
+               c.execute("SELECT * FROM packages ORDER BY name")
 
-               # Save reason of installation (if any).
-               if reason:
-                       c.execute("UPDATE packages SET reason = ? WHERE id = ?", (reason, pkg_id))
+               for row in c:
+                       yield packages.DatabasePackage(self.pakfire, self.repo, self, row)
 
-               # Update the filename information.
-               c.execute("UPDATE packages SET filename = ? WHERE id = ?", (pkg.filename, pkg_id))
-
-               # Add the scriptlet to database (needed to update or uninstall packages).
-               c.execute("UPDATE packages SET scriptlet = ? WHERE id = ?", (pkg.scriptlet, pkg_id))
-
-               # Add triggers to the database.
-               triggers = " ".join(pkg.triggers)
-               c.execute("UPDATE packages SET triggers = ? WHERE id = ?", (triggers, pkg_id))
-
-               self.commit()
                c.close()
diff --git a/pakfire/repository/database_old.py b/pakfire/repository/database_old.py
new file mode 100644 (file)
index 0000000..e360e20
--- /dev/null
@@ -0,0 +1,292 @@
+#!/usr/bin/python
+
+import logging
+import os
+import random
+import shutil
+import sqlite3
+import time
+
+from pakfire.constants import *
+
+class Cursor(sqlite3.Cursor):
+       def execute(self, *args, **kwargs):
+               # For debugging of SQL queries.
+               #print args, kwargs
+
+               return sqlite3.Cursor.execute(self, *args, **kwargs)
+
+
+class Database(object):
+       def __init__(self, pakfire, filename):
+               self.pakfire = pakfire
+               self._db = None
+
+               self._tmp = False
+
+               if filename == ":memory:":
+                       self._tmp = True
+
+                       filename = "/tmp/.%s-db" % random.randint(0, 1024**2)
+
+               self.filename = filename
+
+               self.open()
+
+       def __del__(self):
+               if self._db:
+                       #self._db.commit()
+                       self._db.close()
+
+       def create(self):
+               pass
+
+       def open(self):
+               if not self._db:
+                       logging.debug("Open database %s" % self.filename)
+
+                       dirname = os.path.dirname(self.filename)
+                       if not os.path.exists(dirname):
+                               os.makedirs(dirname)
+
+                       database_exists = os.path.exists(self.filename)
+
+                       # Make a connection to the database.
+                       self._db = sqlite3.connect(self.filename)
+                       self._db.row_factory = sqlite3.Row
+
+                       # Create the database if it was not there, yet.
+                       if not database_exists:
+                               self.create()
+
+       def close(self):
+               self._db.close()
+               self._db = None
+
+               if self._tmp:
+                       os.unlink(self.filename)
+
+       def commit(self):
+               self._db.commit()
+
+       def cursor(self):
+               return self._db.cursor(Cursor)
+
+       def executescript(self, *args, **kwargs):
+               return self._db.executescript(*args, **kwargs)
+
+       def save(self, path):
+               """
+                       Save a copy of this database to a new one located at path.
+               """
+               self.commit()
+
+               shutil.copy2(self.filename, path)
+
+
+class PackageDatabase(Database):
+       def create(self):
+               c = self.cursor()
+
+               c.executescript("""
+                       CREATE TABLE files(
+                               name            TEXT,
+                               pkg                     INTEGER,
+                               size            INTEGER,
+                               type            INTEGER,
+                               hash1           TEXT
+                       );
+
+                       CREATE TABLE packages(
+                               id                      INTEGER PRIMARY KEY,
+                               name            TEXT,
+                               epoch           INTEGER,
+                               version         TEXT,
+                               release         TEXT,
+                               arch            TEXT,
+                               groups          TEXT,
+                               filename        TEXT,
+                               size            INTEGER,
+                               hash1           TEXT,
+                               provides        TEXT,
+                               requires        TEXT,
+                               conflicts       TEXT,
+                               obsoletes       TEXT,
+                               license         TEXT,
+                               summary         TEXT,
+                               description     TEXT,
+                               uuid            TEXT,
+                               build_id        TEXT,
+                               build_host      TEXT,
+                               build_date      TEXT,
+                               build_time      INTEGER
+                       );
+               """)
+               # XXX add some indexes here
+
+               self.commit()
+               c.close()
+
+       def list_packages(self):
+               c = self.cursor()
+               c.execute("SELECT DISTINCT name FROM packages ORDER BY name")
+
+               for pkg in c:
+                       yield pkg["name"]
+
+               c.close()
+
+       def package_exists(self, pkg):
+               return not self.get_id_by_pkg(pkg) is None
+
+       def get_id_by_pkg(self, pkg):
+               c = self.cursor()
+
+               c.execute("SELECT id FROM packages WHERE name = ? AND version = ? AND \
+                       release = ? AND epoch = ? LIMIT 1", (pkg.name, pkg.version, pkg.release, pkg.epoch))
+
+               ret = None
+               for i in c:
+                       ret = i["id"]
+                       break
+
+               c.close()
+
+               return ret
+
+       def add_package(self, pkg):
+               raise NotImplementedError
+
+
+class RemotePackageDatabase(PackageDatabase):
+       def add_package(self, pkg, reason=None):
+               if self.package_exists(pkg):
+                       logging.debug("Skipping package which already exists in database: %s" % pkg.friendly_name)
+                       return
+
+               logging.debug("Adding package to database: %s" % pkg.friendly_name)
+
+               filename = ""
+               if pkg.repo.local:
+                       # Get the path relatively to the repository.
+                       filename = pkg.filename[len(pkg.repo.path):]
+                       # Strip leading / if any.
+                       if filename.startswith("/"):
+                               filename = filename[1:]
+
+               c = self.cursor()
+               c.execute("""
+                       INSERT INTO packages(
+                               name,
+                               epoch,
+                               version,
+                               release,
+                               arch,
+                               groups,
+                               filename,
+                               size,
+                               hash1,
+                               provides,
+                               requires,
+                               conflicts,
+                               obsoletes,
+                               license,
+                               summary,
+                               description,
+                               uuid,
+                               build_id,
+                               build_host,
+                               build_date,
+                               build_time
+                       ) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
+                       (
+                               pkg.name,
+                               pkg.epoch,
+                               pkg.version,
+                               pkg.release,
+                               pkg.arch,
+                               " ".join(pkg.groups),
+                               filename,
+                               pkg.size,
+                               pkg.hash1,
+                               " ".join(pkg.provides),
+                               " ".join(pkg.requires),
+                               " ".join(pkg.conflicts),
+                               " ".join(pkg.obsoletes),
+                               pkg.license,
+                               pkg.summary,
+                               pkg.description,
+                               pkg.uuid,
+                               pkg.build_id,
+                               pkg.build_host,
+                               pkg.build_date,
+                               pkg.build_time,
+                       )
+               )
+               self.commit()
+               c.close()
+
+               pkg_id = self.get_id_by_pkg(pkg)
+
+               c = self.cursor()
+               for file in pkg.filelist:
+                       c.execute("INSERT INTO files(name, pkg) VALUES(?, ?)", (file, pkg_id))
+
+               self.commit()
+               c.close()
+
+               return pkg_id
+
+
+class LocalPackageDatabase(RemotePackageDatabase):
+       def __init__(self, pakfire):
+               # Generate filename for package database
+               filename = os.path.join(pakfire.path, PACKAGES_DB)
+
+               RemotePackageDatabase.__init__(self, pakfire, filename)
+
+       def create(self):
+               RemotePackageDatabase.create(self)
+
+               # Alter the database layout to store additional local information.
+               logging.debug("Altering database table for local information.")
+               c = self.cursor()
+               c.executescript("""
+                       ALTER TABLE packages ADD COLUMN installed INT;
+                       ALTER TABLE packages ADD COLUMN reason TEXT;
+                       ALTER TABLE packages ADD COLUMN repository TEXT;
+                       ALTER TABLE packages ADD COLUMN scriptlet TEXT;
+                       ALTER TABLE packages ADD COLUMN triggers TEXT;
+               """)
+               self.commit()
+               c.close()
+
+       def add_package(self, pkg, reason=None):
+               # Insert all the information to the database we have in the remote database
+               pkg_id = RemotePackageDatabase.add_package(self, pkg)
+
+               # then: add some more information
+               c = self.cursor()
+
+               # Save timestamp when the package was installed.
+               c.execute("UPDATE packages SET installed = ? WHERE id = ?", (time.time(), pkg_id))
+
+               # Add repository information.
+               c.execute("UPDATE packages SET repository = ? WHERE id = ?", (pkg.repo.name, pkg_id))
+
+               # Save reason of installation (if any).
+               if reason:
+                       c.execute("UPDATE packages SET reason = ? WHERE id = ?", (reason, pkg_id))
+
+               # Update the filename information.
+               c.execute("UPDATE packages SET filename = ? WHERE id = ?", (pkg.filename, pkg_id))
+
+               # Add the scriptlet to database (needed to update or uninstall packages).
+               c.execute("UPDATE packages SET scriptlet = ? WHERE id = ?", (pkg.scriptlet, pkg_id))
+
+               # Add triggers to the database.
+               triggers = " ".join(pkg.triggers)
+               c.execute("UPDATE packages SET triggers = ? WHERE id = ?", (triggers, pkg_id))
+
+               self.commit()
+               c.close()
index 79c412c22012eb589b37ebbfce84fb5b5a297d5e..81fee0490f3fb628375efb8f884be4bba069137b 100644 (file)
@@ -1,16 +1,11 @@
 #!/usr/bin/python
 
-import fnmatch
-import json
 import logging
 import os
-import random
-import shutil
-import subprocess
-import time
 
 import database
 import metadata
+import satsolver
 
 import pakfire.compress as compress
 import pakfire.downloader as downloader
@@ -21,324 +16,151 @@ from pakfire.constants import *
 from pakfire.i18n import _
 
 class Index(object):
+       RELATIONS = (
+               (">=", satsolver.REL_GE,),
+               ("<=", satsolver.REL_LE,),
+               ("=" , satsolver.REL_EQ,),
+               ("<" , satsolver.REL_LT,),
+               (">" , satsolver.REL_GT,),
+       )
+
        def __init__(self, pakfire, repo):
                self.pakfire = pakfire
-               self.repo = repo
-
-               self._packages = []
-
-       @property
-       def arch(self):
-               return self.pakfire.distro.arch
-
-       def get_all_by_name(self, name):
-               for package in self.packages:
-                       if package.name == name:
-                               yield package
-
-       def get_by_file(self, filename):
-               for pkg in self.packages:
-                       match = False
-                       for pkg_filename in pkg.filelist:
-                               if fnmatch.fnmatch(pkg_filename, filename):
-                                       match = True
-                                       break
-
-                       if match:
-                               yield pkg
-
-       def get_by_evr(self, name, epoch, version, release):
-               try:
-                       epoch = int(epoch)
-               except TypeError:
-                       epoch = 0
-
-               for pkg in self.packages:
-                       if pkg.type == "source":
-                               continue
 
-                       if pkg.name == name and pkg.epoch == epoch \
-                                       and pkg.version == version and pkg.release == release:
-                               yield pkg
+               # Create reference to repository and the solver repo.
+               self.repo = repo
+               self.solver_repo = repo.solver_repo
 
-       def get_by_id(self, id):
-               raise NotImplementedError
+               self.init()
 
-       def get_by_uuid(self, uuid):
-               for pkg in self.packages:
-                       if pkg.uuid == uuid:
-                               return pkg
+               # Check, if initialization was okay.
+               self.check()
 
-       def get_by_provides(self, requires):
-               for pkg in self.packages:
-                       if pkg.does_provide(requires):
-                               yield pkg
+       def __repr__(self):
+               return "<%s %s>" % (self.__class__.__name__, self.repo)
 
-       @property
-       def packages(self):
-               for pkg in self._packages:
-                       yield pkg
+       def __len(self):
+               return len(self.repo)
 
        @property
-       def size(self):
-               i = 0
-               for pkg in self.packages:
-                       i += 1
+       def cache(self):
+               return self.repo.cache
 
-               return i
-
-       def update(self, force=False):
+       def init(self):
                pass
 
-       def add_package(self, pkg):
+       def check(self):
+               """
+                       Check if everything was correctly initialized.
+               """
                raise NotImplementedError
 
-       @property
-       def cachefile(self):
-               return None
-
-       def import_to_solver(self, solver, repo):
-               if self.cachefile:
-                       if not os.path.exists(self.cachefile):
-                               self.create_solver_cache()
-
-                       logging.debug("Importing repository cache data from %s" % self.cachefile)
-                       repo.add_solv(self.cachefile)
-
-               else:
-                       for pkg in self.packages:
-                               solver.add_package(pkg, repo.name())
-
-               logging.debug("Initialized new repo '%s' with %s packages." % \
-                       (repo.name(), repo.size()))
-
-       def create_solver_cache(self):
-               cachedir = os.path.dirname(self.cachefile)
-               if not os.path.exists(cachedir):
-                       os.makedirs(cachedir)
-
-               f = open(self.cachefile, "w")
-
-               # Write metadata header.
-               xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
-               xml += "<metadata xmlns=\"http://linux.duke.edu/metadata/common\""
-               xml += " xmlns:rpm=\"http://linux.duke.edu/metadata/rpm\">\n"
-
-               # We dump an XML string for every package in this repository and
-               # write it to the XML file.
-               for pkg in self.packages:
-                       xml += pkg.export_xml_string()
-
-               # Write footer.
-               xml += "</metadata>"
-
-               p = subprocess.Popen("rpmmd2solv", stdin=subprocess.PIPE,
-                       stdout=subprocess.PIPE)
-               stdout, stderr = p.communicate(xml)
-
-               f.write(stdout)
-               f.close()
-
-
-class DirectoryIndex(Index):
-       def __init__(self, pakfire, repo, path):
-               if path.startswith("file://"):
-                       path = path[7:]
-               self.path = path
-
-               Index.__init__(self, pakfire, repo)
-
-               # Always update this because it will otherwise contain no data
-               self.update(force=True)
-
        def update(self, force=False):
-               logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
-
-               # Do nothing if the update is not forced but populate the database
-               # if no packages are present.
-               if not force and self._packages:
-                       return
-
-               # If we update the cache, we clear it first.
-               self._packages = []
-
-               for dir, subdirs, files in os.walk(self.path):
-                       for file in files:
-                               # Skip files that do not have the right extension
-                               if not file.endswith(".%s" % PACKAGE_EXTENSION):
-                                       continue
-
-                               file = os.path.join(dir, file)
-
-                               package = packages.open(self.pakfire, self.repo, file)
-
-                               logging.debug("Found package: %s" % package)
-
-                               if isinstance(package, packages.BinaryPackage):
-                                       if not package.arch in (self.arch, "noarch"):
-                                               logging.warning("Skipped package with wrong architecture: %s (%s)" \
-                                                       % (package.filename, package.arch))
-                                               print package.type
-                                               continue
-
-                               # XXX this is disabled because we could also have source
-                               # repositories. But we should not mix them.     
-                               #if package.type == "source":
-                               #       # Silently skip source packages.
-                               #       continue
-
-                               self._packages.append(package)
-
-       def save(self, path=None):
-               if not path:
-                       path = self.path
-
-               path = os.path.join(path, "index.db")
-
-               db = database.PackageDatabase(self.pakfire, path)
-
-               for pkg in self.packages:
-                       db.add_package(pkg)
-
-               db.close()
-
-
-class DatabaseIndexFactory(Index):
-       def __init__(self, pakfire, repo):
-               Index.__init__(self, pakfire, repo)
-
-               # Add empty reference to a fictional database.
-               self.db = None
-
-               self.open_database()
-
-       def open_database(self):
                raise NotImplementedError
 
-       @property
-       def packages(self):
-               c = self.db.cursor()
-               c.execute("SELECT * FROM packages")
-
-               for pkg in c:
-                       yield packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
-
-               c.close()
-
-       def add_package(self, pkg, reason=None):
-               return self.db.add_package(pkg, reason)
-
-       def get_by_id(self, id):
-               c = self.db.cursor()
-               c.execute("SELECT * FROM packages WHERE id = ? LIMIT 1", (id,))
-
-               ret = None
-               for pkg in c:
-                       ret = packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
-
-               c.close()
-
-               return ret
-
-       def get_by_file(self, filename):
-               c = self.db.cursor()
-               c.execute("SELECT pkg FROM files WHERE name GLOB ?", (filename,))
-
-               for pkg in c:
-                       yield self.get_by_id(pkg["pkg"])
-
-               c.close()
-
-       @property
-       def filelist(self):
-               c = self.db.cursor()
-               c.execute("SELECT pkg, name FROM files")
-
-               files = {}
-
-               for entry in c:
-                       file = entry["name"]
-                       try:
-                               files[pkg_id].append(file)
-                       except KeyError:
-                               files[pkg_id] = [file,]
-
-               c.close()
-
-               return files
-
-
-class InstalledIndex(DatabaseIndexFactory):
-       def open_database(self):
-               # Open the local package database.
-               self.db = database.LocalPackageDatabase(self.pakfire)
-
-
-class LocalIndex(DatabaseIndexFactory):
-       def open_database(self):
-               self.db = database.RemotePackageDatabase(self.pakfire, ":memory:")
-
-       def save(self, path=None, algo="xz"):
+       def read(self, filename):
                """
-                       This function saves the database and metadata to path so it can
-                       be exported to a remote repository.
+                       Read file in SOLV format from filename.
                """
-               if not path:
-                       path = self.repo.path
-
-               # Create filenames
-               metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
-               db_path  = os.path.join(metapath, METADATA_DATABASE_FILE)
-               md_path  = os.path.join(metapath, METADATA_DOWNLOAD_FILE)
-
-               if not os.path.exists(metapath):
-                       os.makedirs(metapath)
+               self.solver_repo.read(filename)
 
-               else:
-                       # If a database is present, we remove it because we want to start
-                       # with a clean environment.
-                       if os.path.exists(db_path):
-                               os.unlink(db_path)
+       def write(self, filename):
+               """
+                       Write content to filename in SOLV format.
+               """
+               self.solver_repo.write(filename)
 
-               # Save the database to path and get the filename.
-               self.db.save(db_path)
+       def create_relation(self, s):
+               assert s
 
-               # Make a reference to the database file that it will get a unique name
-               # so we won't get into any trouble with caching proxies.
-               db_hash = util.calc_hash1(db_path)
+               pool = self.pakfire.pool
 
-               db_path2 = os.path.join(os.path.dirname(db_path),
-                       "%s-%s" % (db_hash, os.path.basename(db_path)))
+               if s.startswith("/"):
+                       return satsolver.Relation(pool, s)
 
-               # Compress the database.
-               if algo:
-                       compress.compress(db_path, algo=algo, progress=True)
+               for pattern, type in self.RELATIONS:
+                       if not pattern in s:
+                               continue
 
-               if not os.path.exists(db_path2):
-                       shutil.move(db_path, db_path2)
-               else:
-                       os.unlink(db_path)
+                       name, version = s.split(pattern, 1)
 
-               # Create a new metadata object and add out information to it.
-               md = metadata.Metadata(self.pakfire, self)
+                       return satsolver.Relation(pool, name, version, type)
 
-               # Save name of the hashed database to the metadata.
-               md.database = os.path.basename(db_path2)
-               md.database_hash1 = db_hash
-               md.database_compression = algo
+               return satsolver.Relation(pool, s)
 
-               # Save metdata to repository.
-               md.save(md_path)
+       def add_package(self, pkg):
+               # XXX Skip packages without a UUID
+               #if not pkg.uuid:
+               #       logging.warning("Skipping package which lacks UUID: %s" % pkg)
+               #       return
+               if not pkg.build_time:
+                       return
 
+               logging.debug("Adding package to index %s: %s" % (self, pkg))
+
+               solvable = satsolver.Solvable(self.solver_repo, pkg.name,
+                       pkg.friendly_version, pkg.arch)
+
+               # Save metadata.
+               solvable.set_vendor(pkg.vendor)
+               solvable.set_hash1(pkg.hash1)
+               solvable.set_uuid(pkg.uuid)
+               solvable.set_maintainer(pkg.maintainer)
+               solvable.set_groups(" ".join(pkg.groups))
+
+               # Save upstream information (summary, description, license, url).
+               solvable.set_summary(pkg.summary)
+               solvable.set_description(pkg.description)
+               solvable.set_license(pkg.license)
+               solvable.set_url(pkg.url)
+
+               # Save build information.
+               solvable.set_buildhost(pkg.build_host)
+               solvable.set_buildtime(pkg.build_time)
+
+               # Save filename.
+               filename = os.path.basename(pkg.filename)
+               solvable.set_filename(filename)
+               solvable.set_downloadsize(pkg.size)
+               solvable.set_installsize(pkg.inst_size)
+
+               # Import all requires.
+               for req in pkg.requires:
+                       rel = self.create_relation(req)
+                       solvable.add_requires(rel)
+
+               # Import all provides.
+               for prov in pkg.provides:
+                       rel = self.create_relation(prov)
+                       solvable.add_provides(rel)
+
+               # Import all conflicts.
+               for conf in pkg.conflicts:
+                       rel = self.create_relation(conf)
+                       solvable.add_conflicts(rel)
+
+               # Import all obsoletes.
+               for obso in pkg.obsoletes:
+                       rel = self.create_relation(obso)
+                       solvable.add_obsoletes(rel)
+
+               # Import all files that are in the package.
+               rel = self.create_relation("solvable:filemarker")
+               solvable.add_provides(rel)
+               for file in pkg.filelist:
+                       rel = self.create_relation(file)
+                       solvable.add_provides(rel)
+
+
+class IndexSolv(Index):
+       def check(self):
+               pass # XXX to be done
 
-class RemoteIndex(DatabaseIndexFactory):
-       def open_database(self):
-               self.update(force=False)
+       def update(self, force=False):
+               self._update_metadata(force)
+               self._update_database(force)
 
        def _update_metadata(self, force):
-               # Shortcut to repository cache.
-               cache = self.repo.cache
-
                filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
 
                # Marker if we need to do the download.
@@ -349,15 +171,15 @@ class RemoteIndex(DatabaseIndexFactory):
 
                if not force:
                        # Check if file does exists and is not too old.
-                       if cache.exists(filename):
-                               age = cache.age(filename)
+                       if self.cache.exists(filename):
+                               age = self.cache.age(filename)
                                if age and age < TIME_10M:
                                        download = False
                                        logging.debug("Metadata is recent enough. I don't download it again.")
 
                                # Open old metadata for comparison.
                                old_metadata = metadata.Metadata(self.pakfire, self,
-                                       cache.abspath(filename))
+                                       self.cache.abspath(filename))
 
                if download:
                        logging.debug("Going to (re-)download the repository metadata.")
@@ -377,21 +199,18 @@ class RemoteIndex(DatabaseIndexFactory):
                        else:
                                # We explicitely rewrite the metadata if it is equal to have
                                # a new timestamp and do not download it over and over again.
-                               with cache.open(filename, "w") as o:
+                               with self.cache.open(filename, "w") as o:
                                        o.write(data)
 
                # Parse the metadata that we just downloaded or load it from cache.
                self.metadata = metadata.Metadata(self.pakfire, self,
-                       cache.abspath(filename))
+                       self.cache.abspath(filename))
 
        def _update_database(self, force):
-               # Shortcut to repository cache.
-               cache = self.repo.cache
-
                # Construct cache and download filename.
                filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
 
-               if not cache.exists(filename):
+               if not self.cache.exists(filename):
                        # Initialize a grabber for download.
                        grabber = downloader.DatabaseDownloader(
                                text = _("%s: package database") % self.repo.name,
@@ -400,7 +219,7 @@ class RemoteIndex(DatabaseIndexFactory):
 
                        data = grabber.urlread(filename)
 
-                       with cache.open(filename, "w") as o:
+                       with self.cache.open(filename, "w") as o:
                                o.write(data)
 
                        # decompress the database
@@ -408,41 +227,103 @@ class RemoteIndex(DatabaseIndexFactory):
                                # Open input file and remove the file immediately.
                                # The fileobj is still open and the data will be removed
                                # when it is closed.
-                               compress.decompress(cache.abspath(filename),
+                               compress.decompress(self.cache.abspath(filename),
                                        algo=self.metadata.database_compression)
 
                        # check the hashsum of the downloaded file
-                       if not util.calc_hash1(cache.abspath(filename)) == self.metadata.database_hash1:
+                       if not util.calc_hash1(self.cache.abspath(filename)) == self.metadata.database_hash1:
                                # XXX an exception is not a very good idea because this file could
                                # be downloaded from another mirror. need a better way to handle this.
 
                                # Remove bad file from cache.
-                               cache.remove(filename)
+                               self.cache.remove(filename)
 
                                raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
 
                # (Re-)open the database.
-               self.db = database.RemotePackageDatabase(self.pakfire,
-                       cache.abspath(filename))
+               self.read(self.cache.abspath(filename))
+
+
+class IndexDir(Index):
+       def check(self):
+               pass # XXX to be done
+
+       @property
+       def path(self):
+               path = self.repo.path
+
+               if path.startswith("file://"):
+                       path = path[7:]
+
+               return path
 
        def update(self, force=False):
-               """
-                       Download the repository metadata and the package database.
-               """
+               logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
 
-               # Skip the download for local repositories.
-               if self.repo.local:
+               # Do nothing if the update is not forced but populate the database
+               # if no packages are present.
+               if not force and len(self.repo):
                        return
 
-               # At first, update the metadata.
-               self._update_metadata(force)
+               # Collect all packages from default path.
+               self.collect_packages(self.path)
 
-               # Then, we download the database eventually.
-               self._update_database(force)
+       def collect_packages(self, path):
+               # XXX make progress bar for that
+               for dir, subdirs, files in os.walk(path):
+                       for file in sorted(files):
+                               # Skip files that do not have the right extension
+                               if not file.endswith(".%s" % PACKAGE_EXTENSION):
+                                       continue
 
-               # XXX this code needs lots of work:
-               # XXX   * check the metadata content
+                               package = packages.open(self.pakfire, self.repo, os.path.join(dir, file))
 
-       @property
-       def cachefile(self):
-               return "%s.cache" % self.db.filename
+                               if isinstance(package, packages.BinaryPackage):
+                                       if not package.arch in (self.repo.arch, "noarch"):
+                                               logging.warning("Skipped package with wrong architecture: %s (%s)" \
+                                                       % (package.filename, package.arch))
+                                               print package.type
+                                               continue
+
+                               # Skip all source packages.
+                               elif isinstance(package, packages.SourcePackage):
+                                       continue
+
+                               self.add_package(package)
+
+                               yield package
+
+
+class IndexLocal(Index):
+       def init(self):
+               self.db = database.DatabaseLocal(self.pakfire, self.repo)
+
+       def check(self):
+               # XXX Create the database and lock it or something.
+               pass
+
+       def update(self, force=True):
+               if self.solver_repo.size() == 0:
+                       force = True
+
+               if force:
+                       package_count = len(self.db)
+
+                       # Nothing to do here, if there are no packages in the database.
+                       if not package_count:
+                               return
+
+                       # Add all packages from the database to the index.
+                       pb = util.make_progress(_("Loading installed packages"), package_count)
+
+                       i = 0
+                       for pkg in self.db.packages:
+                               if pb:
+                                       i += 1
+                                       pb.update(i)
+
+                               # XXX currently broken
+                               #self.add_package(pkg)
+
+                       if pb:
+                               pb.finish()
diff --git a/pakfire/repository/index_old.py b/pakfire/repository/index_old.py
new file mode 100644 (file)
index 0000000..79c412c
--- /dev/null
@@ -0,0 +1,448 @@
+#!/usr/bin/python
+
+import fnmatch
+import json
+import logging
+import os
+import random
+import shutil
+import subprocess
+import time
+
+import database
+import metadata
+
+import pakfire.compress as compress
+import pakfire.downloader as downloader
+import pakfire.packages as packages
+import pakfire.util as util
+
+from pakfire.constants import *
+from pakfire.i18n import _
+
+class Index(object):
+       def __init__(self, pakfire, repo):
+               self.pakfire = pakfire
+               self.repo = repo
+
+               self._packages = []
+
+       @property
+       def arch(self):
+               return self.pakfire.distro.arch
+
+       def get_all_by_name(self, name):
+               for package in self.packages:
+                       if package.name == name:
+                               yield package
+
+       def get_by_file(self, filename):
+               for pkg in self.packages:
+                       match = False
+                       for pkg_filename in pkg.filelist:
+                               if fnmatch.fnmatch(pkg_filename, filename):
+                                       match = True
+                                       break
+
+                       if match:
+                               yield pkg
+
+       def get_by_evr(self, name, epoch, version, release):
+               try:
+                       epoch = int(epoch)
+               except TypeError:
+                       epoch = 0
+
+               for pkg in self.packages:
+                       if pkg.type == "source":
+                               continue
+
+                       if pkg.name == name and pkg.epoch == epoch \
+                                       and pkg.version == version and pkg.release == release:
+                               yield pkg
+
+       def get_by_id(self, id):
+               raise NotImplementedError
+
+       def get_by_uuid(self, uuid):
+               for pkg in self.packages:
+                       if pkg.uuid == uuid:
+                               return pkg
+
+       def get_by_provides(self, requires):
+               for pkg in self.packages:
+                       if pkg.does_provide(requires):
+                               yield pkg
+
+       @property
+       def packages(self):
+               for pkg in self._packages:
+                       yield pkg
+
+       @property
+       def size(self):
+               i = 0
+               for pkg in self.packages:
+                       i += 1
+
+               return i
+
+       def update(self, force=False):
+               pass
+
+       def add_package(self, pkg):
+               raise NotImplementedError
+
+       @property
+       def cachefile(self):
+               return None
+
+       def import_to_solver(self, solver, repo):
+               if self.cachefile:
+                       if not os.path.exists(self.cachefile):
+                               self.create_solver_cache()
+
+                       logging.debug("Importing repository cache data from %s" % self.cachefile)
+                       repo.add_solv(self.cachefile)
+
+               else:
+                       for pkg in self.packages:
+                               solver.add_package(pkg, repo.name())
+
+               logging.debug("Initialized new repo '%s' with %s packages." % \
+                       (repo.name(), repo.size()))
+
+       def create_solver_cache(self):
+               cachedir = os.path.dirname(self.cachefile)
+               if not os.path.exists(cachedir):
+                       os.makedirs(cachedir)
+
+               f = open(self.cachefile, "w")
+
+               # Write metadata header.
+               xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+               xml += "<metadata xmlns=\"http://linux.duke.edu/metadata/common\""
+               xml += " xmlns:rpm=\"http://linux.duke.edu/metadata/rpm\">\n"
+
+               # We dump an XML string for every package in this repository and
+               # write it to the XML file.
+               for pkg in self.packages:
+                       xml += pkg.export_xml_string()
+
+               # Write footer.
+               xml += "</metadata>"
+
+               p = subprocess.Popen("rpmmd2solv", stdin=subprocess.PIPE,
+                       stdout=subprocess.PIPE)
+               stdout, stderr = p.communicate(xml)
+
+               f.write(stdout)
+               f.close()
+
+
+class DirectoryIndex(Index):
+       def __init__(self, pakfire, repo, path):
+               if path.startswith("file://"):
+                       path = path[7:]
+               self.path = path
+
+               Index.__init__(self, pakfire, repo)
+
+               # Always update this because it will otherwise contain no data
+               self.update(force=True)
+
+       def update(self, force=False):
+               logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
+
+               # Do nothing if the update is not forced but populate the database
+               # if no packages are present.
+               if not force and self._packages:
+                       return
+
+               # If we update the cache, we clear it first.
+               self._packages = []
+
+               for dir, subdirs, files in os.walk(self.path):
+                       for file in files:
+                               # Skip files that do not have the right extension
+                               if not file.endswith(".%s" % PACKAGE_EXTENSION):
+                                       continue
+
+                               file = os.path.join(dir, file)
+
+                               package = packages.open(self.pakfire, self.repo, file)
+
+                               logging.debug("Found package: %s" % package)
+
+                               if isinstance(package, packages.BinaryPackage):
+                                       if not package.arch in (self.arch, "noarch"):
+                                               logging.warning("Skipped package with wrong architecture: %s (%s)" \
+                                                       % (package.filename, package.arch))
+                                               print package.type
+                                               continue
+
+                               # XXX this is disabled because we could also have source
+                               # repositories. But we should not mix them.     
+                               #if package.type == "source":
+                               #       # Silently skip source packages.
+                               #       continue
+
+                               self._packages.append(package)
+
+       def save(self, path=None):
+               if not path:
+                       path = self.path
+
+               path = os.path.join(path, "index.db")
+
+               db = database.PackageDatabase(self.pakfire, path)
+
+               for pkg in self.packages:
+                       db.add_package(pkg)
+
+               db.close()
+
+
+class DatabaseIndexFactory(Index):
+       def __init__(self, pakfire, repo):
+               Index.__init__(self, pakfire, repo)
+
+               # Add empty reference to a fictional database.
+               self.db = None
+
+               self.open_database()
+
+       def open_database(self):
+               raise NotImplementedError
+
+       @property
+       def packages(self):
+               c = self.db.cursor()
+               c.execute("SELECT * FROM packages")
+
+               for pkg in c:
+                       yield packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
+
+               c.close()
+
+       def add_package(self, pkg, reason=None):
+               return self.db.add_package(pkg, reason)
+
+       def get_by_id(self, id):
+               c = self.db.cursor()
+               c.execute("SELECT * FROM packages WHERE id = ? LIMIT 1", (id,))
+
+               ret = None
+               for pkg in c:
+                       ret = packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
+
+               c.close()
+
+               return ret
+
+       def get_by_file(self, filename):
+               c = self.db.cursor()
+               c.execute("SELECT pkg FROM files WHERE name GLOB ?", (filename,))
+
+               for pkg in c:
+                       yield self.get_by_id(pkg["pkg"])
+
+               c.close()
+
+       @property
+       def filelist(self):
+               c = self.db.cursor()
+               c.execute("SELECT pkg, name FROM files")
+
+               files = {}
+
+               for entry in c:
+                       file = entry["name"]
+                       try:
+                               files[pkg_id].append(file)
+                       except KeyError:
+                               files[pkg_id] = [file,]
+
+               c.close()
+
+               return files
+
+
+class InstalledIndex(DatabaseIndexFactory):
+       def open_database(self):
+               # Open the local package database.
+               self.db = database.LocalPackageDatabase(self.pakfire)
+
+
+class LocalIndex(DatabaseIndexFactory):
+       def open_database(self):
+               self.db = database.RemotePackageDatabase(self.pakfire, ":memory:")
+
+       def save(self, path=None, algo="xz"):
+               """
+                       This function saves the database and metadata to path so it can
+                       be exported to a remote repository.
+               """
+               if not path:
+                       path = self.repo.path
+
+               # Create filenames
+               metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
+               db_path  = os.path.join(metapath, METADATA_DATABASE_FILE)
+               md_path  = os.path.join(metapath, METADATA_DOWNLOAD_FILE)
+
+               if not os.path.exists(metapath):
+                       os.makedirs(metapath)
+
+               else:
+                       # If a database is present, we remove it because we want to start
+                       # with a clean environment.
+                       if os.path.exists(db_path):
+                               os.unlink(db_path)
+
+               # Save the database to path and get the filename.
+               self.db.save(db_path)
+
+               # Make a reference to the database file that it will get a unique name
+               # so we won't get into any trouble with caching proxies.
+               db_hash = util.calc_hash1(db_path)
+
+               db_path2 = os.path.join(os.path.dirname(db_path),
+                       "%s-%s" % (db_hash, os.path.basename(db_path)))
+
+               # Compress the database.
+               if algo:
+                       compress.compress(db_path, algo=algo, progress=True)
+
+               if not os.path.exists(db_path2):
+                       shutil.move(db_path, db_path2)
+               else:
+                       os.unlink(db_path)
+
+               # Create a new metadata object and add out information to it.
+               md = metadata.Metadata(self.pakfire, self)
+
+               # Save name of the hashed database to the metadata.
+               md.database = os.path.basename(db_path2)
+               md.database_hash1 = db_hash
+               md.database_compression = algo
+
+               # Save metdata to repository.
+               md.save(md_path)
+
+
+class RemoteIndex(DatabaseIndexFactory):
+       def open_database(self):
+               self.update(force=False)
+
+       def _update_metadata(self, force):
+               # Shortcut to repository cache.
+               cache = self.repo.cache
+
+               filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
+
+               # Marker if we need to do the download.
+               download = True
+
+               # Marker for the current metadata.
+               old_metadata = None
+
+               if not force:
+                       # Check if file does exists and is not too old.
+                       if cache.exists(filename):
+                               age = cache.age(filename)
+                               if age and age < TIME_10M:
+                                       download = False
+                                       logging.debug("Metadata is recent enough. I don't download it again.")
+
+                               # Open old metadata for comparison.
+                               old_metadata = metadata.Metadata(self.pakfire, self,
+                                       cache.abspath(filename))
+
+               if download:
+                       logging.debug("Going to (re-)download the repository metadata.")
+
+                       # Initialize a grabber for download.
+                       grabber = downloader.MetadataDownloader()
+                       grabber = self.repo.mirrors.group(grabber)
+
+                       data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT)
+
+                       # Parse new metadata for comparison.
+                       new_metadata = metadata.Metadata(self.pakfire, self, metadata=data)
+
+                       if old_metadata and new_metadata < old_metadata:
+                               logging.warning("The downloaded metadata was less recent than the current one. Trashing that.")
+
+                       else:
+                               # We explicitely rewrite the metadata if it is equal to have
+                               # a new timestamp and do not download it over and over again.
+                               with cache.open(filename, "w") as o:
+                                       o.write(data)
+
+               # Parse the metadata that we just downloaded or load it from cache.
+               self.metadata = metadata.Metadata(self.pakfire, self,
+                       cache.abspath(filename))
+
+       def _update_database(self, force):
+               # Shortcut to repository cache.
+               cache = self.repo.cache
+
+               # Construct cache and download filename.
+               filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
+
+               if not cache.exists(filename):
+                       # Initialize a grabber for download.
+                       grabber = downloader.DatabaseDownloader(
+                               text = _("%s: package database") % self.repo.name,
+                       )
+                       grabber = self.repo.mirrors.group(grabber)
+
+                       data = grabber.urlread(filename)
+
+                       with cache.open(filename, "w") as o:
+                               o.write(data)
+
+                       # decompress the database
+                       if self.metadata.database_compression:
+                               # Open input file and remove the file immediately.
+                               # The fileobj is still open and the data will be removed
+                               # when it is closed.
+                               compress.decompress(cache.abspath(filename),
+                                       algo=self.metadata.database_compression)
+
+                       # check the hashsum of the downloaded file
+                       if not util.calc_hash1(cache.abspath(filename)) == self.metadata.database_hash1:
+                               # XXX an exception is not a very good idea because this file could
+                               # be downloaded from another mirror. need a better way to handle this.
+
+                               # Remove bad file from cache.
+                               cache.remove(filename)
+
+                               raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
+
+               # (Re-)open the database.
+               self.db = database.RemotePackageDatabase(self.pakfire,
+                       cache.abspath(filename))
+
+       def update(self, force=False):
+               """
+                       Download the repository metadata and the package database.
+               """
+
+               # Skip the download for local repositories.
+               if self.repo.local:
+                       return
+
+               # At first, update the metadata.
+               self._update_metadata(force)
+
+               # Then, we download the database eventually.
+               self._update_database(force)
+
+               # XXX this code needs lots of work:
+               # XXX   * check the metadata content
+
+       @property
+       def cachefile(self):
+               return "%s.cache" % self.db.filename
index 917f7c589c466815f4d5b9422d90821dd7403acf..c9303a346885ac753319c71e99c0d1cf2922f4ce 100644 (file)
@@ -4,38 +4,25 @@ import logging
 import os
 import shutil
 
-import pakfire.packages as packages
-import pakfire.util as util
-
+import base
 import index
+import metadata
 
-from base import RepositoryFactory
+import pakfire.compress as compress
+import pakfire.packages as packages
+import pakfire.util as util
 
 from pakfire.constants import *
 
-class LocalRepository(RepositoryFactory):
-       def __init__(self, pakfire, name, description, path, idx="db"):
-               RepositoryFactory.__init__(self, pakfire, name, description)
+class RepositoryDir(base.RepositoryFactory):
+       def __init__(self, pakfire, name, description, path):
+               base.RepositoryFactory.__init__(self, pakfire, name, description)
 
-               # Save location of the repository and create it if not existant.
+               # Path to files.
                self.path = path
-               if not os.path.exists(self.path):
-                       os.makedirs(self.path)
 
-               if idx == "db":
-                       self.index = index.LocalIndex(self.pakfire, self)
-
-               elif idx == "directory":
-                       self.index = index.DirectoryIndex(self.pakfire, self, self.path)
-
-       def remove(self):
-               if os.path.exists(self.path):
-                       util.rm(self.path)
-
-       @property
-       def local(self):
-               # This is obviously local.
-               return True
+               # Create index
+               self.index = index.IndexDir(self.pakfire, self)
 
        @property
        def priority(self):
@@ -44,110 +31,134 @@ class LocalRepository(RepositoryFactory):
                """
                return 10
 
-       def _collect_packages(self, path):
-               logging.info("Collecting packages from %s." % path)
-
-               for dir, subdirs, files in os.walk(path):
-                       for file in files:
-                               if not file.endswith(".%s" % PACKAGE_EXTENSION):
-                                       continue
-
-                               file = os.path.join(dir, file)
-
-                               pkg = packages.open(self.pakfire, self, file)
-                               self._add_package(pkg)
-
-       def _add_package(self, pkg):
-               # XXX gets an instance of binary package and puts it into the
-               # repo location if not done yet
-               # then: the package gets added to the index
-
-               if not isinstance(pkg, packages.BinaryPackage):
-                       raise Exception
+       def collect_packages(self, *args, **kwargs):
+               """
+                       Proxy function to add packages to the index.
+               """
 
-               # Skip everything but binary packages.
-               if pkg.type == "source":
-                       return
+               for pkg in self.index.collect_packages(*args, **kwargs):
+                       # The path of the package in the repository
+                       repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
+
+                       # Do we need to copy the package files?
+                       copy = True
+
+                       # Check, if the package does already exists and check if the
+                       # files are really equal.
+                       if os.path.exists(repo_filename):
+                               pkg_exists = packages.open(self.pakfire, self, repo_filename)
+
+                               # Check UUID at first (faster) and check the file hash to be
+                               # absolutely sure.
+                               if pkg.uuid == pkg_exists.uuid and pkg.hash1 and pkg_exists.hash1:
+                                       # Do not copy the file if it is already okay.
+                                       copy = False
+
+                               # Otherwise, unlink the existing file and replace it with the
+                               # new one.
+                               else:
+                                       os.unlink(repo_filename)
+
+                       if copy:
+                               logging.debug("Copying package '%s' to repository." % pkg.friendly_name)
+                               repo_dirname = os.path.dirname(repo_filename)
+                               if not os.path.exists(repo_dirname):
+                                       os.makedirs(repo_dirname)
+
+                               # Try to use a hard link if possible, if we cannot do that we simply
+                               # copy the file.
+                               try:
+                                       os.link(pkg.filename, repo_filename)
+                               except OSError:
+                                       shutil.copy2(pkg.filename, repo_filename)
+
+       def save(self, path=None, algo="xz"):
+               """
+                       This function saves the database and metadata to path so it can
+                       be exported to a remote repository.
+               """
+               if not path:
+                       path = self.path
 
-               repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
+               # Create filenames
+               metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
+               db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
+               md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)
 
-               # Do we need to copy the package files?
-               copy = True
+               if not os.path.exists(metapath):
+                       os.makedirs(metapath)
 
-               pkg_exists = None
-               if os.path.exists(repo_filename):
-                       pkg_exists = packages.open(self.pakfire, self, repo_filename)
+               else:
+                       # If a database is present, we remove it because we want to start
+                       # with a clean environment.
+                       if os.path.exists(db_path):
+                               os.unlink(db_path)
 
-                       # If package in the repo is equivalent to the given one, we can
-                       # skip any further processing.
-                       if pkg.hash1 == pkg_exists.hash1:
-                               logging.debug("The package does already exist in this repo: %s" % pkg.friendly_name)
-                               copy = False
+               # Save the database to path and get the filename.
+               self.index.write(db_path)
 
-                       else:
-                               logging.warning("The package is going to be replaced: %s -> %s" % (pkg_exists, pkg))
-                               os.unlink(repo_filename)
+               # Make a reference to the database file that it will get a unique name
+               # so we won't get into any trouble with caching proxies.
+               db_hash = util.calc_hash1(db_path)
 
-                       del pkg_exists
+               db_path2 = os.path.join(os.path.dirname(db_path),
+                       "%s-%s" % (db_hash, os.path.basename(db_path)))
 
-               if copy:
-                       logging.debug("Copying package '%s' to repository." % pkg.friendly_name)
-                       repo_dirname = os.path.dirname(repo_filename)
-                       if not os.path.exists(repo_dirname):
-                               os.makedirs(repo_dirname)
+               # Compress the database.
+               if algo:
+                       compress.compress(db_path, algo=algo, progress=True)
 
-                       # Try to use a hard link if possible, if we cannot do that we simply
-                       # copy the file.
-                       try:
-                               os.link(pkg.filename, repo_filename)
-                       except OSError:
-                               shutil.copy2(pkg.filename, repo_filename)
+               if not os.path.exists(db_path2):
+                       shutil.move(db_path, db_path2)
+               else:
+                       os.unlink(db_path)
 
-               # Create new package object, that is connected to this repository
-               # and so we can do stuff.
-               pkg = packages.open(self.pakfire, self, repo_filename)
+               # Create a new metadata object and add out information to it.
+               md = metadata.Metadata(self.pakfire, self)
 
-               logging.info("Adding package '%s' to repository." % pkg.friendly_name)
-               self.index.add_package(pkg)
+               # Save name of the hashed database to the metadata.
+               md.database = os.path.basename(db_path2)
+               md.database_hash1 = db_hash
+               md.database_compression = algo
 
-       def save(self, path=None):
-               """
-                       Save the index information to path.
-               """
-               self.index.save(path)
+               # Save metdata to repository.
+               md.save(md_path)
 
 
-class LocalBinaryRepository(LocalRepository):
-       @property
-       def packages(self):
-               for pkg in self.index.packages:
-                       # XXX should be changed to "binary" if all packages do support this.
-                       if pkg.type == "source":
-                               continue
+class RepositoryBuild(RepositoryDir):
+       def __init__(self, pakfire):
+               # XXX need to add distro information to this path
+               path = pakfire.config.get("local_build_repo_path")
 
-                       yield pkg
+               # Create path if it does not exist.
+               if not os.path.exists(path):
+                       os.makedirs(path)
 
+               RepositoryDir.__init__(self, pakfire, "build", "Locally built packages", path)
 
-class LocalSourceRepository(LocalRepository):
        @property
-       def packages(self):
-               for pkg in self.index.packages:
-                       if not pkg.type == "source":
-                               continue
-
-                       yield pkg
+       def priority(self):
+               return 20000
 
 
-class LocalBuildRepository(LocalBinaryRepository):
+class RepositoryLocal(base.RepositoryFactory):
        def __init__(self, pakfire):
-               RepositoryFactory.__init__(self, pakfire, "build", "Locally built packages")
+               base.RepositoryFactory.__init__(self, pakfire, "@system", "Local repository")
 
-               self.path = self.pakfire.config.get("local_build_repo_path")
-               if not os.path.exists(self.path):
-                       os.makedirs(self.path)
+               self.index = index.IndexLocal(self.pakfire, self)
 
-               self.index = index.DirectoryIndex(self.pakfire, self, self.path)
+               # Tell the solver, that these are the installed packages.
+               self.pool.set_installed(self.solver_repo)
 
        @property
        def priority(self):
-               return 20000
+               """
+                       The local repository has always a high priority.
+               """
+               return 10
+
+       def add_package(self, pkg):
+               # Add package to the database.
+               self.index.db.add_package(pkg)
+
+               self.index.add_package(pkg)
diff --git a/pakfire/repository/local_old.py b/pakfire/repository/local_old.py
new file mode 100644 (file)
index 0000000..917f7c5
--- /dev/null
@@ -0,0 +1,153 @@
+#!/usr/bin/python
+
+import logging
+import os
+import shutil
+
+import pakfire.packages as packages
+import pakfire.util as util
+
+import index
+
+from base import RepositoryFactory
+
+from pakfire.constants import *
+
+class LocalRepository(RepositoryFactory):
+       def __init__(self, pakfire, name, description, path, idx="db"):
+               RepositoryFactory.__init__(self, pakfire, name, description)
+
+               # Save location of the repository and create it if not existant.
+               self.path = path
+               if not os.path.exists(self.path):
+                       os.makedirs(self.path)
+
+               if idx == "db":
+                       self.index = index.LocalIndex(self.pakfire, self)
+
+               elif idx == "directory":
+                       self.index = index.DirectoryIndex(self.pakfire, self, self.path)
+
+       def remove(self):
+               if os.path.exists(self.path):
+                       util.rm(self.path)
+
+       @property
+       def local(self):
+               # This is obviously local.
+               return True
+
+       @property
+       def priority(self):
+               """
+                       The local repository has always a high priority.
+               """
+               return 10
+
+       def _collect_packages(self, path):
+               logging.info("Collecting packages from %s." % path)
+
+               for dir, subdirs, files in os.walk(path):
+                       for file in files:
+                               if not file.endswith(".%s" % PACKAGE_EXTENSION):
+                                       continue
+
+                               file = os.path.join(dir, file)
+
+                               pkg = packages.open(self.pakfire, self, file)
+                               self._add_package(pkg)
+
+       def _add_package(self, pkg):
+               # XXX gets an instance of binary package and puts it into the
+               # repo location if not done yet
+               # then: the package gets added to the index
+
+               if not isinstance(pkg, packages.BinaryPackage):
+                       raise Exception
+
+               # Skip everything but binary packages.
+               if pkg.type == "source":
+                       return
+
+               repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
+
+               # Do we need to copy the package files?
+               copy = True
+
+               pkg_exists = None
+               if os.path.exists(repo_filename):
+                       pkg_exists = packages.open(self.pakfire, self, repo_filename)
+
+                       # If package in the repo is equivalent to the given one, we can
+                       # skip any further processing.
+                       if pkg.hash1 == pkg_exists.hash1:
+                               logging.debug("The package does already exist in this repo: %s" % pkg.friendly_name)
+                               copy = False
+
+                       else:
+                               logging.warning("The package is going to be replaced: %s -> %s" % (pkg_exists, pkg))
+                               os.unlink(repo_filename)
+
+                       del pkg_exists
+
+               if copy:
+                       logging.debug("Copying package '%s' to repository." % pkg.friendly_name)
+                       repo_dirname = os.path.dirname(repo_filename)
+                       if not os.path.exists(repo_dirname):
+                               os.makedirs(repo_dirname)
+
+                       # Try to use a hard link if possible, if we cannot do that we simply
+                       # copy the file.
+                       try:
+                               os.link(pkg.filename, repo_filename)
+                       except OSError:
+                               shutil.copy2(pkg.filename, repo_filename)
+
+               # Create new package object, that is connected to this repository
+               # and so we can do stuff.
+               pkg = packages.open(self.pakfire, self, repo_filename)
+
+               logging.info("Adding package '%s' to repository." % pkg.friendly_name)
+               self.index.add_package(pkg)
+
+       def save(self, path=None):
+               """
+                       Save the index information to path.
+               """
+               self.index.save(path)
+
+
+class LocalBinaryRepository(LocalRepository):
+       @property
+       def packages(self):
+               for pkg in self.index.packages:
+                       # XXX should be changed to "binary" if all packages do support this.
+                       if pkg.type == "source":
+                               continue
+
+                       yield pkg
+
+
+class LocalSourceRepository(LocalRepository):
+       @property
+       def packages(self):
+               for pkg in self.index.packages:
+                       if not pkg.type == "source":
+                               continue
+
+                       yield pkg
+
+
+class LocalBuildRepository(LocalBinaryRepository):
+       def __init__(self, pakfire):
+               RepositoryFactory.__init__(self, pakfire, "build", "Locally built packages")
+
+               self.path = self.pakfire.config.get("local_build_repo_path")
+               if not os.path.exists(self.path):
+                       os.makedirs(self.path)
+
+               self.index = index.DirectoryIndex(self.pakfire, self, self.path)
+
+       @property
+       def priority(self):
+               return 20000
index b0b074fb12e0e935846ccca62df07bb40cba614b..0da004a1664d4857e9a9c52d325a4bf98b6b2466 100644 (file)
@@ -2,7 +2,7 @@
 
 from base import RepositoryFactory
 
-class DummyRepository(RepositoryFactory):
+class RepositoryDummy(RepositoryFactory):
        """
                Just a dummy repository that actually does nothing.
        """
index 52c348472aaae14ed173bba4a0c265595f29f09e..af2bba99f7822270cc65f8e849df7fe1f9ffbd76 100644 (file)
@@ -3,64 +3,33 @@
 import logging
 import os
 
-import cache
+import base
 import index
 
 import pakfire.downloader as downloader
 
-from base import RepositoryFactory
+from pakfire.constants import *
 
-class RemoteRepository(RepositoryFactory):
-       cacheable = True
-
-       def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled):
-               RepositoryFactory.__init__(self, pakfire, name, description)
+class RepositorySolv(base.RepositoryFactory):
+       def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled=True):
+               base.RepositoryFactory.__init__(self, pakfire, name, description)
 
                # Parse arguments.
                self.url = url
                self.gpgkey = gpgkey
                self.mirrorlist = mirrorlist
 
-               if enabled:
-                       self.enabled = True
-               else:
-                       self.enabled = False
-
-               # Create a cache for the repository where we can keep all temporary data.
-               self.cache = cache.RepositoryCache(self.pakfire, self)
-
                # Initialize mirror servers.
                self.mirrors = downloader.MirrorList(self.pakfire, self)
 
-               # Initialize index.
-               self.index = index.RemoteIndex(self.pakfire, self)
-
-               logging.debug("Created new repository(name='%s', url='%s', enabled='%s')" % \
-                       (self.name, self.url, self.enabled))
-
-       def __repr__(self):
-               return "<%s %s>" % (self.__class__.__name__, self.url)
-
-       @property
-       def local(self):
-               # If files are located somewhere in the filesystem we assume it is
-               # local.
-               if self.url.startswith("file://"):
-                       return True
-
-               # Otherwise not.
-               return False
-
-       @property
-       def arch(self):
-               return self.pakfire.distro.arch
+               # Create index, which is always SOLV.
+               self.index = index.IndexSolv(self.pakfire, self)
 
-       @property
-       def path(self):
-               if self.local:
-                       return self.url[7:]
-
-               return self.cache.path
+               # Save enabled/disabled flag at the end.
+               if enabled in ("1", "yes", "on", True, 1):
+                       self.enabled = True
+               else:
+                       self.enabled = False
 
        @property
        def priority(self):
@@ -78,26 +47,62 @@ class RemoteRepository(RepositoryFactory):
 
                return priority
 
-       #def update(self, force=False):
-       #       if self.index:
-       #               self.index.update(force=force)
-
-       def _replace_from_cache(self, pkg):
-               for _pkg in self.cache.packages:
-                       if pkg == _pkg:
-                               pkg = _pkg
-                               break
-
-               return pkg
-
-       @property
-       def packages(self):
-               for pkg in self.index.packages:
-                       yield self._replace_from_cache(pkg)
-
-       def get_by_provides(self, requires):
-               for pkg in self.index.get_by_provides(requires):
-                       yield self._replace_from_cache(pkg)
-
-       def get_by_file(self, filename):
-               return self.index.get_by_file(filename)
+       def download(self, filename, hash1=None, text=""):
+               """
+                       Downloads 'filename' from repository and returns the local filename.
+               """
+               # Marker, if we need to download the package.
+               download = True
+
+               cache_prefix = ""
+               if filename.endswith(PACKAGE_EXTENSION):
+                       cache_prefix = "packages"
+               elif filename == METADATA_DOWNLOAD_FILE:
+                       cache_prefix = "repodata"
+               elif filename.endswith(METADATA_DATABASE_FILE):
+                       cache_prefix = "repodata"
+
+               cache_filename = os.path.join(cache_prefix, os.path.basename(filename))
+
+               # Check if file already exists in cache.
+               if self.cache.exists(cache_filename):
+                       logging.debug("File exists in cache: %s" % filename)
+
+                       # If the file does already exist, we check if the hash1 matches.
+                       if hash1 and self.cache.verify(cache_filename, hash1):
+                               # We already got the right file. Skip download.
+                               download = False
+                       else:
+                               # The file in cache has a wrong hash. Remove it and repeat download.
+                               cache.remove(cache_filename)
+
+               if download:
+                       logging.debug("Going to download %s" % filename)
+
+                       # Make sure filename is of type string (and not unicode)
+                       filename = str(filename)
+
+                       # Get a package grabber and add mirror download capabilities to it.
+                       grabber = downloader.PackageDownloader(
+                               text=text + os.path.basename(filename),
+                       )
+                       grabber = self.mirrors.group(grabber)
+
+                       i = grabber.urlopen(filename)
+
+                       # Open input and output files and download the file.
+                       o = self.cache.open(cache_filename, "w")
+
+                       buf = i.read(BUFFER_SIZE)
+                       while buf:
+                               o.write(buf)
+                               buf = i.read(BUFFER_SIZE)
+
+                       i.close()
+                       o.close()
+
+                       # Verify if the download was okay.
+                       if hash1 and not self.cache.verify(cache_filename, hash1):
+                               raise Exception, "XXX this should never happen..."
+
+               return os.path.join(self.cache.path, cache_filename)
diff --git a/pakfire/repository/remote_old.py b/pakfire/repository/remote_old.py
new file mode 100644 (file)
index 0000000..52c3484
--- /dev/null
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+
+import logging
+import os
+
+import cache
+import index
+
+import pakfire.downloader as downloader
+
+from base import RepositoryFactory
+
+class RemoteRepository(RepositoryFactory):
+       cacheable = True
+
+       def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled):
+               RepositoryFactory.__init__(self, pakfire, name, description)
+
+               # Parse arguments.
+               self.url = url
+               self.gpgkey = gpgkey
+               self.mirrorlist = mirrorlist
+
+               if enabled:
+                       self.enabled = True
+               else:
+                       self.enabled = False
+
+               # Create a cache for the repository where we can keep all temporary data.
+               self.cache = cache.RepositoryCache(self.pakfire, self)
+
+               # Initialize mirror servers.
+               self.mirrors = downloader.MirrorList(self.pakfire, self)
+
+               # Initialize index.
+               self.index = index.RemoteIndex(self.pakfire, self)
+
+               logging.debug("Created new repository(name='%s', url='%s', enabled='%s')" % \
+                       (self.name, self.url, self.enabled))
+
+       def __repr__(self):
+               return "<%s %s>" % (self.__class__.__name__, self.url)
+
+       @property
+       def local(self):
+               # If files are located somewhere in the filesystem we assume it is
+               # local.
+               if self.url.startswith("file://"):
+                       return True
+
+               # Otherwise not.
+               return False
+
+       @property
+       def arch(self):
+               return self.pakfire.distro.arch
+
+       @property
+       def path(self):
+               if self.local:
+                       return self.url[7:]
+
+               return self.cache.path
+
+       @property
+       def priority(self):
+               priority = 100
+
+               url2priority = {
+                       "file://" : 50,
+                       "http://" : 75,
+               }
+
+               for url, prio in url2priority.items():
+                       if self.url.startswith(url):
+                               priority = prio
+                               break
+
+               return priority
+
+       #def update(self, force=False):
+       #       if self.index:
+       #               self.index.update(force=force)
+
+       def _replace_from_cache(self, pkg):
+               for _pkg in self.cache.packages:
+                       if pkg == _pkg:
+                               pkg = _pkg
+                               break
+
+               return pkg
+
+       @property
+       def packages(self):
+               for pkg in self.index.packages:
+                       yield self._replace_from_cache(pkg)
+
+       def get_by_provides(self, requires):
+               for pkg in self.index.get_by_provides(requires):
+                       yield self._replace_from_cache(pkg)
+
+       def get_by_file(self, filename):
+               return self.index.get_by_file(filename)
diff --git a/pakfire/repository/solver.py b/pakfire/repository/solver.py
deleted file mode 100644 (file)
index 07d0f26..0000000
+++ /dev/null
@@ -1,245 +0,0 @@
-#!/usr/bin/python
-
-import logging
-import satsolver
-import time
-
-from transaction import Transaction
-
-import pakfire.util as util
-
-from pakfire.constants import *
-from pakfire.i18n import _
-
-class Solver(object):
-       RELATIONS = (
-               (">=", satsolver.REL_GE,),
-               ("<=", satsolver.REL_LE,),
-               ("=" , satsolver.REL_EQ,),
-               ("<" , satsolver.REL_LT,),
-               (">" , satsolver.REL_GT,),
-       )
-
-       def __init__(self, pakfire, repos, arch=None):
-               self.pakfire = pakfire
-               self.repos = repos
-
-               if not arch:
-                       arch = self.pakfire.distro.arch
-
-               # Mapping from solver ID to a package.
-               self.id2pkg = {}
-
-               # Initialize the pool and set the architecture.
-               self.pool = satsolver.Pool()
-               self.pool.set_arch(arch)
-
-               # Initialize all repositories.
-               for repo in self.repos.enabled:
-                       self.init_repo(repo)
-
-#              self.init_repos()
-
-               self.pool.prepare()
-
-               logging.debug("Solver pool has %s solvables." % self.pool.size())
-
-       def create_repo(self, *args, **kwargs):
-               return self.pool.create_repo(*args, **kwargs)
-
-       def create_relation(self, s):
-               s = str(s)
-
-               if s.startswith("/"):
-                       return satsolver.Relation(self.pool, s)
-
-               for pattern, type in self.RELATIONS:
-                       if not pattern in s:
-                               continue
-
-                       name, version = s.split(pattern, 1)
-
-                       return satsolver.Relation(self.pool, name, type, version)
-
-               return satsolver.Relation(self.pool, s)
-
-       def init_repo(self, repo):
-               solvrepo = self.pool.create_repo(repo.name)
-               if repo.name == "installed":
-                       self.pool.set_installed(solvrepo)
-
-               repo.import_to_solver(self, solvrepo)
-               return
-
-               # XXX dead code
-
-               solvrepo = self.pool.create_repo(repo.name)
-               if repo.name == "installed":
-                       self.pool.set_installed(solvrepo)
-
-               pb = util.make_progress(_("Loading %s") % repo.name, repo.size)
-               i = 0
-
-               # Let's see if this repository has a cache and use it if possible.
-               cachefile = repo.metadata_cachefile
-               print cachefile
-               if cachefile and os.path.exists(cachefile):
-                       solvrepo.add_solv(cachefile)
-
-               else:
-                       for pkg in repo.get_all():
-                               if pb:
-                                       i += 1
-                                       pb.update(i)
-
-                               self.add_package(pkg)
-
-               logging.debug("Initialized new repo '%s' with %s packages." % \
-                       (solvrepo.name(), solvrepo.size()))
-
-               if pb:
-                       pb.finish()
-
-               repos.append(solvrepo)
-
-       def add_package(self, pkg, reponame):
-               repo = self.repos.get_repo_by_name(reponame)
-
-               solvable = satsolver.Solvable(repo.solver_repo, str(pkg.name),
-                       str(pkg.friendly_version), str(pkg.arch))
-
-               # Set vendor.
-               solvable.set_vendor(pkg.vendor)
-
-               # Import all requires.
-               for req in pkg.requires:
-                       rel = self.create_relation(req)
-                       solvable.requires().add(rel)
-
-               # Import all provides.
-               for prov in pkg.provides:
-                       rel = self.create_relation(prov)
-                       solvable.provides().add(rel)
-
-               # Import all conflicts.
-               for conf in pkg.conflicts:
-                       rel = self.create_relation(conf)
-                       solvable.conflicts().add(rel)
-
-               # Import all obsoletes.
-               for obso in pkg.obsoletes:
-                       rel = self.create_relation(obso)
-                       solvable.obsoletes().add(rel)
-
-               # Import all files that are in the package.
-               rel = self.create_relation("solvable:filemarker")
-               solvable.provides().add(rel)
-               for file in pkg.filelist:
-                       rel = self.create_relation(file)
-                       solvable.provides().add(rel)
-
-       def create_request(self):
-               return self.pool.create_request()
-
-       def solve(self, request, update=False, allow_downgrade=False, interactive=False):
-               solver = self.pool.create_solver()
-               #solver.set_allow_uninstall(True)
-
-               solver.set_allow_downgrade(allow_downgrade)
-
-               # Configure the solver for an update.
-               if update:
-                       solver.set_update_system(True)
-                       solver.set_do_split_provides(True)
-
-               while True:
-                       # Save start time.
-                       time_start = time.time()
-
-                       # Acutally run the solver.
-                       res = solver.solve(request)
-
-                       # Log time and status of the solver.
-                       logging.debug("Solving took %s" % (time.time() - time_start))
-                       logging.debug("Solver status: %s" % res)
-
-                       # If the solver succeeded, we return the transaction and return.
-                       if res:
-                               # Return a resulting Transaction.
-                               return Transaction.from_solver(self.pakfire, self, solver)
-
-                       # Solver had an error and we now see what we can do:
-                       logging.info("The solver returned %s problems." % solver.problems_count())
-
-                       jobactions = {
-                               satsolver.INSTALL_SOLVABLE : "install",
-                               satsolver.UPDATE_SOLVABLE  : "update",
-                               satsolver.REMOVE_SOLVABLE  : "remove",
-                       }
-
-                       problem_count = 0
-                       for problem in solver.problems(request):
-                               problem_count += 1
-
-                               # A data structure to store the solution to the key that is
-                               # the user supposed to press.
-                               solutionmap = {}
-
-                               logging.warning(" Problem %s: %s" % (problem_count, problem))
-
-                               solution_count = 0
-                               for solution in problem.solutions():
-                                       solution_count += 1
-                                       solutionmap[solution_count] = solution
-
-                                       logging.info("  [%2d]: %s" % (solution_count, solution))
-
-                               if not interactive:
-                                       continue
-
-                               continue # XXX
-
-                               logging.info("  - %s -" % _("Empty to abort."))
-
-                               while True:
-                                       print _("Choose a solution:"),
-
-                                       ret = raw_input()
-                                       # If the user has entered nothing, we abort the operation.
-                                       if not ret:
-                                               return
-
-                                       try:
-                                               ret = int(ret)
-                                       except ValueError:
-                                               ret = None
-
-                                       # Get the selected solution from the map.
-                                       solution = solutionmap.get(ret, None)
-
-                                       if not solution:
-                                               print _("You have entered an invalid solution. Try again.")
-                                               continue
-
-                                       else:
-                                               jobs = [e.job() for e in solution.elements()]
-                                               for job in jobs:
-                                                       try:
-                                                               print jobactions[job.cmd()]
-                                                       except KeyError:
-                                                               raise Exception, "Unknown action called."
-                                               break
-
-       def solv2pkg(self, solv):
-               repo = self.repos.get_repo_by_name(solv.repo().name())
-
-               return repo.get_by_evr(solv.name(), solv.evr())
-
-       def get_by_provides(self, provides):
-               provides = self.create_relation(provides)
-
-               pkgs = self.solvables2packages(self.pool.providers(provides))
-
-               return pkgs
-
-       get_by_name = get_by_provides
index 7efbb690eccf6a26f201c73cac48d11cf906607e..9c210ab73f602403a1bed6b603b3e4827b2a90f4 100644 (file)
@@ -22,6 +22,12 @@ class Action(object):
                self.pakfire = pakfire
                self.pkg = pkg
 
+               # Try to get the binary version of the package from the cache if
+               # any.
+               binary_package = self.pkg.get_from_cache()
+               if binary_package:
+                       self.pkg = binary_package
+
        def __cmp__(self, other):
                # XXX ugly
                return cmp(self.__repr__(), other.__repr__())
@@ -85,14 +91,14 @@ class ActionInstall(Action):
        def extract(self, message, prefix=None):
                logging.debug("Extracting package %s" % self.pkg.friendly_name)
 
+               # Create package in the database
+               self.local.add_package(self.pkg)
+
                if prefix is None:
                        prefix = self.pakfire.path
 
                self.pkg.extract(message, prefix=prefix)
 
-               # Create package in the database
-               self.local.index.add_package(self.pkg)
-
        def run(self):
                msg = _("Extracting: %s")
 
@@ -107,8 +113,6 @@ class ActionInstall(Action):
 
                self.extract(msg % self.pkg.name)
 
-               self.pakfire.solver.add_package(self.pkg, "installed")
-
 
 class ActionUpdate(ActionInstall):
        type = "upgrade"
@@ -156,20 +160,13 @@ class Transaction(object):
                self.actions = []
 
        @classmethod
-       def from_solver(cls, pakfire, solver1, solver2):
-               # Grab the original transaction object from the solver.
-               _transaction = solver2.transaction()
-
-               # Order the objects in the transaction in that way we will run the
-               # installation.
-               _transaction.order()
-
+       def from_solver(cls, pakfire, solver, _transaction):
                # Create a new instance of our own transaction class.
                transaction = cls(pakfire)
 
                for step in _transaction.steps():
-                       action = step.type_s(satsolver.TRANSACTION_MODE_ACTIVE)
-                       pkg = solver1.solv2pkg(step.solvable())
+                       action = step.get_type()
+                       pkg = packages.SolvPackage(pakfire, step.get_solvable())
 
                        for action_cls in cls.action_classes:
                                if action_cls.type == action:
diff --git a/pakfire/satsolver.py b/pakfire/satsolver.py
new file mode 100644 (file)
index 0000000..5894bfe
--- /dev/null
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+import logging
+
+import _pakfire
+from _pakfire import *
+
+# XXX maybe we can move this to the root
+import pakfire.repository.transaction
+
+class Request(_pakfire.Request):
+       def install(self, what):
+               if isinstance(what, Solvable):
+                       self.install_solvable(what)
+                       return
+
+               elif isinstance(what, Relation):
+                       self.install_relation(what)
+                       return
+
+               elif type(what) == type("string"):
+                       self.install_name(what)
+                       return
+
+               raise Exception, "Unknown type"
+
+
+class Solver(object):
+       def __init__(self, pakfire, pool):
+               self.pakfire = pakfire
+               self.pool = pool
+
+               self._solver = _pakfire.Solver(self.pool)
+
+       def solve(self, request, update=False, allow_downgrade=False):
+               #self._solver.set_allow_uninstall(True)
+               self._solver.set_allow_downgrade(allow_downgrade)
+
+               # Configure the solver for an update.
+               if update:
+                       self._solver.set_update_system(True)
+                       self._solver.set_do_split_provides(True)
+
+               res = self._solver.solve(request)
+
+               logging.debug("Solver status: %s" % res)
+
+               # If the solver succeeded, we return the transaction and return.
+               if res:
+                       # Return a resulting Transaction.
+                       transaction = Transaction(self._solver)
+
+                       return pakfire.repository.transaction.Transaction.from_solver(self.pakfire,
+                               self, transaction)
+
+               return res
index 0c65413961426d42b5cdc5f9d1a2167370928f08..3b06d67a1a2a9a7eb8bb50a83deeb56e36ec9a3e 100644 (file)
@@ -1,40 +1,45 @@
-pakfire/util.py
 pakfire/api.py
+pakfire/base.py
+pakfire/builder.py
+pakfire/chroot.py
+pakfire/cli.py
+pakfire/compress.py
+pakfire/config.py
+pakfire/constants.py
 pakfire/distro.py
-pakfire/packages/util.py
+pakfire/downloader.py
+pakfire/errors.py
+pakfire/i18n.py
+pakfire/__init__.py
+pakfire/logger.py
 pakfire/packages/base.py
-pakfire/packages/make.py
-pakfire/packages/__init__.py
-pakfire/packages/source.py
-pakfire/packages/packager.py
+pakfire/packages/binary.py
 pakfire/packages/file.py
+pakfire/packages/__init__.py
 pakfire/packages/installed.py
-pakfire/packages/virtual.py
 pakfire/packages/listing.py
-pakfire/packages/binary.py
-pakfire/chroot.py
-pakfire/downloader.py
-pakfire/compress.py
-pakfire/base.py
-pakfire/i18n.py
-pakfire/__init__.py
-pakfire/server.py
+pakfire/packages/make.py
+pakfire/packages/packager.py
+pakfire/packages/solv.py
+pakfire/packages/source.py
+pakfire/packages/util.py
+pakfire/packages/virtual.py
+pakfire/repository/actions.py
 pakfire/repository/base.py
+pakfire/repository/cache.py
+pakfire/repository/database_old.py
 pakfire/repository/database.py
+pakfire/repository/index_old.py
+pakfire/repository/index.py
 pakfire/repository/__init__.py
-pakfire/repository/solver.py
-pakfire/repository/transaction.py
+pakfire/repository/installed.py
+pakfire/repository/local_old.py
 pakfire/repository/local.py
-pakfire/repository/remote.py
-pakfire/repository/oddments.py
 pakfire/repository/metadata.py
-pakfire/repository/installed.py
-pakfire/repository/actions.py
-pakfire/repository/cache.py
-pakfire/repository/index.py
-pakfire/config.py
-pakfire/logger.py
-pakfire/builder.py
-pakfire/cli.py
-pakfire/errors.py
-pakfire/constants.py
+pakfire/repository/oddments.py
+pakfire/repository/remote_old.py
+pakfire/repository/remote.py
+pakfire/repository/transaction.py
+pakfire/satsolver.py
+pakfire/server.py
+pakfire/util.py
index 8b074300c2491335d74a60e7d7b18f0bbe319036..37a5155178535893ad7d6aa3d66b2e6532051bb3 100644 (file)
@@ -8,7 +8,7 @@ msgid ""
 msgstr ""
 "Project-Id-Version: PACKAGE VERSION\n"
 "Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2011-05-21 17:26+0200\n"
+"POT-Creation-Date: 2011-07-16 16:56+0200\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@li.org>\n"
@@ -17,370 +17,361 @@ msgstr ""
 "Content-Type: text/plain; charset=CHARSET\n"
 "Content-Transfer-Encoding: 8bit\n"
 
-#: ../pakfire/util.py:36
+#: ../pakfire/builder.py:254
 #, python-format
-msgid "%s [y/N]"
-msgstr ""
-
-#: ../pakfire/packages/base.py:68
-msgid "Name"
-msgstr ""
-
-#: ../pakfire/packages/base.py:69 ../pakfire/repository/transaction.py:242
-msgid "Arch"
+msgid "Extracting: %s (source)"
 msgstr ""
 
-#: ../pakfire/packages/base.py:70 ../pakfire/repository/transaction.py:242
-msgid "Version"
+#: ../pakfire/cli.py:18
+msgid "Pakfire command line interface."
 msgstr ""
 
-#: ../pakfire/packages/base.py:71
-msgid "Release"
+#: ../pakfire/cli.py:25
+msgid "The path where pakfire should operate in."
 msgstr ""
 
-#: ../pakfire/packages/base.py:72 ../pakfire/repository/transaction.py:243
-msgid "Size"
+#: ../pakfire/cli.py:65
+msgid "Enable verbose output."
 msgstr ""
 
-#: ../pakfire/packages/base.py:73
-msgid "Repo"
+#: ../pakfire/cli.py:68
+msgid "Path to a configuration file to load."
 msgstr ""
 
-#: ../pakfire/packages/base.py:74
-msgid "Summary"
+#: ../pakfire/cli.py:71
+msgid "Disable a repository temporarily."
 msgstr ""
 
-#: ../pakfire/packages/base.py:75
-msgid "Groups"
+#: ../pakfire/cli.py:76
+msgid "Install one or more packages to the system."
 msgstr ""
 
-#: ../pakfire/packages/base.py:76
-msgid "URL"
+#: ../pakfire/cli.py:78
+msgid "Give name of at least one package to install."
 msgstr ""
 
-#: ../pakfire/packages/base.py:77
-msgid "License"
+#: ../pakfire/cli.py:84
+msgid "Install one or more packages from the filesystem."
 msgstr ""
 
-#: ../pakfire/packages/base.py:80
-msgid "Description"
+#: ../pakfire/cli.py:86
+msgid "Give filename of at least one package."
 msgstr ""
 
-#: ../pakfire/packages/base.py:86
-msgid "UUID"
+#: ../pakfire/cli.py:92
+msgid "Remove one or more packages from the system."
 msgstr ""
 
-#: ../pakfire/packages/base.py:87
-msgid "Build ID"
+#: ../pakfire/cli.py:94
+msgid "Give name of at least one package to remove."
 msgstr ""
 
-#: ../pakfire/packages/base.py:88
-msgid "Build date"
+#: ../pakfire/cli.py:100
+msgid "Update the whole system or one specific package."
 msgstr ""
 
-#: ../pakfire/packages/base.py:89
-msgid "Build host"
+#: ../pakfire/cli.py:102
+msgid "Give a name of a package to update or leave emtpy for all."
 msgstr ""
 
-#: ../pakfire/packages/base.py:91
-msgid "Provides"
+#: ../pakfire/cli.py:108
+msgid "Print some information about the given package(s)."
 msgstr ""
 
-#: ../pakfire/packages/base.py:96
-msgid "Requires"
+#: ../pakfire/cli.py:110
+msgid "Give at least the name of one package."
 msgstr ""
 
-#: ../pakfire/repository/solver.py:80
-#, python-format
-msgid "Loading %s"
+#: ../pakfire/cli.py:116
+msgid "Search for a given pattern."
 msgstr ""
 
-#. XXX
-#: ../pakfire/repository/solver.py:202
-msgid "Empty to abort."
+#: ../pakfire/cli.py:118
+msgid "A pattern to search for."
 msgstr ""
 
-#: ../pakfire/repository/solver.py:205
-msgid "Choose a solution:"
+#: ../pakfire/cli.py:124
+msgid "Get a list of packages that provide a given file or feature."
 msgstr ""
 
-#: ../pakfire/repository/solver.py:221
-msgid "You have entered an invalid solution. Try again."
+#: ../pakfire/cli.py:126 ../pakfire/cli.py:134
+msgid "File or feature to search for."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:97
-#, python-format
-msgid "Extracting: %s"
+#: ../pakfire/cli.py:132
+msgid "Get a list of packages that require a given file or feature."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:100
-#, python-format
-msgid "Installing: %s"
+#: ../pakfire/cli.py:140
+msgid "Get list of packages that belong to the given group."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:102
-#, python-format
-msgid "Reinstalling: %s"
+#: ../pakfire/cli.py:142
+msgid "Group name to search for."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:104
-#, python-format
-msgid "Updating: %s"
+#: ../pakfire/cli.py:148
+msgid "Install all packages that belong to the given group."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:106
-#, python-format
-msgid "Downgrading: %s"
+#: ../pakfire/cli.py:150
+msgid "Group name."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:125
-#, python-format
-msgid "Removing: %s"
+#: ../pakfire/cli.py:156
+msgid "List all currently enabled repositories."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:242
-msgid "Package"
+#: ../pakfire/cli.py:222 ../pakfire/repository/transaction.py:248
+msgid "Repository"
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:243 ../pakfire/cli.py:223
-msgid "Repository"
+#: ../pakfire/cli.py:222
+msgid "Enabled"
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:247
-msgid "Installing:"
+#: ../pakfire/cli.py:222
+msgid "Priority"
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:248
-msgid "Reinstalling:"
+#: ../pakfire/cli.py:222
+msgid "Packages"
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:249
-msgid "Updating:"
+#: ../pakfire/cli.py:237
+msgid "Pakfire builder command line interface."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:250
-msgid "Downgrading:"
+#: ../pakfire/cli.py:279
+msgid "Update the package indexes."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:251
-msgid "Removing:"
+#: ../pakfire/cli.py:285
+msgid "Build one or more packages."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:257
-msgid "Transaction Summary"
+#: ../pakfire/cli.py:287
+msgid "Give name of at least one package to build."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:264
-msgid "package"
+#: ../pakfire/cli.py:291
+msgid "Build the package for the given architecture."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:270
-#, python-format
-msgid "Total download size: %s"
+#: ../pakfire/cli.py:293 ../pakfire/cli.py:315
+msgid "Path were the output files should be copied to."
 msgstr ""
 
-#: ../pakfire/repository/transaction.py:279
-msgid "Is this okay?"
+#: ../pakfire/cli.py:298
+msgid "Go into a shell."
 msgstr ""
 
-#: ../pakfire/repository/index.py:397
-#, python-format
-msgid "%s: package database"
+#: ../pakfire/cli.py:300
+msgid "Give name of a package."
 msgstr ""
 
-#: ../pakfire/builder.py:254
-#, python-format
-msgid "Extracting: %s (source)"
+#: ../pakfire/cli.py:304
+msgid "Emulated architecture in the shell."
 msgstr ""
 
-#: ../pakfire/cli.py:18
-msgid "Pakfire command line interface."
+#: ../pakfire/cli.py:309
+msgid "Generate a source package."
 msgstr ""
 
-#: ../pakfire/cli.py:25
-msgid "The path where pakfire should operate in."
+#: ../pakfire/cli.py:311
+msgid "Give name(s) of a package(s)."
 msgstr ""
 
-#: ../pakfire/cli.py:65
-msgid "Enable verbose output."
+#: ../pakfire/cli.py:386
+msgid "Pakfire repo command line interface."
 msgstr ""
 
-#: ../pakfire/cli.py:68
-msgid "Path to a configuration file to load."
+#: ../pakfire/cli.py:405
+msgid "Repository management commands."
 msgstr ""
 
-#: ../pakfire/cli.py:71
-msgid "Disable a repository temporarily."
+#: ../pakfire/cli.py:413
+msgid "Create a new repository index."
 msgstr ""
 
-#: ../pakfire/cli.py:76
-msgid "Install one or more packages to the system."
+#: ../pakfire/cli.py:414
+msgid "Path to the packages."
 msgstr ""
 
-#: ../pakfire/cli.py:78
-msgid "Give name of at least one package to install."
+#: ../pakfire/cli.py:415
+msgid "Path to input packages."
 msgstr ""
 
-#: ../pakfire/cli.py:84
-msgid "Install one or more packages from the filesystem."
+#: ../pakfire/cli.py:427
+msgid "Pakfire master command line interface."
 msgstr ""
 
-#: ../pakfire/cli.py:86
-msgid "Give filename of at least one package."
+#: ../pakfire/cli.py:449
+msgid "Update the sources."
 msgstr ""
 
-#: ../pakfire/cli.py:92
-msgid "Remove one or more packages from the system."
+#: ../pakfire/cli.py:459
+msgid "Pakfire server command line interface."
 msgstr ""
 
-#: ../pakfire/cli.py:94
-msgid "Give name of at least one package to remove."
+#: ../pakfire/cli.py:483
+msgid "Request a build job from the server."
 msgstr ""
 
-#: ../pakfire/cli.py:100
-msgid "Update the whole system or one specific package."
+#: ../pakfire/cli.py:489
+msgid "Send a keepalive to the server."
 msgstr ""
 
-#: ../pakfire/cli.py:102
-msgid "Give a name of a package to update or leave emtpy for all."
+#: ../pakfire/packages/base.py:69
+msgid "Name"
 msgstr ""
 
-#: ../pakfire/cli.py:108
-msgid "Print some information about the given package(s)."
+#: ../pakfire/packages/base.py:70 ../pakfire/repository/transaction.py:247
+msgid "Arch"
 msgstr ""
 
-#: ../pakfire/cli.py:110
-msgid "Give at least the name of one package."
+#: ../pakfire/packages/base.py:71 ../pakfire/repository/transaction.py:247
+msgid "Version"
 msgstr ""
 
-#: ../pakfire/cli.py:116
-msgid "Search for a given pattern."
+#: ../pakfire/packages/base.py:72
+msgid "Release"
 msgstr ""
 
-#: ../pakfire/cli.py:118
-msgid "A pattern to search for."
+#: ../pakfire/packages/base.py:73 ../pakfire/repository/transaction.py:248
+msgid "Size"
 msgstr ""
 
-#: ../pakfire/cli.py:124
-msgid "Get a list of packages that provide a given file or feature."
+#: ../pakfire/packages/base.py:74
+msgid "Repo"
 msgstr ""
 
-#: ../pakfire/cli.py:126 ../pakfire/cli.py:134
-msgid "File or feature to search for."
+#: ../pakfire/packages/base.py:75
+msgid "Summary"
 msgstr ""
 
-#: ../pakfire/cli.py:132
-msgid "Get a list of packages that require a given file or feature."
+#: ../pakfire/packages/base.py:76
+msgid "Groups"
 msgstr ""
 
-#: ../pakfire/cli.py:140
-msgid "Get list of packages that belong to the given group."
+#: ../pakfire/packages/base.py:77
+msgid "URL"
 msgstr ""
 
-#: ../pakfire/cli.py:142
-msgid "Group name to search for."
+#: ../pakfire/packages/base.py:78
+msgid "License"
 msgstr ""
 
-#: ../pakfire/cli.py:148
-msgid "Install all packages that belong to the given group."
+#: ../pakfire/packages/base.py:81
+msgid "Description"
 msgstr ""
 
-#: ../pakfire/cli.py:150
-msgid "Group name."
+#: ../pakfire/packages/base.py:87
+msgid "UUID"
 msgstr ""
 
-#: ../pakfire/cli.py:156
-msgid "List all currently enabled repositories."
+#: ../pakfire/packages/base.py:88
+msgid "Build ID"
 msgstr ""
 
-#: ../pakfire/cli.py:223
-msgid "Enabled"
+#: ../pakfire/packages/base.py:89
+msgid "Build date"
 msgstr ""
 
-#: ../pakfire/cli.py:223
-msgid "Priority"
+#: ../pakfire/packages/base.py:90
+msgid "Build host"
 msgstr ""
 
-#: ../pakfire/cli.py:238
-msgid "Pakfire builder command line interface."
+#: ../pakfire/packages/base.py:92
+msgid "Provides"
 msgstr ""
 
-#: ../pakfire/cli.py:280
-msgid "Update the package indexes."
+#: ../pakfire/packages/base.py:97
+msgid "Requires"
 msgstr ""
 
-#: ../pakfire/cli.py:286
-msgid "Build one or more packages."
+#: ../pakfire/repository/index_old.py:397 ../pakfire/repository/index.py:216
+#, python-format
+msgid "%s: package database"
 msgstr ""
 
-#: ../pakfire/cli.py:288
-msgid "Give name of at least one package to build."
+#. Add all packages from the database to the index.
+#: ../pakfire/repository/index.py:317
+msgid "Loading installed packages"
 msgstr ""
 
-#: ../pakfire/cli.py:292
-msgid "Build the package for the given architecture."
+#: ../pakfire/repository/transaction.py:103
+#, python-format
+msgid "Extracting: %s"
 msgstr ""
 
-#: ../pakfire/cli.py:294 ../pakfire/cli.py:316
-msgid "Path were the output files should be copied to."
+#: ../pakfire/repository/transaction.py:106
+#, python-format
+msgid "Installing: %s"
 msgstr ""
 
-#: ../pakfire/cli.py:299
-msgid "Go into a shell."
+#: ../pakfire/repository/transaction.py:108
+#, python-format
+msgid "Reinstalling: %s"
 msgstr ""
 
-#: ../pakfire/cli.py:301
-msgid "Give name of a package."
+#: ../pakfire/repository/transaction.py:110
+#, python-format
+msgid "Updating: %s"
 msgstr ""
 
-#: ../pakfire/cli.py:305
-msgid "Emulated architecture in the shell."
+#: ../pakfire/repository/transaction.py:112
+#, python-format
+msgid "Downgrading: %s"
 msgstr ""
 
-#: ../pakfire/cli.py:310
-msgid "Generate a source package."
+#: ../pakfire/repository/transaction.py:129
+#, python-format
+msgid "Removing: %s"
 msgstr ""
 
-#: ../pakfire/cli.py:312
-msgid "Give name(s) of a package(s)."
+#: ../pakfire/repository/transaction.py:247
+msgid "Package"
 msgstr ""
 
-#: ../pakfire/cli.py:381
-msgid "Pakfire repo command line interface."
+#: ../pakfire/repository/transaction.py:252
+msgid "Installing:"
 msgstr ""
 
-#: ../pakfire/cli.py:400
-msgid "Repository management commands."
+#: ../pakfire/repository/transaction.py:253
+msgid "Reinstalling:"
 msgstr ""
 
-#: ../pakfire/cli.py:408
-msgid "Create a new repository index."
+#: ../pakfire/repository/transaction.py:254
+msgid "Updating:"
 msgstr ""
 
-#: ../pakfire/cli.py:409
-msgid "Path to the packages."
+#: ../pakfire/repository/transaction.py:255
+msgid "Downgrading:"
 msgstr ""
 
-#: ../pakfire/cli.py:410
-msgid "Path to input packages."
+#: ../pakfire/repository/transaction.py:256
+msgid "Removing:"
 msgstr ""
 
-#: ../pakfire/cli.py:422
-msgid "Pakfire master command line interface."
+#: ../pakfire/repository/transaction.py:262
+msgid "Transaction Summary"
 msgstr ""
 
-#: ../pakfire/cli.py:444
-msgid "Update the sources."
+#: ../pakfire/repository/transaction.py:269
+msgid "package"
 msgstr ""
 
-#: ../pakfire/cli.py:454
-msgid "Pakfire server command line interface."
+#: ../pakfire/repository/transaction.py:275
+#, python-format
+msgid "Total download size: %s"
 msgstr ""
 
-#: ../pakfire/cli.py:478
-msgid "Request a build job from the server."
+#: ../pakfire/repository/transaction.py:284
+msgid "Is this okay?"
 msgstr ""
 
-#: ../pakfire/cli.py:484
-msgid "Send a keepalive to the server."
+#: ../pakfire/util.py:36
+#, python-format
+msgid "%s [y/N]"
 msgstr ""
index bcb967a319ab98d3284d68ad70f57db5d8e1ac17..412be67bb5a0a5ba4c4612c641d928273f251f37 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -1,9 +1,14 @@
 
-from distutils.core import setup
+import os
+
+from distutils.core import Extension, setup
 
 from DistUtilsExtra.command import *
 
-from pakfire.constants import PAKFIRE_VERSION
+#from pakfire.constants import PAKFIRE_VERSION
+PAKFIRE_VERSION = "0.9.2"
+
+_pakfire_module_files = [os.path.join("src", f) for f in os.listdir("src") if f.endswith(".c")]
 
 setup(
        name = "pakfire",
@@ -23,6 +28,10 @@ setup(
                "scripts/pakfire-repo",
                "scripts/pakfire-server",
        ],
+       ext_modules = [
+               Extension("_pakfire", _pakfire_module_files,
+                       extra_link_args = ["-lsatsolver", "-lsatsolverext"])
+       ],
        cmdclass = { "build" : build_extra.build_extra,
                     "build_i18n" :  build_i18n.build_i18n },
 )
diff --git a/src/_pakfiremodule.c b/src/_pakfiremodule.c
new file mode 100644 (file)
index 0000000..4fc576e
--- /dev/null
@@ -0,0 +1,199 @@
+
+#include <Python.h>
+
+#include "pool.h"
+#include "problem.h"
+#include "relation.h"
+#include "repo.h"
+#include "request.h"
+#include "solvable.h"
+#include "solver.h"
+#include "step.h"
+#include "transaction.h"
+
+static PyMethodDef pakfireModuleMethods[] = {
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Pool_methods[] = {
+       {"prepare", (PyCFunction)Pool_prepare, METH_NOARGS, NULL},
+       {"size", (PyCFunction)Pool_size, METH_NOARGS, NULL},
+       {"set_installed", (PyCFunction)Pool_set_installed, METH_VARARGS, NULL},
+       {"providers", (PyCFunction)Pool_providers, METH_VARARGS, NULL},
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Problem_methods[] = {
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Request_methods[] = {
+       {"install_solvable", (PyCFunction)Request_install_solvable, METH_VARARGS, NULL},
+       {"install_relation", (PyCFunction)Request_install_relation, METH_VARARGS, NULL},
+       {"install_name", (PyCFunction)Request_install_name, METH_VARARGS, NULL},
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Relation_methods[] = {
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Repo_methods[] = {
+       {"name", (PyCFunction)Repo_name, METH_NOARGS, NULL},
+       {"size", (PyCFunction)Repo_size, METH_NOARGS, NULL},
+       {"get_enabled", (PyCFunction)Repo_get_enabled, METH_NOARGS, NULL},
+       {"set_enabled", (PyCFunction)Repo_set_enabled, METH_VARARGS, NULL},
+       {"write", (PyCFunction)Repo_write, METH_VARARGS, NULL},
+       {"read", (PyCFunction)Repo_read, METH_VARARGS, NULL},
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Solvable_methods[] = {
+       {"get_name", (PyCFunction)Solvable_get_name, METH_NOARGS, NULL},
+       {"get_evr", (PyCFunction)Solvable_get_evr, METH_NOARGS, NULL},
+       {"get_arch", (PyCFunction)Solvable_get_arch, METH_NOARGS, NULL},
+       {"get_vendor", (PyCFunction)Solvable_get_vendor, METH_NOARGS, NULL},
+       {"set_vendor", (PyCFunction)Solvable_set_vendor, METH_VARARGS, NULL},
+       {"get_repo_name", (PyCFunction)Solvable_get_repo_name, METH_NOARGS, NULL},
+       {"get_uuid", (PyCFunction)Solvable_get_uuid, METH_NOARGS, NULL},
+       {"set_uuid", (PyCFunction)Solvable_set_uuid, METH_VARARGS, NULL},
+       {"get_hash1", (PyCFunction)Solvable_get_hash1, METH_NOARGS, NULL},
+       {"set_hash1", (PyCFunction)Solvable_set_hash1, METH_VARARGS, NULL},
+       {"get_summary", (PyCFunction)Solvable_get_summary, METH_NOARGS, NULL},
+       {"set_summary", (PyCFunction)Solvable_set_summary, METH_VARARGS, NULL},
+       {"get_description", (PyCFunction)Solvable_get_description, METH_NOARGS, NULL},
+       {"set_description", (PyCFunction)Solvable_set_description, METH_VARARGS, NULL},
+       {"get_groups", (PyCFunction)Solvable_get_groups, METH_NOARGS, NULL},
+       {"set_groups", (PyCFunction)Solvable_set_groups, METH_VARARGS, NULL},
+       {"get_url", (PyCFunction)Solvable_get_url, METH_NOARGS, NULL},
+       {"set_url", (PyCFunction)Solvable_set_url, METH_VARARGS, NULL},
+       {"get_filename", (PyCFunction)Solvable_get_filename, METH_NOARGS, NULL},
+       {"set_filename", (PyCFunction)Solvable_set_filename, METH_VARARGS, NULL},
+       {"get_license", (PyCFunction)Solvable_get_license, METH_NOARGS, NULL},
+       {"set_license", (PyCFunction)Solvable_set_license, METH_VARARGS, NULL},
+       {"get_buildhost", (PyCFunction)Solvable_get_buildhost, METH_NOARGS, NULL},
+       {"set_buildhost", (PyCFunction)Solvable_set_buildhost, METH_VARARGS, NULL},
+       {"get_maintainer", (PyCFunction)Solvable_get_maintainer, METH_NOARGS, NULL},
+       {"set_maintainer", (PyCFunction)Solvable_set_maintainer, METH_VARARGS, NULL},
+       {"get_downloadsize", (PyCFunction)Solvable_get_downloadsize, METH_NOARGS, NULL},
+       {"set_downloadsize", (PyCFunction)Solvable_set_downloadsize, METH_VARARGS, NULL},
+       {"get_installsize", (PyCFunction)Solvable_get_installsize, METH_NOARGS, NULL},
+       {"set_installsize", (PyCFunction)Solvable_set_installsize, METH_VARARGS, NULL},
+       {"get_buildtime", (PyCFunction)Solvable_get_buildtime, METH_NOARGS, NULL},
+       {"set_buildtime", (PyCFunction)Solvable_set_buildtime, METH_VARARGS, NULL},
+       {"add_provides", (PyCFunction)Solvable_add_provides, METH_VARARGS, NULL},
+       {"get_provides", (PyCFunction)Solvable_get_provides, METH_NOARGS, NULL},
+       {"add_requires", (PyCFunction)Solvable_add_requires, METH_VARARGS, NULL},
+       {"get_requires", (PyCFunction)Solvable_get_requires, METH_NOARGS, NULL},
+       {"add_obsoletes", (PyCFunction)Solvable_add_obsoletes, METH_VARARGS, NULL},
+       {"get_obsoletes", (PyCFunction)Solvable_get_obsoletes, METH_NOARGS, NULL},
+       {"add_conflicts", (PyCFunction)Solvable_add_conflicts, METH_VARARGS, NULL},
+       {"get_conflicts", (PyCFunction)Solvable_get_conflicts, METH_NOARGS, NULL},
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Solver_methods[] = {
+       {"solve", (PyCFunction)Solver_solve, METH_VARARGS, NULL},
+       {"get_allow_downgrade", (PyCFunction)Solver_get_allow_downgrade, METH_NOARGS, NULL},
+       {"set_allow_downgrade", (PyCFunction)Solver_set_allow_downgrade, METH_VARARGS, NULL},
+       {"get_allow_archchange", (PyCFunction)Solver_get_allow_archchange, METH_NOARGS, NULL},
+       {"set_allow_archchange", (PyCFunction)Solver_set_allow_archchange, METH_VARARGS, NULL},
+       {"get_allow_vendorchange", (PyCFunction)Solver_get_allow_vendorchange, METH_NOARGS, NULL},
+       {"set_allow_vendorchange", (PyCFunction)Solver_set_allow_vendorchange, METH_VARARGS, NULL},
+       {"get_allow_uninstall", (PyCFunction)Solver_get_allow_uninstall, METH_NOARGS, NULL},
+       {"set_allow_uninstall", (PyCFunction)Solver_set_allow_uninstall, METH_VARARGS, NULL},
+       {"get_updatesystem", (PyCFunction)Solver_get_updatesystem, METH_NOARGS, NULL},
+       {"set_updatesystem", (PyCFunction)Solver_set_updatesystem, METH_VARARGS, NULL},
+       {"get_problems", (PyCFunction)Solver_get_problems, METH_VARARGS, NULL},
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Step_methods[] = {
+       {"get_solvable", (PyCFunction)Step_get_solvable, METH_NOARGS, NULL},
+       {"get_type", (PyCFunction)Step_get_type, METH_NOARGS, NULL},
+       { NULL, NULL, 0, NULL }
+};
+
+static PyMethodDef Transaction_methods[] = {
+       {"steps", (PyCFunction)Transaction_steps, METH_NOARGS, NULL},
+       { NULL, NULL, 0, NULL }
+};
+
+void init_pakfire(void) {
+       PyObject *m, *d;
+
+       m = Py_InitModule("_pakfire", pakfireModuleMethods);
+
+       // Pool
+       PoolType.tp_methods = Pool_methods;
+       if (PyType_Ready(&PoolType) < 0)
+               return;
+       Py_INCREF(&PoolType);
+       PyModule_AddObject(m, "Pool", (PyObject *)&PoolType);
+
+       // Problem
+       ProblemType.tp_methods = Problem_methods;
+       if (PyType_Ready(&ProblemType) < 0)
+               return;
+       Py_INCREF(&ProblemType);
+       PyModule_AddObject(m, "Problem", (PyObject *)&ProblemType);
+
+       // Repo
+       RepoType.tp_methods = Repo_methods;
+       if (PyType_Ready(&RepoType) < 0)
+               return;
+       Py_INCREF(&RepoType);
+       PyModule_AddObject(m, "Repo", (PyObject *)&RepoType);
+
+       // Solvable
+       SolvableType.tp_methods = Solvable_methods;
+       if (PyType_Ready(&SolvableType) < 0)
+               return;
+       Py_INCREF(&SolvableType);
+       PyModule_AddObject(m, "Solvable", (PyObject *)&SolvableType);
+
+       // Relation
+       RelationType.tp_methods = Relation_methods;
+       if (PyType_Ready(&RelationType) < 0)
+               return;
+       Py_INCREF(&RelationType);
+       PyModule_AddObject(m, "Relation", (PyObject *)&RelationType);
+
+       // Request
+       RequestType.tp_methods = Request_methods;
+       if (PyType_Ready(&RequestType) < 0)
+               return;
+       Py_INCREF(&RequestType);
+       PyModule_AddObject(m, "Request", (PyObject *)&RequestType);
+
+       // Solver
+       SolverType.tp_methods = Solver_methods;
+       if (PyType_Ready(&SolverType) < 0)
+               return;
+       Py_INCREF(&SolverType);
+       PyModule_AddObject(m, "Solver", (PyObject *)&SolverType);
+
+       // Step
+       StepType.tp_methods = Step_methods;
+       if (PyType_Ready(&StepType) < 0)
+               return;
+       Py_INCREF(&StepType);
+       PyModule_AddObject(m, "Step", (PyObject *)&StepType);
+
+       // Transaction
+       TransactionType.tp_methods = Transaction_methods;
+       if (PyType_Ready(&TransactionType) < 0)
+               return;
+       Py_INCREF(&TransactionType);
+       PyModule_AddObject(m, "Transaction", (PyObject *)&TransactionType);
+
+       // Add constants
+       d = PyModule_GetDict(m);
+
+       // Add constants for relations
+       PyDict_SetItemString(d, "REL_EQ", Py_BuildValue("i", REL_EQ));
+       PyDict_SetItemString(d, "REL_LT", Py_BuildValue("i", REL_LT));
+       PyDict_SetItemString(d, "REL_GT", Py_BuildValue("i", REL_GT));
+       PyDict_SetItemString(d, "REL_LE", Py_BuildValue("i", REL_LT|REL_EQ));
+       PyDict_SetItemString(d, "REL_GE", Py_BuildValue("i", REL_GT|REL_EQ));
+}
diff --git a/src/config.h b/src/config.h
new file mode 100644 (file)
index 0000000..d367e8c
--- /dev/null
@@ -0,0 +1,7 @@
+
+/*
+       This is just a small configuration file for the build time configuration
+       of the satsolver module.
+*/
+
+#define DEBUG
diff --git a/src/pool.c b/src/pool.c
new file mode 100644 (file)
index 0000000..d803234
--- /dev/null
@@ -0,0 +1,132 @@
+
+#include <satsolver/poolarch.h>
+
+#include "config.h"
+#include "pool.h"
+#include "repo.h"
+#include "solvable.h"
+
+PyTypeObject PoolType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Pool",
+       tp_basicsize: sizeof(PoolObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Pool_new,
+       tp_dealloc: (destructor) Pool_dealloc,
+       tp_doc: "Sat Pool objects",
+};
+
+// Pool
+PyObject* Pool_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       PoolObject *self;
+       const char *arch;
+
+       if (!PyArg_ParseTuple(args, "s", &arch)) {
+               /* XXX raise exception */
+               return NULL;
+       }
+
+       self = (PoolObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_pool = pool_create();
+
+#ifdef DEBUG
+               // Enable debug messages when DEBUG is defined.
+               pool_setdebuglevel(self->_pool, 1);
+#endif
+
+               pool_setarch(self->_pool, arch);
+               if (self->_pool == NULL) {
+                       Py_DECREF(self);
+                       return NULL;
+               }
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Pool_dealloc(PoolObject *self) {
+       // pool_free(self->_pool);
+       self->ob_type->tp_free((PyObject *)self);
+}
+
+PyObject *Pool_add_repo(PoolObject *self, PyObject *args) {
+       const char *name;
+       if (!PyArg_ParseTuple(args, "s", &name)) {
+               /* XXX raise exception */
+       }
+
+       RepoObject *repo;
+
+       repo = PyObject_New(RepoObject, &RepoType);
+       if (repo == NULL)
+               return NULL;
+
+       return (PyObject *)repo;
+}
+
+PyObject *Pool_prepare(PoolObject *self) {
+       _Pool_prepare(self->_pool);
+
+       Py_RETURN_NONE;
+}
+
+void _Pool_prepare(Pool *pool) {
+       pool_addfileprovides(pool);
+       pool_createwhatprovides(pool);
+
+       Id r;
+       int idx;
+       FOR_REPOS(idx, r) {
+               repo_internalize(r);
+       }
+}
+
+PyObject *Pool_size(PoolObject *self) {
+       Pool *pool = self->_pool;
+
+       return Py_BuildValue("i", pool->nsolvables);
+}
+
+PyObject *Pool_search(PoolObject *self, PyObject *args) {
+       Py_RETURN_NONE; /* XXX to be done */
+}
+
+PyObject *Pool_set_installed(PoolObject *self, PyObject *args) {
+       RepoObject *repo;
+
+       if (!PyArg_ParseTuple(args, "O", &repo)) {
+               /* XXX raise exception */
+       }
+
+       pool_set_installed(self->_pool, repo->_repo);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Pool_providers(PoolObject *self, PyObject *args) {
+       const char *name;
+
+       if (!PyArg_ParseTuple(args, "s", &name)) {
+               /* XXX raise exception */
+       }
+
+       Id id = pool_str2id(self->_pool, name, 0);
+
+       Pool *pool = self->_pool;
+       _Pool_prepare(pool);
+
+       PyObject *list = PyList_New(0);
+
+       Id p, pp;
+       SolvableObject *solvable;
+       FOR_PROVIDES(p, pp, id) {
+               solvable = PyObject_New(SolvableObject, &SolvableType);
+               solvable->_pool = self->_pool;
+               solvable->_id = p;
+
+               PyList_Append(list, (PyObject *)solvable);
+       }
+
+       return list;
+}
diff --git a/src/pool.h b/src/pool.h
new file mode 100644 (file)
index 0000000..0da7193
--- /dev/null
@@ -0,0 +1,27 @@
+
+#ifndef PAKFIRE_POOL_H
+#define PAKFIRE_POOL_H
+
+#include <Python.h>
+
+#include <satsolver/pool.h>
+
+// Sat Pool object
+typedef struct {
+    PyObject_HEAD
+    Pool *_pool;
+} PoolObject;
+
+extern PyObject* Pool_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Pool_dealloc(PoolObject *self);
+extern PyObject *Pool_add_repo(PoolObject *self, PyObject *args);
+extern PyObject *Pool_prepare(PoolObject *self);
+extern void _Pool_prepare(Pool *pool);
+extern PyObject *Pool_search(PoolObject *self, PyObject *args);
+extern PyObject *Pool_set_installed(PoolObject *self, PyObject *args);
+extern PyObject *Pool_providers(PoolObject *self, PyObject *args);
+extern PyObject *Pool_size(PoolObject *self);
+
+extern PyTypeObject PoolType;
+
+#endif
diff --git a/src/problem.c b/src/problem.c
new file mode 100644 (file)
index 0000000..f1d9ebb
--- /dev/null
@@ -0,0 +1,39 @@
+
+#include "problem.h"
+#include "request.h"
+#include "solver.h"
+
+PyTypeObject ProblemType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Problem",
+       tp_basicsize: sizeof(ProblemObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Problem_new,
+       tp_dealloc: (destructor) Problem_dealloc,
+       tp_doc: "Sat Problem objects",
+};
+
+PyObject* Problem_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       ProblemObject *self;
+
+       SolverObject *solver;
+       RequestObject *request;
+       Id problem_id;
+
+       if (!PyArg_ParseTuple(args, "OOi", &solver, &request, &problem_id)) {
+               /* XXX raise exception */
+       }
+
+       self = (ProblemObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_solver = solver->_solver;
+//             self->_request = request->_request;
+               self->_id = problem_id;
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Problem_dealloc(ProblemObject *self) {
+       //self->ob_type->tp_free((PyObject *)self);
+}
diff --git a/src/problem.h b/src/problem.h
new file mode 100644 (file)
index 0000000..28d495e
--- /dev/null
@@ -0,0 +1,23 @@
+
+#ifndef PAKFIRE_PROBLEM_H
+#define PAKFIRE_PROBLEM_H
+
+#include <Python.h>
+
+#include <satsolver/pool.h>
+#include <satsolver/solver.h>
+
+// Sat Step object
+typedef struct {
+    PyObject_HEAD
+    Solver *_solver;
+//    Request *_request;
+    Id _id;
+} ProblemObject;
+
+extern PyObject* Problem_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Problem_dealloc(ProblemObject *self);
+
+extern PyTypeObject ProblemType;
+
+#endif
diff --git a/src/relation.c b/src/relation.c
new file mode 100644 (file)
index 0000000..779aac7
--- /dev/null
@@ -0,0 +1,47 @@
+
+#include "pool.h"
+#include "relation.h"
+
+#define REL_NONE 0
+
+PyTypeObject RelationType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Relation",
+       tp_basicsize: sizeof(RelationObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Relation_new,
+       tp_dealloc: (destructor) Relation_dealloc,
+       tp_doc: "Sat Relation objects",
+};
+
+PyObject* Relation_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       RelationObject *self;
+       PoolObject *pool;
+       const char *name;
+       const char *evr;
+       int flags = 0;
+
+       if (!PyArg_ParseTuple(args, "Os|si", &pool, &name, &evr, &flags)) {
+               /* XXX raise exception */
+       }
+
+       Id _name = pool_str2id(pool->_pool, name, 1);
+
+       self = (RelationObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               if (flags == REL_NONE) {
+                       self->_id = _name;
+               } else {
+                       Id _evr = pool_str2id(pool->_pool, evr, 1);
+                       self->_id = pool_rel2id(pool->_pool, _name, _evr, flags, 1);
+               }
+
+               self->_pool = pool->_pool;
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Relation_dealloc(RelationObject *self) {
+       self->ob_type->tp_free((PyObject *)self);
+}
diff --git a/src/relation.h b/src/relation.h
new file mode 100644 (file)
index 0000000..9ae4e3b
--- /dev/null
@@ -0,0 +1,21 @@
+
+#ifndef PAKFIRE_RELATION_H
+#define PAKFIRE_RELATION_H
+
+#include <Python.h>
+
+#include <satsolver/pool.h>
+
+// Sat Relation object
+typedef struct {
+    PyObject_HEAD
+    Pool *_pool;
+    Id _id;
+} RelationObject;
+
+extern PyObject* Relation_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Relation_dealloc(RelationObject *self);
+
+extern PyTypeObject RelationType;
+
+#endif
diff --git a/src/repo.c b/src/repo.c
new file mode 100644 (file)
index 0000000..fdbc4c0
--- /dev/null
@@ -0,0 +1,121 @@
+
+#include <stdbool.h>
+#include <satsolver/repo_write.h>
+
+#include "pool.h"
+#include "repo.h"
+
+PyTypeObject RepoType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Repo",
+       tp_basicsize: sizeof(RepoObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Repo_new,
+       tp_dealloc: (destructor) Repo_dealloc,
+       tp_doc: "Sat Repo objects",
+};
+
+PyObject* Repo_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       RepoObject *self;
+
+       PoolObject *pool;
+       const char *name;
+
+       if (!PyArg_ParseTuple(args, "Os", &pool, &name)) {
+               /* XXX raise exception */
+               return NULL;
+       }
+
+       assert(pool);
+       assert(name);
+
+       self = (RepoObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_repo = repo_create(pool->_pool, name);
+               if (self->_repo == NULL) {
+                       Py_DECREF(self);
+                       return NULL;
+               }
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Repo_dealloc(RepoObject *self) {
+       // repo_free(self->_repo, 0);
+       self->ob_type->tp_free((PyObject *)self);
+}
+
+PyObject *Repo_name(RepoObject *self) {
+       Repo *repo = self->_repo;
+
+       return Py_BuildValue("s", repo->name);
+}
+
+PyObject *Repo_size(RepoObject *self) {
+       Repo *repo = self->_repo;
+
+       return Py_BuildValue("i", repo->nsolvables);
+}
+
+PyObject *Repo_get_enabled(RepoObject *self) {
+       if (self->_repo->disabled == 0) {
+               Py_RETURN_TRUE;
+       }
+
+       Py_RETURN_FALSE;
+}
+
+PyObject *Repo_set_enabled(RepoObject *self, PyObject *args) {
+       bool enabled;
+
+       if (!PyArg_ParseTuple(args, "b", &enabled)) {
+               /* XXX raise exception */
+               return NULL;
+       }
+
+       if (enabled == true) {
+               self->_repo->disabled = 0;
+       } else {
+               self->_repo->disabled = 1;
+       }
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Repo_write(RepoObject *self, PyObject *args) {
+       const char *filename;
+
+       if (!PyArg_ParseTuple(args, "s", &filename)) {
+               /* XXX raise exception */
+       }
+
+       // Prepare the pool and internalize all attributes.
+       _Pool_prepare(self->_repo->pool);
+
+       // XXX catch if file cannot be opened
+       FILE *fp = fopen(filename, "wb");
+
+       repo_write(self->_repo, fp, NULL, NULL, 0);
+
+       fclose(fp);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Repo_read(RepoObject *self, PyObject *args) {
+       const char *filename;
+
+       if (!PyArg_ParseTuple(args, "s", &filename)) {
+               /* XXX raise exception */
+       }
+
+       // XXX catch if file cannot be opened
+       FILE *fp = fopen(filename, "rb");
+
+       repo_add_solv(self->_repo, fp);
+
+       fclose(fp);
+
+       Py_RETURN_NONE;
+}
diff --git a/src/repo.h b/src/repo.h
new file mode 100644 (file)
index 0000000..4ada997
--- /dev/null
@@ -0,0 +1,26 @@
+
+#ifndef PAKFIRE_REPO_H
+#define PAKFIRE_REPO_H
+
+#include <Python.h>
+
+#include <satsolver/repo.h>
+
+// Sat Repo object
+typedef struct {
+    PyObject_HEAD
+    Repo *_repo;
+} RepoObject;
+
+extern PyObject *Repo_dealloc(RepoObject *self);
+extern PyObject* Repo_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Repo_name(RepoObject *self);
+extern PyObject *Repo_size(RepoObject *self);
+extern PyObject *Repo_get_enabled(RepoObject *self);
+extern PyObject *Repo_set_enabled(RepoObject *self, PyObject *args);
+extern PyObject *Repo_write(RepoObject *self, PyObject *args);
+extern PyObject *Repo_read(RepoObject *self, PyObject *args);
+
+extern PyTypeObject RepoType;
+
+#endif
diff --git a/src/request.c b/src/request.c
new file mode 100644 (file)
index 0000000..08ea57f
--- /dev/null
@@ -0,0 +1,82 @@
+
+#include "pool.h"
+#include "relation.h"
+#include "request.h"
+#include "solvable.h"
+
+#include <satsolver/solver.h>
+
+PyTypeObject RequestType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Request",
+       tp_basicsize: sizeof(RequestObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Request_new,
+       tp_dealloc: (destructor) Request_dealloc,
+       tp_doc: "Sat Request objects",
+};
+
+PyObject* Request_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       RequestObject *self;
+       PoolObject *pool;
+
+       if (!PyArg_ParseTuple(args, "O", &pool)) {
+               /* XXX raise exception */
+       }
+
+       self = (RequestObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_pool = pool->_pool;
+               if (self->_pool == NULL) {
+                       Py_DECREF(self);
+                       return NULL;
+               }
+
+               queue_init(&self->_queue);
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Request_dealloc(RequestObject *self) {
+       self->ob_type->tp_free((PyObject *)self);
+}
+
+PyObject *Request_install_solvable(RequestObject *self, PyObject *args) {
+       SolvableObject *solv;
+
+       if (!PyArg_ParseTuple(args, "O", &solv)) {
+               /* XXX raise exception */
+       }
+
+       queue_push(&self->_queue, SOLVER_INSTALL|SOLVER_SOLVABLE);
+       queue_push(&self->_queue, solv->_id);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Request_install_relation(RequestObject *self, PyObject *args) {
+       RelationObject *rel;
+
+       if (!PyArg_ParseTuple(args, "O", &rel)) {
+               /* XXX raise exception */
+       }
+
+       queue_push(&self->_queue, SOLVER_INSTALL|SOLVER_SOLVABLE_PROVIDES);
+       queue_push(&self->_queue, rel->_id);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Request_install_name(RequestObject *self, PyObject *args) {
+       const char *name;
+
+       if (!PyArg_ParseTuple(args, "s", &name)) {
+               /* XXX raise exception */
+       }
+
+       queue_push(&self->_queue, SOLVER_INSTALL|SOLVER_SOLVABLE_NAME);
+       queue_push(&self->_queue, pool_str2id(self->_pool, name, 1));
+
+       Py_RETURN_NONE;
+}
diff --git a/src/request.h b/src/request.h
new file mode 100644 (file)
index 0000000..2ad6474
--- /dev/null
@@ -0,0 +1,24 @@
+
+#ifndef PAKFIRE_REQUEST_H
+#define PAKFIRE_REQUEST_H
+
+#include <Python.h>
+
+#include <satsolver/queue.h>
+
+// Sat Request object
+typedef struct {
+    PyObject_HEAD
+    Pool *_pool;
+    Queue _queue;
+} RequestObject;
+
+extern PyObject* Request_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Request_dealloc(RequestObject *self);
+extern PyObject *Request_install_solvable(RequestObject *self, PyObject *args);
+extern PyObject *Request_install_relation(RequestObject *self, PyObject *args);
+extern PyObject *Request_install_name(RequestObject *self, PyObject *args);
+
+extern PyTypeObject RequestType;
+
+#endif
diff --git a/src/solvable.c b/src/solvable.c
new file mode 100644 (file)
index 0000000..c00bf65
--- /dev/null
@@ -0,0 +1,502 @@
+
+#include "relation.h"
+#include "repo.h"
+#include "solvable.h"
+
+PyTypeObject SolvableType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Solvable",
+       tp_basicsize: sizeof(SolvableObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Solvable_new,
+       tp_dealloc: (destructor) Solvable_dealloc,
+       tp_doc: "Sat Solvable objects",
+};
+
+// Solvable
+PyObject* Solvable_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       SolvableObject *self;
+
+       RepoObject *repo;
+       const char *name;
+       const char *evr;
+       const char *arch = "noarch";
+
+       if (!PyArg_ParseTuple(args, "Oss|s", &repo, &name, &evr, &arch)) {
+               /* XXX raise exception */
+       }
+
+       self = (SolvableObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_id = repo_add_solvable(repo->_repo);
+               self->_pool = repo->_repo->pool;
+
+               /* Fill solvable with data. */
+               Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+               solv->name = pool_str2id(self->_pool, name, 1);
+               solv->evr = pool_str2id(self->_pool, evr, 1);
+               solv->arch = pool_str2id(self->_pool, arch, 1);
+
+               /* add self-provides */
+               Id rel = rel2id(self->_pool, solv->name, solv->evr, REL_EQ, 1);
+               solv->provides = repo_addid_dep(repo->_repo, solv->provides, rel, 0);
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Solvable_dealloc(SolvableObject *self) {
+       self->ob_type->tp_free((PyObject *)self);
+}
+
+PyObject *Solvable_get_name(SolvableObject *self) {
+       Solvable *solvable = pool_id2solvable(self->_pool, self->_id);
+
+       const char *name = pool_id2str(solvable->repo->pool, solvable->name);
+
+       return Py_BuildValue("s", name);
+}
+
+PyObject *Solvable_get_evr(SolvableObject *self) {
+       Solvable *solvable = pool_id2solvable(self->_pool, self->_id);
+
+       const char *evr = pool_id2str(solvable->repo->pool, solvable->evr);
+
+       return Py_BuildValue("s", evr);
+}
+
+PyObject *Solvable_get_arch(SolvableObject *self) {
+       Solvable *solvable = pool_id2solvable(self->_pool, self->_id);
+
+       const char *arch = pool_id2str(solvable->repo->pool, solvable->arch);
+
+       return Py_BuildValue("s", arch);
+}
+
+PyObject *Solvable_get_vendor(SolvableObject *self) {
+       Solvable *solvable = pool_id2solvable(self->_pool, self->_id);
+
+       const char *vendor = pool_id2str(solvable->repo->pool, solvable->vendor);
+
+       return Py_BuildValue("s", vendor);
+}
+
+PyObject *Solvable_set_vendor(SolvableObject *self, PyObject *args) {
+       Solvable *solvable = pool_id2solvable(self->_pool, self->_id);
+
+       const char *vendor;
+       if (!PyArg_ParseTuple(args, "s", &vendor)) {
+               /* XXX raise exception */
+       }
+
+       solvable->vendor = pool_str2id(self->_pool, vendor, 1);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_repo_name(SolvableObject *self) {
+       Solvable *solvable = pool_id2solvable(self->_pool, self->_id);
+
+       return Py_BuildValue("s", solvable->repo->name);
+}
+
+PyObject *Solvable_add_provides(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       RelationObject *rel;
+       if (!PyArg_ParseTuple(args, "O", &rel)) {
+               /* XXX raise exception */
+       }
+
+       solv->provides = repo_addid_dep(solv->repo, solv->provides, rel->_id, 0);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *_Solvable_get_dependencies(Solvable *solv, Offset deps) {
+       Repo *repo = solv->repo;
+       Pool *pool = repo->pool;
+
+       PyObject *dep;
+       const char *dep_str;
+
+       Id id, *ids;
+
+       PyObject *list = PyList_New(0);
+
+       ids = repo->idarraydata + deps;
+       while((id = *ids++) != 0) {
+               dep_str = pool_dep2str(pool, id);
+
+               // Do not include the filelist.
+               if (strcmp(dep_str, "solvable:filemarker") == 0)
+                       break;
+
+               PyList_Append(list, Py_BuildValue("s", dep_str));
+       }
+
+       Py_INCREF(list);
+       return list;
+}
+
+PyObject *Solvable_get_provides(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       return _Solvable_get_dependencies(solv, solv->provides);
+}
+
+PyObject *Solvable_add_requires(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       RelationObject *rel;
+       if (!PyArg_ParseTuple(args, "O", &rel)) {
+               /* XXX raise exception */
+       }
+
+       solv->requires = repo_addid_dep(solv->repo, solv->requires, rel->_id, 0);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_requires(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       return _Solvable_get_dependencies(solv, solv->requires);
+}
+
+PyObject *Solvable_add_obsoletes(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       RelationObject *rel;
+       if (!PyArg_ParseTuple(args, "O", &rel)) {
+               /* XXX raise exception */
+       }
+
+       solv->obsoletes = repo_addid_dep(solv->repo, solv->obsoletes, rel->_id, 0);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_obsoletes(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       return _Solvable_get_dependencies(solv, solv->obsoletes);
+}
+
+PyObject *Solvable_add_conflicts(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       RelationObject *rel;
+       if (!PyArg_ParseTuple(args, "O", &rel)) {
+               /* XXX raise exception */
+       }
+
+       solv->conflicts = repo_addid_dep(solv->repo, solv->conflicts, rel->_id, 0);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_conflicts(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       return _Solvable_get_dependencies(solv, solv->conflicts);
+}
+
+PyObject *Solvable_set_uuid(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *uuid;
+
+       if (!PyArg_ParseTuple(args, "s", &uuid)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_PKGID, uuid);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_uuid(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *uuid = repo_lookup_str(solv->repo, self->_id, SOLVABLE_PKGID);
+
+       return Py_BuildValue("s", uuid);
+}
+
+PyObject *Solvable_set_hash1(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *hash1;
+
+       if (!PyArg_ParseTuple(args, "s", &hash1)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_CHECKSUM, hash1);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_hash1(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *hash1 = repo_lookup_str(solv->repo, self->_id, SOLVABLE_CHECKSUM);
+
+       return Py_BuildValue("s", hash1);
+}
+
+PyObject *Solvable_set_summary(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *summary;
+
+       if (!PyArg_ParseTuple(args, "s", &summary)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_SUMMARY, summary);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_summary(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *summary = repo_lookup_str(solv->repo, self->_id, SOLVABLE_SUMMARY);
+
+       return Py_BuildValue("s", summary);
+}
+
+PyObject *Solvable_set_description(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *desc;
+
+       if (!PyArg_ParseTuple(args, "s", &desc)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_DESCRIPTION, desc);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_description(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *desc = repo_lookup_str(solv->repo, self->_id,
+               SOLVABLE_DESCRIPTION);
+
+       return Py_BuildValue("s", desc);
+}
+
+PyObject *Solvable_set_url(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *url;
+
+       if (!PyArg_ParseTuple(args, "s", &url)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_URL, url);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_url(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *url = repo_lookup_str(solv->repo, self->_id, SOLVABLE_URL);
+
+       return Py_BuildValue("s", url);
+}
+
+PyObject *Solvable_set_groups(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *groups;
+
+       if (!PyArg_ParseTuple(args, "s", &groups)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_GROUP, groups);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_groups(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *groups = repo_lookup_str(solv->repo, self->_id, SOLVABLE_GROUP);
+
+       return Py_BuildValue("s", groups);
+}
+
+PyObject *Solvable_set_filename(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *filename;
+
+       if (!PyArg_ParseTuple(args, "s", &filename)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_MEDIAFILE, filename);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_filename(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *filename = repo_lookup_str(solv->repo, self->_id,
+               SOLVABLE_MEDIAFILE);
+
+       return Py_BuildValue("s", filename);
+}
+
+PyObject *Solvable_set_license(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *license;
+
+       if (!PyArg_ParseTuple(args, "s", &license)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_LICENSE, license);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_license(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *license = repo_lookup_str(solv->repo, self->_id,
+               SOLVABLE_LICENSE);
+
+       return Py_BuildValue("s", license);
+}
+
+PyObject *Solvable_set_buildhost(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *buildhost;
+
+       if (!PyArg_ParseTuple(args, "s", &buildhost)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_BUILDHOST, buildhost);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_buildhost(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *buildhost = repo_lookup_str(solv->repo, self->_id,
+               SOLVABLE_BUILDHOST);
+
+       return Py_BuildValue("s", buildhost);
+}
+
+PyObject *Solvable_set_maintainer(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *maintainer;
+
+       if (!PyArg_ParseTuple(args, "s", &maintainer)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_str(solv->repo, self->_id, SOLVABLE_PACKAGER, maintainer);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_maintainer(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       const char *maintainer = repo_lookup_str(solv->repo, self->_id,
+               SOLVABLE_PACKAGER);
+
+       return Py_BuildValue("s", maintainer);
+}
+
+PyObject *Solvable_set_downloadsize(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       unsigned int downloadsize;
+
+       if (!PyArg_ParseTuple(args, "i", &downloadsize)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_num(solv->repo, self->_id, SOLVABLE_DOWNLOADSIZE, downloadsize);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_downloadsize(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       unsigned int downloadsize = repo_lookup_num(solv->repo, self->_id,
+               SOLVABLE_DOWNLOADSIZE, 0);
+
+       return Py_BuildValue("i", downloadsize);
+}
+
+PyObject *Solvable_set_installsize(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       unsigned int installedsize;
+
+       if (!PyArg_ParseTuple(args, "i", &installedsize)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_num(solv->repo, self->_id, SOLVABLE_INSTALLSIZE, installedsize);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_installsize(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       unsigned int installedsize = repo_lookup_num(solv->repo, self->_id,
+               SOLVABLE_INSTALLSIZE, 0);
+
+       return Py_BuildValue("i", installedsize);
+}
+
+PyObject *Solvable_set_buildtime(SolvableObject *self, PyObject *args) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       unsigned int buildtime;
+
+       if (!PyArg_ParseTuple(args, "i", &buildtime)) {
+               /* XXX raise exception */
+       }
+
+       repo_set_num(solv->repo, self->_id, SOLVABLE_BUILDTIME, buildtime);
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solvable_get_buildtime(SolvableObject *self) {
+       Solvable *solv = pool_id2solvable(self->_pool, self->_id);
+
+       unsigned int buildtime = repo_lookup_num(solv->repo, self->_id,
+               SOLVABLE_BUILDTIME, 0);
+
+       if (buildtime == 0)
+               Py_RETURN_NONE;
+
+       return Py_BuildValue("i", buildtime);
+}
+
diff --git a/src/solvable.h b/src/solvable.h
new file mode 100644 (file)
index 0000000..ac23315
--- /dev/null
@@ -0,0 +1,83 @@
+
+#ifndef PAKFIRE_SOLVABLE_H
+#define PAKFIRE_SOLVABLE_H
+
+#include <Python.h>
+
+#include <satsolver/solvable.h>
+
+// Sat Solvable object
+typedef struct {
+    PyObject_HEAD
+    Pool *_pool;
+    Id _id;
+} SolvableObject;
+
+extern PyObject* Solvable_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Solvable_dealloc(SolvableObject *self);
+extern PyObject *Solvable_get_name(SolvableObject *self);
+extern PyObject *Solvable_get_evr(SolvableObject *self);
+extern PyObject *Solvable_get_arch(SolvableObject *self);
+
+extern PyObject *Solvable_get_vendor(SolvableObject *self);
+extern PyObject *Solvable_set_vendor(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_repo_name(SolvableObject *self);
+
+extern PyObject *Solvable_get_uuid(SolvableObject *self);
+extern PyObject *Solvable_set_uuid(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_hash1(SolvableObject *self);
+extern PyObject *Solvable_set_hash1(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_summary(SolvableObject *self);
+extern PyObject *Solvable_set_summary(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_description(SolvableObject *self);
+extern PyObject *Solvable_set_description(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_groups(SolvableObject *self);
+extern PyObject *Solvable_set_groups(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_url(SolvableObject *self);
+extern PyObject *Solvable_set_url(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_filename(SolvableObject *self);
+extern PyObject *Solvable_set_filename(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_license(SolvableObject *self);
+extern PyObject *Solvable_set_license(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_buildhost(SolvableObject *self);
+extern PyObject *Solvable_set_buildhost(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_maintainer(SolvableObject *self);
+extern PyObject *Solvable_set_maintainer(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_downloadsize(SolvableObject *self);
+extern PyObject *Solvable_set_downloadsize(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_installsize(SolvableObject *self);
+extern PyObject *Solvable_set_installsize(SolvableObject *self, PyObject *args);
+
+extern PyObject *Solvable_get_buildtime(SolvableObject *self);
+extern PyObject *Solvable_set_buildtime(SolvableObject *self, PyObject *args);
+
+// internal use only
+extern PyObject *_Solvable_get_dependencies(Solvable *solv, Offset deps);
+
+extern PyObject *Solvable_add_provides(SolvableObject *self, PyObject *args);
+extern PyObject *Solvable_get_provides(SolvableObject *self);
+
+extern PyObject *Solvable_add_requires(SolvableObject *self, PyObject *args);
+extern PyObject *Solvable_get_requires(SolvableObject *self);
+
+extern PyObject *Solvable_add_obsoletes(SolvableObject *self, PyObject *args);
+extern PyObject *Solvable_get_obsoletes(SolvableObject *self);
+
+extern PyObject *Solvable_add_conflicts(SolvableObject *self, PyObject *args);
+extern PyObject *Solvable_get_conflicts(SolvableObject *self);
+
+extern PyTypeObject SolvableType;
+
+#endif
diff --git a/src/solver.c b/src/solver.c
new file mode 100644 (file)
index 0000000..10a6f26
--- /dev/null
@@ -0,0 +1,168 @@
+
+#include "pool.h"
+#include "problem.h"
+#include "request.h"
+#include "solver.h"
+
+#include <satsolver/solverdebug.h>
+
+PyTypeObject SolverType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Solver",
+       tp_basicsize: sizeof(SolverObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Solver_new,
+       tp_dealloc: (destructor) Solver_dealloc,
+       tp_doc: "Sat Solver objects",
+};
+
+PyObject* Solver_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       SolverObject *self;
+
+       PoolObject *pool;
+
+       if (!PyArg_ParseTuple(args, "O", &pool)) {
+               /* XXX raise exception */
+       }
+
+       self = (SolverObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_solver = solver_create(pool->_pool);
+               if (self->_solver == NULL) {
+                       Py_DECREF(self);
+                       return NULL;
+               }
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Solver_dealloc(SolverObject *self) {
+       solver_free(self->_solver);
+       self->ob_type->tp_free((PyObject *)self);
+}
+
+PyObject *Solver_get_allow_downgrade(SolverObject *self, PyObject *args) {
+       return Py_BuildValue("i", self->_solver->allowdowngrade);
+}
+
+PyObject *Solver_set_allow_downgrade(SolverObject *self, PyObject *args) {
+       int val;
+
+       if (!PyArg_ParseTuple(args, "i", &val)) {
+               /* XXX raise exception */
+       }
+
+       self->_solver->allowdowngrade = val;
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solver_get_allow_archchange(SolverObject *self, PyObject *args) {
+       return Py_BuildValue("i", self->_solver->allowarchchange);
+}
+
+PyObject *Solver_set_allow_archchange(SolverObject *self, PyObject *args) {
+       int val;
+
+       if (!PyArg_ParseTuple(args, "i", &val)) {
+               /* XXX raise exception */
+       }
+
+       self->_solver->allowarchchange = val;
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solver_get_allow_vendorchange(SolverObject *self, PyObject *args) {
+       return Py_BuildValue("i", self->_solver->allowvendorchange);
+}
+
+PyObject *Solver_set_allow_vendorchange(SolverObject *self, PyObject *args) {
+       int val;
+
+       if (!PyArg_ParseTuple(args, "i", &val)) {
+               /* XXX raise exception */
+       }
+
+       self->_solver->allowvendorchange = val;
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solver_get_allow_uninstall(SolverObject *self, PyObject *args) {
+       return Py_BuildValue("i", self->_solver->allowuninstall);
+}
+
+PyObject *Solver_set_allow_uninstall(SolverObject *self, PyObject *args) {
+       int val;
+
+       if (!PyArg_ParseTuple(args, "i", &val)) {
+               /* XXX raise exception */
+       }
+
+       self->_solver->allowuninstall = val;
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solver_get_updatesystem(SolverObject *self, PyObject *args) {
+       return Py_BuildValue("i", self->_solver->updatesystem);
+}
+
+PyObject *Solver_set_updatesystem(SolverObject *self, PyObject *args) {
+       int val;
+
+       if (!PyArg_ParseTuple(args, "i", &val)) {
+               /* XXX raise exception */
+       }
+
+       self->_solver->updatesystem = val;
+
+       Py_RETURN_NONE;
+}
+
+PyObject *Solver_solve(SolverObject *self, PyObject *args) {
+       RequestObject *request;
+
+       if (!PyArg_ParseTuple(args, "O", &request)) {
+               /* XXX raise exception */
+       }
+
+       // Make sure, the pool is prepared.
+       _Pool_prepare(self->_solver->pool);
+
+       solver_solve(self->_solver, &request->_queue);
+
+       solver_printallsolutions(self->_solver);
+
+       if (self->_solver->problems.count == 0) {
+               Py_RETURN_TRUE;
+       }
+
+       Py_RETURN_FALSE;
+}
+
+PyObject *Solver_get_problems(SolverObject *self, PyObject *args) {
+       RequestObject *request;
+
+       if (!PyArg_ParseTuple(args, "O", &request)) {
+               /* XXX raise exception */
+       }
+
+       PyObject *list = PyList_New(0);
+
+       ProblemObject *problem;
+       int i = 0;
+       for(; i < self->_solver->problems.count; i++) {
+               problem = PyObject_New(ProblemObject, &ProblemType);
+               problem->_solver = self->_solver;
+               //problem->_request = request->_request;
+               problem->_id = self->_solver->problems.elements[i];
+
+               PyList_Append(list, (PyObject *)problem);
+       }
+
+       Py_INCREF(list); // XXX do we need this here?
+       return (PyObject *)list;
+}
diff --git a/src/solver.h b/src/solver.h
new file mode 100644 (file)
index 0000000..89b3397
--- /dev/null
@@ -0,0 +1,34 @@
+
+#ifndef PAKFIRE_SOLVER_H
+#define PAKFIRE_SOLVER_H
+
+#include <Python.h>
+
+#include <satsolver/solver.h>
+
+// Sat Solver object
+typedef struct {
+    PyObject_HEAD
+    Solver *_solver;
+} SolverObject;
+
+extern PyObject* Solver_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Solver_dealloc(SolverObject *self);
+
+extern PyObject *Solver_get_allow_downgrade(SolverObject *self, PyObject *args);
+extern PyObject *Solver_set_allow_downgrade(SolverObject *self, PyObject *args);
+extern PyObject *Solver_get_allow_archchange(SolverObject *self, PyObject *args);
+extern PyObject *Solver_set_allow_archchange(SolverObject *self, PyObject *args);
+extern PyObject *Solver_get_allow_vendorchange(SolverObject *self, PyObject *args);
+extern PyObject *Solver_set_allow_vendorchange(SolverObject *self, PyObject *args);
+extern PyObject *Solver_get_allow_uninstall(SolverObject *self, PyObject *args);
+extern PyObject *Solver_set_allow_uninstall(SolverObject *self, PyObject *args);
+extern PyObject *Solver_get_updatesystem(SolverObject *self, PyObject *args);
+extern PyObject *Solver_set_updatesystem(SolverObject *self, PyObject *args);
+
+extern PyObject *Solver_solve(SolverObject *self, PyObject *args);
+extern PyObject *Solver_get_problems(SolverObject *self, PyObject *args);
+
+extern PyTypeObject SolverType;
+
+#endif
diff --git a/src/step.c b/src/step.c
new file mode 100644 (file)
index 0000000..a2b0c36
--- /dev/null
@@ -0,0 +1,129 @@
+
+#include "solvable.h"
+#include "step.h"
+#include "transaction.h"
+
+PyTypeObject StepType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Step",
+       tp_basicsize: sizeof(StepObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Step_new,
+       tp_dealloc: (destructor) Step_dealloc,
+       tp_doc: "Sat Step objects",
+};
+
+PyObject* Step_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       StepObject *self;
+       TransactionObject *transaction;
+       int num;
+
+       if (!PyArg_ParseTuple(args, "Oi", &transaction, &num)) {
+               /* XXX raise exception */
+       }
+
+       self = (StepObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_transaction = transaction->_transaction;
+
+               if (num >= transaction->_transaction->steps.count) {
+                       Py_DECREF(self);
+                       return NULL;
+               }
+
+               self->_id = transaction->_transaction->steps.elements[num];
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Step_dealloc(StepObject *self) {
+       self->ob_type->tp_free((PyObject *)self);
+}
+
+PyObject *Step_get_solvable(StepObject *self, PyObject *args) {
+       SolvableObject *solvable;
+
+       solvable = PyObject_New(SolvableObject, &SolvableType);
+       if (solvable == NULL)
+               return NULL;
+
+       solvable->_pool = self->_transaction->pool;
+       solvable->_id = self->_id;
+
+       return (PyObject *)solvable;
+}
+
+PyObject *Step_get_type(StepObject *self, PyObject *args) {
+       const char *type = "unknown";
+
+       int trans_type = transaction_type(self->_transaction, self->_id,
+               SOLVER_TRANSACTION_SHOW_ACTIVE);
+
+       switch(trans_type) {
+               case SOLVER_TRANSACTION_IGNORE:
+                       type = "ignore";
+                       break;
+
+               case SOLVER_TRANSACTION_ERASE:
+                       type = "erase";
+                       break;
+
+               case SOLVER_TRANSACTION_REINSTALLED:
+                       type = "reinstalled";
+                       break;
+
+               case SOLVER_TRANSACTION_DOWNGRADED:
+                       type = "downgraded";
+                       break;
+
+               case SOLVER_TRANSACTION_CHANGED:
+                       type = "changed";
+                       break;
+
+               case SOLVER_TRANSACTION_UPGRADED:
+                       type = "upgraded";
+                       break;
+
+               case SOLVER_TRANSACTION_OBSOLETED:
+                       type = "obsoleted";
+                       break;
+
+               case SOLVER_TRANSACTION_INSTALL:
+                       type = "install";
+                       break;
+
+               case SOLVER_TRANSACTION_REINSTALL:
+                       type = "reinstall";
+                       break;
+
+               case SOLVER_TRANSACTION_DOWNGRADE:
+                       type = "downgrade";
+                       break;
+
+               case SOLVER_TRANSACTION_CHANGE:
+                       type = "change";
+                       break;
+
+               case SOLVER_TRANSACTION_UPGRADE:
+                       type = "upgrade";
+                       break;
+
+               case SOLVER_TRANSACTION_OBSOLETES:
+                       type = "obsoletes";
+                       break;
+
+               case SOLVER_TRANSACTION_MULTIINSTALL:
+                       type = "multiinstall";
+                       break;
+
+               case SOLVER_TRANSACTION_MULTIREINSTALL:
+                       type = "multireinstall";
+                       break;
+
+               default:
+                       break;
+       }
+
+       return Py_BuildValue("s", type);
+}
diff --git a/src/step.h b/src/step.h
new file mode 100644 (file)
index 0000000..b5a016d
--- /dev/null
@@ -0,0 +1,24 @@
+
+#ifndef PAKFIRE_STEP_H
+#define PAKFIRE_STEP_H
+
+#include <Python.h>
+
+#include <satsolver/pool.h>
+#include <satsolver/transaction.h>
+
+// Sat Step object
+typedef struct {
+    PyObject_HEAD
+    Transaction *_transaction;
+    Id _id;
+} StepObject;
+
+extern PyObject* Step_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Step_dealloc(StepObject *self);
+extern PyObject *Step_get_type(StepObject *self, PyObject *args);
+extern PyObject *Step_get_solvable(StepObject *self, PyObject *args);
+
+extern PyTypeObject StepType;
+
+#endif
diff --git a/src/test.py b/src/test.py
new file mode 100644 (file)
index 0000000..4d2f144
--- /dev/null
@@ -0,0 +1,71 @@
+
+import _pakfire
+print dir(_pakfire)
+
+pool = _pakfire.Pool("i686")
+print pool
+
+repo1 = _pakfire.Repo(pool, "test")
+repo2 = _pakfire.Repo(pool, "installed")
+print repo1, repo2
+
+pool.set_installed(repo2)
+
+solv1 = _pakfire.Solvable(repo1, "a", "1.0-2", "i686")
+print solv1, solv1.get_name(), solv1.get_evr(), solv1.get_arch()
+print dir(solv1)
+
+solv2 = _pakfire.Solvable(repo2, "b", "2.0-2", "i686")
+print solv2, solv2.get_name(), solv2.get_evr(), solv2.get_arch()
+
+solv3 = _pakfire.Solvable(repo1, "b", "2.0-3", "i686")
+print solv3, solv3.get_name(), solv3.get_evr(), solv3.get_arch()
+
+relation1 = _pakfire.Relation(pool, "b", "2.0-3", _pakfire.REL_GE)
+print relation1
+
+relation2 = _pakfire.Relation(pool, "/bin/laden")
+print relation2
+
+solv1.add_requires(relation1)
+solv1.add_provides(relation2)
+
+relation3 = _pakfire.Relation(pool, "a")
+print relation3
+
+#solv2.add_conflicts(relation3)
+#solv3.add_conflicts(relation3)
+
+pool.prepare()
+
+solver = _pakfire.Solver(pool)
+print solver
+
+solver.set_allow_uninstall(True)
+print "allow_uninstall", solver.get_allow_uninstall()
+print "allow_downgrade", solver.get_allow_downgrade()
+
+
+request = _pakfire.Request(pool)
+print request
+
+request.install_solvable(solv1)
+#request.install_solvable_name("a")
+
+res = solver.solve(request)
+print "Result:", res
+
+if res:
+       transaction = _pakfire.Transaction(solver)
+       print transaction, transaction.steps()
+
+       for step in transaction.steps():
+               print "Step %s:" % step
+               solvable = step.get_solvable()
+               print "  solv: %s" % solvable, solvable.get_name(), solvable.get_evr()
+               print "  type: %s" % step.get_type()
+               print
+
+print pool.providers("b")
+
+print "Pool size: %d" % pool.size()
diff --git a/src/transaction.c b/src/transaction.c
new file mode 100644 (file)
index 0000000..cec1d62
--- /dev/null
@@ -0,0 +1,64 @@
+
+#include "solver.h"
+#include "step.h"
+#include "transaction.h"
+
+PyTypeObject TransactionType = {
+       PyObject_HEAD_INIT(NULL)
+       tp_name: "_pakfire.Transaction",
+       tp_basicsize: sizeof(TransactionObject),
+       tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+       tp_new : Transaction_new,
+       tp_dealloc: (destructor) Transaction_dealloc,
+       tp_doc: "Sat Transaction objects",
+};
+
+PyObject* Transaction_new(PyTypeObject *type, PyObject *args, PyObject *kwds) {
+       TransactionObject *self;
+       SolverObject *solver;
+
+       if (!PyArg_ParseTuple(args, "O", &solver)) {
+               /* XXX raise exception */
+       }
+
+       self = (TransactionObject *)type->tp_alloc(type, 0);
+       if (self != NULL) {
+               self->_pool = solver->_solver->pool;
+               if (self->_pool == NULL) {
+                       Py_DECREF(self);
+                       return NULL;
+               }
+
+               /* When the solver is freed we still need the transaction. For that,
+                  we copy it to be independent. */
+               self->_transaction = malloc(sizeof(Transaction));
+               memcpy(self->_transaction, &solver->_solver->trans, sizeof(Transaction));
+
+               // order the transaction right from the start.
+               transaction_order(self->_transaction, 0);
+       }
+
+       return (PyObject *)self;
+}
+
+PyObject *Transaction_dealloc(TransactionObject *self) {
+       /* XXX need to free self->_transaction */
+       self->ob_type->tp_free((PyObject *)self);
+}
+
+PyObject *Transaction_steps(TransactionObject *self, PyObject *args) {
+       PyObject *list = PyList_New(0);
+
+       StepObject *step;
+       int i = 0;
+       for(; i < self->_transaction->steps.count; i++) {
+               step = PyObject_New(StepObject, &StepType);
+               step->_transaction = self->_transaction;
+               step->_id = self->_transaction->steps.elements[i];
+
+               PyList_Append(list, (PyObject *)step);
+       }
+
+       Py_INCREF(list); // XXX do we need this here?
+       return list;
+}
diff --git a/src/transaction.h b/src/transaction.h
new file mode 100644 (file)
index 0000000..6c26a81
--- /dev/null
@@ -0,0 +1,22 @@
+
+#ifndef PAKFIRE_TRANSACTION_H
+#define PAKFIRE_TRANSACTION_H
+
+#include <Python.h>
+
+#include <satsolver/transaction.h>
+
+// Sat Transaction object
+typedef struct {
+    PyObject_HEAD
+    Pool *_pool;
+    Transaction *_transaction;
+} TransactionObject;
+
+extern PyObject* Transaction_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+extern PyObject *Transaction_dealloc(TransactionObject *self);
+extern PyObject *Transaction_steps(TransactionObject *self, PyObject *args);
+
+extern PyTypeObject TransactionType;
+
+#endif