]> git.ipfire.org Git - people/stevee/pakfire.git/commitdiff
Improve the repository code.
authorMichael Tremer <michael.tremer@ipfire.org>
Thu, 29 Mar 2012 16:04:31 +0000 (18:04 +0200)
committerMichael Tremer <michael.tremer@ipfire.org>
Thu, 29 Mar 2012 16:04:31 +0000 (18:04 +0200)
Got rid of all these different Index types and
cleaned up the code a lot.

15 files changed:
po/pakfire.pot
python/pakfire/actions.py
python/pakfire/api.py
python/pakfire/base.py
python/pakfire/downloader.py
python/pakfire/packages/installed.py
python/pakfire/packages/solv.py
python/pakfire/repository/__init__.py
python/pakfire/repository/base.py
python/pakfire/repository/database.py
python/pakfire/repository/index.py
python/pakfire/repository/local.py
python/pakfire/repository/metadata.py
python/pakfire/repository/remote.py
python/pakfire/repository/system.py [moved from python/pakfire/repository/installed.py with 61% similarity]

index b645d2d0bcf3a9b6e0f541a643466a52f8192ee4..1d83e6d3cb2585e90487ddd284757bcb2dd968bd 100644 (file)
@@ -8,7 +8,7 @@ msgid ""
 msgstr ""
 "Project-Id-Version: PACKAGE VERSION\n"
 "Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-03-23 16:42+0100\n"
+"POT-Creation-Date: 2012-03-29 18:01+0200\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@li.org>\n"
@@ -17,60 +17,70 @@ msgstr ""
 "Content-Type: text/plain; charset=CHARSET\n"
 "Content-Transfer-Encoding: 8bit\n"
 
-#: ../python/pakfire/actions.py:150
+#: ../python/pakfire/actions.py:71
+#, python-format
+msgid "%s has got no signatures"
+msgstr ""
+
+#: ../python/pakfire/actions.py:77
+#, python-format
+msgid "%s has got no valid signatures"
+msgstr ""
+
+#: ../python/pakfire/actions.py:168
 #, python-format
 msgid "Cannot run scriptlet because no interpreter is available: %s"
 msgstr ""
 
-#: ../python/pakfire/actions.py:154
+#: ../python/pakfire/actions.py:172
 #, python-format
 msgid "Cannot run scriptlet because the interpreter is not executable: %s"
 msgstr ""
 
-#: ../python/pakfire/actions.py:193
+#: ../python/pakfire/actions.py:211
 #, python-format
 msgid ""
 "The scriptlet returned an error:\n"
 "%s"
 msgstr ""
 
-#: ../python/pakfire/actions.py:196
+#: ../python/pakfire/actions.py:214
 #, python-format
 msgid "The scriptlet ran more than %s seconds and was killed."
 msgstr ""
 
-#: ../python/pakfire/actions.py:200
+#: ../python/pakfire/actions.py:218
 #, python-format
 msgid ""
 "The scriptlet returned with an unhandled error:\n"
 "%s"
 msgstr ""
 
-#: ../python/pakfire/actions.py:254 ../python/pakfire/actions.py:292
-#: ../python/pakfire/actions.py:315 ../python/pakfire/actions.py:338
-#: ../python/pakfire/actions.py:355 ../python/pakfire/actions.py:374
+#: ../python/pakfire/actions.py:272 ../python/pakfire/actions.py:310
+#: ../python/pakfire/actions.py:333 ../python/pakfire/actions.py:356
+#: ../python/pakfire/actions.py:373 ../python/pakfire/actions.py:392
 #, python-format
 msgid "Running transaction test for %s"
 msgstr ""
 
-#: ../python/pakfire/actions.py:263 ../python/pakfire/actions.py:367
+#: ../python/pakfire/actions.py:281 ../python/pakfire/actions.py:385
 msgid "Installing"
 msgstr ""
 
-#: ../python/pakfire/actions.py:301
+#: ../python/pakfire/actions.py:319
 msgid "Updating"
 msgstr ""
 
-#: ../python/pakfire/actions.py:321
+#: ../python/pakfire/actions.py:339
 msgid "Removing"
 msgstr ""
 
 #. Cleaning up leftover files and stuff.
-#: ../python/pakfire/actions.py:345
+#: ../python/pakfire/actions.py:363
 msgid "Cleanup"
 msgstr ""
 
-#: ../python/pakfire/actions.py:383
+#: ../python/pakfire/actions.py:401
 msgid "Downgrading"
 msgstr ""
 
@@ -78,47 +88,51 @@ msgstr ""
 msgid "Ignored arguments:"
 msgstr ""
 
-#: ../python/pakfire/base.py:295 ../python/pakfire/base.py:344
-#: ../python/pakfire/base.py:398 ../python/pakfire/base.py:466
-#: ../python/pakfire/base.py:501 ../python/pakfire/base.py:559
-#: ../python/pakfire/base.py:579
+#: ../python/pakfire/base.py:295 ../python/pakfire/base.py:343
+#: ../python/pakfire/base.py:397 ../python/pakfire/base.py:465
+#: ../python/pakfire/base.py:500 ../python/pakfire/base.py:558
+#: ../python/pakfire/base.py:578
 msgid "Nothing to do"
 msgstr ""
 
-#: ../python/pakfire/base.py:333
+#: ../python/pakfire/base.py:332
 msgid "There are no packages to install."
 msgstr ""
 
-#: ../python/pakfire/base.py:388
+#: ../python/pakfire/base.py:387
 #, python-format
 msgid "Could not find any installed package providing \"%s\"."
 msgstr ""
 
-#: ../python/pakfire/base.py:394
+#: ../python/pakfire/base.py:393
 #, python-format
 msgid "Multiple reinstall candidates for \"%(pattern)s\": %(pkgs)s"
 msgstr ""
 
-#: ../python/pakfire/base.py:423
+#: ../python/pakfire/base.py:422
 #, python-format
 msgid "Could not find package %s in a remote repository."
 msgstr ""
 
-#: ../python/pakfire/base.py:493
+#: ../python/pakfire/base.py:492
 #, python-format
 msgid "Excluding %s."
 msgstr ""
 
-#: ../python/pakfire/base.py:545
+#: ../python/pakfire/base.py:544
 #, python-format
 msgid "\"%s\" package does not seem to be installed."
 msgstr ""
 
-#: ../python/pakfire/base.py:691
+#: ../python/pakfire/base.py:690
 msgid "Build command has failed."
 msgstr ""
 
-#: ../python/pakfire/base.py:765
+#: ../python/pakfire/base.py:725
+msgid "New repository"
+msgstr ""
+
+#: ../python/pakfire/base.py:761
 msgid "Everything is fine."
 msgstr ""
 
@@ -826,12 +840,12 @@ msgstr ""
 msgid "Could not check if this signature is valid."
 msgstr ""
 
-#: ../python/pakfire/cli.py:1337 ../python/pakfire/keyring.py:108
+#: ../python/pakfire/cli.py:1337 ../python/pakfire/keyring.py:114
 #, python-format
 msgid "Created: %s"
 msgstr ""
 
-#: ../python/pakfire/cli.py:1341 ../python/pakfire/keyring.py:111
+#: ../python/pakfire/cli.py:1341 ../python/pakfire/keyring.py:117
 #, python-format
 msgid "Expires: %s"
 msgstr ""
@@ -850,28 +864,28 @@ msgstr ""
 msgid "Given algorithm '%s' is not supported."
 msgstr ""
 
-#: ../python/pakfire/config.py:196
+#: ../python/pakfire/config.py:201
 msgid "Configuration:"
 msgstr ""
 
-#: ../python/pakfire/config.py:198
+#: ../python/pakfire/config.py:203
 #, python-format
 msgid "Section: %s"
 msgstr ""
 
-#: ../python/pakfire/config.py:203
+#: ../python/pakfire/config.py:208
 msgid "No settings in this section."
 msgstr ""
 
-#: ../python/pakfire/config.py:205
+#: ../python/pakfire/config.py:210
 msgid "Loaded from files:"
 msgstr ""
 
-#: ../python/pakfire/downloader.py:140
+#: ../python/pakfire/downloader.py:146
 msgid "Downloading source files:"
 msgstr ""
 
-#: ../python/pakfire/downloader.py:163
+#: ../python/pakfire/downloader.py:169
 #, python-format
 msgid "Downloaded empty file: %s"
 msgstr ""
@@ -903,7 +917,7 @@ msgstr ""
 msgid "Running pakfire-build in a pakfire container?"
 msgstr ""
 
-#: ../python/pakfire/errors.py:90 ../python/pakfire/transaction.py:427
+#: ../python/pakfire/errors.py:94 ../python/pakfire/transaction.py:427
 msgid "Transaction test was not successful"
 msgstr ""
 
@@ -912,55 +926,55 @@ msgstr ""
 msgid "%(commas)s and %(last)s"
 msgstr ""
 
-#: ../python/pakfire/keyring.py:79
+#: ../python/pakfire/keyring.py:86
 msgid "The local keyring is already initialized. Aborting."
 msgstr ""
 
-#: ../python/pakfire/keyring.py:82
+#: ../python/pakfire/keyring.py:89
 msgid "Initializing local keyring..."
 msgstr ""
 
-#: ../python/pakfire/keyring.py:96
+#: ../python/pakfire/keyring.py:102
 #, python-format
 msgid "Fingerprint: %s"
 msgstr ""
 
-#: ../python/pakfire/keyring.py:100
+#: ../python/pakfire/keyring.py:106
 #, python-format
 msgid "Subkey: %s"
 msgstr ""
 
-#: ../python/pakfire/keyring.py:102
+#: ../python/pakfire/keyring.py:108
 msgid "This key has expired!"
 msgstr ""
 
-#: ../python/pakfire/keyring.py:105
+#: ../python/pakfire/keyring.py:111
 msgid "This is a secret key."
 msgstr ""
 
-#: ../python/pakfire/keyring.py:113
+#: ../python/pakfire/keyring.py:119
 msgid "This key does not expire."
 msgstr ""
 
-#: ../python/pakfire/keyring.py:171
+#: ../python/pakfire/keyring.py:172
 #, python-format
 msgid "Generating new key for %(realname)s <%(email)s>..."
 msgstr ""
 
-#: ../python/pakfire/keyring.py:172
+#: ../python/pakfire/keyring.py:173
 msgid "This may take a while..."
 msgstr ""
 
-#: ../python/pakfire/keyring.py:196
+#: ../python/pakfire/keyring.py:192
 #, python-format
 msgid "Successfully import key %s."
 msgstr ""
 
-#: ../python/pakfire/keyring.py:221
+#: ../python/pakfire/keyring.py:212
 msgid "Host key:"
 msgstr ""
 
-#: ../python/pakfire/keyring.py:224
+#: ../python/pakfire/keyring.py:215
 msgid "No host key available."
 msgstr ""
 
@@ -1126,63 +1140,61 @@ msgstr ""
 msgid "Building source package %s:"
 msgstr ""
 
-#: ../python/pakfire/repository/database.py:114
+#: ../python/pakfire/repository/database.py:116
 msgid "The format of the database is not supported by this version of pakfire."
 msgstr ""
 
-#: ../python/pakfire/repository/database.py:222
+#: ../python/pakfire/repository/database.py:224
 #, python-format
 msgid "Cannot use database with version greater than %s."
 msgstr ""
 
-#: ../python/pakfire/repository/database.py:224
+#: ../python/pakfire/repository/database.py:226
 #, python-format
 msgid "Migrating database from format %(old)s to %(new)s."
 msgstr ""
 
-#: ../python/pakfire/repository/index.py:240
+#. Create progress bar.
+#: ../python/pakfire/repository/local.py:102
+#: ../python/pakfire/repository/local.py:272
 #, python-format
-msgid ""
-"I cannot be forced to re-download the metadata for the repository '%s' when "
-"running in offline mode."
+msgid "%s: Adding packages..."
 msgstr ""
 
-#: ../python/pakfire/repository/index.py:292
-#, python-format
-msgid "%s: package database"
+#. Make a nice progress bar.
+#: ../python/pakfire/repository/local.py:205
+msgid "Compressing database..."
 msgstr ""
 
-#. Create progress bar.
-#: ../python/pakfire/repository/index.py:400
+#: ../python/pakfire/repository/remote.py:154
 #, python-format
-msgid "Loading from %s"
+msgid "Cannot update repository metadata for %s when in offline mode."
 msgstr ""
 
-#. Add all packages from the database to the index.
-#: ../python/pakfire/repository/index.py:463
-msgid "Loading installed packages"
+#: ../python/pakfire/repository/remote.py:173
+msgid "The downloaded metadata was less recent than the current one."
 msgstr ""
 
-#. Create progressbar.
-#: ../python/pakfire/repository/local.py:104
-msgid "Signing packages..."
+#: ../python/pakfire/repository/remote.py:212
+#, python-format
+msgid "Cannot download package database for %s in offline mode."
 msgstr ""
 
-#. Make a nice progress bar.
-#: ../python/pakfire/repository/local.py:163
-msgid "Compressing database..."
+#: ../python/pakfire/repository/remote.py:220
+#, python-format
+msgid "%s: package database"
 msgstr ""
 
-#: ../python/pakfire/repository/remote.py:114
+#: ../python/pakfire/repository/remote.py:294
 #, python-format
 msgid "Cannot download this file in offline mode: %s"
 msgstr ""
 
-#: ../python/pakfire/repository/remote.py:134
+#: ../python/pakfire/repository/remote.py:314
 msgid "The checksum of the downloaded file did not match."
 msgstr ""
 
-#: ../python/pakfire/repository/remote.py:135
+#: ../python/pakfire/repository/remote.py:315
 msgid "Trying an other mirror."
 msgstr ""
 
@@ -1305,7 +1317,25 @@ msgstr ""
 msgid "Transaction Test Succeeded"
 msgstr ""
 
-#: ../python/pakfire/transaction.py:443
+#. Make a nice progressbar.
+#: ../python/pakfire/transaction.py:449
+msgid "Verifying signatures..."
+msgstr ""
+
+#: ../python/pakfire/transaction.py:481
+#, python-format
+msgid "Found %s signature error(s)!"
+msgstr ""
+
+#: ../python/pakfire/transaction.py:486
+msgid "Going on because we are running in permissive mode."
+msgstr ""
+
+#: ../python/pakfire/transaction.py:487
+msgid "This is dangerous!"
+msgstr ""
+
+#: ../python/pakfire/transaction.py:507
 msgid "Running transaction"
 msgstr ""
 
index 6f6f89a92b65b1999bfb0d2a88c5591a69de5d4c..18d9fc4f405ebd0f73f8cdfbd81ee5f5cb0e9e08 100644 (file)
@@ -59,6 +59,13 @@ class Action(object):
                return filelist
 
        def verify(self):
+               assert self.pkg, "No package! %s" % self.pkg_solv
+               assert self.pkg.repo, "Package has no repository? %s" % self.pkg
+
+               # Local packages need no verification.
+               if self.pkg.repo.local:
+                       return
+
                # Check if there are any signatures at all.
                if not self.pkg.signatures:
                        raise SignatureError, _("%s has got no signatures") % self.pkg.friendly_name
index 92daab582a7c0dc1a84e62b53471e450853f93dc..e4a278b232a9bd7b3ed2f0748bc77f78e96b4787 100644 (file)
@@ -113,10 +113,10 @@ def requires(patterns, **pakfire_args):
 
        return pakfire.requires(requires)
 
-def repo_create(path, input_paths, key_id=None, type="binary", **pakfire_args):
+def repo_create(path, input_paths, name=None, key_id=None, type="binary", **pakfire_args):
        pakfire = Pakfire(**pakfire_args)
 
-       return pakfire.repo_create(path, input_paths, key_id=key_id, type=type)
+       return pakfire.repo_create(path, input_paths, name=name, key_id=key_id, type=type)
 
 def repo_list(**pakfire_args):
        pakfire = Pakfire(**pakfire_args)
index 73cf237c84df12324852012f10f29eb3072941a6..80c398ae78dfff3868349da7752dfde097afdd30 100644 (file)
@@ -325,8 +325,7 @@ class Pakfire(object):
 
                try:
                        # Add all packages to the repository index.
-                       for file in files:
-                               repo.collect_packages(file)
+                       repo.add_packages(*files)
 
                        # Break if no packages were added at all.
                        if not len(repo):
@@ -719,26 +718,23 @@ class Pakfire(object):
 
                return sorted(pkgs)
 
-       def repo_create(self, path, input_paths, key_id=None, type="binary"):
+       def repo_create(self, path, input_paths, name=None, key_id=None, type="binary"):
                assert type in ("binary", "source",)
 
-               repo = repository.RepositoryDir(
-                       self,
-                       name="new",
-                       description="New repository.",
-                       path=path,
-                       type=type,
-               )
+               if not name:
+                       name = _("New repository")
 
-               for input_path in input_paths:
-                       repo.collect_packages(input_path)
+               # Create new repository.
+               repo = repository.RepositoryDir(self, name=name, description="New repository.",
+                       path=path, type=type, key_id=key_id)
 
-               # Sign the repository with the given key.
-               if key_id:
-                       repo.sign(key_id)
+               # Add all packages.
+               repo.add_packages(*input_paths)
 
+               # Write metadata to disk.
                repo.save()
 
+               # Return the new repository.
                return repo
 
        def repo_list(self):
index f89681f549f254bf4ba7bec0d7a90c4e37607142..d40f14c4a58b14d6508b5672378a73378c273b5e 100644 (file)
@@ -80,6 +80,12 @@ class PakfireGrabber(URLGrabber):
                # a unicode string.
                return URLGrabber.urlread(self, filename.encode("utf-8"), *args, **kwargs)
 
+       def urlopen(self, filename, *args, **kwargs):
+               # However, urlopen requires the filename to be an ordinary string object.
+               filename = str(filename)
+
+               return URLGrabber.urlopen(self, filename, *args, **kwargs)
+
 
 class PackageDownloader(PakfireGrabber):
        def __init__(self, pakfire, *args, **kwargs):
@@ -181,14 +187,21 @@ class Mirror(object):
 
 
 class MirrorList(object):
-       def __init__(self, pakfire, repo):
+       def __init__(self, pakfire, repo, mirrorlist):
                self.pakfire = pakfire
                self.repo = repo
 
                self.__mirrors = []
 
                # Save URL to more mirrors.
-               self.mirrorlist = repo._mirrors
+               self.mirrorlist = mirrorlist
+
+       @property
+       def base_mirror(self):
+               if not self.repo.baseurl:
+                       return
+
+               return Mirror(self.repo.baseurl, preferred=False)
 
        @property
        def distro(self):
@@ -261,6 +274,9 @@ class MirrorList(object):
        def forget_mirrors(self):
                self.__mirrors = []
 
+               if self.base_mirror:
+                       self.__mirrors.append(self.base_mirror)
+
        @property
        def preferred(self):
                """
index bf57229557efc2feef96b77c6553108b9814791a..5a7763b616b993cc07b9402dc00b1a415be11487 100644 (file)
@@ -298,6 +298,11 @@ class DatabasePackage(Package):
 
                self._remove_files(remove_files, message, prefix)
 
+       @property
+       def signatures(self):
+               # Database packages do not have any signatures.
+               return []
+
 
 # XXX maybe we can remove this later?
 class InstalledPackage(DatabasePackage):
index 9bbbc2a3d3b4463b75157cb75b3a9eb55cb96532..66d4f37c4d09b1fd9ebeab12c79f72f3e6ff0eb1 100644 (file)
@@ -205,7 +205,12 @@ class SolvPackage(base.Package):
                if self.repo.local:
                        return True
 
-               return self.repo.cache.exists(self.cache_filename)
+               # If the repository has got a cache, we check if the file
+               # is in there.
+               if self.repo.cache:
+                       return self.repo.cache.exists(self.cache_filename)
+
+               return False
 
        def get_from_cache(self):
                path = None
@@ -220,11 +225,18 @@ class SolvPackage(base.Package):
                                if os.path.exists(p):
                                        path = p
                                        break
-               else:
-                       if self.repo.cache.exists(self.cache_filename):
-                               path = self.repo.cache.abspath(self.cache_filename)
 
-               if path and self.repo.cache.verify(path, self.hash1):
+                       return file.BinaryPackage(self.pakfire, self.repo, path)
+
+               if not self.repo.cache:
+                       return
+
+               if self.repo.cache.exists(self.cache_filename):
+                       # Check if the checksum matches, too.
+                       if not self.repo.cache.verify(self.cache_filename, self.hash1):
+                               return
+
+                       path = self.repo.cache.abspath(self.cache_filename)
                        return file.BinaryPackage(self.pakfire, self.repo, path)
 
        def download(self, text="", logger=None):
@@ -236,3 +248,8 @@ class SolvPackage(base.Package):
        def get_scriptlet(self, type):
                # XXX TODO
                return None
+
+       #@property
+       #def signatures(self):
+       #       # Solv packages do not have any signatures.
+       #       return []
index 39872da4d5477e759e1babab8612b76b87c7143d..b2a316954568e19251a3e986aa9fb55f0e7033f8 100644 (file)
@@ -27,8 +27,9 @@ log = logging.getLogger("pakfire")
 import pakfire.packages as packages
 
 from base import RepositoryDummy
-from local import RepositoryDir, RepositoryBuild, RepositoryLocal
-from remote import RepositorySolv
+from local import RepositoryDir, RepositoryBuild
+from remote import RepositoryRemote
+from system import RepositorySystem
 
 class Repositories(object):
        """
@@ -49,8 +50,8 @@ class Repositories(object):
                # Create a dummy repository
                self.dummy = RepositoryDummy(self.pakfire)
 
-               # Create the local repository
-               self.local = RepositoryLocal(self.pakfire)
+               # Create the local repository.
+               self.local = RepositorySystem(self.pakfire)
                self.add_repo(self.local)
 
                # If we running in build mode, we include our local build repository.
@@ -58,6 +59,7 @@ class Repositories(object):
                        self.local_build = RepositoryBuild(self.pakfire)
                        self.add_repo(self.local_build)
 
+               # Fetch all repository from the configuration files.
                for repo_name, repo_args in self.config.get_repos():
                        self._parse(repo_name, repo_args)
 
@@ -77,7 +79,7 @@ class Repositories(object):
 
                # Update all indexes of the repositories (not force) so that we will
                # always work with valid data.
-               self.update(offline=self.pakfire.offline)
+               self.update(force=False, offline=self.pakfire.offline)
 
        def __iter__(self):
                repositories = self.__repos.values()
@@ -129,7 +131,7 @@ class Repositories(object):
                                # Replace the variable with its value.
                                v = v.replace("%%{%s}" % var, replaces.get(var, ""))
 
-               repo = RepositorySolv(self.pakfire, **_args)
+               repo = RepositoryRemote(self.pakfire, **_args)
                self.add_repo(repo)
 
        def add_repo(self, repo):
index 59d8615b9971fbe7e521920e79af97e0cc5455a9..d7850da0df2206f4c1cf3174574a36772b74ffe4 100644 (file)
 #                                                                             #
 ###############################################################################
 
-import fnmatch
-import glob
-import re
-
 import logging
 log = logging.getLogger("pakfire")
 
-import cache
+import index
+
 import pakfire.packages as packages
 import pakfire.satsolver as satsolver
 
@@ -40,13 +37,13 @@ class RepositoryFactory(object):
                self.solver_repo = satsolver.Repo(self.pool, self.name)
                self.solver_repo.set_priority(self.priority)
 
-               log.debug("Initialized new repository: %s" % self)
+               # Some repositories may have a cache.
+               self.cache = None
 
-               # Create an cache object
-               self.cache = cache.RepositoryCache(self.pakfire, self)
+               log.debug("Initialized new repository: %s" % self)
 
-               # The index MUST be set by an inheriting class.
-               self.index = None
+               # Create an index (in memory).
+               self.index = index.Index(self.pakfire, self)
 
        def __repr__(self):
                return "<%s %s>" % (self.__class__.__name__, self.name)
@@ -113,27 +110,17 @@ class RepositoryFactory(object):
                        A function that is called to update the local data of
                        the repository.
                """
-               assert self.index
-
-               if force or self.enabled:
-                       self.index.update(force, offline=offline)
+               raise NotImplementedError, self
 
        def clean(self):
                """
                        Cleanup all temporary files of this repository.
                """
                log.info("Cleaning up repository '%s'..." % self.name)
-               self.cache.destroy()
 
-               assert self.index
+               # Clear all packages in the index.
                self.index.clear()
 
-       def commit(self):
-               """
-                       Commit repository data to disk.
-               """
-               self.index.commit()
-
        def dump(self, long=False, filelist=False):
                dumps = []
                # Dump all package information of the packages in this repository.
index 84f5a4ae5ce9c12d56dde3a17528d688e784bdcf..da145a03bf571e96e694d6b52553b70ff028c2bd 100644 (file)
@@ -82,11 +82,13 @@ class Database(object):
                                self.create()
 
        def close(self):
-               self.__del__()
+               if self._db:
+                       self._db.close()
+                       self._db = None
 
        def commit(self):
-               self.open()
-               self._db.commit()
+               if self._db:
+                       self._db.commit()
 
        def cursor(self):
                self.open()
@@ -371,6 +373,18 @@ class DatabaseLocal(Database):
 
                c.close()
 
+       def get_filelist(self):
+               c = self.cursor()
+               c.execute("SELECT DISTINCT name FROM files")
+
+               ret = []
+               for row in c:
+                       ret.append(row["name"])
+
+               c.close()
+
+               return ret
+
        def get_package_from_solv(self, solv_pkg):
                c = self.cursor()
                c.execute("SELECT * FROM packages WHERE uuid = ? LIMIT 1", (solv_pkg.uuid,))
index ec60f5d6b0716f0a3626dc41ccaf1a8b20a38e01..fac1c2f00d215e4b35a16007c30f6edd143a66e0 100644 (file)
@@ -24,19 +24,13 @@ import os
 import logging
 log = logging.getLogger("pakfire")
 
-import database
-import metadata
-
-import pakfire.compress as compress
-import pakfire.downloader as downloader
-import pakfire.packages as packages
 import pakfire.satsolver as satsolver
-import pakfire.util as util
-
-from pakfire.constants import *
-from pakfire.i18n import _
 
 class Index(object):
+       """
+               Wraps around the solvable index in the memory.
+       """
+
        def __init__(self, pakfire, repo):
                self.pakfire = pakfire
 
@@ -44,34 +38,9 @@ class Index(object):
                self.repo = repo
                self.solver_repo = repo.solver_repo
 
-               self.init()
-
-               # Check, if initialization was okay.
-               self.check()
-
        def __repr__(self):
                return "<%s %s>" % (self.__class__.__name__, self.repo)
 
-       @property
-       def distro(self):
-               return self.repo.distro
-
-       @property
-       def cache(self):
-               return self.repo.cache
-
-       def init(self):
-               pass
-
-       def check(self):
-               """
-                       Check if everything was correctly initialized.
-               """
-               raise NotImplementedError
-
-       def update(self, force=False, offline=False):
-               raise NotImplementedError
-
        def read(self, filename):
                """
                        Read file in SOLV format from filename.
@@ -84,38 +53,31 @@ class Index(object):
                """
                self.solver_repo.write(filename)
 
-       def commit(self):
+       def optimize(self):
                """
-                       Commit index data to disk.
+                       Optimize the index.
                """
-               pass
+               self.solver_repo.internalize()
 
        def create_relation(self, *args, **kwargs):
                return self.pakfire.create_relation(*args, **kwargs)
 
        def add_package(self, pkg):
-               # XXX Skip packages without a UUID
-               #if not pkg.uuid:
-               #       log.warning("Skipping package which lacks UUID: %s" % pkg)
-               #       return
-               if not pkg.build_time:
-                       return
-
                log.debug("Adding package to index %s: %s" % (self, pkg))
 
                solvable = satsolver.Solvable(self.solver_repo, pkg.name,
                        pkg.friendly_version, pkg.arch)
 
-               # Save metadata.
-               if pkg.vendor:
-                       solvable.set_vendor(pkg.vendor)
+               assert pkg.uuid
+               solvable.set_uuid(pkg.uuid)
 
                hash1 = pkg.hash1
                assert hash1
                solvable.set_hash1(hash1)
 
-               assert pkg.uuid
-               solvable.set_uuid(pkg.uuid)
+               # Save metadata.
+               if pkg.vendor:
+                       solvable.set_vendor(pkg.vendor)
 
                if pkg.maintainer:
                        solvable.set_maintainer(pkg.maintainer)
@@ -186,295 +148,10 @@ class Index(object):
 
        def rem_package(self, pkg):
                # XXX delete the solvable from the index.
-               self.db.rem_package(pkg)
+               pass # TODO
 
        def clear(self):
                """
                        Forget all packages from memory.
                """
                self.solver_repo.clear()
-
-
-class IndexSolv(Index):
-       def check(self):
-               pass # XXX to be done
-
-       def update(self, force=False, offline=False):
-               self._update_metadata(force, offline)
-               self._update_database(force, offline)
-
-       def _update_metadata(self, force, offline=False):
-               filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
-               cache_filename = os.path.join("repodata", self.distro.sname, self.distro.release,
-                       self.repo.name, self.distro.arch, os.path.basename(filename))
-
-               # Marker if we need to do the download.
-               download = True
-
-               # Marker for the current metadata.
-               old_metadata = None
-
-               if not force:
-                       # Check if file does exists and is not too old.
-                       if self.cache.exists(cache_filename):
-                               age = self.cache.age(cache_filename)
-                               if age and age < TIME_10M:
-                                       download = False
-                                       log.debug("Metadata is recent enough. I don't download it again.")
-
-                               # Open old metadata for comparison.
-                               old_metadata = metadata.Metadata(self.pakfire, self,
-                                       self.cache.abspath(cache_filename))
-
-                       # If no metadata was downloaded and we are in offline mode.
-                       elif offline:
-                               # If we cannot download new metadata, we should skip this
-                               # repository.
-                               return
-
-                               #raise OfflineModeError, _("There is no metadata for the repository '%s' and"
-                               #       " we cannot download any because we are running in offline mode."
-                               #       " Connect to a network or disable this repository.") % self.repo.name
-
-               elif force and offline:
-                       raise OfflineModeError, _("I cannot be forced to re-download the metadata for"
-                               " the repository '%s' when running in offline mode.") % self.repo.name
-
-               if download:
-                       # We are supposed to download new metadata, but we are running in
-                       # offline mode. That's okay. Just doing nothing.
-                       if not offline:
-                               log.debug("Going to (re-)download the repository metadata.")
-
-                               # Initialize a grabber for download.
-                               grabber = downloader.MetadataDownloader(self.pakfire)
-                               grabber = self.repo.mirrors.group(grabber)
-
-                               data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT)
-
-                               # Parse new metadata for comparison.
-                               new_metadata = metadata.Metadata(self.pakfire, self, metadata=data)
-
-                               if old_metadata and new_metadata < old_metadata:
-                                       log.warning("The downloaded metadata was less recent than the current one. Trashing that.")
-
-                               else:
-                                       # We explicitely rewrite the metadata if it is equal to have
-                                       # a new timestamp and do not download it over and over again.
-                                       with self.cache.open(cache_filename, "w") as o:
-                                               o.write(data)
-
-               # Parse the metadata that we just downloaded or load it from cache.
-               self.metadata = metadata.Metadata(self.pakfire, self,
-                       self.cache.abspath(cache_filename))
-
-       def _update_database(self, force, offline=False):
-               if not hasattr(self, "metadata"):
-                       return
-
-               # Construct cache and download filename.
-               cache_filename = os.path.join("repodata", self.distro.sname, self.distro.release,
-                       self.repo.name, self.distro.arch, "database", self.metadata.database)
-               filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
-
-               if not self.cache.exists(cache_filename):
-                       if offline:
-                               # If there is not database and we are in offline mode, we cannot
-                               # download anything so we just skip the rest of this function.
-                               return
-
-                               #raise OfflineModeError, _("Your repository metadata is outdated "
-                               #       " and a new version needs to be downloaded.")
-
-                       # Initialize a grabber for download.
-                       grabber = downloader.DatabaseDownloader(
-                               self.pakfire,
-                               text = _("%s: package database") % self.repo.name,
-                       )
-                       grabber = self.repo.mirrors.group(grabber)
-
-                       # Open file on server.
-                       filename = str(filename)
-                       urlobj = fileobj = grabber.urlopen(filename)
-
-                       if self.metadata.database_compression:
-                               fileobj = compress.decompressobj(fileobj=fileobj,
-                                       algo=self.metadata.database_compression)
-
-                       # Make a new file in the cache.
-                       cacheobj = self.cache.open(cache_filename, "w")
-
-                       try:
-                               while True:
-                                       buf = fileobj.read(BUFFER_SIZE)
-                                       if not buf:
-                                               break
-                                       cacheobj.write(buf)
-                       except:
-                               # XXX we should catch decompression errors
-
-                               # Close all file descriptors.
-                               cacheobj.close()
-                               fileobj.close()
-                               if not urlobj == fileobj:
-                                       urlobj.close()
-
-                               raise
-
-                       cacheobj.close()
-                       fileobj.close()
-                       if not urlobj == fileobj:
-                               urlobj.close()
-
-                       # check the hashsum of the downloaded file
-                       #if self.cache.verify(self.cache.abspath(cache_filename), self.metadata.database_hash1):
-                       #       # XXX an exception is not a very good idea because this file could
-                       #       # be downloaded from another mirror. need a better way to handle this.
-                       #
-                       #       # Remove bad file from cache.
-                       #       self.cache.remove(cache_filename)
-                       #
-                       #       raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
-
-               # (Re-)open the database.
-               self.read(self.cache.abspath(cache_filename))
-
-
-class IndexDir(Index):
-       def init(self):
-               self.pkg_type = None
-
-               if self.repo.type == "binary":
-                       self.pkg_type = packages.BinaryPackage
-               elif self.repo.type == "source":
-                       self.pkg_type = packages.SourcePackage
-
-               assert self.pkg_type
-
-       def check(self):
-               pass # XXX to be done
-
-       @property
-       def path(self):
-               path = self.repo.path
-
-               if path.startswith("file://"):
-                       path = path[7:]
-
-               return path
-
-       def update(self, force=False, offline=False):
-               log.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
-
-               # Do nothing if the update is not forced but populate the database
-               # if no packages are present.
-               if not force and len(self.repo):
-                       return
-
-               # Collect all packages from default path.
-               self.collect_packages(self.path)
-
-       def collect_packages(self, path):
-               log.debug("Collecting all packages from %s" % path)
-               pkgs = []
-
-               # Get a filelist of all files that could possibly be packages.
-               files = []
-
-               if os.path.isdir(path):
-                       for dir, subdirs, _files in os.walk(path):
-                               for file in sorted(_files):
-                                       # Skip files that do not have the right extension
-                                       if not file.endswith(".%s" % PACKAGE_EXTENSION):
-                                               continue
-
-                                       file = os.path.join(dir, file)
-                                       files.append(file)
-               elif os.path.isfile(path) and path.endswith(".%s" % PACKAGE_EXTENSION):
-                       files.append(path)
-
-               if not files:
-                       return pkgs
-
-               # Create progress bar.
-               pb = util.make_progress(_("Loading from %s") % path, len(files))
-               i = 0
-
-               for file in files:
-                               if pb:
-                                       i += 1
-                                       pb.update(i)
-
-                               package = packages.open(self.pakfire, self.repo, file)
-
-                               # Find all packages with the given type and skip those of
-                               # the other type.
-                               if not isinstance(package, self.pkg_type):
-                                       continue
-
-                               self.add_package(package)
-                               pkgs.append(package)
-
-               if pb:
-                       pb.finish()
-
-               # Internalize the repository, that all imported information
-               # is available for access.
-               self.solver_repo.internalize()
-
-               return pkgs
-
-
-class IndexLocal(Index):
-       def init(self):
-               self.db = database.DatabaseLocal(self.pakfire, self.repo)
-
-               # Read SOLV cache file.
-               filename = os.path.join(self.pakfire.path, PACKAGES_SOLV)
-               if os.path.exists(filename):
-                       self.read(filename)
-
-       def commit(self):
-               # Write SOLV cache file.
-               filename = os.path.join(self.pakfire.path, PACKAGES_SOLV)
-
-               dirname = os.path.dirname(filename)
-               if not os.path.exists(dirname):
-                       os.makedirs(dirname)
-
-               self.write(filename)
-
-       def check(self):
-               # XXX Create the database and lock it or something.
-               pass
-
-       def update(self, force=True, offline=False):
-               if self.solver_repo.size() == 0:
-                       force = True
-
-               if force:
-                       package_count = len(self.db)
-
-                       # Nothing to do here, if there are no packages in the database.
-                       if not package_count:
-                               return
-
-                       # Add all packages from the database to the index.
-                       pb = util.make_progress(_("Loading installed packages"), package_count)
-
-                       i = 0
-                       for pkg in self.db.packages:
-                               if pb:
-                                       i += 1
-                                       pb.update(i)
-
-                               self.add_package(pkg)
-
-                       if pb:
-                               pb.finish()
-
-       @property
-       def filelist(self):
-               for pkg in self.db.packages:
-                       for file in pkg.filelist:
-                               yield file
index 0929c97ba10fd1905b22fa42d259c835fc82ced0..5ebced8878f02378c6e4380b39fd79dc99e040b3 100644 (file)
@@ -26,7 +26,6 @@ import logging
 log = logging.getLogger("pakfire")
 
 import base
-import index
 import metadata
 
 import pakfire.compress as compress
@@ -37,7 +36,7 @@ from pakfire.constants import *
 from pakfire.i18n import _
 
 class RepositoryDir(base.RepositoryFactory):
-       def __init__(self, pakfire, name, description, path, type="binary"):
+       def __init__(self, pakfire, name, description, path, type="binary", key_id=None):
                base.RepositoryFactory.__init__(self, pakfire, name, description)
 
                # Path to files.
@@ -47,8 +46,8 @@ class RepositoryDir(base.RepositoryFactory):
                assert type in ("binary", "source",)
                self.type = type
 
-               # Create index
-               self.index = index.IndexDir(self.pakfire, self)
+               # The key that is used to sign all added packages.
+               self.key_id = key_id
 
        def remove(self):
                self.index.clear()
@@ -66,62 +65,105 @@ class RepositoryDir(base.RepositoryFactory):
                # Yes, this is local.
                return True
 
-       def collect_packages(self, *args, **kwargs):
+       def search_files(self, *paths):
                """
-                       Proxy function to add packages to the index.
+                       Search for possible package files in the paths.
                """
+               files = []
 
-               for pkg in self.index.collect_packages(*args, **kwargs):
-                       # The path of the package in the repository
-                       repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
+               for path in paths:
+                       if not os.path.exists(path):
+                               continue
 
-                       # Check, if the package does already exists and check if the
-                       # files are really equal.
-                       if os.path.exists(repo_filename):
-                               pkg_exists = packages.open(self.pakfire, self, repo_filename)
+                       if os.path.isdir(path):
+                               for dir, subdirs, _files in os.walk(path):
+                                       for file in sorted(_files):
+                                               # Skip files that do not have the right extension
+                                               if not file.endswith(".%s" % PACKAGE_EXTENSION):
+                                                       continue
 
-                               # Check UUID to see if the file needs to be copied.
-                               if pkg.uuid == pkg_exists.uuid:
-                                       continue
+                                               file = os.path.join(dir, file)
+                                               files.append(file)
 
-                       log.debug("Copying package '%s' to repository." % pkg)
-                       repo_dirname = os.path.dirname(repo_filename)
-                       if not os.path.exists(repo_dirname):
-                               os.makedirs(repo_dirname)
+                       elif os.path.isfile(path) and path.endswith(".%s" % PACKAGE_EXTENSION):
+                               files.append(path)
 
-                       # Try to use a hard link if possible, if we cannot do that we simply
-                       # copy the file.
-                       try:
-                               os.link(pkg.filename, repo_filename)
-                       except OSError:
-                               shutil.copy2(pkg.filename, repo_filename)
+               return files
 
-       def sign(self, key_id):
-               """
-                       Sign all packages with the given key.
-               """
-               # Create progressbar.
-               pb = util.make_progress(_("Signing packages..."), len(self), eta=True)
+       def add_packages(self, *paths):
+               # Search for possible package files in the paths.
+               files = self.search_files(*paths)
+
+               # Give up if there are no files to process.
+               if not files:
+                       return
+
+               # Create progress bar.
+               pb = util.make_progress(_("%s: Adding packages...") % self.name, len(files))
                i = 0
 
-               # Create a new index (because package checksums will change).
-               for pkg in self:
+               for file in files:
                        if pb:
                                i += 1
                                pb.update(i)
 
-                       # Create the full path to the file.
-                       filename = os.path.join(self.path, pkg.filename)
-                       pkg = packages.open(self.pakfire, self, filename)
+                       # Open the package file we want to add.
+                       pkg = packages.open(self.pakfire, self, file)
 
-                       # Sign the package.
-                       pkg.sign(key_id)
+                       # Find all packages with the given type and skip those of
+                       # the other type.
+                       if not pkg.type == self.type:
+                               continue
+
+                       # Compute the local path.
+                       repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
+                       pkg2 = None
+
+                       # If the file is already located in the repository, we do not need to
+                       # copy it.
+                       if not pkg.filename == repo_filename:
+                               need_copy = True
+
+                               # Check if the file is already in the repository.
+                               if os.path.exists(repo_filename):
+                                       # Open it for comparison.
+                                       pkg2 = packages.open(self.pakfire, self, repo_filename)
+
+                                       if pkg.uuid == pkg2.uuid:
+                                               need_copy = False
+
+                               # If a copy is still needed, we do it.
+                               if need_copy:
+                                       # Create the directory.
+                                       repo_dirname = os.path.dirname(repo_filename)
+                                       if not os.path.exists(repo_dirname):
+                                               os.makedirs(repo_dirname)
+
+                                       # Try to use a hard link if possible, if we cannot do that we simply
+                                       # copy the file.
+                                       try:
+                                               os.link(pkg.filename, repo_filename)
+                                       except OSError:
+                                               shutil.copy2(pkg.filename, repo_filename)
+
+                       # Reopen the new package file (in case it needs to be changed).
+                       if pkg2:
+                               pkg = pkg2
+                       else:
+                               pkg = packages.open(self.pakfire, self, repo_filename)
+
+                       # Sign all packages.
+                       if self.key_id:
+                               pkg.sign(self.key_id)
+
+                       # Add the package to the index.
+                       self.index.add_package(pkg)
 
                if pb:
                        pb.finish()
 
-               # Recreate the index because file checksums may have changed.
-               self.index.update(force=True)
+               # Optimize the index.
+               self.index.optimize()
 
        def save(self, path=None, algo="xz"):
                """
@@ -206,56 +248,49 @@ class RepositoryDir(base.RepositoryFactory):
 
 class RepositoryBuild(RepositoryDir):
        def __init__(self, pakfire):
-               # XXX need to add distro information to this path
                # XXX it is also hardcoded
-               path = pakfire.config.get(None, "local_build_repo_path",
-                       "/var/lib/pakfire/local")
+               path = pakfire.config.get(None, "local_build_repo_path", "/var/lib/pakfire/local")
+               #path = os.path.join(path, pakfire.distro.sname)
                assert path
 
-               # Create path if it does not exist.
-               if not os.path.exists(path):
-                       os.makedirs(path)
-
                RepositoryDir.__init__(self, pakfire, "build", "Locally built packages", path)
 
-       @property
-       def local(self):
-               """
-                       Yes, this is local.
-               """
-               return True
+       def update(self, force=False, offline=False):
+               # If force is not given, but there are no files in the repository,
+               # we force an update anyway.
+               if not force:
+                       force = len(self) == 0
 
-       @property
-       def priority(self):
-               return 20000
+               if force:
+                       # Wipe the index.
+                       self.index.clear()
 
+                       # Find all files in the repository dir.
+                       files = self.search_files(self.path)
 
-class RepositoryLocal(base.RepositoryFactory):
-       def __init__(self, pakfire):
-               base.RepositoryFactory.__init__(self, pakfire, "@system", "Local repository")
+                       # Create progress bar.
+                       pb = util.make_progress(_("%s: Adding packages...") % self.name, len(files))
+                       i = 0
+
+                       # Add all files to the index.
+                       for file in files:
+                               if pb:
+                                       i += 1
+                                       pb.update(i)
 
-               self.index = index.IndexLocal(self.pakfire, self)
+                               pkg = packages.open(self.pakfire, self, file)
+                               self.index.add_package(pkg)
 
-               # Tell the solver, that these are the installed packages.
-               self.pool.set_installed(self.solver_repo)
+                       if pb:
+                               pb.finish()
 
        @property
-       def priority(self):
+       def local(self):
                """
-                       The local repository has always a high priority.
+                       Yes, this is local.
                """
-               return 10
-
-       def add_package(self, pkg):
-               # Add package to the database.
-               self.index.db.add_package(pkg)
-
-               self.index.add_package(pkg)
-
-       def rem_package(self, pkg):
-               # Remove package from the database.
-               self.index.rem_package(pkg)
+               return True
 
        @property
-       def filelist(self):
-               return self.index.filelist
+       def priority(self):
+               return 20000
index 9029460b0d8f70c621f9ab6a514406a0fb2308af..cd86d51109b67c2d4a680b66651881f0099fee87 100644 (file)
 ###############################################################################
 
 import json
+import os
 import time
 
 from pakfire.constants import *
 
 class Metadata(object):
-       def __init__(self, pakfire, index, metafile=None, metadata=None):
+       def __init__(self, pakfire, metafile=None, metadata=None):
                self.pakfire = pakfire
-               self.index = index
-
                self.filename = metafile
 
                # Place where we save the data.
@@ -66,6 +65,8 @@ class Metadata(object):
                if not filename:
                        filename = self.filename
 
+               assert os.path.exists(filename), "Metadata file does not exist."
+
                with open(filename) as f:
                        self.parse(f.read())
 
index 370be592142cabfdb30aa1513102d46aa2df623d..f9c5c71b882fa2e068d861750e9de07ee1c5af4c 100644 (file)
@@ -25,38 +25,68 @@ import logging
 log = logging.getLogger("pakfire")
 
 import base
-import index
+import cache
+import metadata
 
+import pakfire.compress as compress
 import pakfire.downloader as downloader
 
 from pakfire.constants import *
 from pakfire.i18n import _
 
-class RepositorySolv(base.RepositoryFactory):
-       def __init__(self, pakfire, name, description, baseurl, mirrors, gpgkey, priority=100, enabled=True):
-               # Parse arguments.
-               self.baseurl  = baseurl
-               self.gpgkey   = gpgkey
-               self._mirrors = mirrors
-               self._priority = priority
+class RepositoryRemote(base.RepositoryFactory):
+       # XXX TODO Make metadata age configureable.
 
-               base.RepositoryFactory.__init__(self, pakfire, name, description)
-
-               # Initialize mirror servers.
-               self.mirrors = downloader.MirrorList(self.pakfire, self)
+       def __init__(self, pakfire, name, description=None, **settings):
+               # Save the settings that come from the configuration file.
+               self.settings = settings
 
-               # Create index, which is always SOLV.
-               self.index = index.IndexSolv(self.pakfire, self)
+               base.RepositoryFactory.__init__(self, pakfire, name, description)
 
-               # Save enabled/disabled flag at the end.
+               # Enabled/disable the repository, based on the configuration setting.
+               enabled = self.settings.get("enabled", True)
                if enabled in ("1", "yes", "on", True, 1):
                        self.enabled = True
                else:
                        self.enabled = False
 
+               # Create an cache object
+               self.cache = cache.RepositoryCache(self.pakfire, self)
+
+               # Initialize mirror servers.
+               mirrorlist = self.settings.get("mirrors", None)
+               self.mirrors = downloader.MirrorList(self.pakfire, self, mirrorlist)
+
+               # Open metadata if any.
+               self.metadata = self.open_metadata()
+
+       @property
+       def baseurl(self):
+               return self.settings.get("baseurl")
+
+       @property
+       def keyfile(self):
+               keyfile = self.settings.get("keyfile", None)
+               if keyfile is None:
+                       keyfile = self.settings.get("gpgkey", None)
+
+               return keyfile
+
        @property
        def priority(self):
-               priority = self._priority
+               priority = self.settings.get("priority", None)
+               if not priority is None:
+                       # Try to concert the given input to an integer
+                       # and return the value if possible.
+                       try:
+                               priority = int(priority)
+                               return priority
+
+                       except ValueError:
+                               pass
+
+               # The default priority is 100.
+               priority = 100
 
                url2priority = {
                        "file://" : 50,
@@ -70,6 +100,156 @@ class RepositorySolv(base.RepositoryFactory):
 
                return priority
 
+       def cache_path(self, *paths):
+               return os.path.join(
+                       "repodata",
+                       self.distro.sname,
+                       self.distro.release,
+                       self.name,
+                       self.distro.arch,
+                       *paths
+               )
+
+       def clean(self):
+               RepositoryFactory.clean(self)
+
+               # Remove all files in the files cache.
+               self.cache.destroy()
+
+       def update(self, force=False, offline=False):
+               # First update the repository metadata.
+               self.update_metadata(force=force, offline=offline)
+               self.update_database(force=force, offline=offline)
+
+               # Read the database.
+               self.open_database()
+
+       def open_metadata(self, path=None):
+               if not path:
+                       path = self.cache_path(os.path.basename(METADATA_DOWNLOAD_FILE))
+                       path = self.cache.abspath(path)
+
+               if self.cache.exists(path):
+                       return metadata.Metadata(self.pakfire, path)
+
+       def update_metadata(self, force=False, offline=False):
+               filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
+               cache_filename = self.cache_path(os.path.basename(filename))
+
+               # Check if the metadata is already recent enough...
+               if self.cache.exists(cache_filename):
+                       age = self.cache.age(cache_filename)
+                       if age and age < TIME_10M:
+                               log.debug("Metadata is recent enough. I don't download it again.")
+                       else:
+                               log.debug("Metadata needs an update.")
+                               force = True
+
+               # If no metadata exists, yet we need an update.
+               else:
+                       force = True
+
+               # Raise an exception when we are running in offline mode but an update is required.
+               if force and offline:
+                       raise OfflineModeError, _("Cannot update repository metadata for %s when in offline mode.") % self.name
+
+               # If no download is required, we exit here.
+               if not force:
+                       return
+
+               # Going to download metada.
+               log.debug("Going to download repository metadata for %s..." % self.name)
+
+               grabber = downloader.MetadataDownloader(self.pakfire)
+               grabber = self.mirrors.group(grabber)
+
+               while True:
+                       data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT)
+
+                       # Parse new metadata for comparison.
+                       md = metadata.Metadata(self.pakfire, metadata=data)
+
+                       if self.metadata and md < self.metadata:
+                               log.warning(_("The downloaded metadata was less recent than the current one."))
+                               grabber.increment_mirror(grabber)
+                               continue
+
+                       # If the download went well, we write the downloaded data to disk
+                       # and break the loop.
+                       f = self.cache.open(cache_filename, "w")
+                       f.write(data)
+                       f.close()
+
+                       break
+
+               # Re-open metadata.
+               self.metadata = self.open_metadata()
+               assert self.metadata
+
+       def open_database(self):
+               assert self.metadata, "Metadata needs to be openend first."
+
+               filename = self.cache_path("database", self.metadata.database)
+               filename = self.cache.abspath(filename)
+
+               assert os.path.exists(filename)
+
+               self.index.clear()
+               self.index.read(filename)
+
+       def update_database(self, force=False, offline=False):
+               assert self.metadata
+
+               # Construct cache and download filename.
+               filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
+               cache_filename = self.cache_path("database", self.metadata.database)
+
+               if not force:
+                       force = not self.cache.exists(cache_filename)
+
+               # Raise an exception when we are running in offline mode but an update is required.
+               if force and offline:
+                       raise OfflineModeError, _("Cannot download package database for %s in offline mode.") % self.name
+
+               elif not force:
+                       return
+
+               # Initialize a grabber for download.
+               grabber = downloader.DatabaseDownloader(
+                       self.pakfire,
+                       text = _("%s: package database") % self.name,
+               )
+               grabber = self.mirrors.group(grabber)
+
+               while True:
+                       # Open file on server.
+                       urlobj = fileobj = grabber.urlopen(filename)
+
+                       if self.metadata.database_compression:
+                               fileobj = compress.decompressobj(fileobj=fileobj,
+                                       algo=self.metadata.database_compression)
+
+                       # Make a new file in the cache.
+                       cacheobj = self.cache.open(cache_filename, "wb")
+
+                       try:
+                               while True:
+                                       buf = fileobj.read(BUFFER_SIZE)
+                                       if not buf:
+                                               break
+                                       cacheobj.write(buf)
+
+                       finally:
+                               # XXX we should catch decompression errors
+
+                               # Close all file descriptors.
+                               cacheobj.close()
+                               fileobj.close()
+                               if not urlobj == fileobj:
+                                       urlobj.close()
+
+                       break
+
        def download(self, pkg, text="", logger=None):
                """
                        Downloads 'filename' from repository and returns the local filename.
@@ -150,9 +330,14 @@ class RepositorySolv(base.RepositoryFactory):
                        "description = %s" % self.description,
                        "enabled = %s" % enabled,
                        "baseurl = %s" % self.baseurl,
-                       "mirrors = %s" % self._mirrors,
-                       #"gpgkey = %s" % self.gpgkey,
-                       "priority = %s" % self._priority,
+               ]
+
+               if self.mirrors.mirrorlist:
+                       lines.append("mirrors = %s" % self.mirrors.mirrorlist)
+
+               lines += [
+                       #"gpgkey = %s" % self.keyfile,
+                       "priority = %s" % self.priority,
                ]
 
                return lines
similarity index 61%
rename from python/pakfire/repository/installed.py
rename to python/pakfire/repository/system.py
index 95f764f4d62d8e9457171df793d35d807166bcf5..642c4bd19be182516a14e92c8894e5d95e2b798a 100644 (file)
 #                                                                             #
 ###############################################################################
 
-import index
+import base
+import database
 
-from base import RepositoryFactory
-
-class InstalledRepository(RepositoryFactory):
+class RepositorySystem(base.RepositoryFactory):
        def __init__(self, pakfire):
-               RepositoryFactory.__init__(self, pakfire, "installed", "Installed packages")
+               base.RepositoryFactory.__init__(self, pakfire, "@system", "Local repository")
 
-               self.index = index.InstalledIndex(self.pakfire, self)
+               # Open database connection.
+               self.db = database.DatabaseLocal(self.pakfire, self)
 
-       @property
-       def local(self):
-               # This is obviously local.
-               return True
+               # Tell the solver, that these are the installed packages.
+               self.pool.set_installed(self.solver_repo)
 
        @property
        def priority(self):
                """
-                       The installed repository has always the highest priority.
+                       The local repository has always a high priority.
                """
-               return 0
+               return 10
+
+       def update(self, force=False, offline=False):
+               if not force:
+                       force = len(self) == 0
+
+               if force:
+                       self.index.clear()
+                       for pkg in self.db.packages:
+                               self.index.add_package(pkg)
+
+       def commit(self):
+               # Commit the database to disk.
+               self.db.commit()
+
+       def add_package(self, pkg):
+               # Add package to the database.
+               self.db.add_package(pkg)
+               self.index.add_package(pkg)
+
+       def rem_package(self, pkg):
+               # Remove package from the database.
+               self.index.rem_package(pkg)
+
+       @property
+       def filelist(self):
+               # XXX ugly?
+
+               for pkg in self.db.packages:
+                       for file in pkg.filelist:
+                               yield file