]> git.ipfire.org Git - pakfire.git/commitdiff
Add experimental support for mirrors.
authorMichael Tremer <michael.tremer@ipfire.org>
Tue, 22 Feb 2011 15:56:25 +0000 (16:56 +0100)
committerMichael Tremer <michael.tremer@ipfire.org>
Tue, 22 Feb 2011 15:56:25 +0000 (16:56 +0100)
pakfire/constants.py
pakfire/downloader.py
pakfire/index.py
pakfire/packages/installed.py
pakfire/repository.py
pakfire/transaction.py
po/pakfire.pot

index 6dfb7cfe88de1eacc26a236312a03f7bc4e6603e..1756c2af2973e1882067b91f4eec9387423feea4 100644 (file)
@@ -34,6 +34,7 @@ BUILD_ROOT = "/var/lib/pakfire/build"
 SOURCE_DOWNLOAD_URL = "http://source.ipfire.org/source-3.x/"
 SOURCE_CACHE_DIR = os.path.join(CACHE_DIR, "sources")
 
+TIME_10M = 60*10
 TIME_24H = 60*60*24
 
 SOURCE_PACKAGE_META = """\
index 486b903205485137c7d772dc57724ea0211a023a..31a72652b33d551295283ba9b9f2e9534010ab6f 100644 (file)
@@ -2,10 +2,10 @@
 
 import json
 import logging
+import random
 
 from urlgrabber.grabber import URLGrabber, URLGrabError
-from urlgrabber.mirror import MGRandomOrder
-from urlgrabber.progress import TextMultiFileMeter
+from urlgrabber.mirror import MirrorGroup
 
 from constants import *
 
@@ -15,13 +15,15 @@ class PakfireGrabber(URLGrabber):
        """
                Class to make some modifications on the urlgrabber configuration.
        """
+       # XXX add proxy, user_agent, keep-alive, throttle things here
        pass
 
 
+
 class Mirror(object):
-       def __init__(self, mirror, location=None, preferred=False):
+       def __init__(self, url, location=None, preferred=False):
                # Save URL of the mirror in full format
-               self.mirror = mirror
+               self.url = url
 
                # Save the location (if given)
                self.location = location
@@ -110,6 +112,15 @@ class MirrorList(object):
                        if mirror.preferred:
                                yield mirror
 
+       @property
+       def non_preferred(self):
+               """
+                       Return a generator for all mirrors that are not preferred.
+               """
+               for mirror in self.__mirrors:
+                       if not mirror.preferred:
+                               yield mirror
+
        @property
        def all(self):
                """
@@ -118,3 +129,35 @@ class MirrorList(object):
                for mirror in self.__mirrors:
                        yield mirror
 
+       def group(self, grabber):
+               """
+                       Return a MirrorGroup object for the given grabber.
+               """
+               # A list of mirrors that is passed to MirrorGroup.
+               mirrors = []
+
+               # Add all preferred mirrors at the first place and shuffle them
+               # that we will start at a random place.
+               for mirror in self.preferred:
+                       mirrors.append(mirror.url)
+               random.shuffle(mirrors)
+
+               # All other mirrors are added as well and will only be used if all
+               # preferred mirrors did not work.
+               for mirror in self.all:
+                       if mirror.url in mirrors:
+                               continue
+
+                       mirrors.append(mirror.url)
+
+               return MirrorGroup(grabber, mirrors)
+
+
+
+class Downloader(object):
+       def __init__(self, mirrors, files):
+               self.grabber = PakfireGrabber()
+
+               self.mirrorgroup = mirrors.group(self.grabber)
+
+
index 87532f1ea7f11307b4ff52ab639a6c25f110fc7f..dd7021c2e6682b262f7409330ea2fb8e64564575 100644 (file)
@@ -139,16 +139,58 @@ class DatabaseIndex(Index):
 
        def update(self, force=False):
                """
-                       Nothing to do here.
+                       Download the repository metadata and the package database.
                """
-               pass
+               # Shortcut to repository cache.
+               cache = self.repo.cache
+
+               cache_filename = "metadata/repomd.json"
+
+               # Marker if we need to do the download.
+               download = True
+
+               # Check if file does exists and is not too old.
+               if cache.exists(cache_filename):
+                       age = cache.age(cache_filename)
+                       if age and age < TIME_10M:
+                               download = False
+
+               if download:
+                       # XXX do we need limit here for security reasons?
+                       metadata = self.repo.grabber.urlread("repodata/repomd.json")
+
+                       with cache.open(cache_filename, "w") as o:
+                               o.write(metadata)
+
+                       # XXX need to parse metadata here
+
+               # XXX split this into two functions
+
+               cache_filename = "metadata/packages.db" # XXX just for now
+
+               if not cache.exists(cache_filename):
+                       o = cache.open(cache_filename, "w")
+                       i = self.repo.grabber.urlopen("repodata/packages.db") # XXX just for now
+                       
+                       buf = i.read(BUFFER_SIZE)
+                       while buf:
+                               o.write(buf)
+                               buf = i.read(BUFFER_SIZE)
+
+                       i.close()
+                       o.close()
+
+                       # XXX possibly, the database needs to be decompressed
+
+               # Reopen the database
+               self.db = database.RemotePackageDatabase(self.pakfire, cache.abspath(cache_filename))
 
        def get_all_by_name(self, name):
                c = self.db.cursor()
                c.execute("SELECT * FROM packages WHERE name = ?", name)
 
                for pkg in c:
-                       yield package.DatabasePackage(self.pakfire, self.db, pkg)
+                       yield package.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
 
                c.close()
 
@@ -168,7 +210,7 @@ class DatabaseIndex(Index):
                c.execute("SELECT * FROM packages")
 
                for pkg in c:
-                       yield packages.DatabasePackage(self.pakfire, self.db, pkg)
+                       yield packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
 
                c.close()
 
index 8b727baaa7a0d33e225d58213fa4822cc296d9e5..c304d70c9587c54224f02d336b8eb907540dcb2d 100644 (file)
@@ -1,12 +1,17 @@
 #!/usr/bin/python
 
+import os
+
 from base import Package
+from binary import BinaryPackage
+
+from pakfire.constants import *
 
 class DatabasePackage(Package):
        type = "db"
 
-       def __init__(self, pakfire, db, data):
-               Package.__init__(self, pakfire, pakfire.repos.local)
+       def __init__(self, pakfire, repo, db, data):
+               Package.__init__(self, pakfire, repo)
 
                self.db = db
 
@@ -113,6 +118,14 @@ class DatabasePackage(Package):
 
                return []
 
+       @property
+       def hash1(self):
+               return self.metadata.get("hash1")
+
+       @property
+       def filename(self):
+               return self.metadata.get("filename") # XXX basename?
+
        @property
        def filelist(self):
                c = self.db.cursor()
@@ -127,6 +140,55 @@ class DatabasePackage(Package):
 
                c.close()
 
+       def download(self):
+               """
+                       Downloads the package from repository and returns a new instance
+                       of BinaryPackage.
+               """
+               # Marker, if we need to download the package.
+               download = True
+
+               # Add shortcut for cache.
+               cache = self.repo.cache
+
+               cache_filename = "packages/%s" % os.path.basename(self.filename)
+
+               # Check if file already exists in cache.
+               if cache.exists(cache_filename):
+                       # If the file does already exist, we check if the hash1 matches.
+                       if cache.verify(cache_filename, self.hash1):
+                               # We already got the right file. Skip download.
+                               download = False
+                       else:
+                               # The file in cache has a wrong hash. Remove it and repeat download.
+                               cache.remove(cache_filename)
+
+               if download:
+                       # Open input and output files and download the file.
+                       o = cache.open(cache_filename, "w")
+                       # Make sure filename is of type string (and not unicode)
+                       filename = str(self.filename)
+
+                       # XXX to be removed very soon
+                       if filename.startswith("686"):
+                               filename = "i%s" % filename
+
+                       i = self.repo.grabber.urlopen(filename)
+
+                       buf = i.read(BUFFER_SIZE)
+                       while buf:
+                               o.write(buf)
+                               buf = i.read(BUFFER_SIZE)
+
+                       i.close()
+                       o.close()
+
+                       # Verify if the download was okay.
+                       if not cache.verify(cache_filename, self.hash1):
+                               raise Exception, "XXX this should never happen..."
+
+               filename = os.path.join(cache.path, cache_filename)
+               return BinaryPackage(self.pakfire, self.repo, filename)
 
 # XXX maybe we can remove this later?
 class InstalledPackage(DatabasePackage):
index 599c3cb27d0e28471c1077799a3d2e25e9f3e959..9b04a2d5e1bfe7bc981ac317d66f9496be7a843a 100644 (file)
@@ -7,12 +7,14 @@ import stat
 import time
 
 from ConfigParser import ConfigParser
+from urlgrabber.progress import TextMeter, TextMultiFileMeter
 
 import base
 import database
 import downloader
 import index
 import packages
+import util
 
 from constants import *
 
@@ -360,12 +362,15 @@ class RepositoryCache(object):
        def path(self):
                return os.path.join(REPO_CACHE_DIR, self.repo.name, self.repo.arch)
 
+       def abspath(self, path):
+               return os.path.join(self.path, path)
+
        def create(self):
                """
                        Create all necessary directories.
                """
-               for d in ("mirrors", "packages", "metadata"):
-                       path = os.path.join(self.path, d)
+               for path in ("mirrors", "packages", "metadata"):
+                       path = self.abspath(path)
 
                        if not os.path.exists(path):
                                os.makedirs(path)
@@ -374,7 +379,7 @@ class RepositoryCache(object):
                """
                        Returns True if a file exists and False if it doesn't.
                """
-               return os.path.exists(os.path.join(self.path, filename))
+               return os.path.exists(self.abspath(filename))
 
        def age(self, filename):
                """
@@ -384,7 +389,7 @@ class RepositoryCache(object):
                if not self.exists(filename):
                        return None
 
-               filename = os.path.join(self.path, filename)
+               filename = self.abspath(filename)
 
                # Creation time of the file
                ctime = os.stat(filename)[stat.ST_CTIME]
@@ -392,10 +397,26 @@ class RepositoryCache(object):
                return (time.time() - ctime) / 60
 
        def open(self, filename, *args, **kwargs):
-               filename = os.path.join(self.path, filename)
+               filename = self.abspath(filename)
 
                return open(filename, *args, **kwargs)
 
+       def verify(self, filename, hash1):
+               """
+                       Return a bool that indicates if a file matches the given hash.
+               """
+               return util.calc_hash1(self.abspath(filename)) == hash1
+
+       def remove(self, filename):
+               """
+                       Remove a file from cache.
+               """
+               if not self.exists(filename):
+                       return
+
+               filename = self.abspath(filename)
+               os.unlink(filename)
+
 
 class RemoteRepository(RepositoryFactory):
        def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled):
@@ -409,6 +430,9 @@ class RemoteRepository(RepositoryFactory):
                else:
                        self.enabled = False
 
+               # A place to cache the download grabber for this repo.
+               self.__grabber = None
+
                # Create a cache for the repository where we can keep all temporary data.
                self.cache = RepositoryCache(self.pakfire, self)
 
@@ -463,15 +487,17 @@ class RemoteRepository(RepositoryFactory):
 
                return priority
 
-       def fetch_file(self, filename):
-               grabber = URLGrabber(
-                       progress_obj = TextMultiFileMeter(),
-               )
-
-               mg = MGRandomOrder(grabber, self.mirrorlist)
-
-               # XXX Need to say destination here.
-               mg.urlgrab(filename)
+       @property
+       def grabber(self):
+               if not self.__grabber:
+                       grabber = downloader.PakfireGrabber(
+#                              progress_obj = TextMultiFileMeter(), # XXX broken?
+                               progress_obj = TextMeter(),
+                       )
+                       
+                       self.__grabber = self.mirrors.group(grabber)
+
+               return self.__grabber
 
        def update_index(self, force=False):
                if self.index:
index 932119ea66491c0cfa7d6cb39d06515b7804d12c..9ff29b60caf74acb56f7ab6801d8a76b5b141912 100644 (file)
@@ -125,8 +125,26 @@ class TransactionSet(object):
                else:
                        self.updates.append(pkg)
 
+       def _download(self, pkgs):
+               """
+                       Download all given packages and return a list of BinaryPackages.
+               """
+               _pkgs = []
+               for pkg in pkgs:
+                       if not isinstance(pkg, packages.BinaryPackage):
+                               pkg = pkg.download()
+                       _pkgs.append(pkg)
+
+               return _pkgs
+
        def download(self):
-               pass
+               """
+                       Convert all packages to BinaryPackage.
+               """
+               self.installs = self._download(self.installs)
+               self.install_deps = self._download(self.install_deps)
+               self.updates = self._download(self.updates)
+               self.update_deps = self._download(self.update_deps)
 
 
 class Transaction(object):
index 5c286ab14034d97f05318b139441235b9dec0c38..125bf24e4b460148b53066cf3d0d4132c59afd61 100644 (file)
@@ -8,7 +8,7 @@ msgid ""
 msgstr ""
 "Project-Id-Version: PACKAGE VERSION\n"
 "Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2011-02-21 02:10+0100\n"
+"POT-Creation-Date: 2011-02-21 21:53+0100\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@li.org>\n"