SOURCE_DOWNLOAD_URL = "http://source.ipfire.org/source-3.x/"
SOURCE_CACHE_DIR = os.path.join(CACHE_DIR, "sources")
+TIME_10M = 60*10
TIME_24H = 60*60*24
SOURCE_PACKAGE_META = """\
import json
import logging
+import random
from urlgrabber.grabber import URLGrabber, URLGrabError
-from urlgrabber.mirror import MGRandomOrder
-from urlgrabber.progress import TextMultiFileMeter
+from urlgrabber.mirror import MirrorGroup
from constants import *
"""
Class to make some modifications on the urlgrabber configuration.
"""
+ # XXX add proxy, user_agent, keep-alive, throttle things here
pass
+
class Mirror(object):
- def __init__(self, mirror, location=None, preferred=False):
+ def __init__(self, url, location=None, preferred=False):
# Save URL of the mirror in full format
- self.mirror = mirror
+ self.url = url
# Save the location (if given)
self.location = location
if mirror.preferred:
yield mirror
+ @property
+ def non_preferred(self):
+ """
+ Return a generator for all mirrors that are not preferred.
+ """
+ for mirror in self.__mirrors:
+ if not mirror.preferred:
+ yield mirror
+
@property
def all(self):
"""
for mirror in self.__mirrors:
yield mirror
+ def group(self, grabber):
+ """
+ Return a MirrorGroup object for the given grabber.
+ """
+ # A list of mirrors that is passed to MirrorGroup.
+ mirrors = []
+
+ # Add all preferred mirrors at the first place and shuffle them
+ # that we will start at a random place.
+ for mirror in self.preferred:
+ mirrors.append(mirror.url)
+ random.shuffle(mirrors)
+
+ # All other mirrors are added as well and will only be used if all
+ # preferred mirrors did not work.
+ for mirror in self.all:
+ if mirror.url in mirrors:
+ continue
+
+ mirrors.append(mirror.url)
+
+ return MirrorGroup(grabber, mirrors)
+
+
+
+class Downloader(object):
+ def __init__(self, mirrors, files):
+ self.grabber = PakfireGrabber()
+
+ self.mirrorgroup = mirrors.group(self.grabber)
+
+
def update(self, force=False):
"""
- Nothing to do here.
+ Download the repository metadata and the package database.
"""
- pass
+ # Shortcut to repository cache.
+ cache = self.repo.cache
+
+ cache_filename = "metadata/repomd.json"
+
+ # Marker if we need to do the download.
+ download = True
+
+ # Check if file does exists and is not too old.
+ if cache.exists(cache_filename):
+ age = cache.age(cache_filename)
+ if age and age < TIME_10M:
+ download = False
+
+ if download:
+ # XXX do we need limit here for security reasons?
+ metadata = self.repo.grabber.urlread("repodata/repomd.json")
+
+ with cache.open(cache_filename, "w") as o:
+ o.write(metadata)
+
+ # XXX need to parse metadata here
+
+ # XXX split this into two functions
+
+ cache_filename = "metadata/packages.db" # XXX just for now
+
+ if not cache.exists(cache_filename):
+ o = cache.open(cache_filename, "w")
+ i = self.repo.grabber.urlopen("repodata/packages.db") # XXX just for now
+
+ buf = i.read(BUFFER_SIZE)
+ while buf:
+ o.write(buf)
+ buf = i.read(BUFFER_SIZE)
+
+ i.close()
+ o.close()
+
+ # XXX possibly, the database needs to be decompressed
+
+ # Reopen the database
+ self.db = database.RemotePackageDatabase(self.pakfire, cache.abspath(cache_filename))
def get_all_by_name(self, name):
c = self.db.cursor()
c.execute("SELECT * FROM packages WHERE name = ?", name)
for pkg in c:
- yield package.DatabasePackage(self.pakfire, self.db, pkg)
+ yield package.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
c.close()
c.execute("SELECT * FROM packages")
for pkg in c:
- yield packages.DatabasePackage(self.pakfire, self.db, pkg)
+ yield packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg)
c.close()
#!/usr/bin/python
+import os
+
from base import Package
+from binary import BinaryPackage
+
+from pakfire.constants import *
class DatabasePackage(Package):
type = "db"
- def __init__(self, pakfire, db, data):
- Package.__init__(self, pakfire, pakfire.repos.local)
+ def __init__(self, pakfire, repo, db, data):
+ Package.__init__(self, pakfire, repo)
self.db = db
return []
+ @property
+ def hash1(self):
+ return self.metadata.get("hash1")
+
+ @property
+ def filename(self):
+ return self.metadata.get("filename") # XXX basename?
+
@property
def filelist(self):
c = self.db.cursor()
c.close()
+ def download(self):
+ """
+ Downloads the package from repository and returns a new instance
+ of BinaryPackage.
+ """
+ # Marker, if we need to download the package.
+ download = True
+
+ # Add shortcut for cache.
+ cache = self.repo.cache
+
+ cache_filename = "packages/%s" % os.path.basename(self.filename)
+
+ # Check if file already exists in cache.
+ if cache.exists(cache_filename):
+ # If the file does already exist, we check if the hash1 matches.
+ if cache.verify(cache_filename, self.hash1):
+ # We already got the right file. Skip download.
+ download = False
+ else:
+ # The file in cache has a wrong hash. Remove it and repeat download.
+ cache.remove(cache_filename)
+
+ if download:
+ # Open input and output files and download the file.
+ o = cache.open(cache_filename, "w")
+ # Make sure filename is of type string (and not unicode)
+ filename = str(self.filename)
+
+ # XXX to be removed very soon
+ if filename.startswith("686"):
+ filename = "i%s" % filename
+
+ i = self.repo.grabber.urlopen(filename)
+
+ buf = i.read(BUFFER_SIZE)
+ while buf:
+ o.write(buf)
+ buf = i.read(BUFFER_SIZE)
+
+ i.close()
+ o.close()
+
+ # Verify if the download was okay.
+ if not cache.verify(cache_filename, self.hash1):
+ raise Exception, "XXX this should never happen..."
+
+ filename = os.path.join(cache.path, cache_filename)
+ return BinaryPackage(self.pakfire, self.repo, filename)
# XXX maybe we can remove this later?
class InstalledPackage(DatabasePackage):
import time
from ConfigParser import ConfigParser
+from urlgrabber.progress import TextMeter, TextMultiFileMeter
import base
import database
import downloader
import index
import packages
+import util
from constants import *
def path(self):
return os.path.join(REPO_CACHE_DIR, self.repo.name, self.repo.arch)
+ def abspath(self, path):
+ return os.path.join(self.path, path)
+
def create(self):
"""
Create all necessary directories.
"""
- for d in ("mirrors", "packages", "metadata"):
- path = os.path.join(self.path, d)
+ for path in ("mirrors", "packages", "metadata"):
+ path = self.abspath(path)
if not os.path.exists(path):
os.makedirs(path)
"""
Returns True if a file exists and False if it doesn't.
"""
- return os.path.exists(os.path.join(self.path, filename))
+ return os.path.exists(self.abspath(filename))
def age(self, filename):
"""
if not self.exists(filename):
return None
- filename = os.path.join(self.path, filename)
+ filename = self.abspath(filename)
# Creation time of the file
ctime = os.stat(filename)[stat.ST_CTIME]
return (time.time() - ctime) / 60
def open(self, filename, *args, **kwargs):
- filename = os.path.join(self.path, filename)
+ filename = self.abspath(filename)
return open(filename, *args, **kwargs)
+ def verify(self, filename, hash1):
+ """
+ Return a bool that indicates if a file matches the given hash.
+ """
+ return util.calc_hash1(self.abspath(filename)) == hash1
+
+ def remove(self, filename):
+ """
+ Remove a file from cache.
+ """
+ if not self.exists(filename):
+ return
+
+ filename = self.abspath(filename)
+ os.unlink(filename)
+
class RemoteRepository(RepositoryFactory):
def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled):
else:
self.enabled = False
+ # A place to cache the download grabber for this repo.
+ self.__grabber = None
+
# Create a cache for the repository where we can keep all temporary data.
self.cache = RepositoryCache(self.pakfire, self)
return priority
- def fetch_file(self, filename):
- grabber = URLGrabber(
- progress_obj = TextMultiFileMeter(),
- )
-
- mg = MGRandomOrder(grabber, self.mirrorlist)
-
- # XXX Need to say destination here.
- mg.urlgrab(filename)
+ @property
+ def grabber(self):
+ if not self.__grabber:
+ grabber = downloader.PakfireGrabber(
+# progress_obj = TextMultiFileMeter(), # XXX broken?
+ progress_obj = TextMeter(),
+ )
+
+ self.__grabber = self.mirrors.group(grabber)
+
+ return self.__grabber
def update_index(self, force=False):
if self.index:
else:
self.updates.append(pkg)
+ def _download(self, pkgs):
+ """
+ Download all given packages and return a list of BinaryPackages.
+ """
+ _pkgs = []
+ for pkg in pkgs:
+ if not isinstance(pkg, packages.BinaryPackage):
+ pkg = pkg.download()
+ _pkgs.append(pkg)
+
+ return _pkgs
+
def download(self):
- pass
+ """
+ Convert all packages to BinaryPackage.
+ """
+ self.installs = self._download(self.installs)
+ self.install_deps = self._download(self.install_deps)
+ self.updates = self._download(self.updates)
+ self.update_deps = self._download(self.update_deps)
class Transaction(object):
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2011-02-21 02:10+0100\n"
+"POT-Creation-Date: 2011-02-21 21:53+0100\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"