CACHE_DIR = "/var/cache/pakfire"
CCACHE_CACHE_DIR = os.path.join(CACHE_DIR, "ccache")
CACHE_ENVIRON_DIR = os.path.join(CACHE_DIR, "environments")
-REPO_CACHE_DIR = os.path.join(CACHE_DIR, "repos")
+REPO_CACHE_DIR = os.path.join(CACHE_DIR, "downloads")
LOCAL_BUILD_REPO_PATH = "/var/lib/pakfire/local"
LOCAL_TMP_PATH = "/var/tmp"
self.update(force=False)
+ @property
+ def distro(self):
+ return self.repo.distro
+
@property
def cache(self):
"""
return
log.debug("Updating mirrorlist for repository '%s' (force=%s)" % (self.repo.name, force))
-
- cache_filename = "mirrors/mirrorlist"
+ cache_filename = os.path.join("repodata", self.distro.sname, self.distro.release,
+ self.repo.name, self.distro.arch, "mirrors")
# Force the update if no mirrorlist is available.
if not self.cache.exists(cache_filename):
# XXX need to support filelist.
return ["%s does not support filelists, yet." % self.__class__.__name__,]
+ @property
+ def cache_filename(self):
+ """
+ The path to this file in the cache.
+ """
+ h = self.hash1
+
+ return os.path.join(h[0:2], h[2:], os.path.basename(self.filename))
+
@property
def is_in_cache(self):
# Local files are always kinda cached.
if self.repo.local:
return True
- return self.repo.cache.exists("package/%s" % self.filename)
+ return self.repo.cache.exists(self.cache_filename)
def get_from_cache(self):
path = None
# the root directory of the repository or in a subdirectory that
# is named by the architecture.
for i in ("", self.arch,):
- path = os.path.join(self.repo.path, i, self.filename)
+ p = os.path.join(self.repo.path, i, self.filename)
- if os.path.exists(path):
- return file.BinaryPackage(self.pakfire, self.repo, path)
+ if os.path.exists(p):
+ path = p
+ break
else:
- filename = "packages/%s" % self.filename
-
- if self.repo.cache.exists(filename):
- path = self.repo.cache.abspath(filename)
+ if self.repo.cache.exists(self.cache_filename):
+ path = self.repo.cache.abspath(self.cache_filename)
- if path:
+ if path and self.repo.cache.verify(path, self.hash1):
return file.BinaryPackage(self.pakfire, self.repo, path)
def download(self, text=""):
An object that is able to cache all data that is loaded from a
remote repository.
"""
+ path = REPO_CACHE_DIR
def __init__(self, pakfire, repo):
self.pakfire = pakfire
return self.__created
- @property
- def path(self):
- return os.path.join(REPO_CACHE_DIR, self.pakfire.distro.release, \
- self.repo.name, self.repo.arch)
-
def abspath(self, path, create=True):
if create:
self.create()
if self.created:
return
- for path in ("mirrors", "packages", "repodata"):
- path = self.abspath(path, create=False)
-
- if not os.path.exists(path):
- os.makedirs(path)
+ if not os.path.exists(self.path):
+ os.makedirs(self.path)
self.__created = True
def open(self, filename, *args, **kwargs):
filename = self.abspath(filename)
+ # Create directory if not existant.
+ dirname = os.path.dirname(filename)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
return open(filename, *args, **kwargs)
def verify(self, filename, hash1):
def __len(self):
return len(self.repo)
+ @property
+ def distro(self):
+ return self.repo.distro
+
@property
def cache(self):
return self.repo.cache
def _update_metadata(self, force, offline=False):
filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
+ cache_filename = os.path.join("repodata", self.distro.sname, self.distro.release,
+ self.repo.name, self.distro.arch, os.path.basename(filename))
# Marker if we need to do the download.
download = True
if not force:
# Check if file does exists and is not too old.
- if self.cache.exists(filename):
- age = self.cache.age(filename)
+ if self.cache.exists(cache_filename):
+ age = self.cache.age(cache_filename)
if age and age < TIME_10M:
download = False
log.debug("Metadata is recent enough. I don't download it again.")
# Open old metadata for comparison.
old_metadata = metadata.Metadata(self.pakfire, self,
- self.cache.abspath(filename))
+ self.cache.abspath(cache_filename))
# If no metadata was downloaded and we are in offline mode.
elif offline:
else:
# We explicitely rewrite the metadata if it is equal to have
# a new timestamp and do not download it over and over again.
- with self.cache.open(filename, "w") as o:
+ with self.cache.open(cache_filename, "w") as o:
o.write(data)
# Parse the metadata that we just downloaded or load it from cache.
self.metadata = metadata.Metadata(self.pakfire, self,
- self.cache.abspath(filename))
+ self.cache.abspath(cache_filename))
def _update_database(self, force, offline=False):
if not hasattr(self, "metadata"):
return
# Construct cache and download filename.
+ cache_filename = os.path.join("repodata", self.distro.sname, self.distro.release,
+ self.repo.name, self.distro.arch, "database", self.metadata.database)
filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
- if not self.cache.exists(filename):
+ if not self.cache.exists(cache_filename):
if offline:
# If there is not database and we are in offline mode, we cannot
# download anything so we just skip the rest of this function.
algo=self.metadata.database_compression)
# Make a new file in the cache.
- cacheobj = self.cache.open(filename, "w")
+ cacheobj = self.cache.open(cache_filename, "w")
try:
while True:
urlobj.close()
# check the hashsum of the downloaded file
- if not util.calc_hash1(self.cache.abspath(filename)) == self.metadata.database_hash1:
- # XXX an exception is not a very good idea because this file could
- # be downloaded from another mirror. need a better way to handle this.
-
- # Remove bad file from cache.
- self.cache.remove(filename)
-
- raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
+ #if self.cache.verify(self.cache.abspath(cache_filename), self.metadata.database_hash1):
+ # # XXX an exception is not a very good idea because this file could
+ # # be downloaded from another mirror. need a better way to handle this.
+ #
+ # # Remove bad file from cache.
+ # self.cache.remove(cache_filename)
+ #
+ # raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
# (Re-)open the database.
- self.read(self.cache.abspath(filename))
+ self.read(self.cache.abspath(cache_filename))
class IndexDir(Index):
# Marker, if we need to download the package.
download = True
- cache_prefix = ""
- if filename.endswith(PACKAGE_EXTENSION):
- cache_prefix = "packages"
- elif filename == METADATA_DOWNLOAD_FILE:
- cache_prefix = "repodata"
- elif filename.endswith(METADATA_DATABASE_FILE):
- cache_prefix = "repodata"
-
- cache_filename = os.path.join(cache_prefix, os.path.basename(filename))
+ cache_filename = pkg.cache_filename
# Check if file already exists in cache.
if self.cache.exists(cache_filename):
# The file in cache has a wrong hash. Remove it and repeat download.
cache.remove(cache_filename)
- if download:
+ # Get a package grabber and add mirror download capabilities to it.
+ grabber = downloader.PackageDownloader(
+ self.pakfire,
+ text=text + os.path.basename(filename),
+ )
+ grabber = self.mirrors.group(grabber)
+
+ # Make sure filename is of type string (and not unicode)
+ filename = str(filename)
+
+ while download:
log.debug("Going to download %s" % filename)
# If we are in offline mode, we cannot download any files.
raise OfflineModeError, _("Cannot download this file in offline mode: %s") \
% filename
- # Make sure filename is of type string (and not unicode)
- filename = str(filename)
-
- # Get a package grabber and add mirror download capabilities to it.
- grabber = downloader.PackageDownloader(
- self.pakfire,
- text=text + os.path.basename(filename),
- )
- grabber = self.mirrors.group(grabber)
-
i = grabber.urlopen(filename)
# Open input and output files and download the file.
i.close()
o.close()
- # Verify if the download was okay.
- if hash1 and not self.cache.verify(cache_filename, hash1):
- raise Exception, "XXX this should never happen..."
+ if self.cache.verify(cache_filename, hash1):
+ log.debug("Successfully downloaded %s (%s)." % (filename, hash1))
+ break
+
+ log.warning(_("The checksum of the downloaded file did not match."))
+ log.warning(_("Trying an other mirror."))
+
+ # Go to the next mirror.
+ grabber.increment_mirror()
return os.path.join(self.cache.path, cache_filename)