From e91c050bdfa29cddfce52e0e178c2b9e07f5071e Mon Sep 17 00:00:00 2001 From: Michael Tremer Date: Sat, 16 Jul 2011 20:47:19 +0200 Subject: [PATCH] Remove some backup files. --- pakfire/repository/database_old.py | 292 ------------------- pakfire/repository/index_old.py | 448 ----------------------------- pakfire/repository/local_old.py | 153 ---------- pakfire/repository/remote_old.py | 103 ------- po/POTFILES.in | 4 - po/pakfire.pot | 4 +- 6 files changed, 2 insertions(+), 1002 deletions(-) delete mode 100644 pakfire/repository/database_old.py delete mode 100644 pakfire/repository/index_old.py delete mode 100644 pakfire/repository/local_old.py delete mode 100644 pakfire/repository/remote_old.py diff --git a/pakfire/repository/database_old.py b/pakfire/repository/database_old.py deleted file mode 100644 index e360e2079..000000000 --- a/pakfire/repository/database_old.py +++ /dev/null @@ -1,292 +0,0 @@ -#!/usr/bin/python - -import logging -import os -import random -import shutil -import sqlite3 -import time - -from pakfire.constants import * - -class Cursor(sqlite3.Cursor): - def execute(self, *args, **kwargs): - # For debugging of SQL queries. - #print args, kwargs - - return sqlite3.Cursor.execute(self, *args, **kwargs) - - -class Database(object): - def __init__(self, pakfire, filename): - self.pakfire = pakfire - self._db = None - - self._tmp = False - - if filename == ":memory:": - self._tmp = True - - filename = "/tmp/.%s-db" % random.randint(0, 1024**2) - - self.filename = filename - - self.open() - - def __del__(self): - if self._db: - #self._db.commit() - self._db.close() - - def create(self): - pass - - def open(self): - if not self._db: - logging.debug("Open database %s" % self.filename) - - dirname = os.path.dirname(self.filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - - database_exists = os.path.exists(self.filename) - - # Make a connection to the database. - self._db = sqlite3.connect(self.filename) - self._db.row_factory = sqlite3.Row - - # Create the database if it was not there, yet. - if not database_exists: - self.create() - - def close(self): - self._db.close() - self._db = None - - if self._tmp: - os.unlink(self.filename) - - def commit(self): - self._db.commit() - - def cursor(self): - return self._db.cursor(Cursor) - - def executescript(self, *args, **kwargs): - return self._db.executescript(*args, **kwargs) - - def save(self, path): - """ - Save a copy of this database to a new one located at path. - """ - self.commit() - - shutil.copy2(self.filename, path) - - -class PackageDatabase(Database): - def create(self): - c = self.cursor() - - c.executescript(""" - CREATE TABLE files( - name TEXT, - pkg INTEGER, - size INTEGER, - type INTEGER, - hash1 TEXT - ); - - CREATE TABLE packages( - id INTEGER PRIMARY KEY, - name TEXT, - epoch INTEGER, - version TEXT, - release TEXT, - arch TEXT, - groups TEXT, - filename TEXT, - size INTEGER, - hash1 TEXT, - provides TEXT, - requires TEXT, - conflicts TEXT, - obsoletes TEXT, - license TEXT, - summary TEXT, - description TEXT, - uuid TEXT, - build_id TEXT, - build_host TEXT, - build_date TEXT, - build_time INTEGER - ); - """) - # XXX add some indexes here - - self.commit() - c.close() - - def list_packages(self): - c = self.cursor() - c.execute("SELECT DISTINCT name FROM packages ORDER BY name") - - for pkg in c: - yield pkg["name"] - - c.close() - - def package_exists(self, pkg): - return not self.get_id_by_pkg(pkg) is None - - def get_id_by_pkg(self, pkg): - c = self.cursor() - - c.execute("SELECT id FROM packages WHERE name = ? AND version = ? AND \ - release = ? AND epoch = ? LIMIT 1", (pkg.name, pkg.version, pkg.release, pkg.epoch)) - - ret = None - for i in c: - ret = i["id"] - break - - c.close() - - return ret - - def add_package(self, pkg): - raise NotImplementedError - - -class RemotePackageDatabase(PackageDatabase): - def add_package(self, pkg, reason=None): - if self.package_exists(pkg): - logging.debug("Skipping package which already exists in database: %s" % pkg.friendly_name) - return - - logging.debug("Adding package to database: %s" % pkg.friendly_name) - - filename = "" - if pkg.repo.local: - # Get the path relatively to the repository. - filename = pkg.filename[len(pkg.repo.path):] - # Strip leading / if any. - if filename.startswith("/"): - filename = filename[1:] - - c = self.cursor() - c.execute(""" - INSERT INTO packages( - name, - epoch, - version, - release, - arch, - groups, - filename, - size, - hash1, - provides, - requires, - conflicts, - obsoletes, - license, - summary, - description, - uuid, - build_id, - build_host, - build_date, - build_time - ) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", - ( - pkg.name, - pkg.epoch, - pkg.version, - pkg.release, - pkg.arch, - " ".join(pkg.groups), - filename, - pkg.size, - pkg.hash1, - " ".join(pkg.provides), - " ".join(pkg.requires), - " ".join(pkg.conflicts), - " ".join(pkg.obsoletes), - pkg.license, - pkg.summary, - pkg.description, - pkg.uuid, - pkg.build_id, - pkg.build_host, - pkg.build_date, - pkg.build_time, - ) - ) - self.commit() - c.close() - - pkg_id = self.get_id_by_pkg(pkg) - - c = self.cursor() - for file in pkg.filelist: - c.execute("INSERT INTO files(name, pkg) VALUES(?, ?)", (file, pkg_id)) - - self.commit() - c.close() - - return pkg_id - - -class LocalPackageDatabase(RemotePackageDatabase): - def __init__(self, pakfire): - # Generate filename for package database - filename = os.path.join(pakfire.path, PACKAGES_DB) - - RemotePackageDatabase.__init__(self, pakfire, filename) - - def create(self): - RemotePackageDatabase.create(self) - - # Alter the database layout to store additional local information. - logging.debug("Altering database table for local information.") - c = self.cursor() - c.executescript(""" - ALTER TABLE packages ADD COLUMN installed INT; - ALTER TABLE packages ADD COLUMN reason TEXT; - ALTER TABLE packages ADD COLUMN repository TEXT; - ALTER TABLE packages ADD COLUMN scriptlet TEXT; - ALTER TABLE packages ADD COLUMN triggers TEXT; - """) - self.commit() - c.close() - - def add_package(self, pkg, reason=None): - # Insert all the information to the database we have in the remote database - pkg_id = RemotePackageDatabase.add_package(self, pkg) - - # then: add some more information - c = self.cursor() - - # Save timestamp when the package was installed. - c.execute("UPDATE packages SET installed = ? WHERE id = ?", (time.time(), pkg_id)) - - # Add repository information. - c.execute("UPDATE packages SET repository = ? WHERE id = ?", (pkg.repo.name, pkg_id)) - - # Save reason of installation (if any). - if reason: - c.execute("UPDATE packages SET reason = ? WHERE id = ?", (reason, pkg_id)) - - # Update the filename information. - c.execute("UPDATE packages SET filename = ? WHERE id = ?", (pkg.filename, pkg_id)) - - # Add the scriptlet to database (needed to update or uninstall packages). - c.execute("UPDATE packages SET scriptlet = ? WHERE id = ?", (pkg.scriptlet, pkg_id)) - - # Add triggers to the database. - triggers = " ".join(pkg.triggers) - c.execute("UPDATE packages SET triggers = ? WHERE id = ?", (triggers, pkg_id)) - - self.commit() - c.close() diff --git a/pakfire/repository/index_old.py b/pakfire/repository/index_old.py deleted file mode 100644 index 79c412c22..000000000 --- a/pakfire/repository/index_old.py +++ /dev/null @@ -1,448 +0,0 @@ -#!/usr/bin/python - -import fnmatch -import json -import logging -import os -import random -import shutil -import subprocess -import time - -import database -import metadata - -import pakfire.compress as compress -import pakfire.downloader as downloader -import pakfire.packages as packages -import pakfire.util as util - -from pakfire.constants import * -from pakfire.i18n import _ - -class Index(object): - def __init__(self, pakfire, repo): - self.pakfire = pakfire - self.repo = repo - - self._packages = [] - - @property - def arch(self): - return self.pakfire.distro.arch - - def get_all_by_name(self, name): - for package in self.packages: - if package.name == name: - yield package - - def get_by_file(self, filename): - for pkg in self.packages: - match = False - for pkg_filename in pkg.filelist: - if fnmatch.fnmatch(pkg_filename, filename): - match = True - break - - if match: - yield pkg - - def get_by_evr(self, name, epoch, version, release): - try: - epoch = int(epoch) - except TypeError: - epoch = 0 - - for pkg in self.packages: - if pkg.type == "source": - continue - - if pkg.name == name and pkg.epoch == epoch \ - and pkg.version == version and pkg.release == release: - yield pkg - - def get_by_id(self, id): - raise NotImplementedError - - def get_by_uuid(self, uuid): - for pkg in self.packages: - if pkg.uuid == uuid: - return pkg - - def get_by_provides(self, requires): - for pkg in self.packages: - if pkg.does_provide(requires): - yield pkg - - @property - def packages(self): - for pkg in self._packages: - yield pkg - - @property - def size(self): - i = 0 - for pkg in self.packages: - i += 1 - - return i - - def update(self, force=False): - pass - - def add_package(self, pkg): - raise NotImplementedError - - @property - def cachefile(self): - return None - - def import_to_solver(self, solver, repo): - if self.cachefile: - if not os.path.exists(self.cachefile): - self.create_solver_cache() - - logging.debug("Importing repository cache data from %s" % self.cachefile) - repo.add_solv(self.cachefile) - - else: - for pkg in self.packages: - solver.add_package(pkg, repo.name()) - - logging.debug("Initialized new repo '%s' with %s packages." % \ - (repo.name(), repo.size())) - - def create_solver_cache(self): - cachedir = os.path.dirname(self.cachefile) - if not os.path.exists(cachedir): - os.makedirs(cachedir) - - f = open(self.cachefile, "w") - - # Write metadata header. - xml = "\n" - xml += "\n" - - # We dump an XML string for every package in this repository and - # write it to the XML file. - for pkg in self.packages: - xml += pkg.export_xml_string() - - # Write footer. - xml += "" - - p = subprocess.Popen("rpmmd2solv", stdin=subprocess.PIPE, - stdout=subprocess.PIPE) - stdout, stderr = p.communicate(xml) - - f.write(stdout) - f.close() - - -class DirectoryIndex(Index): - def __init__(self, pakfire, repo, path): - if path.startswith("file://"): - path = path[7:] - self.path = path - - Index.__init__(self, pakfire, repo) - - # Always update this because it will otherwise contain no data - self.update(force=True) - - def update(self, force=False): - logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force)) - - # Do nothing if the update is not forced but populate the database - # if no packages are present. - if not force and self._packages: - return - - # If we update the cache, we clear it first. - self._packages = [] - - for dir, subdirs, files in os.walk(self.path): - for file in files: - # Skip files that do not have the right extension - if not file.endswith(".%s" % PACKAGE_EXTENSION): - continue - - file = os.path.join(dir, file) - - package = packages.open(self.pakfire, self.repo, file) - - logging.debug("Found package: %s" % package) - - if isinstance(package, packages.BinaryPackage): - if not package.arch in (self.arch, "noarch"): - logging.warning("Skipped package with wrong architecture: %s (%s)" \ - % (package.filename, package.arch)) - print package.type - continue - - # XXX this is disabled because we could also have source - # repositories. But we should not mix them. - #if package.type == "source": - # # Silently skip source packages. - # continue - - self._packages.append(package) - - def save(self, path=None): - if not path: - path = self.path - - path = os.path.join(path, "index.db") - - db = database.PackageDatabase(self.pakfire, path) - - for pkg in self.packages: - db.add_package(pkg) - - db.close() - - -class DatabaseIndexFactory(Index): - def __init__(self, pakfire, repo): - Index.__init__(self, pakfire, repo) - - # Add empty reference to a fictional database. - self.db = None - - self.open_database() - - def open_database(self): - raise NotImplementedError - - @property - def packages(self): - c = self.db.cursor() - c.execute("SELECT * FROM packages") - - for pkg in c: - yield packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg) - - c.close() - - def add_package(self, pkg, reason=None): - return self.db.add_package(pkg, reason) - - def get_by_id(self, id): - c = self.db.cursor() - c.execute("SELECT * FROM packages WHERE id = ? LIMIT 1", (id,)) - - ret = None - for pkg in c: - ret = packages.DatabasePackage(self.pakfire, self.repo, self.db, pkg) - - c.close() - - return ret - - def get_by_file(self, filename): - c = self.db.cursor() - c.execute("SELECT pkg FROM files WHERE name GLOB ?", (filename,)) - - for pkg in c: - yield self.get_by_id(pkg["pkg"]) - - c.close() - - @property - def filelist(self): - c = self.db.cursor() - c.execute("SELECT pkg, name FROM files") - - files = {} - - for entry in c: - file = entry["name"] - try: - files[pkg_id].append(file) - except KeyError: - files[pkg_id] = [file,] - - c.close() - - return files - - -class InstalledIndex(DatabaseIndexFactory): - def open_database(self): - # Open the local package database. - self.db = database.LocalPackageDatabase(self.pakfire) - - -class LocalIndex(DatabaseIndexFactory): - def open_database(self): - self.db = database.RemotePackageDatabase(self.pakfire, ":memory:") - - def save(self, path=None, algo="xz"): - """ - This function saves the database and metadata to path so it can - be exported to a remote repository. - """ - if not path: - path = self.repo.path - - # Create filenames - metapath = os.path.join(path, METADATA_DOWNLOAD_PATH) - db_path = os.path.join(metapath, METADATA_DATABASE_FILE) - md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE) - - if not os.path.exists(metapath): - os.makedirs(metapath) - - else: - # If a database is present, we remove it because we want to start - # with a clean environment. - if os.path.exists(db_path): - os.unlink(db_path) - - # Save the database to path and get the filename. - self.db.save(db_path) - - # Make a reference to the database file that it will get a unique name - # so we won't get into any trouble with caching proxies. - db_hash = util.calc_hash1(db_path) - - db_path2 = os.path.join(os.path.dirname(db_path), - "%s-%s" % (db_hash, os.path.basename(db_path))) - - # Compress the database. - if algo: - compress.compress(db_path, algo=algo, progress=True) - - if not os.path.exists(db_path2): - shutil.move(db_path, db_path2) - else: - os.unlink(db_path) - - # Create a new metadata object and add out information to it. - md = metadata.Metadata(self.pakfire, self) - - # Save name of the hashed database to the metadata. - md.database = os.path.basename(db_path2) - md.database_hash1 = db_hash - md.database_compression = algo - - # Save metdata to repository. - md.save(md_path) - - -class RemoteIndex(DatabaseIndexFactory): - def open_database(self): - self.update(force=False) - - def _update_metadata(self, force): - # Shortcut to repository cache. - cache = self.repo.cache - - filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE) - - # Marker if we need to do the download. - download = True - - # Marker for the current metadata. - old_metadata = None - - if not force: - # Check if file does exists and is not too old. - if cache.exists(filename): - age = cache.age(filename) - if age and age < TIME_10M: - download = False - logging.debug("Metadata is recent enough. I don't download it again.") - - # Open old metadata for comparison. - old_metadata = metadata.Metadata(self.pakfire, self, - cache.abspath(filename)) - - if download: - logging.debug("Going to (re-)download the repository metadata.") - - # Initialize a grabber for download. - grabber = downloader.MetadataDownloader() - grabber = self.repo.mirrors.group(grabber) - - data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT) - - # Parse new metadata for comparison. - new_metadata = metadata.Metadata(self.pakfire, self, metadata=data) - - if old_metadata and new_metadata < old_metadata: - logging.warning("The downloaded metadata was less recent than the current one. Trashing that.") - - else: - # We explicitely rewrite the metadata if it is equal to have - # a new timestamp and do not download it over and over again. - with cache.open(filename, "w") as o: - o.write(data) - - # Parse the metadata that we just downloaded or load it from cache. - self.metadata = metadata.Metadata(self.pakfire, self, - cache.abspath(filename)) - - def _update_database(self, force): - # Shortcut to repository cache. - cache = self.repo.cache - - # Construct cache and download filename. - filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database) - - if not cache.exists(filename): - # Initialize a grabber for download. - grabber = downloader.DatabaseDownloader( - text = _("%s: package database") % self.repo.name, - ) - grabber = self.repo.mirrors.group(grabber) - - data = grabber.urlread(filename) - - with cache.open(filename, "w") as o: - o.write(data) - - # decompress the database - if self.metadata.database_compression: - # Open input file and remove the file immediately. - # The fileobj is still open and the data will be removed - # when it is closed. - compress.decompress(cache.abspath(filename), - algo=self.metadata.database_compression) - - # check the hashsum of the downloaded file - if not util.calc_hash1(cache.abspath(filename)) == self.metadata.database_hash1: - # XXX an exception is not a very good idea because this file could - # be downloaded from another mirror. need a better way to handle this. - - # Remove bad file from cache. - cache.remove(filename) - - raise Exception, "Downloaded file did not match the hashsum. Need to re-download it." - - # (Re-)open the database. - self.db = database.RemotePackageDatabase(self.pakfire, - cache.abspath(filename)) - - def update(self, force=False): - """ - Download the repository metadata and the package database. - """ - - # Skip the download for local repositories. - if self.repo.local: - return - - # At first, update the metadata. - self._update_metadata(force) - - # Then, we download the database eventually. - self._update_database(force) - - # XXX this code needs lots of work: - # XXX * check the metadata content - - @property - def cachefile(self): - return "%s.cache" % self.db.filename diff --git a/pakfire/repository/local_old.py b/pakfire/repository/local_old.py deleted file mode 100644 index 917f7c589..000000000 --- a/pakfire/repository/local_old.py +++ /dev/null @@ -1,153 +0,0 @@ -#!/usr/bin/python - -import logging -import os -import shutil - -import pakfire.packages as packages -import pakfire.util as util - -import index - -from base import RepositoryFactory - -from pakfire.constants import * - -class LocalRepository(RepositoryFactory): - def __init__(self, pakfire, name, description, path, idx="db"): - RepositoryFactory.__init__(self, pakfire, name, description) - - # Save location of the repository and create it if not existant. - self.path = path - if not os.path.exists(self.path): - os.makedirs(self.path) - - if idx == "db": - self.index = index.LocalIndex(self.pakfire, self) - - elif idx == "directory": - self.index = index.DirectoryIndex(self.pakfire, self, self.path) - - def remove(self): - if os.path.exists(self.path): - util.rm(self.path) - - @property - def local(self): - # This is obviously local. - return True - - @property - def priority(self): - """ - The local repository has always a high priority. - """ - return 10 - - def _collect_packages(self, path): - logging.info("Collecting packages from %s." % path) - - for dir, subdirs, files in os.walk(path): - for file in files: - if not file.endswith(".%s" % PACKAGE_EXTENSION): - continue - - file = os.path.join(dir, file) - - pkg = packages.open(self.pakfire, self, file) - self._add_package(pkg) - - def _add_package(self, pkg): - # XXX gets an instance of binary package and puts it into the - # repo location if not done yet - # then: the package gets added to the index - - if not isinstance(pkg, packages.BinaryPackage): - raise Exception - - # Skip everything but binary packages. - if pkg.type == "source": - return - - repo_filename = os.path.join(self.path, os.path.basename(pkg.filename)) - - # Do we need to copy the package files? - copy = True - - pkg_exists = None - if os.path.exists(repo_filename): - pkg_exists = packages.open(self.pakfire, self, repo_filename) - - # If package in the repo is equivalent to the given one, we can - # skip any further processing. - if pkg.hash1 == pkg_exists.hash1: - logging.debug("The package does already exist in this repo: %s" % pkg.friendly_name) - copy = False - - else: - logging.warning("The package is going to be replaced: %s -> %s" % (pkg_exists, pkg)) - os.unlink(repo_filename) - - del pkg_exists - - if copy: - logging.debug("Copying package '%s' to repository." % pkg.friendly_name) - repo_dirname = os.path.dirname(repo_filename) - if not os.path.exists(repo_dirname): - os.makedirs(repo_dirname) - - # Try to use a hard link if possible, if we cannot do that we simply - # copy the file. - try: - os.link(pkg.filename, repo_filename) - except OSError: - shutil.copy2(pkg.filename, repo_filename) - - # Create new package object, that is connected to this repository - # and so we can do stuff. - pkg = packages.open(self.pakfire, self, repo_filename) - - logging.info("Adding package '%s' to repository." % pkg.friendly_name) - self.index.add_package(pkg) - - def save(self, path=None): - """ - Save the index information to path. - """ - self.index.save(path) - - -class LocalBinaryRepository(LocalRepository): - @property - def packages(self): - for pkg in self.index.packages: - # XXX should be changed to "binary" if all packages do support this. - if pkg.type == "source": - continue - - yield pkg - - -class LocalSourceRepository(LocalRepository): - @property - def packages(self): - for pkg in self.index.packages: - if not pkg.type == "source": - continue - - yield pkg - - -class LocalBuildRepository(LocalBinaryRepository): - def __init__(self, pakfire): - RepositoryFactory.__init__(self, pakfire, "build", "Locally built packages") - - self.path = self.pakfire.config.get("local_build_repo_path") - if not os.path.exists(self.path): - os.makedirs(self.path) - - self.index = index.DirectoryIndex(self.pakfire, self, self.path) - - @property - def priority(self): - return 20000 diff --git a/pakfire/repository/remote_old.py b/pakfire/repository/remote_old.py deleted file mode 100644 index 52c348472..000000000 --- a/pakfire/repository/remote_old.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/python - -import logging -import os - -import cache -import index - -import pakfire.downloader as downloader - -from base import RepositoryFactory - -class RemoteRepository(RepositoryFactory): - cacheable = True - - def __init__(self, pakfire, name, description, url, mirrorlist, gpgkey, enabled): - RepositoryFactory.__init__(self, pakfire, name, description) - - # Parse arguments. - self.url = url - self.gpgkey = gpgkey - self.mirrorlist = mirrorlist - - if enabled: - self.enabled = True - else: - self.enabled = False - - # Create a cache for the repository where we can keep all temporary data. - self.cache = cache.RepositoryCache(self.pakfire, self) - - # Initialize mirror servers. - self.mirrors = downloader.MirrorList(self.pakfire, self) - - # Initialize index. - self.index = index.RemoteIndex(self.pakfire, self) - - logging.debug("Created new repository(name='%s', url='%s', enabled='%s')" % \ - (self.name, self.url, self.enabled)) - - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, self.url) - - @property - def local(self): - # If files are located somewhere in the filesystem we assume it is - # local. - if self.url.startswith("file://"): - return True - - # Otherwise not. - return False - - @property - def arch(self): - return self.pakfire.distro.arch - - @property - def path(self): - if self.local: - return self.url[7:] - - return self.cache.path - - @property - def priority(self): - priority = 100 - - url2priority = { - "file://" : 50, - "http://" : 75, - } - - for url, prio in url2priority.items(): - if self.url.startswith(url): - priority = prio - break - - return priority - - #def update(self, force=False): - # if self.index: - # self.index.update(force=force) - - def _replace_from_cache(self, pkg): - for _pkg in self.cache.packages: - if pkg == _pkg: - pkg = _pkg - break - - return pkg - - @property - def packages(self): - for pkg in self.index.packages: - yield self._replace_from_cache(pkg) - - def get_by_provides(self, requires): - for pkg in self.index.get_by_provides(requires): - yield self._replace_from_cache(pkg) - - def get_by_file(self, filename): - return self.index.get_by_file(filename) diff --git a/po/POTFILES.in b/po/POTFILES.in index a9b36c944..24106b414 100644 --- a/po/POTFILES.in +++ b/po/POTFILES.in @@ -27,16 +27,12 @@ pakfire/packages/virtual.py pakfire/repository/actions.py pakfire/repository/base.py pakfire/repository/cache.py -pakfire/repository/database_old.py pakfire/repository/database.py -pakfire/repository/index_old.py pakfire/repository/index.py pakfire/repository/__init__.py pakfire/repository/installed.py -pakfire/repository/local_old.py pakfire/repository/local.py pakfire/repository/metadata.py -pakfire/repository/remote_old.py pakfire/repository/remote.py pakfire/satsolver.py pakfire/server.py diff --git a/po/pakfire.pot b/po/pakfire.pot index 44ac7d3ff..dc6153068 100644 --- a/po/pakfire.pot +++ b/po/pakfire.pot @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2011-07-16 20:37+0200\n" +"POT-Creation-Date: 2011-07-16 20:47+0200\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -290,7 +290,7 @@ msgstr "" msgid "Requires" msgstr "" -#: ../pakfire/repository/index_old.py:397 ../pakfire/repository/index.py:216 +#: ../pakfire/repository/index.py:216 #, python-format msgid "%s: package database" msgstr "" -- 2.39.5