return pkgs
def repo_create(self, path, input_paths):
- if not os.path.exists(path) or not os.path.isdir(path):
- raise PakfireError, "Given path is not existant or not a directory: %s" % path
-
repo = repository.LocalRepository(
self,
name="new",
path=path,
)
+ # Create a new temporary repository.
+ repo.index.create_database()
+
for input_path in input_paths:
repo._collect_packages(input_path)
- repo.index.tag_db()
+ repo.save()
+
+ # Destroy the temporary database.
+ repo.index.destroy_database()
METADATA_FORMAT = 0
METADATA_DOWNLOAD_LIMIT = 1024**2
METADATA_DOWNLOAD_PATH = "repodata"
-METADATA_DOWNLOAD_FILE = os.path.join(METADATA_DOWNLOAD_PATH, "repomd.json")
+METADATA_DOWNLOAD_FILE = "repomd.json"
+METADATA_DATABASE_FILE = "packages.db"
PACKAGE_FORMAT = 0
PACKAGE_EXTENSION = "pfm"
import logging
import os
+import shutil
import sqlite3
import time
def close(self):
self._db.close()
+ self._db = None
def commit(self):
self._db.commit()
def cursor(self):
return self._db.cursor()
+ def save(self, path):
+ """
+ Save (means copy) the database to path.
+ """
+ # Commit all data in memory to the database.
+ self.commit()
+
+ # Copy the file.
+ shutil.copy(self.filename, path)
+
class PackageDatabase(Database):
def create(self):
# Initialize with no content.
self.db, self.metadata = None, None
+ def create_database(self):
+ filename = "/tmp/.%s-%s" % (random.randint(0, 1024**2), METADATA_DATABASE_FILE)
+
+ self.db = database.RemotePackageDatabase(self.pakfire, filename)
+
+ def destroy_database(self):
+ if self.db:
+ self.db.close()
+
+ os.unlink(self.db.filename)
+
def _update_metadata(self, force):
# Shortcut to repository cache.
cache = self.repo.cache
- filename = METADATA_DOWNLOAD_FILE
+ filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
# Marker if we need to do the download.
download = True
data = grabber.urlread(filename)
+ # XXX check the hashsum of the downloaded file
+
with cache.open(filename, "w") as o:
o.write(data)
Download the repository metadata and the package database.
"""
+ # Skip the download for local repositories.
+ if self.repo.local:
+ return
+
# At first, update the metadata.
self._update_metadata(force)
# XXX * check the metadata content
# XXX * use compression
+ def save(self, path=None):
+ """
+ This function saves the database and metadata to path so it can
+ be exported to a remote repository.
+ """
+ if not path:
+ path = self.repo.path
+
+ # Create filenames
+ metapath = os.path.join(path, METADATA_DOWNLOAD_PATH)
+ db_path = os.path.join(metapath, METADATA_DATABASE_FILE)
+ md_path = os.path.join(metapath, METADATA_DOWNLOAD_FILE)
+
+ if not os.path.exists(metapath):
+ os.makedirs(metapath)
+
+ # Save the database to path and get the filename
+ self.db.save(db_path)
+
+ # Make a reference to the database file that it will get a unique name
+ # so we won't get into any trouble with caching proxies.
+ db_hash = util.calc_hash1(db_path)
+
+ db_path2 = os.path.join(os.path.dirname(db_path),
+ "%s-%s" % (db_hash, os.path.basename(db_path)))
+
+ if not os.path.exists(db_path2):
+ os.link(db_path, db_path2)
+
+ # Create a new metadata object and add out information to it.
+ md = metadata.Metadata(self.pakfire, self)
+
+ # Save name of the hashed database to the metadata.
+ md.database = os.path.basename(db_path2)
+ md.database_hash1 = db_hash
+
+ # Save metdata to repository.
+ md.save(md_path)
return self._data.get("database")
def set_database(self, val):
- self._data.set("database", val)
+ self._data["database"] = val
database = property(get_database, set_database)
+
+ def get_database_hash1(self):
+ return self._data.get("database_hash1", None)
+
+ def set_database_hash1(self, val):
+ self._data["database_hash1"] = val
+
+ database_hash1 = property(get_database_hash1, set_database_hash1)
def __init__(self, pakfire, name, description, path):
RepositoryFactory.__init__(self, pakfire, name, description)
- # Save location of the repository
+ # Save location of the repository and create it if not existant.
self.path = path
+ if not os.path.exists(self.path):
+ os.makedirs(self.path)
self.index = index.DatabaseIndex(self.pakfire, self)
if not isinstance(pkg, packages.BinaryPackage):
raise Exception
- repo_filename = os.path.join(self.path, pkg.arch, os.path.basename(pkg.filename))
+ repo_filename = os.path.join(self.path, os.path.basename(pkg.filename))
# Do we need to copy the package files?
copy = True
logging.info("Adding package '%s' to repository." % pkg.friendly_name)
self.index.add_package(pkg)
+ def save(self, path=None):
+ """
+ Save the index information to path.
+ """
+ self.index.save(path)
+
class InstalledRepository(RepositoryFactory):
def __init__(self, pakfire):
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2011-02-25 19:21+0100\n"
+"POT-Creation-Date: 2011-02-25 20:34+0100\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
msgid "Total download size: %s"
msgstr ""
-#: ../pakfire/index.py:267
+#: ../pakfire/index.py:278
#, python-format
msgid "%s: package database"
msgstr ""