class Pakfire(object):
def __init__(self, path="/", builder=False, configs=[],
- disable_repos=None):
+ disable_repos=None, offline=False):
# Check if we are operating as the root user.
self.check_root_user()
# Update all indexes of the repositories (not force) so that we will
# always work with valid data.
- self.repos.update()
+ self.repos.update(offline=offline)
def check_root_user(self):
if not os.getuid() == 0 or not os.getgid() == 0:
self.args.instroot,
configs = [self.args.config],
disable_repos = self.args.disable_repo,
+ offline = self.args.offline,
)
self.action2func = {
self.parser.add_argument("--disable-repo", nargs="*", metavar="REPO",
help=_("Disable a repository temporarily."))
+ self.parser.add_argument("--offline", action="store_true",
+ help=_("Run in offline mode."))
+
def parse_command_install(self):
# Implement the "install" command.
sub_install = self.sub_commands.add_parser("install",
builder = True,
configs = [self.args.config],
disable_repos = self.args.disable_repo,
+ offline = self.args.offline,
)
self.action2func = {
builder = True,
configs = [self.args.config],
disable_repos = self.args.disable_repo,
+ offline = self.args.offline,
)
self.action2func = {
repo.enabled = False
continue
- def update(self, force=False):
+ def update(self, force=False, offline=False):
logging.debug("Updating all repository indexes (force=%s)" % force)
# XXX update all indexes if necessary or forced
for repo in self.enabled:
- repo.update(force=force)
+ repo.update(force=force, offline=offline)
#def get_all(self):
# for repo in self.enabled:
"""
return False
- def update(self, force=False):
+ def update(self, force=False, offline=False):
"""
A function that is called to update the local data of
the repository.
"""
if hasattr(self, "index"):
- self.index.update(force)
+ self.index.update(force, offline=offline)
def get_all(self):
"""
for pkg in self._packages:
yield pkg
- def update(self, force=False):
+ def update(self, force=False, offline=False):
pass
def add_package(self, pkg):
# Always update this because it will otherwise contain no data
self.update(force=True)
- def update(self, force=False):
+ def update(self, force=False, offline=False):
logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
# Do nothing if the update is not forced but populate the database
class RemoteIndex(DatabaseIndexFactory):
def open_database(self):
- self.update(force=False)
+ self.update(force=False, offline=True)
- def _update_metadata(self, force):
+ def _update_metadata(self, force, offline):
# Shortcut to repository cache.
cache = self.repo.cache
filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
# Marker if we need to do the download.
- download = True
+ download = not offline
# Marker for the current metadata.
old_metadata = None
self.metadata = metadata.Metadata(self.pakfire, self,
cache.abspath(filename))
- def _update_database(self, force):
+ def _update_database(self, force, offline):
# Shortcut to repository cache.
cache = self.repo.cache
filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
if not cache.exists(filename):
+ if offline:
+ raise Exception, "No database. Cannot download one in offline mode..."
+
# Initialize a grabber for download.
grabber = downloader.DatabaseDownloader(
text = _("%s: package database") % self.repo.name,
self.db = database.RemotePackageDatabase(self.pakfire,
cache.abspath(filename))
- def update(self, force=False):
+ def update(self, force=False, offline=False):
"""
Download the repository metadata and the package database.
"""
return
# At first, update the metadata.
- self._update_metadata(force)
+ self._update_metadata(force, offline)
# Then, we download the database eventually.
- self._update_database(force)
+ self._update_database(force, offline)
# XXX this code needs lots of work:
# XXX * check the metadata content
return priority
- def update(self, force=False):
+ def update(self, force=False, offline=False):
if self.index:
- self.index.update(force=force)
+ self.index.update(force=force, offline=offline)
def _replace_from_cache(self, pkg):
for _pkg in self.cache.packages: