]>
git.ipfire.org Git - pakfire.git/blob - python/pakfire/repository/index.py
9a398d240cbe77883631458141e00a5effb8013a
2 ###############################################################################
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
20 ###############################################################################
28 import pakfire
.compress
as compress
29 import pakfire
.downloader
as downloader
30 import pakfire
.packages
as packages
31 import pakfire
.satsolver
as satsolver
32 import pakfire
.util
as util
34 from pakfire
.constants
import *
35 from pakfire
.i18n
import _
38 def __init__(self
, pakfire
, repo
):
39 self
.pakfire
= pakfire
41 # Create reference to repository and the solver repo.
43 self
.solver_repo
= repo
.solver_repo
47 # Check, if initialization was okay.
51 return "<%s %s>" % (self
.__class
__.__name
__, self
.repo
)
58 return self
.repo
.cache
65 Check if everything was correctly initialized.
67 raise NotImplementedError
69 def update(self
, force
=False, offline
=False):
70 raise NotImplementedError
72 def read(self
, filename
):
74 Read file in SOLV format from filename.
76 self
.solver_repo
.read(filename
)
78 def write(self
, filename
):
80 Write content to filename in SOLV format.
82 self
.solver_repo
.write(filename
)
84 def create_relation(self
, *args
, **kwargs
):
85 return self
.pakfire
.create_relation(*args
, **kwargs
)
87 def add_package(self
, pkg
):
88 # XXX Skip packages without a UUID
90 # logging.warning("Skipping package which lacks UUID: %s" % pkg)
92 if not pkg
.build_time
:
95 logging
.debug("Adding package to index %s: %s" % (self
, pkg
))
97 solvable
= satsolver
.Solvable(self
.solver_repo
, pkg
.name
,
98 pkg
.friendly_version
, pkg
.arch
)
102 solvable
.set_vendor(pkg
.vendor
)
106 solvable
.set_hash1(hash1
)
109 solvable
.set_uuid(pkg
.uuid
)
112 solvable
.set_maintainer(pkg
.maintainer
)
115 solvable
.set_groups(" ".join(pkg
.groups
))
117 # Save upstream information (summary, description, license, url).
119 solvable
.set_summary(pkg
.summary
)
122 solvable
.set_description(pkg
.description
)
125 solvable
.set_license(pkg
.license
)
128 solvable
.set_url(pkg
.url
)
130 # Save build information.
132 solvable
.set_buildhost(pkg
.build_host
)
135 solvable
.set_buildtime(pkg
.build_time
)
138 filename
= os
.path
.basename(pkg
.filename
)
140 solvable
.set_filename(filename
)
142 solvable
.set_downloadsize(pkg
.size
)
143 solvable
.set_installsize(pkg
.inst_size
)
145 # Import all requires.
146 requires
= pkg
.requires
147 prerequires
= pkg
.prerequires
149 requires
.append("solvable:prereqmarker")
150 requires
+= prerequires
152 for req
in pkg
.requires
:
153 rel
= self
.create_relation(req
)
154 solvable
.add_requires(rel
)
156 # Import all provides.
157 for prov
in pkg
.provides
:
158 rel
= self
.create_relation(prov
)
159 solvable
.add_provides(rel
)
161 # Import all conflicts.
162 for conf
in pkg
.conflicts
:
163 rel
= self
.create_relation(conf
)
164 solvable
.add_conflicts(rel
)
166 # Import all obsoletes.
167 for obso
in pkg
.obsoletes
:
168 rel
= self
.create_relation(obso
)
169 solvable
.add_obsoletes(rel
)
171 # Import all files that are in the package.
172 rel
= self
.create_relation("solvable:filemarker")
173 solvable
.add_provides(rel
)
174 for file in pkg
.filelist
:
175 rel
= self
.create_relation(file)
176 solvable
.add_provides(rel
)
178 def rem_package(self
, pkg
):
179 # XXX delete the solvable from the index.
180 self
.db
.rem_package(pkg
)
184 Forget all packages from memory.
186 self
.solver_repo
.clear()
189 class IndexSolv(Index
):
191 pass # XXX to be done
193 def update(self
, force
=False, offline
=False):
194 self
._update
_metadata
(force
, offline
)
195 self
._update
_database
(force
, offline
)
197 def _update_metadata(self
, force
, offline
=False):
198 filename
= os
.path
.join(METADATA_DOWNLOAD_PATH
, METADATA_DOWNLOAD_FILE
)
200 # Marker if we need to do the download.
203 # Marker for the current metadata.
207 # Check if file does exists and is not too old.
208 if self
.cache
.exists(filename
):
209 age
= self
.cache
.age(filename
)
210 if age
and age
< TIME_10M
:
212 logging
.debug("Metadata is recent enough. I don't download it again.")
214 # Open old metadata for comparison.
215 old_metadata
= metadata
.Metadata(self
.pakfire
, self
,
216 self
.cache
.abspath(filename
))
218 # If no metadata was downloaded and we are in offline mode.
220 # If we cannot download new metadata, we should skip this
224 #raise OfflineModeError, _("There is no metadata for the repository '%s' and"
225 # " we cannot download any because we are running in offline mode."
226 # " Connect to a network or disable this repository.") % self.repo.name
228 elif force
and offline
:
229 raise OfflineModeError
, _("I cannot be forced to re-download the metadata for"
230 " the repository '%s' when running in offline mode.") % self
.repo
.name
233 # We are supposed to download new metadata, but we are running in
234 # offline mode. That's okay. Just doing nothing.
236 logging
.debug("Going to (re-)download the repository metadata.")
238 # Initialize a grabber for download.
239 grabber
= downloader
.MetadataDownloader(self
.pakfire
)
240 grabber
= self
.repo
.mirrors
.group(grabber
)
242 data
= grabber
.urlread(filename
, limit
=METADATA_DOWNLOAD_LIMIT
)
244 # Parse new metadata for comparison.
245 new_metadata
= metadata
.Metadata(self
.pakfire
, self
, metadata
=data
)
247 if old_metadata
and new_metadata
< old_metadata
:
248 logging
.warning("The downloaded metadata was less recent than the current one. Trashing that.")
251 # We explicitely rewrite the metadata if it is equal to have
252 # a new timestamp and do not download it over and over again.
253 with self
.cache
.open(filename
, "w") as o
:
256 # Parse the metadata that we just downloaded or load it from cache.
257 self
.metadata
= metadata
.Metadata(self
.pakfire
, self
,
258 self
.cache
.abspath(filename
))
260 def _update_database(self
, force
, offline
=False):
261 if not hasattr(self
, "metadata"):
264 # Construct cache and download filename.
265 filename
= os
.path
.join(METADATA_DOWNLOAD_PATH
, self
.metadata
.database
)
267 if not self
.cache
.exists(filename
):
269 # If there is not database and we are in offline mode, we cannot
270 # download anything so we just skip the rest of this function.
273 #raise OfflineModeError, _("Your repository metadata is outdated "
274 # " and a new version needs to be downloaded.")
276 # Initialize a grabber for download.
277 grabber
= downloader
.DatabaseDownloader(
279 text
= _("%s: package database") % self
.repo
.name
,
281 grabber
= self
.repo
.mirrors
.group(grabber
)
283 data
= grabber
.urlread(filename
)
285 with self
.cache
.open(filename
, "w") as o
:
288 # decompress the database
289 if self
.metadata
.database_compression
:
290 # Open input file and remove the file immediately.
291 # The fileobj is still open and the data will be removed
293 compress
.decompress(self
.cache
.abspath(filename
),
294 algo
=self
.metadata
.database_compression
)
296 # check the hashsum of the downloaded file
297 if not util
.calc_hash1(self
.cache
.abspath(filename
)) == self
.metadata
.database_hash1
:
298 # XXX an exception is not a very good idea because this file could
299 # be downloaded from another mirror. need a better way to handle this.
301 # Remove bad file from cache.
302 self
.cache
.remove(filename
)
304 raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
306 # (Re-)open the database.
307 self
.read(self
.cache
.abspath(filename
))
310 class IndexDir(Index
):
314 if self
.repo
.type == "binary":
315 self
.pkg_type
= packages
.BinaryPackage
316 elif self
.repo
.type == "source":
317 self
.pkg_type
= packages
.SourcePackage
322 pass # XXX to be done
326 path
= self
.repo
.path
328 if path
.startswith("file://"):
333 def update(self
, force
=False, offline
=False):
334 logging
.debug("Updating repository index '%s' (force=%s)" % (self
.path
, force
))
336 # Do nothing if the update is not forced but populate the database
337 # if no packages are present.
338 if not force
and len(self
.repo
):
341 # Collect all packages from default path.
342 self
.collect_packages(self
.path
)
344 def collect_packages(self
, path
):
345 logging
.debug("Collecting all packages from %s" % path
)
348 # Get a filelist of all files that could possibly be packages.
351 if os
.path
.isdir(path
):
352 for dir, subdirs
, _files
in os
.walk(path
):
353 for file in sorted(_files
):
354 # Skip files that do not have the right extension
355 if not file.endswith(".%s" % PACKAGE_EXTENSION
):
358 file = os
.path
.join(dir, file)
360 elif os
.path
.isfile(path
) and path
.endswith(".%s" % PACKAGE_EXTENSION
):
366 # Create progress bar.
367 pb
= util
.make_progress(_("Loading from %s") % path
, len(files
))
375 package
= packages
.open(self
.pakfire
, self
.repo
, file)
377 # Find all packages with the given type and skip those of
379 if isinstance(package
, self
.pkg_type
):
380 # Check for binary packages if the architecture matches.
381 if isinstance(package
, packages
.BinaryPackage
) and \
382 not package
.arch
in (self
.repo
.arch
, "noarch"):
383 logging
.warning("Skipped package with wrong architecture: %s (%s)" \
384 % (package
.filename
, package
.arch
))
387 # Skip all source packages.
391 self
.add_package(package
)
397 # Internalize the repository, that all imported information
398 # is available for access.
399 self
.solver_repo
.internalize()
404 class IndexLocal(Index
):
406 self
.db
= database
.DatabaseLocal(self
.pakfire
, self
.repo
)
409 # XXX Create the database and lock it or something.
412 def update(self
, force
=True, offline
=False):
413 if self
.solver_repo
.size() == 0:
417 package_count
= len(self
.db
)
419 # Nothing to do here, if there are no packages in the database.
420 if not package_count
:
423 # Add all packages from the database to the index.
424 pb
= util
.make_progress(_("Loading installed packages"), package_count
)
427 for pkg
in self
.db
.packages
:
432 self
.add_package(pkg
)