]> git.ipfire.org Git - pakfire.git/blob - python/pakfire/repository/index.py
9a398d240cbe77883631458141e00a5effb8013a
[pakfire.git] / python / pakfire / repository / index.py
1 #!/usr/bin/python
2 ###############################################################################
3 # #
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
6 # #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
11 # #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
16 # #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
19 # #
20 ###############################################################################
21
22 import logging
23 import os
24
25 import database
26 import metadata
27
28 import pakfire.compress as compress
29 import pakfire.downloader as downloader
30 import pakfire.packages as packages
31 import pakfire.satsolver as satsolver
32 import pakfire.util as util
33
34 from pakfire.constants import *
35 from pakfire.i18n import _
36
37 class Index(object):
38 def __init__(self, pakfire, repo):
39 self.pakfire = pakfire
40
41 # Create reference to repository and the solver repo.
42 self.repo = repo
43 self.solver_repo = repo.solver_repo
44
45 self.init()
46
47 # Check, if initialization was okay.
48 self.check()
49
50 def __repr__(self):
51 return "<%s %s>" % (self.__class__.__name__, self.repo)
52
53 def __len(self):
54 return len(self.repo)
55
56 @property
57 def cache(self):
58 return self.repo.cache
59
60 def init(self):
61 pass
62
63 def check(self):
64 """
65 Check if everything was correctly initialized.
66 """
67 raise NotImplementedError
68
69 def update(self, force=False, offline=False):
70 raise NotImplementedError
71
72 def read(self, filename):
73 """
74 Read file in SOLV format from filename.
75 """
76 self.solver_repo.read(filename)
77
78 def write(self, filename):
79 """
80 Write content to filename in SOLV format.
81 """
82 self.solver_repo.write(filename)
83
84 def create_relation(self, *args, **kwargs):
85 return self.pakfire.create_relation(*args, **kwargs)
86
87 def add_package(self, pkg):
88 # XXX Skip packages without a UUID
89 #if not pkg.uuid:
90 # logging.warning("Skipping package which lacks UUID: %s" % pkg)
91 # return
92 if not pkg.build_time:
93 return
94
95 logging.debug("Adding package to index %s: %s" % (self, pkg))
96
97 solvable = satsolver.Solvable(self.solver_repo, pkg.name,
98 pkg.friendly_version, pkg.arch)
99
100 # Save metadata.
101 if pkg.vendor:
102 solvable.set_vendor(pkg.vendor)
103
104 hash1 = pkg.hash1
105 assert hash1
106 solvable.set_hash1(hash1)
107
108 assert pkg.uuid
109 solvable.set_uuid(pkg.uuid)
110
111 if pkg.maintainer:
112 solvable.set_maintainer(pkg.maintainer)
113
114 if pkg.groups:
115 solvable.set_groups(" ".join(pkg.groups))
116
117 # Save upstream information (summary, description, license, url).
118 if pkg.summary:
119 solvable.set_summary(pkg.summary)
120
121 if pkg.description:
122 solvable.set_description(pkg.description)
123
124 if pkg.license:
125 solvable.set_license(pkg.license)
126
127 if pkg.url:
128 solvable.set_url(pkg.url)
129
130 # Save build information.
131 if pkg.build_host:
132 solvable.set_buildhost(pkg.build_host)
133
134 if pkg.build_time:
135 solvable.set_buildtime(pkg.build_time)
136
137 # Save filename.
138 filename = os.path.basename(pkg.filename)
139 assert filename
140 solvable.set_filename(filename)
141
142 solvable.set_downloadsize(pkg.size)
143 solvable.set_installsize(pkg.inst_size)
144
145 # Import all requires.
146 requires = pkg.requires
147 prerequires = pkg.prerequires
148 if prerequires:
149 requires.append("solvable:prereqmarker")
150 requires += prerequires
151
152 for req in pkg.requires:
153 rel = self.create_relation(req)
154 solvable.add_requires(rel)
155
156 # Import all provides.
157 for prov in pkg.provides:
158 rel = self.create_relation(prov)
159 solvable.add_provides(rel)
160
161 # Import all conflicts.
162 for conf in pkg.conflicts:
163 rel = self.create_relation(conf)
164 solvable.add_conflicts(rel)
165
166 # Import all obsoletes.
167 for obso in pkg.obsoletes:
168 rel = self.create_relation(obso)
169 solvable.add_obsoletes(rel)
170
171 # Import all files that are in the package.
172 rel = self.create_relation("solvable:filemarker")
173 solvable.add_provides(rel)
174 for file in pkg.filelist:
175 rel = self.create_relation(file)
176 solvable.add_provides(rel)
177
178 def rem_package(self, pkg):
179 # XXX delete the solvable from the index.
180 self.db.rem_package(pkg)
181
182 def clear(self):
183 """
184 Forget all packages from memory.
185 """
186 self.solver_repo.clear()
187
188
189 class IndexSolv(Index):
190 def check(self):
191 pass # XXX to be done
192
193 def update(self, force=False, offline=False):
194 self._update_metadata(force, offline)
195 self._update_database(force, offline)
196
197 def _update_metadata(self, force, offline=False):
198 filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
199
200 # Marker if we need to do the download.
201 download = True
202
203 # Marker for the current metadata.
204 old_metadata = None
205
206 if not force:
207 # Check if file does exists and is not too old.
208 if self.cache.exists(filename):
209 age = self.cache.age(filename)
210 if age and age < TIME_10M:
211 download = False
212 logging.debug("Metadata is recent enough. I don't download it again.")
213
214 # Open old metadata for comparison.
215 old_metadata = metadata.Metadata(self.pakfire, self,
216 self.cache.abspath(filename))
217
218 # If no metadata was downloaded and we are in offline mode.
219 elif offline:
220 # If we cannot download new metadata, we should skip this
221 # repository.
222 return
223
224 #raise OfflineModeError, _("There is no metadata for the repository '%s' and"
225 # " we cannot download any because we are running in offline mode."
226 # " Connect to a network or disable this repository.") % self.repo.name
227
228 elif force and offline:
229 raise OfflineModeError, _("I cannot be forced to re-download the metadata for"
230 " the repository '%s' when running in offline mode.") % self.repo.name
231
232 if download:
233 # We are supposed to download new metadata, but we are running in
234 # offline mode. That's okay. Just doing nothing.
235 if not offline:
236 logging.debug("Going to (re-)download the repository metadata.")
237
238 # Initialize a grabber for download.
239 grabber = downloader.MetadataDownloader(self.pakfire)
240 grabber = self.repo.mirrors.group(grabber)
241
242 data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT)
243
244 # Parse new metadata for comparison.
245 new_metadata = metadata.Metadata(self.pakfire, self, metadata=data)
246
247 if old_metadata and new_metadata < old_metadata:
248 logging.warning("The downloaded metadata was less recent than the current one. Trashing that.")
249
250 else:
251 # We explicitely rewrite the metadata if it is equal to have
252 # a new timestamp and do not download it over and over again.
253 with self.cache.open(filename, "w") as o:
254 o.write(data)
255
256 # Parse the metadata that we just downloaded or load it from cache.
257 self.metadata = metadata.Metadata(self.pakfire, self,
258 self.cache.abspath(filename))
259
260 def _update_database(self, force, offline=False):
261 if not hasattr(self, "metadata"):
262 return
263
264 # Construct cache and download filename.
265 filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
266
267 if not self.cache.exists(filename):
268 if offline:
269 # If there is not database and we are in offline mode, we cannot
270 # download anything so we just skip the rest of this function.
271 return
272
273 #raise OfflineModeError, _("Your repository metadata is outdated "
274 # " and a new version needs to be downloaded.")
275
276 # Initialize a grabber for download.
277 grabber = downloader.DatabaseDownloader(
278 self.pakfire,
279 text = _("%s: package database") % self.repo.name,
280 )
281 grabber = self.repo.mirrors.group(grabber)
282
283 data = grabber.urlread(filename)
284
285 with self.cache.open(filename, "w") as o:
286 o.write(data)
287
288 # decompress the database
289 if self.metadata.database_compression:
290 # Open input file and remove the file immediately.
291 # The fileobj is still open and the data will be removed
292 # when it is closed.
293 compress.decompress(self.cache.abspath(filename),
294 algo=self.metadata.database_compression)
295
296 # check the hashsum of the downloaded file
297 if not util.calc_hash1(self.cache.abspath(filename)) == self.metadata.database_hash1:
298 # XXX an exception is not a very good idea because this file could
299 # be downloaded from another mirror. need a better way to handle this.
300
301 # Remove bad file from cache.
302 self.cache.remove(filename)
303
304 raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
305
306 # (Re-)open the database.
307 self.read(self.cache.abspath(filename))
308
309
310 class IndexDir(Index):
311 def init(self):
312 self.pkg_type = None
313
314 if self.repo.type == "binary":
315 self.pkg_type = packages.BinaryPackage
316 elif self.repo.type == "source":
317 self.pkg_type = packages.SourcePackage
318
319 assert self.pkg_type
320
321 def check(self):
322 pass # XXX to be done
323
324 @property
325 def path(self):
326 path = self.repo.path
327
328 if path.startswith("file://"):
329 path = path[7:]
330
331 return path
332
333 def update(self, force=False, offline=False):
334 logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
335
336 # Do nothing if the update is not forced but populate the database
337 # if no packages are present.
338 if not force and len(self.repo):
339 return
340
341 # Collect all packages from default path.
342 self.collect_packages(self.path)
343
344 def collect_packages(self, path):
345 logging.debug("Collecting all packages from %s" % path)
346 pkgs = []
347
348 # Get a filelist of all files that could possibly be packages.
349 files = []
350
351 if os.path.isdir(path):
352 for dir, subdirs, _files in os.walk(path):
353 for file in sorted(_files):
354 # Skip files that do not have the right extension
355 if not file.endswith(".%s" % PACKAGE_EXTENSION):
356 continue
357
358 file = os.path.join(dir, file)
359 files.append(file)
360 elif os.path.isfile(path) and path.endswith(".%s" % PACKAGE_EXTENSION):
361 files.append(path)
362
363 if not files:
364 return pkgs
365
366 # Create progress bar.
367 pb = util.make_progress(_("Loading from %s") % path, len(files))
368 i = 0
369
370 for file in files:
371 if pb:
372 i += 1
373 pb.update(i)
374
375 package = packages.open(self.pakfire, self.repo, file)
376
377 # Find all packages with the given type and skip those of
378 # the other type.
379 if isinstance(package, self.pkg_type):
380 # Check for binary packages if the architecture matches.
381 if isinstance(package, packages.BinaryPackage) and \
382 not package.arch in (self.repo.arch, "noarch"):
383 logging.warning("Skipped package with wrong architecture: %s (%s)" \
384 % (package.filename, package.arch))
385 continue
386
387 # Skip all source packages.
388 else:
389 continue
390
391 self.add_package(package)
392 pkgs.append(package)
393
394 if pb:
395 pb.finish()
396
397 # Internalize the repository, that all imported information
398 # is available for access.
399 self.solver_repo.internalize()
400
401 return pkgs
402
403
404 class IndexLocal(Index):
405 def init(self):
406 self.db = database.DatabaseLocal(self.pakfire, self.repo)
407
408 def check(self):
409 # XXX Create the database and lock it or something.
410 pass
411
412 def update(self, force=True, offline=False):
413 if self.solver_repo.size() == 0:
414 force = True
415
416 if force:
417 package_count = len(self.db)
418
419 # Nothing to do here, if there are no packages in the database.
420 if not package_count:
421 return
422
423 # Add all packages from the database to the index.
424 pb = util.make_progress(_("Loading installed packages"), package_count)
425
426 i = 0
427 for pkg in self.db.packages:
428 if pb:
429 i += 1
430 pb.update(i)
431
432 self.add_package(pkg)
433
434 if pb:
435 pb.finish()