]> git.ipfire.org Git - pakfire.git/blob - pakfire/repository/index.py
Add "clean all" command to pakfire and pakfire-builder.
[pakfire.git] / pakfire / repository / index.py
1 #!/usr/bin/python
2
3 import logging
4 import os
5
6 import database
7 import metadata
8
9 import pakfire.compress as compress
10 import pakfire.downloader as downloader
11 import pakfire.packages as packages
12 import pakfire.satsolver as satsolver
13 import pakfire.util as util
14
15 from pakfire.constants import *
16 from pakfire.i18n import _
17
18 class Index(object):
19 def __init__(self, pakfire, repo):
20 self.pakfire = pakfire
21
22 # Create reference to repository and the solver repo.
23 self.repo = repo
24 self.solver_repo = repo.solver_repo
25
26 self.init()
27
28 # Check, if initialization was okay.
29 self.check()
30
31 def __repr__(self):
32 return "<%s %s>" % (self.__class__.__name__, self.repo)
33
34 def __len(self):
35 return len(self.repo)
36
37 @property
38 def cache(self):
39 return self.repo.cache
40
41 def init(self):
42 pass
43
44 def check(self):
45 """
46 Check if everything was correctly initialized.
47 """
48 raise NotImplementedError
49
50 def update(self, force=False):
51 raise NotImplementedError
52
53 def read(self, filename):
54 """
55 Read file in SOLV format from filename.
56 """
57 self.solver_repo.read(filename)
58
59 def write(self, filename):
60 """
61 Write content to filename in SOLV format.
62 """
63 self.solver_repo.write(filename)
64
65 def create_relation(self, *args, **kwargs):
66 return self.pakfire.create_relation(*args, **kwargs)
67
68 def add_package(self, pkg):
69 # XXX Skip packages without a UUID
70 #if not pkg.uuid:
71 # logging.warning("Skipping package which lacks UUID: %s" % pkg)
72 # return
73 if not pkg.build_time:
74 return
75
76 logging.debug("Adding package to index %s: %s" % (self, pkg))
77
78 solvable = satsolver.Solvable(self.solver_repo, pkg.name,
79 pkg.friendly_version, pkg.arch)
80
81 # Save metadata.
82 if pkg.vendor:
83 solvable.set_vendor(pkg.vendor)
84
85 hash1 = pkg.hash1
86 assert hash1
87 solvable.set_hash1(hash1)
88
89 assert pkg.uuid
90 solvable.set_uuid(pkg.uuid)
91
92 if pkg.maintainer:
93 solvable.set_maintainer(pkg.maintainer)
94
95 if pkg.groups:
96 solvable.set_groups(" ".join(pkg.groups))
97
98 # Save upstream information (summary, description, license, url).
99 if pkg.summary:
100 solvable.set_summary(pkg.summary)
101
102 if pkg.description:
103 solvable.set_description(pkg.description)
104
105 if pkg.license:
106 solvable.set_license(pkg.license)
107
108 if pkg.url:
109 solvable.set_url(pkg.url)
110
111 # Save build information.
112 if pkg.build_host:
113 solvable.set_buildhost(pkg.build_host)
114
115 if pkg.build_time:
116 solvable.set_buildtime(pkg.build_time)
117
118 # Save filename.
119 filename = os.path.basename(pkg.filename)
120 assert filename
121 solvable.set_filename(filename)
122
123 solvable.set_downloadsize(pkg.size)
124 solvable.set_installsize(pkg.inst_size)
125
126 # Import all requires.
127 for req in pkg.requires:
128 rel = self.create_relation(req)
129 solvable.add_requires(rel)
130
131 # Import all provides.
132 for prov in pkg.provides:
133 rel = self.create_relation(prov)
134 solvable.add_provides(rel)
135
136 # Import all conflicts.
137 for conf in pkg.conflicts:
138 rel = self.create_relation(conf)
139 solvable.add_conflicts(rel)
140
141 # Import all obsoletes.
142 for obso in pkg.obsoletes:
143 rel = self.create_relation(obso)
144 solvable.add_obsoletes(rel)
145
146 # Import all files that are in the package.
147 rel = self.create_relation("solvable:filemarker")
148 solvable.add_provides(rel)
149 for file in pkg.filelist:
150 rel = self.create_relation(file)
151 solvable.add_provides(rel)
152
153 def clear(self):
154 """
155 Forget all packages from memory.
156 """
157 self.solver_repo.clear()
158
159
160 class IndexSolv(Index):
161 def check(self):
162 pass # XXX to be done
163
164 def update(self, force=False):
165 self._update_metadata(force)
166 self._update_database(force)
167
168 def _update_metadata(self, force):
169 filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
170
171 # Marker if we need to do the download.
172 download = True
173
174 # Marker for the current metadata.
175 old_metadata = None
176
177 if not force:
178 # Check if file does exists and is not too old.
179 if self.cache.exists(filename):
180 age = self.cache.age(filename)
181 if age and age < TIME_10M:
182 download = False
183 logging.debug("Metadata is recent enough. I don't download it again.")
184
185 # Open old metadata for comparison.
186 old_metadata = metadata.Metadata(self.pakfire, self,
187 self.cache.abspath(filename))
188
189 if download:
190 logging.debug("Going to (re-)download the repository metadata.")
191
192 # Initialize a grabber for download.
193 grabber = downloader.MetadataDownloader()
194 grabber = self.repo.mirrors.group(grabber)
195
196 data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT)
197
198 # Parse new metadata for comparison.
199 new_metadata = metadata.Metadata(self.pakfire, self, metadata=data)
200
201 if old_metadata and new_metadata < old_metadata:
202 logging.warning("The downloaded metadata was less recent than the current one. Trashing that.")
203
204 else:
205 # We explicitely rewrite the metadata if it is equal to have
206 # a new timestamp and do not download it over and over again.
207 with self.cache.open(filename, "w") as o:
208 o.write(data)
209
210 # Parse the metadata that we just downloaded or load it from cache.
211 self.metadata = metadata.Metadata(self.pakfire, self,
212 self.cache.abspath(filename))
213
214 def _update_database(self, force):
215 # Construct cache and download filename.
216 filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
217
218 if not self.cache.exists(filename):
219 # Initialize a grabber for download.
220 grabber = downloader.DatabaseDownloader(
221 text = _("%s: package database") % self.repo.name,
222 )
223 grabber = self.repo.mirrors.group(grabber)
224
225 data = grabber.urlread(filename)
226
227 with self.cache.open(filename, "w") as o:
228 o.write(data)
229
230 # decompress the database
231 if self.metadata.database_compression:
232 # Open input file and remove the file immediately.
233 # The fileobj is still open and the data will be removed
234 # when it is closed.
235 compress.decompress(self.cache.abspath(filename),
236 algo=self.metadata.database_compression)
237
238 # check the hashsum of the downloaded file
239 if not util.calc_hash1(self.cache.abspath(filename)) == self.metadata.database_hash1:
240 # XXX an exception is not a very good idea because this file could
241 # be downloaded from another mirror. need a better way to handle this.
242
243 # Remove bad file from cache.
244 self.cache.remove(filename)
245
246 raise Exception, "Downloaded file did not match the hashsum. Need to re-download it."
247
248 # (Re-)open the database.
249 self.read(self.cache.abspath(filename))
250
251
252 class IndexDir(Index):
253 def check(self):
254 pass # XXX to be done
255
256 @property
257 def path(self):
258 path = self.repo.path
259
260 if path.startswith("file://"):
261 path = path[7:]
262
263 return path
264
265 def update(self, force=False):
266 logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
267
268 # Do nothing if the update is not forced but populate the database
269 # if no packages are present.
270 if not force and len(self.repo):
271 return
272
273 # Collect all packages from default path.
274 self.collect_packages(self.path)
275
276 def collect_packages(self, path):
277 logging.debug("Collecting all packages from %s" % path)
278 pkgs = []
279
280 # Get a filelist of all files that could possibly be packages.
281 files = []
282 for dir, subdirs, _files in os.walk(path):
283 for file in sorted(_files):
284 # Skip files that do not have the right extension
285 if not file.endswith(".%s" % PACKAGE_EXTENSION):
286 continue
287
288 file = os.path.join(dir, file)
289 files.append(file)
290
291 if not files:
292 return pkgs
293
294 # Create progress bar.
295 pb = util.make_progress(_("Loading from %s") % path, len(files))
296 i = 0
297
298 for file in files:
299 if pb:
300 i += 1
301 pb.update(i)
302
303 package = packages.open(self.pakfire, self.repo, file)
304
305 if isinstance(package, packages.BinaryPackage):
306 if not package.arch in (self.repo.arch, "noarch"):
307 logging.warning("Skipped package with wrong architecture: %s (%s)" \
308 % (package.filename, package.arch))
309 print package.type
310 continue
311
312 # Skip all source packages.
313 elif isinstance(package, packages.SourcePackage):
314 continue
315
316 self.add_package(package)
317 pkgs.append(package)
318
319 if pb:
320 pb.finish()
321
322 return pkgs
323
324
325 class IndexLocal(Index):
326 def init(self):
327 self.db = database.DatabaseLocal(self.pakfire, self.repo)
328
329 def check(self):
330 # XXX Create the database and lock it or something.
331 pass
332
333 def update(self, force=True):
334 if self.solver_repo.size() == 0:
335 force = True
336
337 if force:
338 package_count = len(self.db)
339
340 # Nothing to do here, if there are no packages in the database.
341 if not package_count:
342 return
343
344 # Add all packages from the database to the index.
345 pb = util.make_progress(_("Loading installed packages"), package_count)
346
347 i = 0
348 for pkg in self.db.packages:
349 if pb:
350 i += 1
351 pb.update(i)
352
353 self.add_package(pkg)
354
355 if pb:
356 pb.finish()