]> git.ipfire.org Git - pakfire.git/blob - python/pakfire/repository/remote.py
410750acb936303572b3a21d716ce0583f9532ab
[pakfire.git] / python / pakfire / repository / remote.py
1 #!/usr/bin/python
2 ###############################################################################
3 # #
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
6 # #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
11 # #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
16 # #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
19 # #
20 ###############################################################################
21
22 import os
23 import urlgrabber
24
25 import logging
26 log = logging.getLogger("pakfire")
27
28 import base
29 import cache
30 import metadata
31
32 import pakfire.compress as compress
33 import pakfire.downloader as downloader
34
35 from pakfire.constants import *
36 from pakfire.i18n import _
37
38 class RepositoryRemote(base.RepositoryFactory):
39 # XXX TODO Make metadata age configureable.
40
41 def __init__(self, pakfire, name, description=None, **settings):
42 # Save the settings that come from the configuration file.
43 self.settings = settings
44
45 base.RepositoryFactory.__init__(self, pakfire, name, description)
46
47 # Enabled/disable the repository, based on the configuration setting.
48 enabled = self.settings.get("enabled", True)
49 if enabled in ("1", "yes", "on", True, 1):
50 self.enabled = True
51 else:
52 self.enabled = False
53
54 # Create an cache object
55 self.cache = cache.RepositoryCache(self.pakfire, self)
56
57 # Initialize mirror servers.
58 mirrorlist = self.settings.get("mirrors", None)
59 self.mirrors = downloader.MirrorList(self.pakfire, self, mirrorlist)
60
61 # Open metadata if any.
62 self.metadata = self.open_metadata()
63
64 @property
65 def baseurl(self):
66 return self.settings.get("baseurl")
67
68 @property
69 def keyfile(self):
70 keyfile = self.settings.get("keyfile", None)
71 if keyfile is None:
72 keyfile = self.settings.get("gpgkey", None)
73
74 return keyfile
75
76 @property
77 def priority(self):
78 priority = self.settings.get("priority", None)
79 if not priority is None:
80 # Try to concert the given input to an integer
81 # and return the value if possible.
82 try:
83 priority = int(priority)
84 return priority
85
86 except ValueError:
87 pass
88
89 # The default priority is 100.
90 priority = 100
91
92 url2priority = {
93 "file://" : 50,
94 "http://" : 75,
95 }
96
97 for url, prio in url2priority.items():
98 if self.baseurl.startswith(url):
99 priority = prio
100 break
101
102 return priority
103
104 def cache_path(self, *paths):
105 return os.path.join(
106 "repodata",
107 self.distro.sname,
108 self.distro.release,
109 self.name,
110 self.distro.arch,
111 *paths
112 )
113
114 def clean(self):
115 base.RepositoryFactory.clean(self)
116
117 # Remove all files in the files cache.
118 self.cache.destroy()
119
120 def open(self):
121 # First update the repository metadata.
122 self.update_metadata()
123 self.update_database()
124
125 # Read the database.
126 self.open_database()
127
128 # Mark the repository as open.
129 self.opened = True
130
131 def close(self):
132 # Mark the repository as not open.
133 self.opened = False
134
135 def open_metadata(self, path=None):
136 if not path:
137 path = self.cache_path(os.path.basename(METADATA_DOWNLOAD_FILE))
138 path = self.cache.abspath(path)
139
140 if self.cache.exists(path):
141 return metadata.Metadata(self.pakfire, path)
142
143 def update_metadata(self, force=False, offline=False):
144 filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
145 cache_filename = self.cache_path(os.path.basename(filename))
146
147 # Check if the metadata is already recent enough...
148 exists = self.cache.exists(cache_filename)
149
150 if not exists and offline:
151 raise OfflineModeError, _("No metadata available for repository %s. Cannot download any.") \
152 % self.name
153
154 elif exists and offline:
155 # Repository metadata exists. We cannot update anything because of the offline mode.
156 return
157
158 if not force and exists:
159 age = self.cache.age(cache_filename)
160 if age and age < TIME_10M:
161 log.debug("Metadata is recent enough. I don't download it again.")
162 return
163
164 # Going to download metada.
165 log.debug("Going to download repository metadata for %s..." % self.name)
166 assert not offline
167
168 grabber = downloader.MetadataDownloader(self.pakfire)
169 grabber = self.mirrors.group(grabber)
170
171 while True:
172 try:
173 data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT)
174 except urlgrabber.grabber.URLGrabError, e:
175 if e.errno == 256:
176 raise DownloadError, _("Could not update metadata for %s from any mirror server") % self.name
177
178 grabber.increment_mirror(grabber)
179 continue
180
181 # Parse new metadata for comparison.
182 md = metadata.Metadata(self.pakfire, metadata=data)
183
184 if self.metadata and md < self.metadata:
185 log.warning(_("The downloaded metadata was less recent than the current one."))
186 grabber.increment_mirror(grabber)
187 continue
188
189 # If the download went well, we write the downloaded data to disk
190 # and break the loop.
191 f = self.cache.open(cache_filename, "w")
192 f.write(data)
193 f.close()
194
195 break
196
197 # Re-open metadata.
198 self.metadata = self.open_metadata()
199 assert self.metadata
200
201 def open_database(self):
202 assert self.metadata, "Metadata needs to be openend first."
203
204 filename = self.cache_path("database", self.metadata.database)
205 filename = self.cache.abspath(filename)
206
207 assert os.path.exists(filename)
208
209 self.index.clear()
210 self.index.read(filename)
211
212 def update_database(self, force=False, offline=False):
213 assert self.metadata, "Metadata needs to be openend first."
214
215 # Construct cache and download filename.
216 filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
217 cache_filename = self.cache_path("database", self.metadata.database)
218
219 if not force:
220 force = not self.cache.exists(cache_filename)
221
222 # Raise an exception when we are running in offline mode but an update is required.
223 if force and offline:
224 raise OfflineModeError, _("Cannot download package database for %s in offline mode.") % self.name
225
226 elif not force:
227 return
228
229 # Just make sure we don't try to download anything in offline mode.
230 assert not offline
231
232 # Initialize a grabber for download.
233 grabber = downloader.DatabaseDownloader(
234 self.pakfire,
235 text = _("%s: package database") % self.name,
236 )
237 grabber = self.mirrors.group(grabber)
238
239 while True:
240 # Open file on server.
241 urlobj = fileobj = grabber.urlopen(filename)
242
243 if self.metadata.database_compression:
244 fileobj = compress.decompressobj(fileobj=fileobj,
245 algo=self.metadata.database_compression)
246
247 # Make a new file in the cache.
248 cacheobj = self.cache.open(cache_filename, "wb")
249
250 try:
251 while True:
252 buf = fileobj.read(BUFFER_SIZE)
253 if not buf:
254 break
255 cacheobj.write(buf)
256
257 finally:
258 # XXX we should catch decompression errors
259
260 # Close all file descriptors.
261 cacheobj.close()
262 fileobj.close()
263 if not urlobj == fileobj:
264 urlobj.close()
265
266 break
267
268 def download(self, pkg, text="", logger=None):
269 """
270 Downloads 'filename' from repository and returns the local filename.
271 """
272 if logger is None:
273 logger = log
274
275 filename, hash1 = pkg.filename, pkg.hash1
276
277 # Marker, if we need to download the package.
278 download = True
279
280 cache_filename = pkg.cache_filename
281
282 # Check if file already exists in cache.
283 if self.cache.exists(cache_filename):
284 logger.debug("File exists in cache: %s" % filename)
285
286 # If the file does already exist, we check if the hash1 matches.
287 if hash1 and self.cache.verify(cache_filename, hash1):
288 # We already got the right file. Skip download.
289 download = False
290 else:
291 # The file in cache has a wrong hash. Remove it and repeat download.
292 self.cache.remove(cache_filename)
293
294 # Get a package grabber and add mirror download capabilities to it.
295 grabber = downloader.PackageDownloader(
296 self.pakfire,
297 text=text + os.path.basename(filename),
298 )
299 grabber = self.mirrors.group(grabber)
300
301 # Make sure filename is of type string (and not unicode)
302 filename = str(filename)
303
304 while download:
305 logger.debug("Going to download %s" % filename)
306
307 # If we are in offline mode, we cannot download any files.
308 if self.pakfire.offline and not self.baseurl.startswith("file://"):
309 raise OfflineModeError, _("Cannot download this file in offline mode: %s") \
310 % filename
311
312 try:
313 i = grabber.urlopen(filename)
314 except urlgrabber.grabber.URLGrabError, e:
315 raise DownloadError, _("Could not download %s: %s") % (filename, e)
316
317 # Open input and output files and download the file.
318 o = self.cache.open(cache_filename, "w")
319
320 buf = i.read(BUFFER_SIZE)
321 while buf:
322 o.write(buf)
323 buf = i.read(BUFFER_SIZE)
324
325 i.close()
326 o.close()
327
328 # Calc the hash1 of the downloaded file.
329 calc_hash1 = self.cache.hash1(cache_filename)
330
331 if calc_hash1 == hash1:
332 logger.debug("Successfully downloaded %s (%s)." % (filename, hash1))
333 break
334
335 sums = {
336 "good" : hash1,
337 "bad" : calc_hash1,
338 }
339
340 logger.warning(_("The checksum of the downloaded file did not match."))
341 logger.warning(_("Expected %(good)s but got %(bad)s.") % sums)
342 logger.warning(_("Trying an other mirror."))
343
344 # Remove the bad file.
345 self.cache.remove(cache_filename)
346
347 # Go to the next mirror.
348 grabber.increment_mirror(grabber)
349
350 return os.path.join(self.cache.path, cache_filename)
351
352 def get_config(self):
353 if self.enabled:
354 enabled = "1"
355 else:
356 enabled = "0"
357
358 lines = [
359 "[repo:%s]" % self.name,
360 "description = %s" % self.description,
361 "enabled = %s" % enabled,
362 "baseurl = %s" % self.baseurl,
363 ]
364
365 if self.mirrors.mirrorlist:
366 lines.append("mirrors = %s" % self.mirrors.mirrorlist)
367
368 lines += [
369 #"gpgkey = %s" % self.keyfile,
370 "priority = %s" % self.priority,
371 ]
372
373 return lines