]>
git.ipfire.org Git - pakfire.git/blob - python/pakfire/repository/remote.py
410750acb936303572b3a21d716ce0583f9532ab
2 ###############################################################################
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
20 ###############################################################################
26 log
= logging
.getLogger("pakfire")
32 import pakfire
.compress
as compress
33 import pakfire
.downloader
as downloader
35 from pakfire
.constants
import *
36 from pakfire
.i18n
import _
38 class RepositoryRemote(base
.RepositoryFactory
):
39 # XXX TODO Make metadata age configureable.
41 def __init__(self
, pakfire
, name
, description
=None, **settings
):
42 # Save the settings that come from the configuration file.
43 self
.settings
= settings
45 base
.RepositoryFactory
.__init
__(self
, pakfire
, name
, description
)
47 # Enabled/disable the repository, based on the configuration setting.
48 enabled
= self
.settings
.get("enabled", True)
49 if enabled
in ("1", "yes", "on", True, 1):
54 # Create an cache object
55 self
.cache
= cache
.RepositoryCache(self
.pakfire
, self
)
57 # Initialize mirror servers.
58 mirrorlist
= self
.settings
.get("mirrors", None)
59 self
.mirrors
= downloader
.MirrorList(self
.pakfire
, self
, mirrorlist
)
61 # Open metadata if any.
62 self
.metadata
= self
.open_metadata()
66 return self
.settings
.get("baseurl")
70 keyfile
= self
.settings
.get("keyfile", None)
72 keyfile
= self
.settings
.get("gpgkey", None)
78 priority
= self
.settings
.get("priority", None)
79 if not priority
is None:
80 # Try to concert the given input to an integer
81 # and return the value if possible.
83 priority
= int(priority
)
89 # The default priority is 100.
97 for url
, prio
in url2priority
.items():
98 if self
.baseurl
.startswith(url
):
104 def cache_path(self
, *paths
):
115 base
.RepositoryFactory
.clean(self
)
117 # Remove all files in the files cache.
121 # First update the repository metadata.
122 self
.update_metadata()
123 self
.update_database()
128 # Mark the repository as open.
132 # Mark the repository as not open.
135 def open_metadata(self
, path
=None):
137 path
= self
.cache_path(os
.path
.basename(METADATA_DOWNLOAD_FILE
))
138 path
= self
.cache
.abspath(path
)
140 if self
.cache
.exists(path
):
141 return metadata
.Metadata(self
.pakfire
, path
)
143 def update_metadata(self
, force
=False, offline
=False):
144 filename
= os
.path
.join(METADATA_DOWNLOAD_PATH
, METADATA_DOWNLOAD_FILE
)
145 cache_filename
= self
.cache_path(os
.path
.basename(filename
))
147 # Check if the metadata is already recent enough...
148 exists
= self
.cache
.exists(cache_filename
)
150 if not exists
and offline
:
151 raise OfflineModeError
, _("No metadata available for repository %s. Cannot download any.") \
154 elif exists
and offline
:
155 # Repository metadata exists. We cannot update anything because of the offline mode.
158 if not force
and exists
:
159 age
= self
.cache
.age(cache_filename
)
160 if age
and age
< TIME_10M
:
161 log
.debug("Metadata is recent enough. I don't download it again.")
164 # Going to download metada.
165 log
.debug("Going to download repository metadata for %s..." % self
.name
)
168 grabber
= downloader
.MetadataDownloader(self
.pakfire
)
169 grabber
= self
.mirrors
.group(grabber
)
173 data
= grabber
.urlread(filename
, limit
=METADATA_DOWNLOAD_LIMIT
)
174 except urlgrabber
.grabber
.URLGrabError
, e
:
176 raise DownloadError
, _("Could not update metadata for %s from any mirror server") % self
.name
178 grabber
.increment_mirror(grabber
)
181 # Parse new metadata for comparison.
182 md
= metadata
.Metadata(self
.pakfire
, metadata
=data
)
184 if self
.metadata
and md
< self
.metadata
:
185 log
.warning(_("The downloaded metadata was less recent than the current one."))
186 grabber
.increment_mirror(grabber
)
189 # If the download went well, we write the downloaded data to disk
190 # and break the loop.
191 f
= self
.cache
.open(cache_filename
, "w")
198 self
.metadata
= self
.open_metadata()
201 def open_database(self
):
202 assert self
.metadata
, "Metadata needs to be openend first."
204 filename
= self
.cache_path("database", self
.metadata
.database
)
205 filename
= self
.cache
.abspath(filename
)
207 assert os
.path
.exists(filename
)
210 self
.index
.read(filename
)
212 def update_database(self
, force
=False, offline
=False):
213 assert self
.metadata
, "Metadata needs to be openend first."
215 # Construct cache and download filename.
216 filename
= os
.path
.join(METADATA_DOWNLOAD_PATH
, self
.metadata
.database
)
217 cache_filename
= self
.cache_path("database", self
.metadata
.database
)
220 force
= not self
.cache
.exists(cache_filename
)
222 # Raise an exception when we are running in offline mode but an update is required.
223 if force
and offline
:
224 raise OfflineModeError
, _("Cannot download package database for %s in offline mode.") % self
.name
229 # Just make sure we don't try to download anything in offline mode.
232 # Initialize a grabber for download.
233 grabber
= downloader
.DatabaseDownloader(
235 text
= _("%s: package database") % self
.name
,
237 grabber
= self
.mirrors
.group(grabber
)
240 # Open file on server.
241 urlobj
= fileobj
= grabber
.urlopen(filename
)
243 if self
.metadata
.database_compression
:
244 fileobj
= compress
.decompressobj(fileobj
=fileobj
,
245 algo
=self
.metadata
.database_compression
)
247 # Make a new file in the cache.
248 cacheobj
= self
.cache
.open(cache_filename
, "wb")
252 buf
= fileobj
.read(BUFFER_SIZE
)
258 # XXX we should catch decompression errors
260 # Close all file descriptors.
263 if not urlobj
== fileobj
:
268 def download(self
, pkg
, text
="", logger
=None):
270 Downloads 'filename' from repository and returns the local filename.
275 filename
, hash1
= pkg
.filename
, pkg
.hash1
277 # Marker, if we need to download the package.
280 cache_filename
= pkg
.cache_filename
282 # Check if file already exists in cache.
283 if self
.cache
.exists(cache_filename
):
284 logger
.debug("File exists in cache: %s" % filename
)
286 # If the file does already exist, we check if the hash1 matches.
287 if hash1
and self
.cache
.verify(cache_filename
, hash1
):
288 # We already got the right file. Skip download.
291 # The file in cache has a wrong hash. Remove it and repeat download.
292 self
.cache
.remove(cache_filename
)
294 # Get a package grabber and add mirror download capabilities to it.
295 grabber
= downloader
.PackageDownloader(
297 text
=text
+ os
.path
.basename(filename
),
299 grabber
= self
.mirrors
.group(grabber
)
301 # Make sure filename is of type string (and not unicode)
302 filename
= str(filename
)
305 logger
.debug("Going to download %s" % filename
)
307 # If we are in offline mode, we cannot download any files.
308 if self
.pakfire
.offline
and not self
.baseurl
.startswith("file://"):
309 raise OfflineModeError
, _("Cannot download this file in offline mode: %s") \
313 i
= grabber
.urlopen(filename
)
314 except urlgrabber
.grabber
.URLGrabError
, e
:
315 raise DownloadError
, _("Could not download %s: %s") % (filename
, e
)
317 # Open input and output files and download the file.
318 o
= self
.cache
.open(cache_filename
, "w")
320 buf
= i
.read(BUFFER_SIZE
)
323 buf
= i
.read(BUFFER_SIZE
)
328 # Calc the hash1 of the downloaded file.
329 calc_hash1
= self
.cache
.hash1(cache_filename
)
331 if calc_hash1
== hash1
:
332 logger
.debug("Successfully downloaded %s (%s)." % (filename
, hash1
))
340 logger
.warning(_("The checksum of the downloaded file did not match."))
341 logger
.warning(_("Expected %(good)s but got %(bad)s.") % sums
)
342 logger
.warning(_("Trying an other mirror."))
344 # Remove the bad file.
345 self
.cache
.remove(cache_filename
)
347 # Go to the next mirror.
348 grabber
.increment_mirror(grabber
)
350 return os
.path
.join(self
.cache
.path
, cache_filename
)
352 def get_config(self
):
359 "[repo:%s]" % self
.name
,
360 "description = %s" % self
.description
,
361 "enabled = %s" % enabled
,
362 "baseurl = %s" % self
.baseurl
,
365 if self
.mirrors
.mirrorlist
:
366 lines
.append("mirrors = %s" % self
.mirrors
.mirrorlist
)
369 #"gpgkey = %s" % self.keyfile,
370 "priority = %s" % self
.priority
,