From: Michael Tremer Date: Sat, 5 Mar 2011 19:13:34 +0000 (+0100) Subject: Unify compressions. X-Git-Tag: 0.9.3~95^2~2 X-Git-Url: http://git.ipfire.org/gitweb.cgi?a=commitdiff_plain;h=c1fbb0b7f7246ed3bb16e027b7de7df6593b5594;p=pakfire.git Unify compressions. This makes it much simplier for the developers to compress or decompress files in place. --- diff --git a/pakfire/constants.py b/pakfire/constants.py index b766da497..284b9812d 100644 --- a/pakfire/constants.py +++ b/pakfire/constants.py @@ -21,7 +21,7 @@ PACKAGES_DB_DIR = "var/lib/pakfire" PACKAGES_DB = os.path.join(PACKAGES_DB_DIR, "packages.db") REPOSITORY_DB = "index.db" -BUFFER_SIZE = 1024**2 +BUFFER_SIZE = 102400 MIRRORLIST_MAXSIZE = 1024**2 diff --git a/pakfire/packages/packager.py b/pakfire/packages/packager.py index e2e4ef124..8fe64524d 100644 --- a/pakfire/packages/packager.py +++ b/pakfire/packages/packager.py @@ -13,6 +13,8 @@ import uuid import xattr import zlib +import pakfire.compress + from pakfire.constants import * from pakfire.i18n import _ @@ -354,28 +356,9 @@ class Packager(object): logging.debug("Compressing package with %s algorithm." % compress or "no") - filename = self.archive_files["data.img"] - i = open(filename) - os.unlink(filename) - - o = open(filename, "w") - - if compress == "xz": - comp = lzma.LZMACompressor() - - elif compress == "zlib": - comp = zlib.compressobj(9) - - buf = i.read(BUFFER_SIZE) - while buf: - o.write(comp.compress(buf)) - - buf = i.read(BUFFER_SIZE) - - o.write(comp.flush()) - - i.close() - o.close() + # Compress file (in place). + pakfire.compress.compress(self.archive_files["data.img"], + algo=compress, progress=True) def create_info(self): f = open(self.archive_files["info"], "w") diff --git a/pakfire/repository/index.py b/pakfire/repository/index.py index a0c68b7e9..9ed863e10 100644 --- a/pakfire/repository/index.py +++ b/pakfire/repository/index.py @@ -3,17 +3,16 @@ import fnmatch import json import logging -import lzma import os import random import shutil import time -import zlib import database import downloader import metadata +import pakfire.compress as compress import pakfire.packages as packages import pakfire.util as util @@ -216,27 +215,7 @@ class LocalIndex(DatabaseIndexFactory): # Compress the database. if compress: - i = open(db_path) - os.unlink(db_path) - - o = open(db_path, "w") - - # Choose a compressor. - if compress == "xz": - comp = lzma.LZMACompressor() - elif compress == "zlib": - comp = zlib.compressobj(9) - - buf = i.read(BUFFER_SIZE) - while buf: - o.write(comp.compress(buf)) - - buf = i.read(BUFFER_SIZE) - - o.write(comp.flush()) - - i.close() - o.close() + compress.compress(db_path, algo=compress, progress=True) if not os.path.exists(db_path2): shutil.move(db_path, db_path2) @@ -332,29 +311,8 @@ class RemoteIndex(DatabaseIndexFactory): # Open input file and remove the file immediately. # The fileobj is still open and the data will be removed # when it is closed. - i = cache.open(filename) - cache.remove(filename) - - # Open output file. - o = cache.open(filename, "w") - - # Choose a decompessor. - if self.metadata.database_compression == "xz": - comp = lzma.LZMADecompressor() - - elif self.metadata.database_compression == "zlib": - comp = zlib.decompressobj() - - buf = i.read(BUFFER_SIZE) - while buf: - o.write(comp.decompress(buf)) - - buf = i.read(BUFFER_SIZE) - - o.write(comp.flush()) - - i.close() - o.close() + compress.decompress(cache.abspath(filename), + algo=self.metadata.database_compression) # check the hashsum of the downloaded file if not util.calc_hash1(cache.abspath(filename)) == self.metadata.database_hash1: