]> git.ipfire.org Git - pakfire.git/commitdiff
Unify compressions.
authorMichael Tremer <michael.tremer@ipfire.org>
Sat, 5 Mar 2011 19:13:34 +0000 (20:13 +0100)
committerMichael Tremer <michael.tremer@ipfire.org>
Sat, 5 Mar 2011 19:13:34 +0000 (20:13 +0100)
This makes it much simplier for the developers to compress or
decompress files in place.

pakfire/constants.py
pakfire/packages/packager.py
pakfire/repository/index.py

index b766da497afc50e1d99d3ab75be1eabcd67fc364..284b9812dc662d4f858ab3c0d5f323c1779d31f6 100644 (file)
@@ -21,7 +21,7 @@ PACKAGES_DB_DIR = "var/lib/pakfire"
 PACKAGES_DB = os.path.join(PACKAGES_DB_DIR, "packages.db")
 REPOSITORY_DB = "index.db"
 
-BUFFER_SIZE = 1024**2
+BUFFER_SIZE = 102400
 
 MIRRORLIST_MAXSIZE = 1024**2
 
index e2e4ef124bcde951f884dca8ae53b9f823f5b03a..8fe64524d332713c26b1dfa666c44033c0f14e08 100644 (file)
@@ -13,6 +13,8 @@ import uuid
 import xattr
 import zlib
 
+import pakfire.compress
+
 from pakfire.constants import *
 from pakfire.i18n import _
 
@@ -354,28 +356,9 @@ class Packager(object):
 
                        logging.debug("Compressing package with %s algorithm." % compress or "no")
 
-                       filename = self.archive_files["data.img"]
-                       i = open(filename)
-                       os.unlink(filename)
-
-                       o = open(filename, "w")
-
-                       if compress == "xz":
-                               comp = lzma.LZMACompressor()
-
-                       elif compress == "zlib":
-                               comp = zlib.compressobj(9)
-
-                       buf = i.read(BUFFER_SIZE)
-                       while buf:
-                               o.write(comp.compress(buf))
-
-                               buf = i.read(BUFFER_SIZE)
-
-                       o.write(comp.flush())
-
-                       i.close()
-                       o.close()
+                       # Compress file (in place).
+                       pakfire.compress.compress(self.archive_files["data.img"],
+                               algo=compress, progress=True)
 
        def create_info(self):
                f = open(self.archive_files["info"], "w")
index a0c68b7e91e09e61016d4d073da7e643e0ea2036..9ed863e1022a65e724fd2a9598fe5ab19bb1c851 100644 (file)
@@ -3,17 +3,16 @@
 import fnmatch
 import json
 import logging
-import lzma
 import os
 import random
 import shutil
 import time
-import zlib
 
 import database
 import downloader
 import metadata
 
+import pakfire.compress as compress
 import pakfire.packages as packages
 import pakfire.util as util
 
@@ -216,27 +215,7 @@ class LocalIndex(DatabaseIndexFactory):
 
                # Compress the database.
                if compress:
-                       i = open(db_path)
-                       os.unlink(db_path)
-
-                       o = open(db_path, "w")
-
-                       # Choose a compressor.
-                       if compress == "xz":
-                               comp = lzma.LZMACompressor()
-                       elif compress == "zlib":
-                               comp = zlib.compressobj(9)
-
-                       buf = i.read(BUFFER_SIZE)
-                       while buf:
-                               o.write(comp.compress(buf))
-
-                               buf = i.read(BUFFER_SIZE)
-
-                       o.write(comp.flush())
-
-                       i.close()
-                       o.close()
+                       compress.compress(db_path, algo=compress, progress=True)
 
                if not os.path.exists(db_path2):
                        shutil.move(db_path, db_path2)
@@ -332,29 +311,8 @@ class RemoteIndex(DatabaseIndexFactory):
                                # Open input file and remove the file immediately.
                                # The fileobj is still open and the data will be removed
                                # when it is closed.
-                               i = cache.open(filename)
-                               cache.remove(filename)
-
-                               # Open output file.
-                               o = cache.open(filename, "w")
-
-                               # Choose a decompessor.
-                               if self.metadata.database_compression == "xz":
-                                       comp = lzma.LZMADecompressor()
-
-                               elif self.metadata.database_compression == "zlib":
-                                       comp = zlib.decompressobj()
-
-                               buf = i.read(BUFFER_SIZE)
-                               while buf:
-                                       o.write(comp.decompress(buf))
-
-                                       buf = i.read(BUFFER_SIZE)
-
-                               o.write(comp.flush())
-
-                               i.close()
-                               o.close()
+                               compress.decompress(cache.abspath(filename),
+                                       algo=self.metadata.database_compression)
 
                        # check the hashsum of the downloaded file
                        if not util.calc_hash1(cache.abspath(filename)) == self.metadata.database_hash1: