#!/usr/bin/python
-
+###############################################################################
+# #
+# Pakfire - The IPFire package management system #
+# Copyright (C) 2011 Pakfire development team #
+# #
+# This program is free software: you can redistribute it and/or modify #
+# it under the terms of the GNU General Public License as published by #
+# the Free Software Foundation, either version 3 of the License, or #
+# (at your option) any later version. #
+# #
+# This program is distributed in the hope that it will be useful, #
+# but WITHOUT ANY WARRANTY; without even the implied warranty of #
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
+# GNU General Public License for more details. #
+# #
+# You should have received a copy of the GNU General Public License #
+# along with this program. If not, see <http://www.gnu.org/licenses/>. #
+# #
+###############################################################################
+
+import collections
+import fnmatch
import glob
+import hashlib
import logging
import lzma
import os
import progressbar
+import re
import shutil
import sys
import tarfile
import tempfile
+import time
+import uuid
import xattr
+import zlib
+
+import pakfire.compress
+import pakfire.util as util
from pakfire.constants import *
from pakfire.i18n import _
-class Extractor(object):
+from file import BinaryPackage, InnerTarFile, SourcePackage
+
+class Packager(object):
def __init__(self, pakfire, pkg):
self.pakfire = pakfire
self.pkg = pkg
- self.data = pkg.get_file("data.img")
+ self.files = []
+ self.tmpfiles = []
- self.archive = None
- self._tempfile = None
-
- if pkg.payload_compression == "XXX":
- self.archive = tarfile.open(fileobj=self.data)
- else:
- self._uncompress_data()
+ def __del__(self):
+ for file in self.tmpfiles:
+ if not os.path.exists(file):
+ continue
- def cleanup(self):
- # XXX not called by anything
- if self._tempfile:
- os.unlink(self._tempfile)
+ logging.debug("Removing tmpfile: %s" % file)
- def _uncompress_data(self):
- # XXX this function uncompresses the data.img file
- # and saves the bare tarball to /tmp which takes a lot
- # of space.
+ if os.path.isdir(file):
+ util.rm(file)
+ else:
+ os.remove(file)
- self.data.seek(0)
+ def mktemp(self, directory=False):
+ # XXX use real mk(s)temp here
+ filename = os.path.join("/", LOCAL_TMP_PATH, util.random_string())
- # Create a temporary file to save the content in
- f, self._tempfile = tempfile.mkstemp()
- f = open(self._tempfile, "w")
+ if directory:
+ os.makedirs(filename)
- decompressor = lzma.LZMADecompressor()
+ self.tmpfiles.append(filename)
- buf = self.data.read(BUFFER_SIZE)
- while buf:
- f.write(decompressor.decompress(buf))
+ return filename
- buf = self.data.read(BUFFER_SIZE)
+ def save(self, filename):
+ # Create a new tar archive.
+ tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT)
- f.write(decompressor.flush())
- f.close()
+ # Add package formation information.
+ # Must always be the first file in the archive.
+ formatfile = self.create_package_format()
+ tar.add(formatfile, arcname="pakfire-format")
- self.archive = tarfile.open(self._tempfile)
+ # XXX make sure all files belong to the root user
- @property
- def files(self):
- return self.archive.getnames()
+ # Create checksum file.
+ chksumsfile = self.mktemp()
+ chksums = open(chksumsfile, "w")
- def extractall(self, path="/", callback=None):
- pbar = self._make_progressbar()
+ # Add all files to tar file.
+ for arcname, filename in self.files:
+ tar.add(filename, arcname=arcname)
- if pbar:
- pbar.start()
- else:
- print " %s %-20s" % (_("Extracting"), self.pkg.name)
+ # Calculating the hash sum of the added file
+ # and store it in the chksums file.
+ f = open(filename)
+ h = hashlib.sha512()
+ while True:
+ buf = f.read(BUFFER_SIZE)
+ if not buf:
+ break
- i = 0
- for name in self.files:
- i += 1
- self.extract(name, path, callback=callback)
-
- if pbar:
- pbar.update(i)
+ h.update(buf)
+ f.close()
- if pbar:
- pbar.finish()
- #sys.stdout.write("\n")
+ chksums.write("%-10s %s\n" % (arcname, h.hexdigest()))
- def extract(self, filename, path="/", callback=None):
- member = self.archive.getmember(filename)
- target = os.path.join(path, filename)
+ # Close checksum file and attach it to the end.
+ chksums.close()
+ tar.add(chksumsfile, "chksums")
- # If the member is a directory and if it already exists, we
- # don't need to create it again.
- if member.isdir() and os.path.exists(target):
- return
+ # Close the tar file.
+ tar.close()
- #if self.pakfire.config.get("debug"):
- # msg = "Creating file (%s:%03d:%03d) " % \
- # (tarfile.filemode(member.mode), member.uid, member.gid)
- # if member.issym():
- # msg += "/%s -> %s" % (member.name, member.linkname)
- # elif member.islnk():
- # msg += "/%s link to /%s" % (member.name, member.linkname)
- # else:
- # msg += "/%s" % member.name
- # logging.debug(msg)
+ def add(self, filename, arcname=None):
+ if not arcname:
+ arcname = os.path.basename(filename)
- # Remove file if it has been existant
- if not member.isdir() and os.path.exists(target):
- os.unlink(target)
+ logging.debug("Adding %s (as %s) to tarball." % (filename, arcname))
+ self.files.append((arcname, filename))
- self.archive.extract(member, path=path)
+ def create_package_format(self):
+ filename = self.mktemp()
- # XXX implement setting of xattrs/acls here
+ f = open(filename, "w")
+ f.write("%s\n" % PACKAGE_FORMAT)
+ f.close()
- if callback and not member.isdir():
- callback(member.name, hash1="XXX", size=member.size)
+ return filename
- def _make_progressbar(self):
- # Don't display a progressbar if we are running in debug mode.
- if self.pakfire.config.get("debug"):
- return
+ def create_filelist(self, datafile):
+ filelist = self.mktemp()
- if not sys.stdout.isatty():
- return
+ f = open(filelist, "w")
+ datafile = InnerTarFile(datafile)
- widgets = [
- " ",
- "%s %-20s" % (_("Extracting:"), self.pkg.name),
- " ",
- progressbar.Bar(left="[", right="]"),
- " ",
-# progressbar.Percentage(),
-# " ",
- progressbar.ETA(),
- " ",
- ]
+ for m in datafile.getmembers():
+ logging.debug(" %s %-8s %-8s %s %6s %s" % \
+ (tarfile.filemode(m.mode), m.uname, m.gname,
+ "%d-%02d-%02d %02d:%02d:%02d" % time.localtime(m.mtime)[:6],
+ util.format_size(m.size), m.name))
- # maxval must be > 0 and so we assume that
- # empty packages have at least one file.
- maxval = len(self.files) or 1
+ f.write("%(name)-40s %(type)1s %(size)-10d %(uname)-10s %(gname)-10s %(mode)-6d %(mtime)-12d" \
+ % m.get_info(tarfile.ENCODING, "strict"))
- return progressbar.ProgressBar(
- widgets=widgets,
- maxval=maxval,
- term_width=80,
- )
+ # Calculate SHA512 hash of regular files.
+ if m.isreg():
+ mobj = datafile.extractfile(m)
+ h = hashlib.sha512()
+ while True:
+ buf = mobj.read(BUFFER_SIZE)
+ if not buf:
+ break
+ h.update(buf)
-class InnerTarFile(tarfile.TarFile):
- def __init__(self, *args, **kwargs):
- # Force the pax format
- kwargs["format"] = tarfile.PAX_FORMAT
+ mobj.close()
+ f.write(" %s\n" % h.hexdigest())
- if kwargs.has_key("env"):
- self.env = kwargs.pop("env")
+ # For other files, just finish the line.
+ else:
+ f.write(" -\n")
- tarfile.TarFile.__init__(self, *args, **kwargs)
+ logging.info("")
- def __filter_xattrs(self, tarinfo):
- logging.debug("Adding file: %s" % tarinfo.name)
+ datafile.close()
+ f.close()
- filename = self.env.chrootPath(self.env.buildroot, tarinfo.name)
+ return filelist
- # xattrs do only exists for regular files. If we don't have one,
- # simply skip.
- if os.path.isfile(filename):
- for attr, value in xattr.get_all(filename):
- tarinfo.pax_headers[attr] = value
+ def run(self):
+ raise NotImplementedError
- logging.debug(" xattr: %s=%s" % (attr, value))
- return tarinfo
+class BinaryPackager(Packager):
+ def __init__(self, pakfire, pkg, builder, buildroot):
+ Packager.__init__(self, pakfire, pkg)
- def add(self, *args, **kwargs):
- # Add filter for xattrs if no other filter is set.
- if not kwargs.has_key("filter") and len(args) < 5:
- kwargs["filter"] = self.__filter_xattrs
+ self.builder = builder
+ self.buildroot = buildroot
- tarfile.TarFile.add(self, *args, **kwargs)
+ def create_metafile(self, datafile):
+ info = collections.defaultdict(lambda: "")
+ # Extract datafile in temporary directory and scan for dependencies.
+ tmpdir = self.mktemp(directory=True)
-# XXX this is totally ugly and needs to be done right!
+ tarfile = InnerTarFile(datafile)
+ tarfile.extractall(path=tmpdir)
+ tarfile.close()
-class Packager(object):
- ARCHIVE_FILES = ("info", "filelist", "signature", "data.img")
+ # Run the dependency tracker.
+ self.pkg.track_dependencies(self.builder, tmpdir)
- def __init__(self, pakfire, pkg, env):
- self.pakfire = pakfire
- self.pkg = pkg
- self.env = env
-
- self.tarball = None
-
- # Store meta information
- self.info = {
- "package_format" : PACKAGE_FORMAT,
- }
- self.info.update(self.pkg.info)
- self.info.update(self.pakfire.distro.info)
- self.info.update(self.env.info)
-
- ### Create temporary files
- # Create temp directory to where we extract all files again and
- # gather some information about them like requirements and provides.
- self.tempdir = self.env.chrootPath("tmp", "%s_data" % self.pkg.friendly_name)
- if not os.path.exists(self.tempdir):
- os.makedirs(self.tempdir)
-
- # Create files that have the archive data
- self.archive_files = {}
- for i in self.ARCHIVE_FILES:
- self.archive_files[i] = \
- self.env.chrootPath("tmp", "%s_%s" % (self.pkg.friendly_name, i))
-
- def __call__(self):
- logging.debug("Packaging %s" % self.pkg.friendly_name)
-
- # Create the tarball and add all data to it.
- self.create_tarball()
-
- chroot_tempdir = self.tempdir[len(self.env.chrootPath()):]
- self.info.update({
- "requires" : self.env.do("/usr/lib/buildsystem-tools/dependency-tracker requires %s" % chroot_tempdir,
- returnOutput=True, env=self.pkg.env).strip(),
- "provides" : self.env.do("/usr/lib/buildsystem-tools/dependency-tracker provides %s" % chroot_tempdir,
- returnOutput=True, env=self.pkg.env).strip(),
+ # Generic package information including Pakfire information.
+ info.update({
+ "pakfire_version" : PAKFIRE_VERSION,
+ "uuid" : uuid.uuid4(),
})
- self.create_info()
- self.create_signature()
+ # Include distribution information.
+ info.update(self.pakfire.distro.info)
+ info.update(self.pkg.info)
+
+ # Update package information for string formatting.
+ info.update({
+ "groups" : " ".join(self.pkg.groups),
+ "prerequires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
+ for d in self.pkg.prerequires]),
+ "requires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
+ for d in self.pkg.requires]),
+ "provides" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
+ for d in self.pkg.provides]),
+ "conflicts" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
+ for d in self.pkg.conflicts]),
+ "obsoletes" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
+ for d in self.pkg.obsoletes]),
+ })
- # Create the outer tarball.
- resultdir = os.path.join(self.env.chrootPath("result", self.pkg.arch))
- if not os.path.exists(resultdir):
- os.makedirs(resultdir)
+ # Format description.
+ description = [PACKAGE_INFO_DESCRIPTION_LINE % l \
+ for l in util.text_wrap(self.pkg.description, length=80)]
+ info["description"] = "\n".join(description)
- filename = os.path.join(resultdir, self.pkg.filename)
+ # Build information.
+ info.update({
+ # Package it built right now.
+ "build_time" : int(time.time()),
+ "build_id" : uuid.uuid4(),
+ })
- tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT)
+ # Installed size (equals size of the uncompressed tarball).
+ info.update({
+ "inst_size" : os.path.getsize(datafile),
+ })
- for i in self.ARCHIVE_FILES:
- tar.add(self.archive_files[i], arcname=i)
+ metafile = self.mktemp()
- tar.close()
+ f = open(metafile, "w")
+ f.write(PACKAGE_INFO % info)
+ f.close()
- def create_tarball(self):
- tar = InnerTarFile(self.archive_files["data.img"], mode="w", env=self.env)
+ return metafile
+ def create_datafile(self):
includes = []
excludes = []
- for pattern in self.pkg.file_patterns:
+ # List of all patterns, which grows.
+ patterns = self.pkg.files
+
+ for pattern in patterns:
# Check if we are running in include or exclude mode.
if pattern.startswith("!"):
files = excludes
- # Strip the ! charater
+ # Strip the ! character.
pattern = pattern[1:]
-
else:
files = includes
+ # Expand file to point to chroot.
if pattern.startswith("/"):
pattern = pattern[1:]
- pattern = self.env.chrootPath(self.env.buildroot, pattern)
+ pattern = os.path.join(self.buildroot, pattern)
# Recognize the type of the pattern. Patterns could be a glob
# pattern that is expanded here or just a directory which will
# be included recursively.
- if "*" in pattern or "?" in pattern:
- files += glob.glob(pattern)
+ if "*" in pattern or "?" in pattern or ("[" in pattern and "]" in pattern):
+ _patterns = glob.glob(pattern)
+ else:
+ _patterns = [pattern,]
+
+ for pattern in _patterns:
+ if not os.path.exists(pattern):
+ continue
- elif os.path.exists(pattern):
# Add directories recursively...
if os.path.isdir(pattern):
+ # Add directory itself.
+ files.append(pattern)
+
for dir, subdirs, _files in os.walk(pattern):
+ for subdir in subdirs:
+ if subdir in ORPHAN_DIRECTORIES:
+ continue
+
+ subdir = os.path.join(dir, subdir)
+ files.append(subdir)
+
for file in _files:
file = os.path.join(dir, file)
files.append(file)
else:
files.append(pattern)
- else:
- logging.warning("Unrecognized pattern type: %s" % pattern)
-
files = []
for file in includes:
# Skip if file is already in the file set or
continue
files.append(file)
-
files.sort()
- filelist = open(self.archive_files["filelist"], mode="w")
+ # Load progressbar.
+ message = "%-10s : %s" % (_("Packaging"), self.pkg.friendly_name)
+ pb = util.make_progress(message, len(files), eta=False)
- for file_real in files:
- file_tar = file_real[len(self.env.chrootPath(self.env.buildroot)) + 1:]
- file_tmp = os.path.join(self.tempdir, file_tar)
+ datafile = self.mktemp()
+ tar = InnerTarFile(datafile, mode="w")
- tar.add(file_real, arcname=file_tar, recursive=False)
+ # All files in the tarball are relative to this directory.
+ basedir = self.buildroot
- # Record the packaged file to the filelist.
- filelist.write("/%s\n" % file_tar)
+ i = 0
+ for file in files:
+ if pb:
+ i += 1
+ pb.update(i)
- # "Copy" the file to the tmp path for later investigation.
- if os.path.isdir(file_real):
- file_dir = file_tmp
- else:
- file_dir = os.path.dirname(file_tmp)
+ # Never package /.
+ if os.path.normpath(file) == os.path.normpath(basedir):
+ continue
+
+ arcname = "/%s" % os.path.relpath(file, basedir)
- if not os.path.exists(file_dir):
- os.makedirs(file_dir)
+ # Special handling for directories.
+ if os.path.isdir(file):
+ # Empty directories that are in the list of ORPHAN_DIRECTORIES
+ # can be skipped and removed.
+ if arcname in ORPHAN_DIRECTORIES and not os.listdir(file):
+ logging.debug("Found an orphaned directory: %s" % arcname)
+ try:
+ os.unlink(file)
+ except OSError:
+ pass
- if os.path.isfile(file_real):
- os.link(file_real, file_tmp)
+ continue
+ # Add file to tarball.
+ tar.add(file, arcname=arcname, recursive=False)
+
+ # Remove all packaged files.
+ for file in reversed(files):
+ if not os.path.exists(file):
+ continue
+
+ # It's okay if we cannot remove directories,
+ # when they are not empty.
+ if os.path.isdir(file):
+ try:
+ os.rmdir(file)
+ except OSError:
+ continue
else:
- shutil.copy2(file_real, file_tmp)
+ os.unlink(file)
- # Unlink the file and remove empty directories.
- if not os.path.isdir(file_real):
- os.unlink(file_real)
+ while True:
+ file = os.path.dirname(file)
- elif os.path.isdir(file_real) and not os.listdir(file_real):
- os.rmdir(file_real)
+ if not file.startswith(basedir):
+ break
- # Dump all files that are in the archive.
- tar.list()
+ try:
+ os.rmdir(file)
+ except OSError:
+ break
- # Write all data to disk.
+ # Close the tarfile.
tar.close()
- filelist.close()
- # XXX compress the tarball here
+ # Finish progressbar.
+ if pb:
+ pb.finish()
+
+ return datafile
+
+ def create_scriptlets(self):
+ scriptlets = []
+
+ for scriptlet_name in SCRIPTS:
+ scriptlet = self.pkg.get_scriptlet(scriptlet_name)
+
+ if not scriptlet:
+ continue
+
+ # Write script to a file.
+ scriptlet_file = self.mktemp()
+
+ if scriptlet["lang"] == "bin":
+ path = lang["path"]
+ try:
+ f = open(path, "b")
+ except OSError:
+ raise Exception, "Cannot open script file: %s" % lang["path"]
+
+ s = open(scriptlet_file, "wb")
+
+ while True:
+ buf = f.read(BUFFER_SIZE)
+ if not buf:
+ break
+
+ s.write(buf)
+
+ f.close()
+ s.close()
+
+ elif scriptlet["lang"] == "shell":
+ s = open(scriptlet_file, "w")
+
+ # Write shell script to file.
+ s.write("#!/bin/sh -e\n\n")
+ s.write(scriptlet["scriptlet"])
+ s.write("\n\nexit 0\n")
+
+ s.close()
+
+ else:
+ raise Exception, "Unknown scriptlet language: %s" % scriptlet["lang"]
+
+ scriptlets.append((scriptlet_name, scriptlet_file))
+
+ # XXX scan for script dependencies
+
+ return scriptlets
+
+ def create_configs(self, datafile):
+ datafile = InnerTarFile(datafile)
+
+ members = datafile.getmembers()
- def create_info(self):
- f = open(self.archive_files["info"], "w")
- f.write(BINARY_PACKAGE_META % self.info)
+ configfiles = []
+ configdirs = []
+
+ # Find all directories in the config file list.
+ for file in self.pkg.configfiles:
+ if file.startswith("/"):
+ file = file[1:]
+
+ for member in members:
+ if member.name == file and member.isdir():
+ configdirs.append(file)
+
+ for configdir in configdirs:
+ for member in members:
+ if not member.isdir() and member.name.startswith(configdir):
+ configfiles.append(member.name)
+
+ for pattern in self.pkg.configfiles:
+ if pattern.startswith("/"):
+ pattern = pattern[1:]
+
+ for member in members:
+ if not fnmatch.fnmatch(member.name, pattern):
+ continue
+
+ if member.name in configfiles:
+ continue
+
+ configfiles.append(member.name)
+
+ # Sort list alphabetically.
+ configfiles.sort()
+
+ configsfile = self.mktemp()
+
+ f = open(configsfile, "w")
+ for file in configfiles:
+ f.write("%s\n" % file)
f.close()
- def create_signature(self):
- # Create an empty signature.
- f = open(self.archive_files["signature"], "w")
+ return configsfile
+
+ def compress_datafile(self, datafile, algo="xz"):
+ outputfile = self.mktemp()
+
+ # Compress the datafile with the choosen algorithm.
+ pakfire.compress.compress_file(datafile, outputfile, algo=algo,
+ progress=True, message=_("Compressing %s") % self.pkg.friendly_name)
+
+ # We do not need the uncompressed output anymore.
+ os.unlink(datafile)
+
+ # The outputfile becomes out new datafile.
+ return outputfile
+
+ def run(self, resultdir):
+ # Add all files to this package.
+ datafile = self.create_datafile()
+
+ # Get filelist from datafile.
+ filelist = self.create_filelist(datafile)
+ configs = self.create_configs(datafile)
+
+ # Create script files.
+ scriptlets = self.create_scriptlets()
+
+ metafile = self.create_metafile(datafile)
+
+ # XXX make xz in variable
+ datafile = self.compress_datafile(datafile, algo="xz")
+
+ # Add files to the tar archive in correct order.
+ self.add(metafile, "info")
+ self.add(filelist, "filelist")
+ self.add(configs, "configs")
+ self.add(datafile, "data.img")
+
+ for scriptlet_name, scriptlet_file in scriptlets:
+ self.add(scriptlet_file, "scriptlets/%s" % scriptlet_name)
+
+ # Build the final package.
+ tempfile = self.mktemp()
+ self.save(tempfile)
+
+ # Add architecture information to path.
+ resultdir = "%s/%s" % (resultdir, self.pkg.arch)
+
+ if not os.path.exists(resultdir):
+ os.makedirs(resultdir)
+
+ resultfile = os.path.join(resultdir, self.pkg.package_filename)
+ logging.info("Saving package to %s" % resultfile)
+ try:
+ os.link(tempfile, resultfile)
+ except OSError:
+ shutil.copy2(tempfile, resultfile)
+
+ return BinaryPackage(self.pakfire, self.pakfire.repos.dummy, resultfile)
+
+
+class SourcePackager(Packager):
+ def create_metafile(self, datafile):
+ info = collections.defaultdict(lambda: "")
+
+ # Generic package information including Pakfire information.
+ info.update({
+ "pakfire_version" : PAKFIRE_VERSION,
+ })
+
+ # Include distribution information.
+ info.update(self.pakfire.distro.info)
+ info.update(self.pkg.info)
+
+ # Update package information for string formatting.
+ requires = [PACKAGE_INFO_DEPENDENCY_LINE % r for r in self.pkg.requires]
+ info.update({
+ "groups" : " ".join(self.pkg.groups),
+ "requires" : "\n".join(requires),
+ })
+
+ # Format description.
+ description = [PACKAGE_INFO_DESCRIPTION_LINE % l \
+ for l in util.text_wrap(self.pkg.description, length=80)]
+ info["description"] = "\n".join(description)
+
+ # Build information.
+ info.update({
+ # Package it built right now.
+ "build_time" : int(time.time()),
+ "build_id" : uuid.uuid4(),
+ })
+
+ # Set UUID
+ # XXX replace this by the payload hash
+ info.update({
+ "uuid" : uuid.uuid4(),
+ })
+
+ metafile = self.mktemp()
+
+ f = open(metafile, "w")
+ f.write(PACKAGE_INFO % info)
f.close()
+
+ return metafile
+
+ def create_datafile(self):
+ filename = self.mktemp()
+ datafile = InnerTarFile(filename, mode="w")
+
+ # Add all downloaded files to the package.
+ for file in self.pkg.download():
+ datafile.add(file, "files/%s" % os.path.basename(file))
+
+ # Add all files in the package directory.
+ for file in sorted(self.pkg.files):
+ arcname = os.path.relpath(file, self.pkg.path)
+ datafile.add(file, arcname)
+
+ datafile.close()
+
+ return filename
+
+ def run(self, resultdirs=[]):
+ assert resultdirs
+
+ logging.info(_("Building source package %s:") % self.pkg.package_filename)
+
+ # Add datafile to package.
+ datafile = self.create_datafile()
+
+ # Create filelist out of data.
+ filelist = self.create_filelist(datafile)
+
+ # Create metadata.
+ metafile = self.create_metafile(datafile)
+
+ # Add files to the tar archive in correct order.
+ self.add(metafile, "info")
+ self.add(filelist, "filelist")
+ self.add(datafile, "data.img")
+
+ # Build the final tarball.
+ tempfile = self.mktemp()
+ self.save(tempfile)
+
+ for resultdir in resultdirs:
+ # XXX sometimes, there has been a None in resultdirs
+ if not resultdir:
+ continue
+
+ resultdir = "%s/%s" % (resultdir, self.pkg.arch)
+
+ if not os.path.exists(resultdir):
+ os.makedirs(resultdir)
+
+ resultfile = os.path.join(resultdir, self.pkg.package_filename)
+ logging.info("Saving package to %s" % resultfile)
+ try:
+ os.link(tempfile, resultfile)
+ except OSError:
+ shutil.copy2(tempfile, resultfile)
+
+ # Dump package information.
+ pkg = SourcePackage(self.pakfire, self.pakfire.repos.dummy, tempfile)
+ for line in pkg.dump(long=True).splitlines():
+ logging.info(line)
+ logging.info("")