]> git.ipfire.org Git - pakfire.git/blob - pakfire/packages/packager.py
Simplify the package creation process and add payload checksum.
[pakfire.git] / pakfire / packages / packager.py
1 #!/usr/bin/python
2
3 import glob
4 import logging
5 import lzma
6 import os
7 import progressbar
8 import re
9 import shutil
10 import sys
11 import tarfile
12 import tempfile
13 import uuid
14 import xattr
15 import zlib
16
17 import pakfire.compress
18 import util
19
20 from pakfire.constants import *
21 from pakfire.i18n import _
22
23 from file import InnerTarFile
24
25 # XXX this is totally ugly and needs to be done right!
26
27 class Packager(object):
28 ARCHIVE_FILES = ("info", "filelist", "data.img")
29
30 def __init__(self, pakfire, pkg, env):
31 self.pakfire = pakfire
32 self.pkg = pkg
33 self.env = env
34
35 self.tarball = None
36
37 # Store meta information
38 self.info = {
39 "package_format" : PACKAGE_FORMAT,
40 "package_type" : "binary",
41 "package_uuid" : uuid.uuid4(),
42 "payload_comp" : None,
43 }
44 self.info.update(self.pkg.info)
45 self.info.update(self.pakfire.distro.info)
46 self.info.update(self.env.info)
47
48 ### Create temporary files
49 # Create temp directory to where we extract all files again and
50 # gather some information about them like requirements and provides.
51 self.tempdir = self.env.chrootPath("tmp", "%s_data" % self.pkg.friendly_name)
52 if not os.path.exists(self.tempdir):
53 os.makedirs(self.tempdir)
54
55 # Create files that have the archive data
56 self.archive_files = {}
57 for i in self.ARCHIVE_FILES:
58 self.archive_files[i] = \
59 self.env.chrootPath("tmp", "%s_%s" % (self.pkg.friendly_name, i))
60
61 def __call__(self):
62 logging.debug("Packaging %s" % self.pkg.friendly_name)
63
64 # Create the tarball and add all data to it.
65 self.create_tarball()
66
67 e = self.env.do("/usr/lib/buildsystem-tools/dependency-tracker %s" % \
68 self.tempdir[len(self.env.chrootPath()):], returnOutput=True,
69 env=self.pkg.env)
70
71 for line in e.splitlines():
72 m = re.match(r"^(\w+)=(.*)$", line)
73 if m is None:
74 continue
75
76 key, val = m.groups()
77
78 if not key in ("requires", "provides"):
79 continue
80
81 val = val.strip("\"")
82 val = val.split()
83
84 self.info[key] = " ".join(sorted(val))
85
86 self.create_info()
87
88 # Create the outer tarball.
89 resultdir = os.path.join(self.env.chrootPath("result", self.pkg.arch))
90 if not os.path.exists(resultdir):
91 os.makedirs(resultdir)
92
93 filename = os.path.join(resultdir, self.pkg.filename)
94
95 tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT)
96
97 for i in self.ARCHIVE_FILES:
98 tar.add(self.archive_files[i], arcname=i)
99
100 tar.close()
101
102 def create_tarball(self, compress="xz"):
103 tar = InnerTarFile(self.archive_files["data.img"], mode="w")
104
105 includes = []
106 excludes = []
107
108 for pattern in self.pkg.file_patterns:
109 # Check if we are running in include or exclude mode.
110 if pattern.startswith("!"):
111 files = excludes
112
113 # Strip the ! charater
114 pattern = pattern[1:]
115
116 else:
117 files = includes
118
119 if pattern.startswith("/"):
120 pattern = pattern[1:]
121 pattern = self.env.chrootPath(self.env.buildroot, pattern)
122
123 # Recognize the type of the pattern. Patterns could be a glob
124 # pattern that is expanded here or just a directory which will
125 # be included recursively.
126 if "*" in pattern or "?" in pattern:
127 files += glob.glob(pattern)
128
129 elif os.path.exists(pattern):
130 # Add directories recursively...
131 if os.path.isdir(pattern):
132 for dir, subdirs, _files in os.walk(pattern):
133 for file in _files:
134 file = os.path.join(dir, file)
135 files.append(file)
136
137 # all other files are just added.
138 else:
139 files.append(pattern)
140
141 files = []
142 for file in includes:
143 # Skip if file is already in the file set or
144 # marked to be excluded from this archive.
145 if file in excludes or file in files:
146 continue
147
148 files.append(file)
149
150 files.sort()
151
152 filelist = open(self.archive_files["filelist"], mode="w")
153
154 for file_real in files:
155 file_tar = file_real[len(self.env.chrootPath(self.env.buildroot)) + 1:]
156 file_tmp = os.path.join(self.tempdir, file_tar)
157
158 if file_tar in ORPHAN_DIRECTORIES and not os.listdir(file_real):
159 logging.debug("Found an orphaned directory: %s" % file_tar)
160 os.unlink(file_real)
161 continue
162
163 tar.add(file_real, arcname=file_tar)
164
165 # Record the packaged file to the filelist.
166 filelist.write("/%s\n" % file_tar)
167
168 # "Copy" the file to the tmp path for later investigation.
169 if os.path.isdir(file_real):
170 file_dir = file_tmp
171 else:
172 file_dir = os.path.dirname(file_tmp)
173
174 if not os.path.exists(file_dir):
175 os.makedirs(file_dir)
176
177 if os.path.isfile(file_real):
178 os.link(file_real, file_tmp)
179
180 elif os.path.islink(file_real):
181 # Dead symlinks cannot be copied by shutil.
182 os.symlink(os.readlink(file_real), file_tmp)
183
184 elif os.path.isdir(file_real):
185 if not os.path.exists(file_tmp):
186 os.makedirs(file_tmp)
187
188 else:
189 shutil.copy2(file_real, file_tmp)
190
191 # Unlink the file and remove empty directories.
192 if not os.path.isdir(file_real):
193 os.unlink(file_real)
194
195 elif os.path.isdir(file_real) and not os.listdir(file_real):
196 os.rmdir(file_real)
197
198 # Dump all files that are in the archive.
199 tar.list()
200
201 # Write all data to disk.
202 tar.close()
203 filelist.close()
204
205 # compress the tarball here
206 if compress:
207 # Save algorithm to metadata.
208 self.info["payload_comp"] = compress
209
210 logging.debug("Compressing package with %s algorithm." % compress or "no")
211
212 # Compress file (in place).
213 pakfire.compress.compress(self.archive_files["data.img"],
214 algo=compress, progress=True)
215
216 # Calc hashsum of the payload of the package.
217 self.info["payload_hash1"] = util.calc_hash1(self.archive_files["data.img"])
218
219 def create_info(self):
220 f = open(self.archive_files["info"], "w")
221 f.write(BINARY_PACKAGE_META % self.info)
222 f.close()