]> git.ipfire.org Git - pakfire.git/blob - pakfire/packages/packager.py
Enhance support for groups.
[pakfire.git] / pakfire / packages / packager.py
1 #!/usr/bin/python
2
3 import glob
4 import logging
5 import lzma
6 import os
7 import progressbar
8 import re
9 import shutil
10 import sys
11 import tarfile
12 import tempfile
13 import uuid
14 import xattr
15 import zlib
16
17 import pakfire.compress
18 import util
19
20 from pakfire.constants import *
21 from pakfire.i18n import _
22
23 from file import InnerTarFile
24
25 class Packager(object):
26 ARCHIVE_FILES = ("info", "filelist", "data.img")
27
28 def __init__(self, pakfire, pkg, env):
29 self.pakfire = pakfire
30 self.pkg = pkg
31 self.env = env
32
33 self.tarball = None
34
35 self.cleanup = True
36
37 # Store meta information
38 self.info = {
39 "package_format" : PACKAGE_FORMAT,
40 "package_type" : self.type,
41 "package_uuid" : uuid.uuid4(),
42 "payload_comp" : "",
43
44 "requires" : "",
45 "provides" : "",
46 }
47 self.info.update(self.pkg.info)
48 self.info["groups"] = " ".join(self.info["groups"])
49 self.info.update(self.pakfire.distro.info)
50 self.info.update(self.env.info)
51
52 ### Create temporary files
53 # Create temp directory to where we extract all files again and
54 # gather some information about them like requirements and provides.
55 self.tempdir = self.env.chrootPath("tmp", "%s_data" % self.pkg.friendly_name)
56 if not os.path.exists(self.tempdir):
57 os.makedirs(self.tempdir)
58
59 # Create files that have the archive data
60 self.archive_files = {}
61 for i in self.ARCHIVE_FILES:
62 self.archive_files[i] = \
63 self.env.chrootPath("tmp", "%s_%s" % (self.pkg.friendly_name, i))
64
65 def __call__(self):
66 logging.debug("Packaging %s" % self.pkg.friendly_name)
67
68 # Create the tarball and add all data to it.
69 self.create_tarball()
70
71 if self.type == "binary":
72 e = self.env.do("/usr/lib/buildsystem-tools/dependency-tracker %s" % \
73 self.tempdir[len(self.env.chrootPath()):], returnOutput=True,
74 env=self.pkg.env)
75
76 for line in e.splitlines():
77 m = re.match(r"^(\w+)=(.*)$", line)
78 if m is None:
79 continue
80
81 key, val = m.groups()
82
83 if not key in ("requires", "provides"):
84 continue
85
86 val = val.strip("\"")
87 val = val.split()
88
89 self.info[key] = " ".join(sorted(val))
90
91 self.create_info()
92
93 # Create the outer tarball.
94 resultdir = os.path.join(self.env.chrootPath("result", self.pkg.arch))
95 if not os.path.exists(resultdir):
96 os.makedirs(resultdir)
97
98 filename = os.path.join(resultdir, self.pkg.filename)
99
100 tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT)
101
102 for i in self.ARCHIVE_FILES:
103 tar.add(self.archive_files[i], arcname=i)
104
105 tar.close()
106
107 def create_tarball(self, compress=None):
108 tar = InnerTarFile(self.archive_files["data.img"], mode="w")
109
110 prefix = self.env.buildroot
111 if self.type == "source":
112 prefix = "build"
113
114 if not compress and self.type == "binary":
115 compress = "xz"
116
117 includes = []
118 excludes = []
119
120 for pattern in self.pkg.file_patterns:
121 # Check if we are running in include or exclude mode.
122 if pattern.startswith("!"):
123 files = excludes
124
125 # Strip the ! charater
126 pattern = pattern[1:]
127
128 else:
129 files = includes
130
131 if pattern.startswith("/"):
132 pattern = pattern[1:]
133 pattern = self.env.chrootPath(prefix, pattern)
134
135 # Recognize the type of the pattern. Patterns could be a glob
136 # pattern that is expanded here or just a directory which will
137 # be included recursively.
138 if "*" in pattern or "?" in pattern:
139 files += glob.glob(pattern)
140
141 elif os.path.exists(pattern):
142 # Add directories recursively...
143 if os.path.isdir(pattern):
144 for dir, subdirs, _files in os.walk(pattern):
145 for file in _files:
146 file = os.path.join(dir, file)
147 files.append(file)
148
149 # all other files are just added.
150 else:
151 files.append(pattern)
152
153 files = []
154 for file in includes:
155 # Skip if file is already in the file set or
156 # marked to be excluded from this archive.
157 if file in excludes or file in files:
158 continue
159
160 files.append(file)
161
162 files.sort()
163
164 filelist = open(self.archive_files["filelist"], mode="w")
165
166 for file_real in files:
167 file_tar = file_real[len(self.env.chrootPath(prefix)) + 1:]
168 file_tmp = os.path.join(self.tempdir, file_tar)
169
170 if file_tar in ORPHAN_DIRECTORIES and not os.listdir(file_real):
171 logging.debug("Found an orphaned directory: %s" % file_tar)
172 os.unlink(file_real)
173 continue
174
175 tar.add(file_real, arcname=file_tar)
176
177 # Record the packaged file to the filelist.
178 filelist.write("/%s\n" % file_tar)
179
180 # "Copy" the file to the tmp path for later investigation.
181 if os.path.isdir(file_real):
182 file_dir = file_tmp
183 else:
184 file_dir = os.path.dirname(file_tmp)
185
186 if not os.path.exists(file_dir):
187 os.makedirs(file_dir)
188
189 if os.path.isfile(file_real):
190 os.link(file_real, file_tmp)
191
192 elif os.path.islink(file_real):
193 # Dead symlinks cannot be copied by shutil.
194 os.symlink(os.readlink(file_real), file_tmp)
195
196 elif os.path.isdir(file_real):
197 if not os.path.exists(file_tmp):
198 os.makedirs(file_tmp)
199
200 else:
201 shutil.copy2(file_real, file_tmp)
202
203 # Unlink the file and remove empty directories.
204 if self.cleanup:
205 if not os.path.isdir(file_real):
206 os.unlink(file_real)
207
208 elif os.path.isdir(file_real) and not os.listdir(file_real):
209 os.rmdir(file_real)
210
211 # Dump all files that are in the archive.
212 tar.list()
213
214 # Write all data to disk.
215 tar.close()
216 filelist.close()
217
218 # compress the tarball here
219 if compress:
220 # Save algorithm to metadata.
221 self.info["payload_comp"] = compress
222
223 logging.debug("Compressing package with %s algorithm." % compress or "no")
224
225 # Compress file (in place).
226 pakfire.compress.compress(self.archive_files["data.img"],
227 algo=compress, progress=True)
228
229 # Calc hashsum of the payload of the package.
230 self.info["payload_hash1"] = util.calc_hash1(self.archive_files["data.img"])
231
232 def create_info(self):
233 f = open(self.archive_files["info"], "w")
234 f.write(BINARY_PACKAGE_META % self.info)
235 f.close()
236
237 @property
238 def type(self):
239 raise NotImplementedError
240
241
242 class BinaryPackager(Packager):
243 @property
244 def type(self):
245 return "binary"
246
247
248 class SourcePackager(Packager):
249 def __init__(self, *args, **kwargs):
250 Packager.__init__(self, *args, **kwargs)
251
252 self.cleanup = False
253
254 @property
255 def type(self):
256 return "source"