]>
Commit | Line | Data |
---|---|---|
47a4cb89 MT |
1 | #!/usr/bin/python |
2 | ||
3 | import glob | |
4 | import logging | |
5 | import lzma | |
6 | import os | |
7 | import progressbar | |
8c617c20 | 8 | import re |
be4a3422 | 9 | import shutil |
47a4cb89 MT |
10 | import sys |
11 | import tarfile | |
12 | import tempfile | |
1317485d | 13 | import uuid |
47a4cb89 | 14 | import xattr |
ce9ffa40 | 15 | import zlib |
47a4cb89 | 16 | |
c1fbb0b7 | 17 | import pakfire.compress |
677ff42a | 18 | from pakfire.util import rm |
8c617c20 | 19 | import util |
c1fbb0b7 | 20 | |
47a4cb89 MT |
21 | from pakfire.constants import * |
22 | from pakfire.i18n import _ | |
23 | ||
114ac7ee | 24 | from file import InnerTarFile |
47a4cb89 | 25 | |
47a4cb89 | 26 | class Packager(object): |
bc9a7929 | 27 | ARCHIVE_FILES = ("info", "filelist", "data.img") |
47a4cb89 MT |
28 | |
29 | def __init__(self, pakfire, pkg, env): | |
30 | self.pakfire = pakfire | |
31 | self.pkg = pkg | |
32 | self.env = env | |
33 | ||
34 | self.tarball = None | |
35 | ||
d507be4d MT |
36 | self.cleanup = True |
37 | ||
47a4cb89 MT |
38 | # Store meta information |
39 | self.info = { | |
40 | "package_format" : PACKAGE_FORMAT, | |
d507be4d | 41 | "package_type" : self.type, |
1317485d | 42 | "package_uuid" : uuid.uuid4(), |
74f4a5f2 | 43 | "payload_comp" : "", |
d507be4d | 44 | |
71d3b468 | 45 | "prerequires" : "", |
d507be4d MT |
46 | "requires" : "", |
47 | "provides" : "", | |
71d3b468 | 48 | "conflicts" : "", |
868d6d80 | 49 | "obsoletes": "", |
47a4cb89 MT |
50 | } |
51 | self.info.update(self.pkg.info) | |
8537c16d | 52 | self.info["groups"] = " ".join(self.info["groups"]) |
47a4cb89 | 53 | self.info.update(self.pakfire.distro.info) |
fc4d4177 | 54 | self.info.update(self.env.info) |
47a4cb89 MT |
55 | |
56 | ### Create temporary files | |
57 | # Create temp directory to where we extract all files again and | |
58 | # gather some information about them like requirements and provides. | |
59 | self.tempdir = self.env.chrootPath("tmp", "%s_data" % self.pkg.friendly_name) | |
60 | if not os.path.exists(self.tempdir): | |
61 | os.makedirs(self.tempdir) | |
62 | ||
63 | # Create files that have the archive data | |
64 | self.archive_files = {} | |
65 | for i in self.ARCHIVE_FILES: | |
66 | self.archive_files[i] = \ | |
67 | self.env.chrootPath("tmp", "%s_%s" % (self.pkg.friendly_name, i)) | |
68 | ||
69 | def __call__(self): | |
70 | logging.debug("Packaging %s" % self.pkg.friendly_name) | |
71 | ||
72 | # Create the tarball and add all data to it. | |
73 | self.create_tarball() | |
74 | ||
d507be4d MT |
75 | if self.type == "binary": |
76 | e = self.env.do("/usr/lib/buildsystem-tools/dependency-tracker %s" % \ | |
77 | self.tempdir[len(self.env.chrootPath()):], returnOutput=True, | |
78 | env=self.pkg.env) | |
8c617c20 | 79 | |
d507be4d MT |
80 | for line in e.splitlines(): |
81 | m = re.match(r"^(\w+)=(.*)$", line) | |
82 | if m is None: | |
83 | continue | |
8c617c20 | 84 | |
d507be4d | 85 | key, val = m.groups() |
8c617c20 | 86 | |
71d3b468 | 87 | if not key in ("prerequires", "requires", "provides", "conflicts", "obsoletes",): |
d507be4d | 88 | continue |
8c617c20 | 89 | |
d507be4d MT |
90 | val = val.strip("\"") |
91 | val = val.split() | |
8c617c20 | 92 | |
d507be4d | 93 | self.info[key] = " ".join(sorted(val)) |
47a4cb89 | 94 | |
4496b160 MT |
95 | elif self.type == "source": |
96 | # Save the build requirements. | |
97 | self.info["requires"] = " ".join(self.pkg.requires) | |
98 | ||
47a4cb89 | 99 | self.create_info() |
47a4cb89 MT |
100 | |
101 | # Create the outer tarball. | |
102 | resultdir = os.path.join(self.env.chrootPath("result", self.pkg.arch)) | |
103 | if not os.path.exists(resultdir): | |
104 | os.makedirs(resultdir) | |
105 | ||
106 | filename = os.path.join(resultdir, self.pkg.filename) | |
107 | ||
108 | tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT) | |
109 | ||
110 | for i in self.ARCHIVE_FILES: | |
111 | tar.add(self.archive_files[i], arcname=i) | |
112 | ||
113 | tar.close() | |
114 | ||
677ff42a MT |
115 | rm(self.tempdir) |
116 | ||
d507be4d | 117 | def create_tarball(self, compress=None): |
114ac7ee | 118 | tar = InnerTarFile(self.archive_files["data.img"], mode="w") |
47a4cb89 | 119 | |
d507be4d MT |
120 | prefix = self.env.buildroot |
121 | if self.type == "source": | |
122 | prefix = "build" | |
123 | ||
124 | if not compress and self.type == "binary": | |
125 | compress = "xz" | |
126 | ||
715a78e9 MT |
127 | includes = [] |
128 | excludes = [] | |
129 | ||
47a4cb89 | 130 | for pattern in self.pkg.file_patterns: |
715a78e9 MT |
131 | # Check if we are running in include or exclude mode. |
132 | if pattern.startswith("!"): | |
133 | files = excludes | |
134 | ||
135 | # Strip the ! charater | |
136 | pattern = pattern[1:] | |
137 | ||
138 | else: | |
139 | files = includes | |
140 | ||
47a4cb89 MT |
141 | if pattern.startswith("/"): |
142 | pattern = pattern[1:] | |
d507be4d | 143 | pattern = self.env.chrootPath(prefix, pattern) |
47a4cb89 MT |
144 | |
145 | # Recognize the type of the pattern. Patterns could be a glob | |
146 | # pattern that is expanded here or just a directory which will | |
147 | # be included recursively. | |
148 | if "*" in pattern or "?" in pattern: | |
149 | files += glob.glob(pattern) | |
150 | ||
151 | elif os.path.exists(pattern): | |
152 | # Add directories recursively... | |
153 | if os.path.isdir(pattern): | |
154 | for dir, subdirs, _files in os.walk(pattern): | |
155 | for file in _files: | |
156 | file = os.path.join(dir, file) | |
157 | files.append(file) | |
158 | ||
159 | # all other files are just added. | |
160 | else: | |
161 | files.append(pattern) | |
162 | ||
715a78e9 MT |
163 | files = [] |
164 | for file in includes: | |
58d47ee6 MT |
165 | # Skip if file is already in the file set or |
166 | # marked to be excluded from this archive. | |
167 | if file in excludes or file in files: | |
715a78e9 MT |
168 | continue |
169 | ||
170 | files.append(file) | |
171 | ||
47a4cb89 MT |
172 | files.sort() |
173 | ||
0bab8cdb MT |
174 | filelist = open(self.archive_files["filelist"], mode="w") |
175 | ||
47a4cb89 | 176 | for file_real in files: |
d507be4d | 177 | file_tar = file_real[len(self.env.chrootPath(prefix)) + 1:] |
0bab8cdb | 178 | file_tmp = os.path.join(self.tempdir, file_tar) |
47a4cb89 | 179 | |
8c617c20 MT |
180 | if file_tar in ORPHAN_DIRECTORIES and not os.listdir(file_real): |
181 | logging.debug("Found an orphaned directory: %s" % file_tar) | |
182 | os.unlink(file_real) | |
183 | continue | |
184 | ||
114ac7ee | 185 | tar.add(file_real, arcname=file_tar) |
49e0d33b | 186 | |
0bab8cdb MT |
187 | # Record the packaged file to the filelist. |
188 | filelist.write("/%s\n" % file_tar) | |
189 | ||
190 | # "Copy" the file to the tmp path for later investigation. | |
191 | if os.path.isdir(file_real): | |
192 | file_dir = file_tmp | |
193 | else: | |
194 | file_dir = os.path.dirname(file_tmp) | |
195 | ||
196 | if not os.path.exists(file_dir): | |
197 | os.makedirs(file_dir) | |
198 | ||
199 | if os.path.isfile(file_real): | |
200 | os.link(file_real, file_tmp) | |
201 | ||
9a0737c7 MT |
202 | elif os.path.islink(file_real): |
203 | # Dead symlinks cannot be copied by shutil. | |
204 | os.symlink(os.readlink(file_real), file_tmp) | |
205 | ||
556b4305 MT |
206 | elif os.path.isdir(file_real): |
207 | if not os.path.exists(file_tmp): | |
208 | os.makedirs(file_tmp) | |
209 | ||
0bab8cdb MT |
210 | else: |
211 | shutil.copy2(file_real, file_tmp) | |
212 | ||
213 | # Unlink the file and remove empty directories. | |
d507be4d MT |
214 | if self.cleanup: |
215 | if not os.path.isdir(file_real): | |
216 | os.unlink(file_real) | |
47a4cb89 | 217 | |
d507be4d MT |
218 | elif os.path.isdir(file_real) and not os.listdir(file_real): |
219 | os.rmdir(file_real) | |
0bab8cdb | 220 | |
47a4cb89 MT |
221 | # Dump all files that are in the archive. |
222 | tar.list() | |
223 | ||
224 | # Write all data to disk. | |
225 | tar.close() | |
0bab8cdb | 226 | filelist.close() |
47a4cb89 | 227 | |
ce9ffa40 MT |
228 | # compress the tarball here |
229 | if compress: | |
230 | # Save algorithm to metadata. | |
231 | self.info["payload_comp"] = compress | |
232 | ||
233 | logging.debug("Compressing package with %s algorithm." % compress or "no") | |
234 | ||
c1fbb0b7 MT |
235 | # Compress file (in place). |
236 | pakfire.compress.compress(self.archive_files["data.img"], | |
237 | algo=compress, progress=True) | |
47a4cb89 | 238 | |
8c617c20 MT |
239 | # Calc hashsum of the payload of the package. |
240 | self.info["payload_hash1"] = util.calc_hash1(self.archive_files["data.img"]) | |
241 | ||
47a4cb89 MT |
242 | def create_info(self): |
243 | f = open(self.archive_files["info"], "w") | |
244 | f.write(BINARY_PACKAGE_META % self.info) | |
245 | f.close() | |
d507be4d MT |
246 | |
247 | @property | |
248 | def type(self): | |
249 | raise NotImplementedError | |
250 | ||
251 | ||
252 | class BinaryPackager(Packager): | |
253 | @property | |
254 | def type(self): | |
255 | return "binary" | |
256 | ||
257 | ||
258 | class SourcePackager(Packager): | |
259 | def __init__(self, *args, **kwargs): | |
260 | Packager.__init__(self, *args, **kwargs) | |
261 | ||
262 | self.cleanup = False | |
263 | ||
264 | @property | |
265 | def type(self): | |
266 | return "source" |