]> git.ipfire.org Git - people/stevee/pakfire.git/blob - src/pakfire/packages/packager.py
packager: Set approprivate file permissions in archive
[people/stevee/pakfire.git] / src / pakfire / packages / packager.py
1 #!/usr/bin/python
2 ###############################################################################
3 # #
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
6 # #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
11 # #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
16 # #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
19 # #
20 ###############################################################################
21
22 import collections
23 import fnmatch
24 import glob
25 import hashlib
26 import os
27 import re
28 import shutil
29 import sys
30 import tarfile
31 import tempfile
32 import time
33 import uuid
34 import zlib
35
36 import logging
37 log = logging.getLogger("pakfire")
38
39 import pakfire.util as util
40
41 from pakfire.constants import *
42 from pakfire.i18n import _
43
44 from . import tar
45
46 class Packager(object):
47 payload_compression = None
48
49 def __init__(self, pakfire, pkg):
50 self.pakfire = pakfire
51 self.pkg = pkg
52
53 self.files = []
54 self.tmpfiles = []
55
56 def __del__(self):
57 for file in self.tmpfiles:
58 if not os.path.exists(file):
59 continue
60
61 log.debug("Removing tmpfile: %s" % file)
62
63 if os.path.isdir(file):
64 util.rm(file)
65 else:
66 os.remove(file)
67
68 def mktemp(self, directory=False):
69 if directory:
70 filename = os.path.join("/", LOCAL_TMP_PATH, util.random_string())
71 os.makedirs(filename)
72 else:
73 f = tempfile.NamedTemporaryFile(mode="w", delete=False)
74 f.close()
75
76 filename = f.name
77
78 self.tmpfiles.append(filename)
79
80 return filename
81
82 def save(self, filename):
83 # Create a new tar archive.
84 tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT)
85
86 # Add package formation information.
87 # Must always be the first file in the archive.
88 formatfile = self.create_package_format()
89 tar.add(formatfile, arcname="pakfire-format")
90
91 # XXX make sure all files belong to the root user
92
93 # Create checksum file.
94 chksumsfile = self.mktemp()
95 chksums = open(chksumsfile, "w")
96
97 # Add all files to tar file.
98 for arcname, filename in self.files:
99 tar.add(filename, arcname=arcname)
100
101 # Calculating the hash sum of the added file
102 # and store it in the chksums file.
103 f = open(filename)
104 h = hashlib.new("sha512")
105 while True:
106 buf = f.read(BUFFER_SIZE)
107 if not buf:
108 break
109
110 h.update(buf)
111 f.close()
112
113 chksums.write("%-10s %s\n" % (arcname, h.hexdigest()))
114
115 # Close checksum file and attach it to the end.
116 chksums.close()
117 tar.add(chksumsfile, "chksums")
118
119 # Close the tar file.
120 tar.close()
121
122 def add(self, filename, arcname=None):
123 if not arcname:
124 arcname = os.path.basename(filename)
125
126 log.debug("Adding %s (as %s) to tarball." % (filename, arcname))
127 self.files.append((arcname, filename))
128
129 def create_package_format(self):
130 filename = self.mktemp()
131
132 f = open(filename, "w")
133 f.write("%s\n" % PACKAGE_FORMAT)
134 f.close()
135
136 return filename
137
138 def create_filelist(self, datafile):
139 filelist = self.mktemp()
140
141 f = open(filelist, "w")
142
143 if self.payload_compression == "xz":
144 datafile = tar.InnerTarFileXz.open(datafile)
145 else:
146 datafile = tar.InnerTarFile.open(datafile)
147
148 while True:
149 m = next(datafile)
150 if not m:
151 break
152
153 log.debug(" %s %-8s %-8s %s %6s %s" % \
154 (tarfile.filemode(m.mode), m.uname, m.gname,
155 "%d-%02d-%02d %02d:%02d:%02d" % time.localtime(m.mtime)[:6],
156 util.format_size(m.size), m.name))
157
158 f.write("%(type)1s %(size)-10d %(uname)-10s %(gname)-10s %(mode)-6d %(mtime)-12d" \
159 % m.get_info(tarfile.ENCODING, "strict"))
160
161 # Calculate SHA512 hash of regular files.
162 if m.isreg():
163 mobj = datafile.extractfile(m)
164 h = hashlib.new("sha512")
165
166 while True:
167 buf = mobj.read(BUFFER_SIZE)
168 if not buf:
169 break
170 h.update(buf)
171
172 mobj.close()
173 f.write(" %s" % h.hexdigest())
174 else:
175 f.write(" -")
176
177 caps = m.pax_headers.get("PAKFIRE.capabilities", None)
178 if caps:
179 f.write(" %s" % caps)
180 else:
181 f.write(" -")
182
183 # The file name must be the last argument to contain spaces.
184 f.write(" %s" % m.name)
185
186 f.write("\n")
187
188 log.info("")
189
190 datafile.close()
191 f.close()
192
193 return filelist
194
195 def run(self):
196 raise NotImplementedError
197
198 def getsize(self, datafile):
199 size = 0
200
201 if self.payload_compression == "xz":
202 t = tar.InnerTarFileXz.open(datafile)
203 else:
204 t = tar.InnerTarFile.open(datafile)
205
206 while True:
207 m = next(t)
208 if not m:
209 break
210
211 size += m.size
212
213 t.close()
214 return size
215
216
217 class BinaryPackager(Packager):
218 payload_compression = "xz"
219
220 def __init__(self, pakfire, pkg, builder, buildroot):
221 Packager.__init__(self, pakfire, pkg)
222
223 self.builder = builder
224 self.buildroot = buildroot
225
226 def create_metafile(self, datafile):
227 info = collections.defaultdict(lambda: "")
228
229 # Extract datafile in temporary directory and scan for dependencies.
230 tmpdir = self.mktemp(directory=True)
231
232 if self.payload_compression == "xz":
233 tarfile = tar.InnerTarFileXz.open(datafile)
234 else:
235 tarfile = tar.InnerTarFile.open(datafile)
236
237 tarfile.extractall(path=tmpdir)
238 tarfile.close()
239
240 # Run the dependency tracker.
241 self.pkg.track_dependencies(self.builder, tmpdir)
242
243 # Generic package information including Pakfire information.
244 info.update({
245 "pakfire_version" : PAKFIRE_VERSION,
246 "uuid" : self.pkg.uuid,
247 "type" : "binary",
248 })
249
250 # Include distribution information.
251 info.update(self.pakfire.distro.info)
252 info.update(self.pkg.info)
253
254 # Update package information for string formatting.
255 info.update({
256 "groups" : " ".join(self.pkg.groups),
257 "prerequires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
258 for d in self.pkg.prerequires]),
259 "requires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
260 for d in self.pkg.requires]),
261 "provides" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
262 for d in self.pkg.provides]),
263 "conflicts" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
264 for d in self.pkg.conflicts]),
265 "obsoletes" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
266 for d in self.pkg.obsoletes]),
267 "recommends" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
268 for d in self.pkg.recommends]),
269 "suggests" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE % d \
270 for d in self.pkg.suggests]),
271 })
272
273 # Format description.
274 description = [PACKAGE_INFO_DESCRIPTION_LINE % l \
275 for l in util.text_wrap(self.pkg.description, length=80)]
276 info["description"] = "\n".join(description)
277
278 # Build information.
279 info.update({
280 # Package it built right now.
281 "build_time" : int(time.time()),
282 "build_id" : uuid.uuid4(),
283 })
284
285 # Installed size (equals size of the uncompressed tarball).
286 info.update({
287 "inst_size" : self.getsize(datafile),
288 })
289
290 metafile = self.mktemp()
291
292 f = open(metafile, "w")
293 f.write(PACKAGE_INFO % info)
294 f.close()
295
296 return metafile
297
298 def create_datafile(self):
299 includes = []
300 excludes = []
301
302 # List of all patterns, which grows.
303 patterns = self.pkg.files
304
305 # ...
306 orphan_directories = []
307 for d in ORPHAN_DIRECTORIES:
308 if d.startswith("usr/"):
309 b = os.path.basename(d)
310 b = os.path.join(self.buildroot, b)
311
312 if os.path.islink(b):
313 continue
314
315 d = os.path.join(self.buildroot, d)
316 if not os.path.islink(d):
317 orphan_directories.append(d)
318
319 for pattern in patterns:
320 # Check if we are running in include or exclude mode.
321 if pattern.startswith("!"):
322 files = excludes
323
324 # Strip the ! character.
325 pattern = pattern[1:]
326 else:
327 files = includes
328
329 # Expand file to point to chroot.
330 if pattern.startswith("/"):
331 pattern = pattern[1:]
332 pattern = os.path.join(self.buildroot, pattern)
333
334 # Recognize the type of the pattern. Patterns could be a glob
335 # pattern that is expanded here or just a directory which will
336 # be included recursively.
337 if "*" in pattern or "?" in pattern or ("[" in pattern and "]" in pattern):
338 _patterns = glob.glob(pattern)
339 else:
340 _patterns = [pattern,]
341
342 for pattern in _patterns:
343 # Try to stat the pattern. If that is not successful, we cannot go on.
344 try:
345 os.lstat(pattern)
346 except OSError:
347 continue
348
349 # Add directories recursively but skip those symlinks
350 # that point to a directory.
351 if os.path.isdir(pattern) and not os.path.islink(pattern):
352 # Add directory itself.
353 files.append(pattern)
354
355 for dir, subdirs, _files in os.walk(pattern):
356 for subdir in subdirs:
357 if subdir in orphan_directories:
358 continue
359
360 subdir = os.path.join(dir, subdir)
361 files.append(subdir)
362
363 for file in _files:
364 file = os.path.join(dir, file)
365 files.append(file)
366
367 # All other files are just added.
368 else:
369 files.append(pattern)
370
371 files = []
372 for file in includes:
373 # Skip if file is already in the file set or
374 # marked to be excluded from this archive.
375 if file in excludes or file in files:
376 continue
377
378 # Skip orphan directories.
379 if file in orphan_directories and not os.listdir(file):
380 log.debug("Found an orphaned directory: %s" % file)
381 continue
382
383 files.append(file)
384
385 while True:
386 file = os.path.dirname(file)
387
388 if file == self.buildroot:
389 break
390
391 if not file in files:
392 files.append(file)
393
394 files.sort()
395
396 # Load progressbar.
397 message = "%-10s : %s" % (_("Packaging"), self.pkg.friendly_name)
398 pb = util.make_progress(message, len(files), eta=False)
399
400 datafile = self.mktemp()
401 if self.payload_compression == "xz":
402 t = tar.InnerTarFileXz.open(datafile, mode="w")
403 else:
404 t = tar.InnerTarFile.open(datafile, mode="w")
405
406 # All files in the tarball are relative to this directory.
407 basedir = self.buildroot
408
409 i = 0
410 for file in files:
411 if pb:
412 i += 1
413 pb.update(i)
414
415 # Never package /.
416 if os.path.normpath(file) == os.path.normpath(basedir):
417 continue
418
419 # Name of the file in the archive.
420 arcname = "/%s" % os.path.relpath(file, basedir)
421
422 # Add file to tarball.
423 t.add(file, arcname=arcname, recursive=False)
424
425 # Remove all packaged files.
426 for file in reversed(files):
427 # It's okay if we cannot remove directories,
428 # when they are not empty.
429 if os.path.isdir(file):
430 try:
431 os.rmdir(file)
432 except OSError:
433 continue
434 else:
435 try:
436 os.unlink(file)
437 except OSError:
438 pass
439
440 while True:
441 file = os.path.dirname(file)
442
443 if not file.startswith(basedir):
444 break
445
446 try:
447 os.rmdir(file)
448 except OSError:
449 break
450
451 # Close the tarfile.
452 t.close()
453
454 # Finish progressbar.
455 if pb:
456 pb.finish()
457
458 return datafile
459
460 def create_scriptlets(self):
461 scriptlets = []
462
463 # Collect all prerequires for the scriptlets.
464 prerequires = []
465
466 for scriptlet_name in SCRIPTS:
467 scriptlet = self.pkg.get_scriptlet(scriptlet_name)
468
469 if not scriptlet:
470 continue
471
472 # Write script to a file.
473 scriptlet_file = self.mktemp()
474
475 lang = scriptlet["lang"]
476
477 if lang == "bin":
478 path = lang["path"]
479 try:
480 f = open(path, "b")
481 except OSError:
482 raise Exception("Cannot open script file: %s" % lang["path"])
483
484 s = open(scriptlet_file, "wb")
485
486 while True:
487 buf = f.read(BUFFER_SIZE)
488 if not buf:
489 break
490
491 s.write(buf)
492
493 f.close()
494 s.close()
495
496 elif lang == "shell":
497 s = open(scriptlet_file, "w")
498
499 # Write shell script to file.
500 s.write("#!/bin/sh -e\n\n")
501 s.write(scriptlet["scriptlet"])
502 s.write("\n\nexit 0\n")
503 s.close()
504
505 if scriptlet_name in SCRIPTS_PREREQUIRES:
506 # Shell scripts require a shell to be executed.
507 prerequires.append("/bin/sh")
508
509 prerequires += self.builder.find_prerequires(scriptlet_file)
510
511 elif lang == "python":
512 # Write the code to the scriptlet file.
513 s = open(scriptlet_file, "w")
514 s.write(scriptlet["scriptlet"])
515 s.close()
516
517 else:
518 raise Exception("Unknown scriptlet language: %s" % scriptlet["lang"])
519
520 scriptlets.append((scriptlet_name, scriptlet_file))
521
522 # Cleanup prerequires.
523 self.pkg.update_prerequires(prerequires)
524
525 return scriptlets
526
527 def find_files(self, datafile, patterns):
528 if self.payload_compression == "xz":
529 datafile = tar.InnerTarFileXz.open(datafile)
530 else:
531 datafile = tar.InnerTarFile.open(datafile)
532
533 members = datafile.getmembers()
534
535 files = []
536 dirs = []
537
538 # Find all directories in the file list.
539 for file in patterns:
540 if file.startswith("/"):
541 file = file[1:]
542
543 for member in members:
544 if member.name == file and member.isdir():
545 dirs.append(file)
546
547 for d in dirs:
548 for member in members:
549 if not member.isdir() and member.name.startswith(d):
550 files.append(member.name)
551
552 for pattern in patterns:
553 if pattern.startswith("/"):
554 pattern = pattern[1:]
555
556 for member in members:
557 if not fnmatch.fnmatch(member.name, pattern):
558 continue
559
560 if member.name in files:
561 continue
562
563 files.append(member.name)
564
565 # Sort list alphabetically.
566 files.sort()
567
568 return files
569
570 def create_configfiles(self, datafile):
571 files = self.find_files(datafile, self.pkg.configfiles)
572
573 configsfile = self.mktemp()
574
575 f = open(configsfile, "w")
576 for file in files:
577 f.write("%s\n" % file)
578 f.close()
579
580 return configsfile
581
582 def create_datafiles(self, datafile):
583 files = self.find_files(datafile, self.pkg.datafiles)
584
585 datafile = self.mktemp()
586
587 f = open(datafile, "w")
588 for file in files:
589 f.write("%s\n" % file)
590 f.close()
591
592 return datafile
593
594 def run(self, resultdir):
595 # Add all files to this package.
596 datafile = self.create_datafile()
597
598 # Get filelist from datafile.
599 filelist = self.create_filelist(datafile)
600 configfiles = self.create_configfiles(datafile)
601 datafiles = self.create_datafiles(datafile)
602
603 # Create script files.
604 scriptlets = self.create_scriptlets()
605
606 metafile = self.create_metafile(datafile)
607
608 # Add files to the tar archive in correct order.
609 self.add(metafile, "info")
610 self.add(filelist, "filelist")
611 self.add(configfiles, "configfiles")
612 self.add(datafiles, "datafiles")
613 self.add(datafile, "data.img")
614
615 for scriptlet_name, scriptlet_file in scriptlets:
616 self.add(scriptlet_file, "scriptlets/%s" % scriptlet_name)
617
618 # Build the final package.
619 tempfile = self.mktemp()
620 self.save(tempfile)
621
622 # Add architecture information to path.
623 resultdir = "%s/%s" % (resultdir, self.pkg.arch)
624
625 if not os.path.exists(resultdir):
626 os.makedirs(resultdir)
627
628 resultfile = os.path.join(resultdir, self.pkg.package_filename)
629 log.info("Saving package to %s" % resultfile)
630 try:
631 os.link(tempfile, resultfile)
632 except OSError:
633 shutil.copy2(tempfile, resultfile)
634
635
636 class SourcePackager(Packager):
637 payload_compression = None
638
639 def create_metafile(self, datafile):
640 info = collections.defaultdict(lambda: "")
641
642 # Generic package information including Pakfire information.
643 info.update({
644 "pakfire_version" : PAKFIRE_VERSION,
645 "type" : "source",
646 })
647
648 # Include distribution information.
649 info.update(self.pakfire.distro.info)
650 info.update(self.pkg.info)
651
652 # Size is the size of the (uncompressed) datafile.
653 info["inst_size"] = self.getsize(datafile)
654
655 # Update package information for string formatting.
656 requires = [PACKAGE_INFO_DEPENDENCY_LINE % r for r in self.pkg.requires]
657 info.update({
658 "groups" : " ".join(self.pkg.groups),
659 "requires" : "\n".join(requires),
660 })
661
662 # Format description.
663 description = [PACKAGE_INFO_DESCRIPTION_LINE % l \
664 for l in util.text_wrap(self.pkg.description, length=80)]
665 info["description"] = "\n".join(description)
666
667 # Build information.
668 info.update({
669 # Package it built right now.
670 "build_time" : int(time.time()),
671 "build_id" : uuid.uuid4(),
672 })
673
674 # Arches equals supported arches.
675 info["arch"] = self.pkg.supported_arches
676
677 # Set UUID
678 # XXX replace this by the payload hash
679 info.update({
680 "uuid" : uuid.uuid4(),
681 })
682
683 metafile = self.mktemp()
684
685 f = open(metafile, "w")
686 f.write(PACKAGE_INFO % info)
687 f.close()
688
689 return metafile
690
691 def create_datafile(self):
692 # Create a list of all files that have to be put into the
693 # package.
694 files = []
695
696 # Download all files that go into the package.
697 for file in self.pkg.download():
698 assert os.path.getsize(file), "Don't package empty files"
699 files.append(("files/%s" % os.path.basename(file), file))
700
701 # Add all files in the package directory.
702 for file in self.pkg.files:
703 files.append((os.path.relpath(file, self.pkg.path), file))
704
705 # Add files in alphabetical order.
706 files.sort()
707
708 # Load progressbar.
709 message = "%-10s : %s" % (_("Packaging"), self.pkg.friendly_name)
710 pb = util.make_progress(message, len(files), eta=False)
711
712 filename = self.mktemp()
713 if self.payload_compression == "xz":
714 datafile = tar.InnerTarFileXz.open(filename, mode="w")
715 else:
716 datafile = tar.InnerTarFile.open(filename, mode="w")
717
718 i = 0
719 for arcname, file in files:
720 if pb:
721 i += 1
722 pb.update(i)
723
724 datafile.add(file, arcname)
725 datafile.close()
726
727 if pb:
728 pb.finish()
729
730 return filename
731
732 def run(self, resultdir):
733 # Create resultdir if it does not exist yet.
734 if not os.path.exists(resultdir):
735 os.makedirs(resultdir)
736
737 log.info(_("Building source package %s:") % self.pkg.package_filename)
738
739 # The filename where this source package is saved at.
740 target_filename = os.path.join(resultdir, self.pkg.package_filename)
741
742 # Add datafile to package.
743 datafile = self.create_datafile()
744
745 # Create filelist out of data.
746 filelist = self.create_filelist(datafile)
747
748 # Create metadata.
749 metafile = self.create_metafile(datafile)
750
751 # Add files to the tar archive in correct order.
752 self.add(metafile, "info")
753 self.add(filelist, "filelist")
754 self.add(datafile, "data.img")
755
756 # Build the final tarball.
757 try:
758 self.save(target_filename)
759 except:
760 # Remove the target file when anything went wrong.
761 os.unlink(target_filename)
762 raise
763
764 return target_filename