2 ###############################################################################
4 # Pakfire - The IPFire package management system #
5 # Copyright (C) 2011 Pakfire development team #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
20 ###############################################################################
37 log
= logging
.getLogger("pakfire")
39 import pakfire
.util
as util
41 from pakfire
.constants
import *
42 from pakfire
.i18n
import _
46 class Packager(object):
47 payload_compression
= None
49 def __init__(self
, pakfire
, pkg
):
50 self
.pakfire
= pakfire
57 for file in self
.tmpfiles
:
58 if not os
.path
.exists(file):
61 log
.debug("Removing tmpfile: %s" % file)
63 if os
.path
.isdir(file):
68 def mktemp(self
, directory
=False):
70 filename
= os
.path
.join("/", LOCAL_TMP_PATH
, util
.random_string())
73 f
= tempfile
.NamedTemporaryFile(mode
="w", delete
=False)
78 self
.tmpfiles
.append(filename
)
82 def save(self
, filename
):
83 # Create a new tar archive.
84 tar
= tarfile
.TarFile(filename
, mode
="w", format
=tarfile
.PAX_FORMAT
)
86 # Add package formation information.
87 # Must always be the first file in the archive.
88 formatfile
= self
.create_package_format()
89 tar
.add(formatfile
, arcname
="pakfire-format")
91 # XXX make sure all files belong to the root user
93 # Create checksum file.
94 chksumsfile
= self
.mktemp()
95 chksums
= open(chksumsfile
, "w")
97 # Add all files to tar file.
98 for arcname
, filename
in self
.files
:
99 tar
.add(filename
, arcname
=arcname
)
101 # Calculating the hash sum of the added file
102 # and store it in the chksums file.
104 h
= hashlib
.new("sha512")
106 buf
= f
.read(BUFFER_SIZE
)
113 chksums
.write("%-10s %s\n" % (arcname
, h
.hexdigest()))
115 # Close checksum file and attach it to the end.
117 tar
.add(chksumsfile
, "chksums")
119 # Close the tar file.
122 def add(self
, filename
, arcname
=None):
124 arcname
= os
.path
.basename(filename
)
126 log
.debug("Adding %s (as %s) to tarball." % (filename
, arcname
))
127 self
.files
.append((arcname
, filename
))
129 def create_package_format(self
):
130 filename
= self
.mktemp()
132 f
= open(filename
, "w")
133 f
.write("%s\n" % PACKAGE_FORMAT
)
138 def create_filelist(self
, datafile
):
139 filelist
= self
.mktemp()
141 f
= open(filelist
, "w")
143 if self
.payload_compression
== "xz":
144 datafile
= tar
.InnerTarFileXz
.open(datafile
)
146 datafile
= tar
.InnerTarFile
.open(datafile
)
153 log
.debug(" %s %-8s %-8s %s %6s %s" % \
154 (tarfile
.filemode(m
.mode
), m
.uname
, m
.gname
,
155 "%d-%02d-%02d %02d:%02d:%02d" % time
.localtime(m
.mtime
)[:6],
156 util
.format_size(m
.size
), m
.name
))
158 f
.write("%(type)1s %(size)-10d %(uname)-10s %(gname)-10s %(mode)-6d %(mtime)-12d" \
159 % m
.get_info(tarfile
.ENCODING
, "strict"))
161 # Calculate SHA512 hash of regular files.
163 mobj
= datafile
.extractfile(m
)
164 h
= hashlib
.new("sha512")
167 buf
= mobj
.read(BUFFER_SIZE
)
173 f
.write(" %s" % h
.hexdigest())
177 caps
= m
.pax_headers
.get("PAKFIRE.capabilities", None)
179 f
.write(" %s" % caps
)
183 # The file name must be the last argument to contain spaces.
184 f
.write(" %s" % m
.name
)
196 raise NotImplementedError
198 def getsize(self
, datafile
):
201 if self
.payload_compression
== "xz":
202 t
= tar
.InnerTarFileXz
.open(datafile
)
204 t
= tar
.InnerTarFile
.open(datafile
)
217 class BinaryPackager(Packager
):
218 payload_compression
= "xz"
220 def __init__(self
, pakfire
, pkg
, builder
, buildroot
):
221 Packager
.__init
__(self
, pakfire
, pkg
)
223 self
.builder
= builder
224 self
.buildroot
= buildroot
226 def create_metafile(self
, datafile
):
227 info
= collections
.defaultdict(lambda: "")
229 # Extract datafile in temporary directory and scan for dependencies.
230 tmpdir
= self
.mktemp(directory
=True)
232 if self
.payload_compression
== "xz":
233 tarfile
= tar
.InnerTarFileXz
.open(datafile
)
235 tarfile
= tar
.InnerTarFile
.open(datafile
)
237 tarfile
.extractall(path
=tmpdir
)
240 # Run the dependency tracker.
241 self
.pkg
.track_dependencies(self
.builder
, tmpdir
)
243 # Generic package information including Pakfire information.
245 "pakfire_version" : PAKFIRE_VERSION
,
246 "uuid" : self
.pkg
.uuid
,
250 # Include distribution information.
251 info
.update(self
.pakfire
.distro
.info
)
252 info
.update(self
.pkg
.info
)
254 # Update package information for string formatting.
256 "groups" : " ".join(self
.pkg
.groups
),
257 "prerequires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE
% d \
258 for d
in self
.pkg
.prerequires
]),
259 "requires" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE
% d \
260 for d
in self
.pkg
.requires
]),
261 "provides" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE
% d \
262 for d
in self
.pkg
.provides
]),
263 "conflicts" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE
% d \
264 for d
in self
.pkg
.conflicts
]),
265 "obsoletes" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE
% d \
266 for d
in self
.pkg
.obsoletes
]),
267 "recommends" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE
% d \
268 for d
in self
.pkg
.recommends
]),
269 "suggests" : "\n".join([PACKAGE_INFO_DEPENDENCY_LINE
% d \
270 for d
in self
.pkg
.suggests
]),
273 # Format description.
274 description
= [PACKAGE_INFO_DESCRIPTION_LINE
% l \
275 for l
in util
.text_wrap(self
.pkg
.description
, length
=80)]
276 info
["description"] = "\n".join(description
)
280 # Package it built right now.
281 "build_time" : int(time
.time()),
282 "build_id" : uuid
.uuid4(),
285 # Installed size (equals size of the uncompressed tarball).
287 "inst_size" : self
.getsize(datafile
),
290 metafile
= self
.mktemp()
292 f
= open(metafile
, "w")
293 f
.write(PACKAGE_INFO
% info
)
298 def create_datafile(self
):
302 # List of all patterns, which grows.
303 patterns
= self
.pkg
.files
306 orphan_directories
= []
307 for d
in ORPHAN_DIRECTORIES
:
308 if d
.startswith("usr/"):
309 b
= os
.path
.basename(d
)
310 b
= os
.path
.join(self
.buildroot
, b
)
312 if os
.path
.islink(b
):
315 d
= os
.path
.join(self
.buildroot
, d
)
316 if not os
.path
.islink(d
):
317 orphan_directories
.append(d
)
319 for pattern
in patterns
:
320 # Check if we are running in include or exclude mode.
321 if pattern
.startswith("!"):
324 # Strip the ! character.
325 pattern
= pattern
[1:]
329 # Expand file to point to chroot.
330 if pattern
.startswith("/"):
331 pattern
= pattern
[1:]
332 pattern
= os
.path
.join(self
.buildroot
, pattern
)
334 # Recognize the type of the pattern. Patterns could be a glob
335 # pattern that is expanded here or just a directory which will
336 # be included recursively.
337 if "*" in pattern
or "?" in pattern
or ("[" in pattern
and "]" in pattern
):
338 _patterns
= glob
.glob(pattern
)
340 _patterns
= [pattern
,]
342 for pattern
in _patterns
:
343 # Try to stat the pattern. If that is not successful, we cannot go on.
349 # Add directories recursively but skip those symlinks
350 # that point to a directory.
351 if os
.path
.isdir(pattern
) and not os
.path
.islink(pattern
):
352 # Add directory itself.
353 files
.append(pattern
)
355 for dir, subdirs
, _files
in os
.walk(pattern
):
356 for subdir
in subdirs
:
357 if subdir
in orphan_directories
:
360 subdir
= os
.path
.join(dir, subdir
)
364 file = os
.path
.join(dir, file)
367 # All other files are just added.
369 files
.append(pattern
)
372 for file in includes
:
373 # Skip if file is already in the file set or
374 # marked to be excluded from this archive.
375 if file in excludes
or file in files
:
378 # Skip orphan directories.
379 if file in orphan_directories
and not os
.listdir(file):
380 log
.debug("Found an orphaned directory: %s" % file)
386 file = os
.path
.dirname(file)
388 if file == self
.buildroot
:
391 if not file in files
:
397 message
= "%-10s : %s" % (_("Packaging"), self
.pkg
.friendly_name
)
398 pb
= util
.make_progress(message
, len(files
), eta
=False)
400 datafile
= self
.mktemp()
401 if self
.payload_compression
== "xz":
402 t
= tar
.InnerTarFileXz
.open(datafile
, mode
="w")
404 t
= tar
.InnerTarFile
.open(datafile
, mode
="w")
406 # All files in the tarball are relative to this directory.
407 basedir
= self
.buildroot
416 if os
.path
.normpath(file) == os
.path
.normpath(basedir
):
419 # Name of the file in the archive.
420 arcname
= "/%s" % os
.path
.relpath(file, basedir
)
422 # Add file to tarball.
423 t
.add(file, arcname
=arcname
, recursive
=False)
425 # Remove all packaged files.
426 for file in reversed(files
):
427 # It's okay if we cannot remove directories,
428 # when they are not empty.
429 if os
.path
.isdir(file):
441 file = os
.path
.dirname(file)
443 if not file.startswith(basedir
):
454 # Finish progressbar.
460 def create_scriptlets(self
):
463 # Collect all prerequires for the scriptlets.
466 for scriptlet_name
in SCRIPTS
:
467 scriptlet
= self
.pkg
.get_scriptlet(scriptlet_name
)
472 # Write script to a file.
473 scriptlet_file
= self
.mktemp()
475 lang
= scriptlet
["lang"]
482 raise Exception("Cannot open script file: %s" % lang
["path"])
484 s
= open(scriptlet_file
, "wb")
487 buf
= f
.read(BUFFER_SIZE
)
496 elif lang
== "shell":
497 s
= open(scriptlet_file
, "w")
499 # Write shell script to file.
500 s
.write("#!/bin/sh -e\n\n")
501 s
.write(scriptlet
["scriptlet"])
502 s
.write("\n\nexit 0\n")
505 if scriptlet_name
in SCRIPTS_PREREQUIRES
:
506 # Shell scripts require a shell to be executed.
507 prerequires
.append("/bin/sh")
509 prerequires
+= self
.builder
.find_prerequires(scriptlet_file
)
511 elif lang
== "python":
512 # Write the code to the scriptlet file.
513 s
= open(scriptlet_file
, "w")
514 s
.write(scriptlet
["scriptlet"])
518 raise Exception("Unknown scriptlet language: %s" % scriptlet
["lang"])
520 scriptlets
.append((scriptlet_name
, scriptlet_file
))
522 # Cleanup prerequires.
523 self
.pkg
.update_prerequires(prerequires
)
527 def find_files(self
, datafile
, patterns
):
528 if self
.payload_compression
== "xz":
529 datafile
= tar
.InnerTarFileXz
.open(datafile
)
531 datafile
= tar
.InnerTarFile
.open(datafile
)
533 members
= datafile
.getmembers()
538 # Find all directories in the file list.
539 for file in patterns
:
540 if file.startswith("/"):
543 for member
in members
:
544 if member
.name
== file and member
.isdir():
548 for member
in members
:
549 if not member
.isdir() and member
.name
.startswith(d
):
550 files
.append(member
.name
)
552 for pattern
in patterns
:
553 if pattern
.startswith("/"):
554 pattern
= pattern
[1:]
556 for member
in members
:
557 if not fnmatch
.fnmatch(member
.name
, pattern
):
560 if member
.name
in files
:
563 files
.append(member
.name
)
565 # Sort list alphabetically.
570 def create_configfiles(self
, datafile
):
571 files
= self
.find_files(datafile
, self
.pkg
.configfiles
)
573 configsfile
= self
.mktemp()
575 f
= open(configsfile
, "w")
577 f
.write("%s\n" % file)
582 def create_datafiles(self
, datafile
):
583 files
= self
.find_files(datafile
, self
.pkg
.datafiles
)
585 datafile
= self
.mktemp()
587 f
= open(datafile
, "w")
589 f
.write("%s\n" % file)
594 def run(self
, resultdir
):
595 # Add all files to this package.
596 datafile
= self
.create_datafile()
598 # Get filelist from datafile.
599 filelist
= self
.create_filelist(datafile
)
600 configfiles
= self
.create_configfiles(datafile
)
601 datafiles
= self
.create_datafiles(datafile
)
603 # Create script files.
604 scriptlets
= self
.create_scriptlets()
606 metafile
= self
.create_metafile(datafile
)
608 # Add files to the tar archive in correct order.
609 self
.add(metafile
, "info")
610 self
.add(filelist
, "filelist")
611 self
.add(configfiles
, "configfiles")
612 self
.add(datafiles
, "datafiles")
613 self
.add(datafile
, "data.img")
615 for scriptlet_name
, scriptlet_file
in scriptlets
:
616 self
.add(scriptlet_file
, "scriptlets/%s" % scriptlet_name
)
618 # Build the final package.
619 tempfile
= self
.mktemp()
622 # Add architecture information to path.
623 resultdir
= "%s/%s" % (resultdir
, self
.pkg
.arch
)
625 if not os
.path
.exists(resultdir
):
626 os
.makedirs(resultdir
)
628 resultfile
= os
.path
.join(resultdir
, self
.pkg
.package_filename
)
629 log
.info("Saving package to %s" % resultfile
)
631 os
.link(tempfile
, resultfile
)
633 shutil
.copy2(tempfile
, resultfile
)
636 class SourcePackager(Packager
):
637 payload_compression
= None
639 def create_metafile(self
, datafile
):
640 info
= collections
.defaultdict(lambda: "")
642 # Generic package information including Pakfire information.
644 "pakfire_version" : PAKFIRE_VERSION
,
648 # Include distribution information.
649 info
.update(self
.pakfire
.distro
.info
)
650 info
.update(self
.pkg
.info
)
652 # Size is the size of the (uncompressed) datafile.
653 info
["inst_size"] = self
.getsize(datafile
)
655 # Update package information for string formatting.
656 requires
= [PACKAGE_INFO_DEPENDENCY_LINE
% r
for r
in self
.pkg
.requires
]
658 "groups" : " ".join(self
.pkg
.groups
),
659 "requires" : "\n".join(requires
),
662 # Format description.
663 description
= [PACKAGE_INFO_DESCRIPTION_LINE
% l \
664 for l
in util
.text_wrap(self
.pkg
.description
, length
=80)]
665 info
["description"] = "\n".join(description
)
669 # Package it built right now.
670 "build_time" : int(time
.time()),
671 "build_id" : uuid
.uuid4(),
674 # Arches equals supported arches.
675 info
["arch"] = self
.pkg
.supported_arches
678 # XXX replace this by the payload hash
680 "uuid" : uuid
.uuid4(),
683 metafile
= self
.mktemp()
685 f
= open(metafile
, "w")
686 f
.write(PACKAGE_INFO
% info
)
691 def create_datafile(self
):
692 # Create a list of all files that have to be put into the
696 # Download all files that go into the package.
697 for file in self
.pkg
.download():
698 assert os
.path
.getsize(file), "Don't package empty files"
699 files
.append(("files/%s" % os
.path
.basename(file), file))
701 # Add all files in the package directory.
702 for file in self
.pkg
.files
:
703 files
.append((os
.path
.relpath(file, self
.pkg
.path
), file))
705 # Add files in alphabetical order.
709 message
= "%-10s : %s" % (_("Packaging"), self
.pkg
.friendly_name
)
710 pb
= util
.make_progress(message
, len(files
), eta
=False)
712 filename
= self
.mktemp()
713 if self
.payload_compression
== "xz":
714 datafile
= tar
.InnerTarFileXz
.open(filename
, mode
="w")
716 datafile
= tar
.InnerTarFile
.open(filename
, mode
="w")
719 for arcname
, file in files
:
724 datafile
.add(file, arcname
)
732 def run(self
, resultdir
):
733 # Create resultdir if it does not exist yet.
734 if not os
.path
.exists(resultdir
):
735 os
.makedirs(resultdir
)
737 log
.info(_("Building source package %s:") % self
.pkg
.package_filename
)
739 # The filename where this source package is saved at.
740 target_filename
= os
.path
.join(resultdir
, self
.pkg
.package_filename
)
742 # Add datafile to package.
743 datafile
= self
.create_datafile()
745 # Create filelist out of data.
746 filelist
= self
.create_filelist(datafile
)
749 metafile
= self
.create_metafile(datafile
)
751 # Add files to the tar archive in correct order.
752 self
.add(metafile
, "info")
753 self
.add(filelist
, "filelist")
754 self
.add(datafile
, "data.img")
756 # Build the final tarball.
758 self
.save(target_filename
)
760 # Remove the target file when anything went wrong.
761 os
.unlink(target_filename
)
764 return target_filename