]> git.ipfire.org Git - pakfire.git/commitdiff
One huge commit, that breaks pakfire.
authorMichael Tremer <michael.tremer@ipfire.org>
Thu, 25 Aug 2011 16:43:07 +0000 (18:43 +0200)
committerMichael Tremer <michael.tremer@ipfire.org>
Thu, 25 Aug 2011 16:43:07 +0000 (18:43 +0200)
This commit introduces the new format of the pakfire makefiles.

39 files changed:
MANIFEST.in
examples/pakfire.conf
macros/build.macro [new file with mode: 0644]
macros/constants.macro [new file with mode: 0644]
macros/package-default.macro [new file with mode: 0644]
macros/systemd.macro [new file with mode: 0644]
macros/templates.macro [new file with mode: 0644]
pakfire/actions.py
pakfire/api.py
pakfire/base.py
pakfire/builder.py
pakfire/chroot.py
pakfire/cli.py
pakfire/constants.py
pakfire/distro.py
pakfire/downloader.py
pakfire/errors.py
pakfire/i18n.py
pakfire/packages/__init__.py
pakfire/packages/base.py
pakfire/packages/binary.py
pakfire/packages/file.py
pakfire/packages/lexer.py [new file with mode: 0644]
pakfire/packages/make.py
pakfire/packages/packager.py
pakfire/packages/solv.py
pakfire/packages/source.py
pakfire/packages/virtual.py [deleted file]
pakfire/repository/__init__.py
pakfire/repository/base.py
pakfire/repository/database.py
pakfire/repository/index.py
pakfire/util.py
po/POTFILES.in
po/de_DE.po
po/es.po
po/pakfire.pot
scripts/pakfire
setup.py

index f510649b5d58a90f07382f12f540be73ea86427e..37baa01355283abe5cb00bd731f37e67ee5f623a 100644 (file)
@@ -1,2 +1,4 @@
 include INSTALL Makefile
 recursive-include examples *
+recursive-include po *
+recursive-include src *.c *.h
index 6e1b5e54ec54aff311f834dc199af142378a9b4e..5a07828dfff1aea5c3ba4e69f152f3c11462ef4e 100644 (file)
@@ -21,9 +21,12 @@ name = IPFire
 version = 3.0-prealpha2
 slogan = Gluttony
 
+maintainer = IPFire Development Team <developers@ipfire.org>
 vendor = ipfire
 arch = i686
 
+source_dl = http://source.ipfire.org/source-3.x/
+
 
 [master]
 server = http://172.28.1.250/api/master
diff --git a/macros/build.macro b/macros/build.macro
new file mode 100644 (file)
index 0000000..f4257d1
--- /dev/null
@@ -0,0 +1,146 @@
+
+
+def MACRO_EXTRACT_TARBALL
+       set +x
+       for source in %{sources}; do
+               %{MACRO_EXTRACT} %{DIR_DL}/${source} || exit 1
+       done
+       unset source
+
+       [ -d "%{DIR_APP}" ] && cd %{DIR_APP}
+       set -x
+end
+
+def MACRO_INSTALL_FILES
+       %{MACRO_INSTALL_DEFAULT_FILES}
+       %{MACRO_INSTALL_SYSTEMD_FILES}
+       %{MACRO_INSTALL_SYSTEMD_TMPFILES}
+       %{MACRO_INSTALL_PAM_FILES}
+       %{MACRO_INSTALL_LOGROTATE_FILES}
+end
+
+# XXX to be removed soon
+def MACRO_INSTALL_DEFAULT_FILES
+       for file in %{DIR_SOURCE}/default/*; do
+               [ -e "${file}" ] || continue
+               mkdir -p %{BUILDROOT}/etc/default
+               cp -vf ${file} %{BUILDROOT}/etc/default/$(basename ${file})
+       done
+       unset file
+end
+
+def MACRO_INSTALL_PAM_FILES
+       for file in %{DIR_SOURCE}/{*.pam,pam.d/*}; do
+               [ -e "${file}" ] || continue
+               mkdir -p %{BUILDROOT}/etc/pam.d
+               cp -vf ${file} %{BUILDROOT}/etc/pam.d/$(basename ${file%*.pam})
+       done
+       unset file
+end
+
+def MACRO_INSTALL_LOGROTATE_FILES
+       for file in %{DIR_SOURCE}/{*.logrotate,logrotate/*}; do
+               [ -e "${file}" ] || continue
+               mkdir -p %{BUILDROOT}/etc/logrotate.d
+               cp -vf ${file} %{BUILDROOT}/etc/logrotate.d/$(basename ${file%*.logrotate})
+       done
+       unset file
+end
+
+def MACRO_PYTHON_COMPILE
+       # XXX TODO
+end
+
+# These variables are used if you have to add some targets
+# directly to the make command.
+make_build_targets   =
+make_install_targets = install
+
+build_requires = gcc glibc-devel
+
+# Pre-defined build scripts.
+build
+       def _prepare
+               [ -d "%{DIR_SRC}" ] && cd %{DIR_SRC}
+
+               %{prepare}
+       end
+
+       def prepare
+               # Extract source tarball.
+               %{MACRO_EXTRACT_TARBALL}
+
+               # Apply all patches.
+               %{MACRO_PATCHES}
+
+               # Run custom commands.
+               %{prepare_cmds}
+       end
+
+       def prepare_cmds
+       end
+
+       def _build
+               [ -d "%{DIR_APP}" ] && cd %{DIR_APP}
+
+               %{build}
+       end
+
+       def build
+               if [ -e "%{CONFIGURE_SCRIPT}" ]; then
+                       ./%{CONFIGURE_SCRIPT} \
+                               %{configure_options}
+
+                       ${MACRO_FIX_LIBTOOL}
+               fi
+
+               # Run custom commands.
+               %{configure_cmds}
+
+               make %{PARALLELISMFLAGS} %{make_build_targets}
+
+               # Run more custom commands.
+               %{build_cmds}
+       end
+
+       def configure_cmds
+       end
+
+       def build_cmds
+       end
+
+       def _test
+               [ -d "%{DIR_APP}" ] && cd %{DIR_APP}
+
+               %{test}
+       end
+
+       def test
+       end
+
+       def _install
+               [ -d "%{DIR_APP}" ] && cd %{DIR_APP}
+
+               mkdir -pv %{BUILDROOT}
+
+               %{install}
+
+               %{MACRO_INSTALL_FILES}
+               %{MACRO_PYTHON_COMPILE}
+
+               %{install_post}
+
+               %{MACRO_QUALITY_AGENT}
+       end
+
+       def install
+               make DESTDIR=%{BUILDROOT} %{make_install_targets}
+
+               # Run custom commands.
+               %{install_cmds}
+       end
+
+       # XXX to be removed soon
+       def install_post
+       end
+end
diff --git a/macros/constants.macro b/macros/constants.macro
new file mode 100644 (file)
index 0000000..c54e3c5
--- /dev/null
@@ -0,0 +1,77 @@
+
+# XXX just for now
+PARALLELISMFLAGS = -j2
+
+BASEDIR     = /build
+BUILDROOT   = /buildroot
+
+DIR_APP     = %{DIR_SRC}/%{thisapp}
+DIR_DL      = %{BASEDIR}/files
+DIR_PATCHES = %{BASEDIR}/patches
+DIR_SRC     = /usr/src
+DIR_TMP     = /tmp
+DIR_SOURCE  = %{BASEDIR}
+
+CONFIGURE_SCRIPT = configure
+configure_options = \
+       --host=%{DISTRO_MACHINE} \
+       --build=%{DISTRO_MACHINE} \
+       --prefix=/usr
+
+patches =
+sources = %{tarball}
+tarball = %{thisapp}.tar.gz
+
+# Macro definitions
+
+# Macro to extract tarballs.
+# Guesses the compression type automatically.
+MACRO_EXTRACT = tar xvaf
+
+# Macro to define and start the quality agent.
+# Long term goal is to improve the commited code.
+MACRO_QUALITY_AGENT = quality-agent
+
+# Macro to strip debugging symbols.
+MACRO_STRIP = /usr/lib/buildsystem-tools/stripper %{BUILDROOT}
+
+def MACRO_PATCHES
+       patches="%{patches}"
+
+       if [ -n "${patches}" ]; then
+               _patches=""
+               for patch in ${patches}; do
+                       _patches="${_patches} %{DIR_PATCHES}/${patch}"
+               done
+               patches="${_patches}"
+               unset _patches
+       else
+               for patch in %{DIR_PATCHES}/*.{diff,patch{,0}}; do
+                       [ -e "${patch}" ] || continue
+                       patches="${patches} ${patch}"
+               done
+       fi
+       
+       for patch in ${patches}; do
+               case "${patch}" in
+                       *.patch0)
+                               cmd="patch -Np0"
+                               ;;
+                       *.patch|*.diff)
+                               cmd="patch -Np1"
+                               ;;
+               esac
+
+               ${cmd} -i ${patch}
+       done
+       unset cmd patch patches
+end
+
+# Remove rpath from libtool.
+def MACRO_FIX_LIBTOOL
+       if [ -e "%{DIR_APP}/libtool" ]; then
+               sed -e %{DIR_APP}/libtool \
+                       -e 's|^hardcode_libdir_flag_spec=.*|hardcode_libdir_flag_spec=""|g'
+                       -e 's|^runpath_var=LD_RUN_PATH|runpath_var=DIE_RPATH_DIE|g'
+       fi
+end
diff --git a/macros/package-default.macro b/macros/package-default.macro
new file mode 100644 (file)
index 0000000..56df52f
--- /dev/null
@@ -0,0 +1,16 @@
+
+# Epoch information should always be set, starting with zero.
+epoch      = 0
+
+# The default maintainer is the maintainer of the distribution.
+maintainer = %{DISTRO_MAINTAINER}
+
+# The default architecture is the architecture of the distribution.
+arch       = %{DISTRO_ARCH}
+
+# Place to add URLs for sources download.
+source_dl  =
+
+# A list of sources that need to be fetched in
+# order to build the package.
+sources    = %{thisapp}.tar.gz
diff --git a/macros/systemd.macro b/macros/systemd.macro
new file mode 100644 (file)
index 0000000..2ffb412
--- /dev/null
@@ -0,0 +1,23 @@
+
+
+SYSTEMD_UNIT_DIR = /lib/systemd/system
+
+SYSTEMD_TMPFILES_DIR = /usr/lib/tmpfiles.d
+
+def MACRO_INSTALL_SYSTEMD_FILES
+       for file in %{DIR_SOURCE}/systemd/*; do
+               [ -e "${file}" ] || continue
+               mkdir -p %{BUILDROOT}/%{SYSTEMD_UNIT_DIR}
+               cp -vf ${file} %{BUILDROOT}/%{SYSTEMD_UNIT_DIR}/$(basename ${file})
+       done
+       unset file
+end
+
+def MACRO_INSTALL_SYSTEMD_TMPFILES
+       for file in %{DIR_SOURCE}/*.tmpfiles; do
+               [ -e "${file}" ] || continue
+               mkdir -p %{BUILDROOT}/%{SYSTEMD_TMPFILES_DIR}
+               cp -vf ${file} %{BUILDROOT}/%{SYSTEMD_TMPFILES_DIR}/$(basename ${file})
+       done
+       unset file
+end
diff --git a/macros/templates.macro b/macros/templates.macro
new file mode 100644 (file)
index 0000000..d616b34
--- /dev/null
@@ -0,0 +1,53 @@
+
+# XXX fill in description
+_release = %{release}.%{DISTRO_DISTTAG}
+thisapp = %{name}-%{version}
+thisver = %{version}-%{_release}
+
+template MAIN
+       def files
+               /
+       end
+end
+
+template LIBS
+       summary = Library files of %{thisapp}.
+       description = Runtime library files of the package %{thisapp}.
+
+       def files
+               /lib/lib*.so.*
+               /usr/lib*/*.so.*
+       end
+
+       script postin
+               # Update linker cache.
+               /sbin/ldconfig 2>/dev/null || true
+       end
+
+       script postup
+               /sbin/ldconfig 2>/dev/null || true
+       end
+
+       script postun
+               /sbin/ldconfig 2>/dev/null || true
+       end
+end
+
+template DEVEL
+       summary = Development files of %{thisapp}.
+       description = %{summary}
+
+       def files
+               /usr/bin/*-config
+               /usr/include
+               /usr/lib/*.a
+               /usr/lib/pkgconfig
+               /usr/share/aclocal
+               */lib/*.so
+               /usr/share/*/cmake
+               /usr/share/man/man2
+               /usr/share/man/man3
+               /usr/share/pkgconfig
+               /usr/share/vala
+       end
+end
index 1518ae980eda44e98b736d7724a01d8aeb69d7a9..2efa609543ef8990171d168039646c7678c18ed8 100644 (file)
@@ -76,32 +76,22 @@ class Action(object):
 
 class ActionScript(Action):
        type = "script"
+       script_action = None
 
        def init(self):
                # Load the scriplet.
-               self.scriptlet = self.pkg.scriptlet
+               self.scriptlet = self.pkg.get_scriptlet(self.script_action)
 
        @property
        def interpreter(self):
                """
                        Get the interpreter of this scriptlet.
                """
-               # XXX check, how to handle elf files here.
-
-               # If nothing was found, we return the default interpreter.
-               interpreter = SCRIPTLET_INTERPRETER
-
-               for line in self.scriptlet.splitlines():
-                       if line.startswith("#!/"):
-                               interpreter = line[2:]
-                               interpreter = interpreter.split()[0]
-                       break
-
-               return interpreter
+               return util.scriptlet_interpreter(self.scriptlet)
 
        @property
        def args(self):
-               raise NotImplementedError
+               return []
 
        def run(self):
                # Exit immediately, if the scriptlet is empty.
@@ -112,14 +102,15 @@ class ActionScript(Action):
                logging.debug("Running scriptlet %s" % self)
 
                # Check if the interpreter does exist and is executable.
-               interpreter = "%s/%s" % (self.pakfire.path, self.interpreter)
-               if not os.path.exists(interpreter):
-                       raise ActionError, _("Cannot run scriptlet because no interpreter is available: %s" \
-                               % self.interpreter)
+               if self.interpreter:
+                       interpreter = "%s/%s" % (self.pakfire.path, self.interpreter)
+                       if not os.path.exists(interpreter):
+                               raise ActionError, _("Cannot run scriptlet because no interpreter is available: %s" \
+                                       % self.interpreter)
 
-               if not os.access(interpreter, os.X_OK):
-                       raise ActionError, _("Cannot run scriptlet because the interpreter is not executable: %s" \
-                               % self.interpreter)
+                       if not os.access(interpreter, os.X_OK):
+                               raise ActionError, _("Cannot run scriptlet because the interpreter is not executable: %s" \
+                                       % self.interpreter)
 
                # Create a name for the temporary script file.
                script_file_chroot = os.path.join("/", LOCAL_TMP_PATH,
@@ -150,12 +141,13 @@ class ActionScript(Action):
                        # XXX catch errors and return a beautiful message to the user
                        raise
 
-               command = [script_file_chroot,] + self.args
+               # Generate the script command.
+               command = [script_file_chroot] + self.args
 
                # If we are running in /, we do not need to chroot there.
-               chroot_dir = None
+               chroot_path = None
                if not self.pakfire.path == "/":
-                       chroot_dir = self.pakfire.path
+                       chroot_path = self.pakfire.path
 
                try:
                        ret = chroot.do(command, cwd="/tmp",
@@ -181,39 +173,27 @@ class ActionScript(Action):
 
 
 class ActionScriptPreIn(ActionScript):
-       @property
-       def args(self):
-               return ["prein",]
+       script_action = "prein"
 
 
 class ActionScriptPostIn(ActionScript):
-       @property
-       def args(self):
-               return ["postin",]
+       script_action = "postin"
 
 
 class ActionScriptPreUn(ActionScript):
-       @property
-       def args(self):
-               return ["preun",]
+       script_action = "preun"
 
 
 class ActionScriptPostUn(ActionScript):
-       @property
-       def args(self):
-               return ["postun",]
+       script_action = "postun"
 
 
 class ActionScriptPreUp(ActionScript):
-       @property
-       def args(self):
-               return ["preup",]
+       script_action = "preup"
 
 
 class ActionScriptPostUp(ActionScript):
-       @property
-       def args(self):
-               return ["postup",]
+       script_action = "postup"
 
 
 class ActionScriptPostTrans(ActionScript):
@@ -221,21 +201,15 @@ class ActionScriptPostTrans(ActionScript):
 
 
 class ActionScriptPostTransIn(ActionScriptPostTrans):
-       @property
-       def args(self):
-               return ["posttransin",]
+       script_action = "posttransin"
 
 
 class ActionScriptPostTransUn(ActionScriptPostTrans):
-       @property
-       def args(self):
-               return ["posttransun",]
+       script_action = "posttransun"
 
 
 class ActionScriptPostTransUp(ActionScriptPostTrans):
-       @property
-       def args(self):
-               return ["posttransup",]
+       script_action = "posttransup"
 
 
 class ActionInstall(Action):
index 601bb78cc703450ae89edbb84b3217a0737412c1..82cf0c420d95e821ee99a3c0b91e1f789f5cc571 100644 (file)
@@ -72,14 +72,21 @@ def grouplist(group, **pakfire_args):
 
        return pakfire.grouplist(group)
 
+def _build(pkg, resultdir, **kwargs):
+       pakfire = Pakfire(**kwargs)
+
+       return pakfire._build(pkg, resultdir, **kwargs)
+
 def build(pkg, **kwargs):
        return Pakfire.build(pkg, **kwargs)
 
 def shell(pkg, **kwargs):
        return Pakfire.shell(pkg, **kwargs)
 
-def dist(pkgs, **kwargs):
-       return Pakfire.dist(pkgs, **kwargs)
+def dist(pkgs, resultdirs=None, **pakfire_args):
+       pakfire = Pakfire(**pakfire_args)
+
+       return pakfire.dist(pkgs, resultdirs=resultdirs)
 
 def provides(patterns, **pakfire_args):
        # Create pakfire instance.
index e19e49edb02e57bc0c559132adf06d109233a5c8..9257f4844106166555064e29fcca64cbcdfef4ff 100644 (file)
@@ -123,6 +123,15 @@ class Pakfire(object):
                if not self.path == "/":
                        util.rm(self.path)
 
+       @property
+       def environ(self):
+               env = {}
+
+               # Get distribution information.
+               env.update(self.distro.environ)
+
+               return env
+
        @property
        def supported_arches(self):
                return self.config.supported_arches
@@ -161,6 +170,8 @@ class Pakfire(object):
                raise BuildError, arch
 
        def check_is_ipfire(self):
+               return # XXX disabled for now
+
                ret = os.path.exists("/etc/ipfire-release")
 
                if not ret:
@@ -187,24 +198,44 @@ class Pakfire(object):
                else:
                        logging.info(_("Nothing to do"))
 
-       def install(self, requires):
+       def install(self, requires, interactive=True, logger=None, **kwargs):
+               if not logger:
+                       logger = logging.getLogger()
+
                # Create a new request.
                request = self.create_request()
+
+               # Expand all groups.
                for req in requires:
-                       req = self.create_relation(req)
-                       request.install(req)
+                       if req.startswith("@"):
+                               reqs = self.grouplist(req[1:])
+                       else:
+                               reqs = [req,]
+
+                       for req in reqs:
+                               if not isinstance(req, packages.BinaryPackage):
+                                       req = self.create_relation(req)
+
+                               request.install(req)
 
                # Do the solving.
                solver = self.create_solver()
-               t = solver.solve(request)
+               t = solver.solve(request, **kwargs)
 
                if not t:
+                       if not interactive:
+                               raise DependencyError
+
                        logging.info(_("Nothing to do"))
                        return
 
-               # Ask if the user acknowledges the transaction.
-               if not t.cli_yesno():
-                       return
+               if interactive:
+                       # Ask if the user acknowledges the transaction.
+                       if not t.cli_yesno():
+                               return
+
+               else:
+                       t.dump(logger=logger)
 
                # Run the transaction.
                t.run()
@@ -330,14 +361,20 @@ class Pakfire(object):
                # For all patterns we run a single search which returns us a bunch
                # of solvables which are transformed into Package objects.
                for pattern in patterns:
-                       solvs = self.pool.search(pattern, satsolver.SEARCH_GLOB, "solvable:name")
+                       if os.path.exists(pattern):
+                               pkg = packages.open(self, self.repos.dummy, pattern)
+                               if pkg:
+                                       pkgs.append(pkg)
 
-                       for solv in solvs:
-                               pkg = packages.SolvPackage(self, solv)
-                               if pkg in pkgs:
-                                       continue
+                       else:
+                               solvs = self.pool.search(pattern, satsolver.SEARCH_GLOB, "solvable:name")
 
-                               pkgs.append(pkg)
+                               for solv in solvs:
+                                       pkg = packages.SolvPackage(self, solv)
+                                       if pkg in pkgs:
+                                               continue
+
+                                       pkgs.append(pkg)
 
                return sorted(pkgs)
 
@@ -358,10 +395,8 @@ class Pakfire(object):
                # Return a list of the packages, alphabetically sorted.
                return sorted(pkgs.values())
 
-       def groupinstall(self, group):
-               pkgs = self.grouplist(group)
-
-               self.install(pkgs)
+       def groupinstall(self, group, **kwargs):
+               self.install("@%s" % group, **kwargs)
 
        def grouplist(self, group):
                pkgs = []
@@ -407,6 +442,18 @@ class Pakfire(object):
                finally:
                        b.destroy()
 
+       def _build(self, pkg, resultdir, nodeps=False, **kwargs):
+               print kwargs
+
+               b = builder.Builder2(self, pkg, resultdir, **kwargs)
+
+               try:
+                       b.build()
+               except Error:
+                       raise BuildError, _("Build command has failed.")
+               finally:
+                       b.cleanup()
+
        @staticmethod
        def shell(pkg, **kwargs):
                b = builder.Builder(pkg, **kwargs)
@@ -418,40 +465,17 @@ class Pakfire(object):
                finally:
                        b.destroy()
 
-       @staticmethod
-       def dist(pkgs, resultdirs=None, **pakfire_args):
-               # Create a builder with empty package.
-               b = builder.Builder(None, **pakfire_args)
-               p = b.pakfire
-
+       def dist(self, pkgs, resultdirs=None):
                if not resultdirs:
                        resultdirs = []
 
                # Always include local repository
-               resultdirs.append(p.repos.local_build.path)
+               resultdirs.append(self.repos.local_build.path)
 
-               try:
-                       b.prepare()
-
-                       for pkg in pkgs:
-                               b.pkg = pkg
-
-                               b.extract(build_deps=False)
-
-                               # Run the actual dist.
-                               b.dist()
-
-                               # Copy-out all resultfiles
-                               for resultdir in resultdirs:
-                                       if not resultdir:
-                                               continue
-
-                                       b.copy_result(resultdir)
+               for pkg in pkgs:
+                       pkg = packages.Makefile(self, pkg)
 
-                               # Cleanup the stuff that the package left.
-                               b.cleanup()
-               finally:
-                       b.destroy()
+                       pkg.dist(resultdirs)
 
        def provides(self, patterns):
                pkgs = []
index 6e281d2f30838e8fb66bc16d664ddf81e0bc9ad9..dc30b263bf3337e256e8fad228ab2d45ed202fe6 100644 (file)
@@ -56,7 +56,7 @@ BUILD_LOG_HEADER = """
 
 """
 
-class Builder(object):
+class BuildEnviron(object):
        # The version of the kernel this machine is running.
        kernel_version = os.uname()[2]
 
@@ -117,8 +117,12 @@ class Builder(object):
                self.distro = self.pakfire.distro
                self.path = self.pakfire.path
 
-               # Open the package.
-               self.pkg = pkg
+               # Log the package information.
+               self.pkg = packages.Makefile(self.pakfire, pkg)
+               self.log.info(_("Package information:"))
+               for line in self.pkg.dump(long=True).splitlines():
+                       self.log.info("  %s" % line)
+               self.log.info("")
 
                # XXX need to make this configureable
                self.settings = {
@@ -137,27 +141,6 @@ class Builder(object):
                # Save the build time.
                self.build_time = int(time.time())
 
-       def get_pkg(self):
-               return getattr(self, "_pkg", None)
-
-       def set_pkg(self, pkg):
-               if pkg is None:
-                       self.__pkg = None
-                       return
-
-               self._pkg = packages.open(self.pakfire, None, pkg)
-
-               # Log the package information.
-               if not isinstance(self._pkg, packages.Makefile):
-                       self.log.info("Package information:")
-                       for line in self._pkg.dump(long=True).splitlines():
-                               self.log.info("  %s" % line)
-                       self.log.info("")
-
-               assert self.pkg
-
-       pkg = property(get_pkg, set_pkg)
-
        @property
        def arch(self):
                """
@@ -245,6 +228,52 @@ class Builder(object):
 
                                self.copyout(file_in, file_out)
 
+       def get_pwuid(self, uid):
+               users = {}
+
+               f = open(self.chrootPath("/etc/passwd"))
+               for line in f.readlines():
+                       m = re.match(r"^([a-z][a-z0-9_\-]{,30}):x:(\d+):(\d+):(.*):(.*)$", line)
+                       if not m:
+                               continue
+
+                       item = {
+                               "name"  : m.group(1),
+                               "uid"   : int(m.group(2)),
+                               "gid"   : int(m.group(3)),
+                               "home"  : m.group(4),
+                               "shell" : m.group(5),
+                       }
+
+                       assert not users.has_key(item["uid"])
+                       users[item["uid"]] = item
+
+               f.close()
+
+               return users.get(uid, None)
+
+       def get_grgid(self, gid):
+               groups = {}
+
+               f = open(self.chrootPath("/etc/group"))
+               for line in f.readlines():
+                       m = re.match(r"^([a-z][a-z0-9_\-]{,30}):x:(\d+):(.*)$", line)
+                       if not m:
+                               continue
+
+                       item = {
+                               "name"  : m.group(1),
+                               "gid"   : int(m.group(2)),
+                       }
+
+                       # XXX re-enable later
+                       #assert not groups.has_key(item["gid"])
+                       groups[item["gid"]] = item
+
+               f.close()
+
+               return groups.get(gid, None)
+
        def extract(self, requires=None, build_deps=True):
                """
                        Gets a dependency set and extracts all packages
@@ -267,29 +296,15 @@ class Builder(object):
                        requires.append("icecream")
 
                # Get build dependencies from source package.
-               if isinstance(self.pkg, packages.SourcePackage):
-                       for req in self.pkg.requires:
-                               requires.append(req)
+               for req in self.pkg.requires:
+                       requires.append(req)
 
                # Install all packages.
                self.install(requires)
 
                # Copy the makefile and load source tarballs.
-               if isinstance(self.pkg, packages.Makefile):
-                       self.pkg.extract(self)
-
-               elif isinstance(self.pkg, packages.SourcePackage):
-                       self.pkg.extract(_("Extracting: %s (source)") % self.pkg.name,
-                               prefix=os.path.join(self.path, "build"))
-
-               # If we have a makefile, we can only get the build dependencies
-               # after we have extracted all the rest.
-               if build_deps and isinstance(self.pkg, packages.Makefile):
-                       requires = self.make_requires()
-                       if not requires:
-                               return
-
-                       self.install(requires)
+               self.pkg.extract(_("Extracting"),
+                       prefix=os.path.join(self.path, "build"))
 
        def install(self, requires):
                """
@@ -299,33 +314,8 @@ class Builder(object):
                if not requires:
                        return
 
-               # Create a request and fill it with what we need.
-               request = self.pakfire.create_request()
-
-               for req in requires:
-                       if isinstance(req, packages.BinaryPackage):
-                               req = req.friendly_name
-
-                       if "<" in req or ">" in req or "=" in req or req.startswith("/"):
-                               req = self.pakfire.create_relation(req)
-
-                       request.install(req)
-
-               # Create a new solver instance.
-               solver = self.pakfire.create_solver()
-
-               # Do the solving.
-               transaction = solver.solve(request, allow_downgrade=True, logger=self.log)
-
-               # XXX check for errors
-               if not transaction:
-                       raise DependencyError, "Could not resolve dependencies"
-
-               # Show the user what is going to be done.
-               transaction.dump(logger=self.log)
-
-               # Run the transaction.
-               transaction.run()
+               self.pakfire.install(requires, interactive=False,
+                       allow_downgrade=True, logger=self.log)
 
        def install_test(self):
                pkgs = []
@@ -472,9 +462,6 @@ class Builder(object):
        def cleanup(self):
                logging.debug("Cleaning environemnt.")
 
-               # Run make clean and let it cleanup its stuff.
-               self.make("clean")
-
                # Remove the build directory and buildroot.
                dirs = ("build", self.buildroot, "result")
 
@@ -486,10 +473,6 @@ class Builder(object):
                        util.rm(d)
                        os.makedirs(d)
 
-               # Clear make_info cache.
-               if hasattr(self, "_make_info"):
-                       del self._make_info
-
        def _mountall(self):
                self.log.debug("Mounting environment")
                for cmd, mountpoint in self.mountpoints:
@@ -523,20 +506,6 @@ class Builder(object):
 
                return ret
 
-       @staticmethod
-       def calc_parallelism():
-               """
-                       Calculate how many processes to run
-                       at the same time.
-
-                       We take the log10(number of processors) * factor
-               """
-               num = os.sysconf("SC_NPROCESSORS_CONF")
-               if num == 1:
-                       return 2
-               else:
-                       return int(round(math.log10(num) * 26))
-
        @property
        def environ(self):
                env = {
@@ -547,14 +516,14 @@ class Builder(object):
                        "PS1"  : "\u:\w\$ ",
 
                        "BUILDROOT" : self.buildroot,
-                       "PARALLELISMFLAGS" : "-j%s" % self.calc_parallelism(),
+                       "PARALLELISMFLAGS" : "-j%s" % util.calc_parallelism(),
                }
 
                # Inherit environment from distro
                env.update(self.pakfire.distro.environ)
 
                # Icecream environment settings
-               if self.settings.get("enable_icecream", None):
+               if self.settings.get("enable_icecream", False):
                        # Set the toolchain path
                        if self.settings.get("icecream_toolchain", None):
                                env["ICECC_VERSION"] = self.settings.get("icecream_toolchain")
@@ -610,105 +579,40 @@ class Builder(object):
 
                return ret
 
-       def make(self, *args, **kwargs):
-               if isinstance(self.pkg, packages.Makefile):
-                       filename = os.path.basename(self.pkg.filename)
-               elif isinstance(self.pkg, packages.SourcePackage):
-                       filename = "%s.%s" % (self.pkg.name, MAKEFILE_EXTENSION)
-
-               return self.do("make -f /build/%s %s" % (filename, " ".join(args)),
-                       **kwargs)
-
-       @property
-       def make_info(self):
-               if not hasattr(self, "_make_info"):
-                       info = {}
-
-                       output = self.make("buildinfo", returnOutput=True)
-
-                       for line in output.splitlines():
-                               # XXX temporarily
-                               if not line:
-                                       break
-
-                               m = re.match(r"^(\w+)=(.*)$", line)
-                               if not m:
-                                       continue
-
-                               info[m.group(1)] = m.group(2).strip("\"")
-
-                       self._make_info = info
-
-               return self._make_info
-
-       @property
-       def packages(self):
-               if hasattr(self, "_packages"):
-                       return self._packages
-
-               pkgs = []
-               output = self.make("packageinfo", returnOutput=True)
-
-               pkg = {}
-               for line in output.splitlines():
-                       if not line:
-                               pkgs.append(pkg)
-                               pkg = {}
-
-                       m = re.match(r"^(\w+)=(.*)$", line)
-                       if not m:
-                               continue
-
-                       k, v = m.groups()
-                       pkg[k] = v.strip("\"")
-
-               self._packages = []
-               for pkg in pkgs:
-                       pkg = packages.VirtualPackage(self.pakfire, pkg)
-                       self._packages.append(pkg)
-
-               return self._packages
-
-       def make_requires(self):
-               return self.make_info.get("PKG_BUILD_DEPS", "").split()
-
-       def make_sources(self):
-               return self.make_info.get("PKG_FILES", "").split()
-
-       def create_icecream_toolchain(self):
-               if not self.settings.get("enable_icecream", None):
-                       return
-
-               out = self.do("icecc --build-native", returnOutput=True)
-
-               for line in out.splitlines():
-                       m = re.match(r"^creating ([a-z0-9]+\.tar\.gz)", line)
-                       if m:
-                               self.settings["icecream_toolchain"] = "/%s" % m.group(1)
-
        def build(self):
                assert self.pkg
 
                # Create icecream toolchain.
                self.create_icecream_toolchain()
 
+               # Create the build script and build command.
+               build_script = self.create_buildscript()
+               build_cmd = "/bin/sh -e -x %s" % build_script
+
                try:
-                       self.make("build", logger=self.log)
+                       self.do(build_cmd, logger=self.log)
 
                except Error:
                        raise BuildError, "The build command failed."
 
-               for pkg in reversed(self.packages):
-                       packager = packages.BinaryPackager(self.pakfire, pkg, self)
-                       packager()
-               self.log.info("")
+               # XXX clean up that mess after this line
 
-               self.log.info(_("Dumping created packages"))
+               # Create a temporary repository where we put in the just built packages.
                repo = repository.RepositoryDir(self.pakfire, "build-%s" % self.build_id,
                        "", self.chrootPath("result"), type="binary")
                self.pakfire.repos.add_repo(repo)
 
-               repo.update()
+               # Make all these little package from the build environment.
+               for pkg in reversed(self.pkg.packages):
+                       packager = packages.BinaryPackager(self.pakfire, pkg, self)
+                       packager.run([repo.path,])
+               self.log.info("")
+
+               # Update repository metadata.
+               repo.update(force=True)
+
+               self.log.info(_("Dumping created packages"))
+
                for line in repo.dump(long=True, filelist=True).splitlines():
                        self.log.info("  %s" % line)
                self.log.info("")
@@ -717,8 +621,22 @@ class Builder(object):
 
                return repo
 
-       def dist(self):
-               self.pkg.dist(self)
+       def build(self):
+               pkgfile = os.path.join("/build", os.path.basename(self.pkg.filename))
+               resultdir = self.chrootPath("/result")
+
+               # Create the build command, that is executed in the chroot.
+               build_command = ["pakfire-build2", "--offline", "build", pkgfile,
+                       "--nodeps",]
+
+               try:
+                       self.do(" ".join(build_command), logger=self.log)
+
+               except Error:
+                       raise BuildError, _("The build command failed. See logfile for details.")
+
+               # Copy the final packages and stuff.
+               # XXX TODO resultdir
 
        def shell(self, args=[]):
                if not util.cli_is_interactive():
@@ -752,3 +670,129 @@ class Builder(object):
 
                finally:
                        self._umountall()
+
+# XXX backwards compatibilty
+Builder = BuildEnviron
+
+class Builder2(object):
+       def __init__(self, pakfire, filename, resultdir, **kwargs):
+               self.pakfire = pakfire
+
+               self.filename = filename
+
+               self.resultdir = resultdir
+
+               # Open package file.
+               self.pkg = packages.Makefile(self.pakfire, self.filename)
+
+               #self.buildroot = "/tmp/pakfire_buildroot/%s" % util.random_string(20)
+               self.buildroot = "/buildroot"
+
+               self._environ = {
+                       "BUILDROOT" : self.buildroot,
+                       "LANG"      : "C",
+               }
+
+       @property
+       def distro(self):
+               return self.pakfire.distro
+
+       @property
+       def environ(self):
+               environ = os.environ
+               environ.update(self._environ)
+
+               return environ
+
+       def do(self, command, shell=True, personality=None, cwd=None, *args, **kwargs):
+               # Environment variables
+               logging.debug("Environment:")
+               for k, v in sorted(self.environ.items()):
+                       logging.debug("  %s=%s" % (k, v))
+
+               # Update personality it none was set
+               if not personality:
+                       personality = self.distro.personality
+
+               if not cwd:
+                       cwd = "/%s" % LOCAL_TMP_PATH
+
+               # Make every shell to a login shell because we set a lot of
+               # environment things there.
+               if shell:
+                       command = ["bash", "--login", "-c", command]
+
+               return chroot.do(
+                       command,
+                       personality=personality,
+                       shell=False,
+                       env=self.environ,
+                       logger=logging.getLogger(),
+                       cwd=cwd,
+                       *args,
+                       **kwargs
+               )
+
+       def create_icecream_toolchain(self):
+               try:
+                       out = self.do("icecc --build-native", returnOutput=True)
+               except Error:
+                       return
+
+               for line in out.splitlines():
+                       m = re.match(r"^creating ([a-z0-9]+\.tar\.gz)", line)
+                       if m:
+                               self._environ["icecream_toolchain"] = "/%s" % m.group(1)
+
+       def create_buildscript(self, stage):
+               file = "/tmp/build_%s" % util.random_string()
+
+               # Get buildscript from the package.
+               script = self.pkg.get_buildscript(stage)
+
+               # Write script to an empty file.
+               f = open(file, "w")
+               f.write("#!/bin/sh\n\n")
+               f.write("set -e\n")
+               f.write("set -x\n")
+               f.write("\n%s\n" % script)
+               f.write("exit 0\n")
+               f.close()
+               os.chmod(file, 700)
+
+               return file
+
+       def build(self):
+               # Create buildroot.
+               if not os.path.exists(self.buildroot):
+                       os.makedirs(self.buildroot)
+
+               # Build icecream toolchain if icecream is installed.
+               self.create_icecream_toolchain()
+
+               for stage in ("prepare", "build", "test", "install"):
+                       self.build_stage(stage)
+
+               # Package the result.
+               # Make all these little package from the build environment.
+               logging.info(_("Creating packages:"))
+               for pkg in reversed(self.pkg.packages):
+                       packager = packages.BinaryPackager(self.pakfire, pkg, self.buildroot)
+                       packager.run([self.resultdir,])
+               logging.info("")
+
+       def build_stage(self, stage):
+               # Get the buildscript for this stage.
+               buildscript = self.create_buildscript(stage)
+
+               # Execute the buildscript of this stage.
+               logging.info(_("Running stage %s:") % stage)
+               self.do(buildscript, shell=False)
+
+               # Remove the buildscript.
+               if os.path.exists(buildscript):
+                       os.unlink(buildscript)
+
+       def cleanup(self):
+               if os.path.exists(self.buildroot):
+                       util.rm(self.buildroot)
index 89af7cf76715484400362a6e773268d12ef87bdc..78543d1f6e5a00ff201a2b908ddd36e870c514a0 100644 (file)
@@ -91,7 +91,7 @@ def do(command, shell=False, chrootPath=None, cwd=None, timeout=0, raiseExc=True
 
                # Create new child process
                child = subprocess.Popen(
-                       command, 
+                       command,
                        shell=shell,
                        bufsize=0, close_fds=True, 
                        stdin=open("/dev/null", "r"), 
@@ -179,4 +179,7 @@ class ChildPreExec(object):
 
                # Change to cwd.
                if self.cwd:
+                       if not os.path.exists(self.cwd):
+                               os.makedirs(self.cwd)
+
                        os.chdir(self.cwd)
index 75efa08d4ea9ca0788758b16d92b5768a233ec7b..a0355651e6869be1566a64f742a3ca80ca171f37 100644 (file)
@@ -437,8 +437,9 @@ class CliBuilder(Cli):
                        "arch" : self.args.arch,
                }
 
-               pakfire.build(pkg, builder_mode=self.args.mode, distro_config=distro_config,
-                       resultdirs=[self.args.resultdir,], shell=True, **self.pakfire_args)
+               pakfire.build(pkg, builder_mode=self.args.mode,
+                       distro_config=distro_config, resultdirs=[self.args.resultdir,],
+                       shell=True, **self.pakfire_args)
 
        def handle_shell(self):
                pkg = None
@@ -570,3 +571,60 @@ class CliServer(Cli):
                path = self.args.path[0]
 
                pakfire.repo_create(path, self.args.inputs, **self.pakfire_args)
+
+
+class CliBuilder2(Cli):
+       def __init__(self):
+               self.parser = argparse.ArgumentParser(
+                       description = _("Pakfire builder command line interface."),
+               )
+
+               self.parse_common_arguments()
+
+               # Add sub-commands.
+               self.sub_commands = self.parser.add_subparsers()
+
+               self.parse_command_build()
+
+               # Finally parse all arguments from the command line and save them.
+               self.args = self.parser.parse_args()
+
+               self.action2func = {
+                       "build"       : self.handle_build,
+               }
+
+       def parse_command_build(self):
+               # Implement the "build" command.
+               sub_build = self.sub_commands.add_parser("build",
+                       help=_("Build one or more packages."))
+               sub_build.add_argument("package", nargs=1,
+                       help=_("Give name of at least one package to build."))
+               sub_build.add_argument("action", action="store_const", const="build")
+
+               sub_build.add_argument("-a", "--arch",
+                       help=_("Build the package for the given architecture."))
+               sub_build.add_argument("--resultdir", nargs="?",
+                       help=_("Path were the output files should be copied to."))
+               sub_build.add_argument("-m", "--mode", nargs="?", default="development",
+                       help=_("Mode to run in. Is either 'release' or 'development' (default)."))
+               sub_build.add_argument("--nodeps", action="store_true",
+                       help=_("Do not verify build dependencies."))
+
+       def handle_build(self):
+               # Get the package descriptor from the command line options
+               pkg = self.args.package[0]
+
+               # Check, if we got a regular file
+               if os.path.exists(pkg):
+                       pkg = os.path.abspath(pkg)
+               else:
+                       raise FileNotFoundError, pkg
+
+               # Create distribution configuration from command line.
+               distro_config = {
+                       "arch" : self.args.arch,
+               }
+
+               pakfire._build(pkg, builder_mode=self.args.mode,
+                       distro_config=distro_config, resultdir=self.args.resultdir,
+                       nodeps=self.args.nodeps, **self.pakfire_args)
index 02cdc5dd77c8cb9d6cc46348d4a63fcd771104de..06694036df98bf262a1cf30c145a649a469ebf4f 100644 (file)
@@ -25,6 +25,8 @@ from errors import *
 
 from __version__ import PAKFIRE_VERSION
 
+PAKFIRE_LEAST_COMPATIBLE_VERSION = "0.9.5"
+
 SYSCONFDIR = "/etc"
 
 CONFIG_DIR = os.path.join(SYSCONFDIR, "pakfire.repos.d")
@@ -45,20 +47,34 @@ BUFFER_SIZE = 102400
 
 MIRRORLIST_MAXSIZE = 1024**2
 
+MACRO_FILE_DIR = "/usr/lib/pakfire/macros"
+MACRO_FILES = \
+       (os.path.join(MACRO_FILE_DIR, f) for f in sorted(os.listdir(MACRO_FILE_DIR)) if f.endswith(".macro"))
+
 METADATA_FORMAT = 0
 METADATA_DOWNLOAD_LIMIT = 1024**2
 METADATA_DOWNLOAD_PATH  = "repodata"
 METADATA_DOWNLOAD_FILE  = "repomd.json"
 METADATA_DATABASE_FILE  = "packages.solv"
 
-PACKAGE_FORMAT = 0
+PACKAGE_FORMAT = 1
+# XXX implement this properly
+PACKAGE_FORMATS_SUPPORTED = [0, 1]
 PACKAGE_EXTENSION = "pfm"
 MAKEFILE_EXTENSION = "nm"
 
 PACKAGE_FILENAME_FMT = "%(name)s-%(version)s-%(release)s.%(arch)s.%(ext)s"
 
-BUILD_PACKAGES = ["build-essentials>=2:1.0-1.ip3",]
-SHELL_PACKAGES = ["elinks", "less", "pakfire", "vim",]
+BUILD_PACKAGES = [
+       "@Build",
+       "/bin/bash",
+       "build-essentials>=2:1.0-1.ip3",
+       "gcc",
+       "glibc-devel",
+       "shadow-utils>=4.1.4.3",
+       "pakfire-build>=%s" % PAKFIRE_LEAST_COMPATIBLE_VERSION,
+]
+SHELL_PACKAGES = ["elinks", "less", "vim",]
 BUILD_ROOT = "/var/lib/pakfire/build"
 
 SOURCE_DOWNLOAD_URL = "http://source.ipfire.org/source-3.x/"
@@ -75,53 +91,82 @@ ORPHAN_DIRECTORIES = [
        "usr/share/man/man5", "usr/share/man/man6", "usr/share/man/man7",
        "usr/share/man/man8", "usr/share/man/man9", "usr/lib/pkgconfig",
 ]
-ORPHAN_DIRECTORIES.sort(cmp=lambda x,y: cmp(len(x), len(y)), reverse=True)
+for i in ORPHAN_DIRECTORIES:
+       i = os.path.dirname(i)
 
-BINARY_PACKAGE_META = SOURCE_PACKAGE_META = """\
-### %(name)s package
+       if not i or i in ORPHAN_DIRECTORIES:
+               continue
 
-VERSION="%(package_format)s"
-TYPE="%(package_type)s"
+       ORPHAN_DIRECTORIES.append(i)
 
-# Build information
-BUILD_DATE="%(build_date)s"
-BUILD_HOST="%(build_host)s"
-BUILD_ID="%(build_id)s"
-BUILD_TIME="%(build_time)s"
+ORPHAN_DIRECTORIES.sort(cmp=lambda x,y: cmp(len(x), len(y)), reverse=True)
 
-# Distribution information
-DISTRO_NAME="%(distro_name)s"
-DISTRO_RELEASE="%(distro_release)s"
-DISTRO_VENDOR="%(distro_vendor)s"
+PACKAGE_INFO = """\
+# Pakfire %(pakfire_version)s
 
 # Package information
-PKG_NAME="%(name)s"
-PKG_VER="%(version)s"
-PKG_REL="%(release)s"
-PKG_EPOCH="%(epoch)s"
-PKG_UUID="%(package_uuid)s"
+package
+       name        = %(name)s
+       version     = %(version)s
+       release     = %(release)s
+       epoch       = %(epoch)s
+       arch        = %(arch)s
+
+       uuid        = %(uuid)s
+       groups      = %(groups)s
+       maintainer  = %(maintainer)s
+       url         = %(url)s
+       license     = %(license)s
+
+       summary     = %(summary)s
+       def description
+%(description)s
+       end
+
+       size        = %(inst_size)d
+end
 
-PKG_GROUPS="%(groups)s"
-PKG_ARCH="%(arch)s"
-
-PKG_MAINTAINER="%(maintainer)s"
-PKG_LICENSE="%(license)s"
-PKG_URL="%(url)s"
-
-PKG_SUMMARY="%(summary)s"
-PKG_DESCRIPTION="%(description)s"
-
-# Dependency info
-PKG_PREREQUIRES="%(prerequires)s"
-PKG_REQUIRES="%(requires)s"
-PKG_PROVIDES="%(provides)s"
-PKG_CONFLICTS="%(conflicts)s"
-PKG_OBSOLETES="%(obsoletes)s"
-
-PKG_PAYLOAD_COMP="%(payload_comp)s"
-PKG_PAYLOAD_HASH1="%(payload_hash1)s"
+# Build information
+build
+       host        = %(build_host)s
+       id          = %(build_id)s
+       time        = %(build_time)d
+end
 
+# Distribution information
+distribution
+       name        = %(distro_name)s
+       release     = %(distro_release)s
+       vendor      = %(distro_vendor)s
+       maintainer  = %(distro_maintainer)s
+end
+
+# Dependency information
+dependencies
+       def prerequires
+%(prerequires)s
+       end
+
+       def requires
+%(requires)s
+       end
+
+       def provides
+%(provides)s
+       end
+
+       def conflicts
+%(conflicts)s
+       end
+
+       def obsoletes
+%(obsoletes)s
+       end
+end
+
+# EOF
 """
+PACKAGE_INFO_DESCRIPTION_LINE = PACKAGE_INFO_DEPENDENCY_LINE = "\t\t%s"
 
 # XXX make this configurable in pakfire.conf
 PAKFIRE_MULTIINSTALL = ["kernel",]
@@ -129,60 +174,14 @@ PAKFIRE_MULTIINSTALL = ["kernel",]
 SCRIPTLET_INTERPRETER = "/bin/sh"
 SCRIPTLET_TIMEOUT = 60 * 15
 
-SCRIPTLET_TEMPLATE = """\
-#!/bin/sh
-
-function control_prein() {
-%(control_prein)s
-}
-
-function control_postin() {
-%(control_postin)s
-}
-
-function control_preun() {
-%(control_preun)s
-}
-
-function control_postun() {
-%(control_postun)s
-}
-
-function control_preup() {
-%(control_preup)s
-}
-
-function control_postup() {
-%(control_postup)s
-}
-
-function control_postransin() {
-%(control_posttransin)s
-}
-
-function control_posttransun() {
-%(control_posttransun)s
-}
-
-function control_posttransup() {
-%(control_posttransup)s
-}
-
-# Get right action from commandline.
-action=${1}
-shift
-
-case "${action}" in
-       prein|postin|preun|postun|preup|postup|posttransin|posttransun|posttransup)
-               control_${action} $@
-               ;;
-
-       *)
-               echo "Unknown action: ${action}" >&2
-               exit 2
-               ;;
-esac
-
-# Always exit with an okay status.
-exit 0
-"""
+SCRIPTS = (
+       "prein",
+       "postin",
+       "preun",
+       "postun",
+       "preup",
+       "postup",
+       "posttransin",
+       "posttransun",
+       "posttransup",
+)
index 04ed243cd4f5d7a9637ae31a34152f20ec79d96e..ce7467a92b27f35d837d7246f3be7e1e8643fa2f 100644 (file)
@@ -55,7 +55,7 @@ class Distribution(object):
                logging.debug("Distribution configuration:")
 
                attrs = ("name", "version", "release", "sname", "dist", "vendor",
-                       "arch", "machine",)
+                       "arch", "machine", "source_dl",)
 
                for attr in attrs:
                        logging.debug(" %s : %s" % (attr, getattr(self, attr)))
@@ -97,6 +97,10 @@ class Distribution(object):
        def vendor(self):
                return self._data.get("vendor")
 
+       @property
+       def maintainer(self):
+               return self._data.get("maintainer")
+
        def get_arch(self):
                return self._data.get("arch") or self.config.host_arch
        
@@ -119,6 +123,10 @@ class Distribution(object):
 
                return "%s-%s-linux-gnu" % (self.arch, vendor.lower())
 
+       @property
+       def source_dl(self):
+               return self._data.get("source_dl", None)
+
        @property
        def environ(self):
                """
@@ -126,15 +134,16 @@ class Distribution(object):
                        chroot environment.
                """
                env = {
-                       "DISTRO_NAME"    : self.name,
-                       "DISTRO_SNAME"   : self.sname,
-                       "DISTRO_VERSION" : self.version,
-                       "DISTRO_RELEASE" : self.release,
-                       "DISTRO_DISTTAG" : self.dist,
-                       "DISTRO_ARCH"    : self.arch,
-                       "DISTRO_MACHINE" : self.machine,
-                       "DISTRO_VENDOR"  : self.vendor,
-                       "DISTRO_SLOGAN"  : self.slogan,
+                       "DISTRO_NAME"       : self.name,
+                       "DISTRO_SNAME"      : self.sname,
+                       "DISTRO_VERSION"    : self.version,
+                       "DISTRO_RELEASE"    : self.release,
+                       "DISTRO_DISTTAG"    : self.dist,
+                       "DISTRO_ARCH"       : self.arch,
+                       "DISTRO_MACHINE"    : self.machine,
+                       "DISTRO_MAINTAINER" : self.maintainer,
+                       "DISTRO_VENDOR"     : self.vendor,
+                       "DISTRO_SLOGAN"     : self.slogan,
                }
 
                return env
index 5290e558dee94ee8925ce715c553d0c35721e3b1..84043c93dc7e3d8f01cb793becc6fe739b63d117 100644 (file)
@@ -47,7 +47,6 @@ class PakfireGrabber(URLGrabber):
                        config = pakfire.config
 
                if config.get("offline"):
-                       raise
                        raise OfflineModeError, "Cannot use %s in offline mode." % self.__class__.__name__
 
                # Set throttle setting.
@@ -132,6 +131,10 @@ class MirrorList(object):
                if not self.mirrorlist:
                        return 
 
+               # If the system is not online, we cannot download anything.
+               if self.pakfire.offline:
+                       return
+
                logging.debug("Updating mirrorlist for repository '%s' (force=%s)" % (self.repo.name, force))
 
                cache_filename = "mirrors/mirrorlist"
index c5574477d1b7cb2216ccc4effb27c1d0b2866b93..99998373ce7e47254c36b6de530d87c36c53f9d0 100644 (file)
@@ -67,6 +67,9 @@ class OfflineModeError(Error):
                "Please connect your system to the network, remove --offline from the"
                " command line and try again.")
 
+class PackageFormatUnsupportedError(Error):
+       pass
+
 class PakfireError(Error):
        pass
 
index 22bb9839c85beb5937e66b66a576bc65eb8eccf8..2980dce5e96a6f16b450c9ac81be8f8a6b68b92c 100644 (file)
@@ -38,6 +38,6 @@ def _(singular, plural=None, n=None):
        """
        if not plural is None:
                assert n is not None
-               return gettext.ldngettext("pakfire", singular, plural, n)
+               return gettext.dngettext("pakfire", singular, plural, n)
 
-       return gettext.ldgettext("pakfire", singular)
+       return gettext.dgettext("pakfire", singular)
index f890f26ff11ae6791412eb5980b9528ba049fc87..377a9553695438da8ae912f0c7e309a0332acfe6 100644 (file)
@@ -26,7 +26,6 @@ from file import InnerTarFile
 from installed import DatabasePackage, InstalledPackage
 from solv import SolvPackage
 from source import SourcePackage
-from virtual import VirtualPackage
 
 from make import Makefile
 from packager import BinaryPackager
index 32a2c9aed7f91eb0190762b62d67890fec6ad0a9..ece56c750332eabc105a0cef11ffa9dffa556a99 100644 (file)
@@ -90,8 +90,16 @@ class Package(object):
                        (_("Arch"), self.arch),
                        (_("Version"), self.version),
                        (_("Release"), self.release),
-                       (_("Size"), util.format_size(self.size)),
-                       (_("Repo"), self.repo.name),
+               ]
+
+               if self.size:
+                       items.append((_("Size"), util.format_size(self.size)))
+
+               # filter dummy repository
+               if not self.repo == self.pakfire.repos.dummy:
+                       items.append((_("Repo"), self.repo.name))
+
+               items += [
                        (_("Summary"), self.summary),
                        (_("Groups"), " ".join(self.groups)),
                        (_("URL"), self.url),
@@ -104,6 +112,9 @@ class Package(object):
                        caption = ""
 
                if long:
+                       if self.maintainer:
+                               items.append((_("Maintainer"), self.maintainer))
+
                        items.append((_("UUID"), self.uuid))
                        items.append((_("Build ID"), self.build_id))
                        items.append((_("Build date"), self.build_date))
@@ -114,11 +125,26 @@ class Package(object):
                                items.append((caption, prov))
                                caption = ""
 
+                       caption = _("Pre-requires")
+                       for req in sorted(self.prerequires):
+                               items.append((caption, req))
+                               caption = ""
+
                        caption = _("Requires")
                        for req in sorted(self.requires):
                                items.append((caption, req))
                                caption = ""
 
+                       caption = _("Conflicts")
+                       for req in sorted(self.conflicts):
+                               items.append((caption, req))
+                               caption = ""
+
+                       caption = _("Obsoletes")
+                       for req in sorted(self.obsoletes):
+                               items.append((caption, req))
+                               caption = ""
+
                # Append filelist if requested.
                if filelist:
                        for file in self.filelist:
@@ -151,7 +177,9 @@ class Package(object):
                        "license"     : self.license,
                        "hash1"       : self.hash1,
                        "vendor"      : self.vendor,
+                       "build_date"  : self.build_date,
                        "build_host"  : self.build_host,
+                       "build_id"    : self.build_id,
                        "build_time"  : self.build_time,
                        "size"        : self.size,
                        "inst_size"   : self.inst_size,
@@ -367,6 +395,10 @@ class Package(object):
        def obsoletes(self):
                return self.metadata.get("PKG_OBSOLETES", "").split()
 
+       @property
+       def scriptlets(self):
+               return self.metadata.get("PKG_SCRIPTLETS", "").split()
+
        @property
        def filelist(self):
                raise NotImplementedError
index bd751128dc99f75f09b8a2553f8197f60e12654b..b0b06ebbeca5801a30b47371004b8bc896c5a2ad 100644 (file)
 from file import FilePackage
 
 class BinaryPackage(FilePackage):
-       @property
-       def arch(self):
-               return self.metadata.get("PKG_ARCH")
+       def get_scriptlet(self, type):
+               a = self.open_archive()
+
+               # Path of the scriptlet in the tarball.
+               path = "scriptlets/%s" % type
+
+               try:
+                       f = a.extractfile(path)
+               except KeyError:
+                       # If the scriptlet is not available, we just return.
+                       return
+
+               scriptlet = f.read()
+
+               f.close()
+               a.close()
+
+               return scriptlet
index 3926f40bd6bb45cd58a22d0222c9729aef3b0e5e..2f322dc5b8813959a525b1b8248ed6addee07954 100644 (file)
@@ -28,10 +28,15 @@ import xattr
 
 import pakfire.util as util
 import pakfire.compress as compress
-from pakfire.errors import FileError
 from pakfire.constants import *
 
 from base import Package
+from lexer import FileLexer
+
+# XXX need to add zlib and stuff here.
+PAYLOAD_COMPRESSION_MAGIC = {
+       "xz" : "\xfd7zXZ",
+}
 
 class InnerTarFile(tarfile.TarFile):
        SUPPORTED_XATTRS = ("security.capability",)
@@ -118,8 +123,28 @@ class FilePackage(Package):
                # Place to cache the metadata
                self._metadata = {}
 
+               # Store the format of this package file.
+               self.format = self.get_format()
+
+               # XXX need to make this much better.
                self.check()
 
+               # Read the info file.
+               if self.format >= 1:
+                       a = self.open_archive()
+                       f = a.extractfile("info")
+
+                       self.lexer = FileLexer(f.readlines())
+
+                       f.close()
+                       a.close()
+
+               elif self.format == 0:
+                       pass
+
+               else:
+                       raise PackageFormatUnsupportedError, _("Filename: %s") % self.filename
+
        def check(self):
                """
                        Initially check if the given file is of the correct type and
@@ -128,6 +153,26 @@ class FilePackage(Package):
                if not tarfile.is_tarfile(self.filename):
                        raise FileError, "Given file is not of correct format: %s" % self.filename
 
+               assert self.format in PACKAGE_FORMATS_SUPPORTED
+
+       def get_format(self):
+               a = self.open_archive()
+               try:
+                       f = a.extractfile("pakfire-format")
+               except KeyError:
+                       return 0
+
+               format = f.read()
+               try:
+                       format = int(format)
+               except TypeError:
+                       format = 0
+
+               f.close()
+               a.close()
+
+               return format
+
        def __repr__(self):
                return "<%s %s>" % (self.__class__.__name__, self.filename)
 
@@ -233,18 +278,13 @@ class FilePackage(Package):
                if pb:
                        pb.finish()
 
-       @property
-       def file_version(self):
-               """
-                       Returns the version of the package metadata.
-               """
-               return self.metadata.get("VERSION")
-
        @property
        def metadata(self):
                """
                        Read-in the metadata from the "info" file and cache it in _metadata.
                """
+               assert self.format == 0, self
+
                if not self._metadata:
                        a = self.open_archive()
                        f = a.extractfile("info")
@@ -324,22 +364,42 @@ class FilePackage(Package):
 
        @property
        def configfiles(self):
-               return [] # XXX to be done
+               a = self.open_archive()
+
+               f = a.extractfile("configs")
+               for line in f.readlines():
+                       if not line.startswith("/"):
+                               line = "/%s" % line
+                       yield line
+
+               a.close()
 
        @property
        def payload_compression(self):
                """
-                       Return the compression type of the payload.
+                       Return the (guessed) compression type of the payload.
                """
-               comp = self.metadata.get("PKG_PAYLOAD_COMP", None)
+               # Get the max. length of the magic values.
+               max_length = max([len(v) for v in PAYLOAD_COMPRESSION_MAGIC.values()])
 
-               if comp == "XXX":
-                       return
+               a = self.open_archive()
+               f = a.extractfile("data.img")
 
-               return comp
+               # Read magic bytes from file.
+               magic = f.read(max_length)
+
+               f.close()
+               a.close()
+
+               for algo, m in PAYLOAD_COMPRESSION_MAGIC.items():
+                       if not magic.startswith(m):
+                               continue
+
+                       return algo
 
        @property
        def signature(self):
+               # XXX needs to be replaced
                """
                        Read the signature from the archive or return None if no
                        signature does exist.
@@ -366,25 +426,231 @@ class FilePackage(Package):
                        Calculate the hash1 of this package.
                """
                return util.calc_hash1(self.filename)
+       
+       @property
+       def name(self):
+               if self.format >= 1:
+                       name = self.lexer.package.get_var("name")
+               elif self.format == 0:
+                       name = self.metadata.get("PKG_NAME")
+
+               assert name, self
+               return name
 
        @property
-       def scriptlet(self):
-               """
-                       Read the scriptlet from the archive or return an empty string if no
-                       scriptlet does exist.
-               """
-               ret = None
+       def epoch(self):
+               if self.format >= 1:
+                       epoch = self.lexer.package.get_var("epoch", 0)
+               elif self.format == 0:
+                       epoch = self.metadata.get("PKG_EPOCH")
+
                try:
-                       a = self.open_archive()
-                       f = a.extractfile("control")
+                       epoch = int(epoch)
+               except TypeError:
+                       epoch = 0
 
-                       ret = f.read()
+               return epoch
 
-                       f.close()
-                       a.close()
+       @property
+       def version(self):
+               if self.format >= 1:
+                       version = self.lexer.package.get_var("version")
+               elif self.format == 0:
+                       version = self.metadata.get("PKG_VER")
 
-               except KeyError:
-                       # scriptlet file could not be found
-                       pass
+               assert version, self
+               return version
+
+       @property
+       def release(self):
+               if self.format >= 1:
+                       release = self.lexer.package.get_var("release")
+               elif self.format == 0:
+                       release = self.metadata.get("PKG_REL")
+
+               assert release, self
+               return release
+
+       @property
+       def arch(self):
+               if self.format >= 1:
+                       arch = self.lexer.package.get_var("arch")
+               elif self.format == 0:
+                       arch = self.metadata.get("PKG_ARCH")
+
+               assert arch, self
+               return arch
+
+       @property
+       def vendor(self):
+               if self.format >= 1:
+                       vendor = self.lexer.distro.get_var("vendor")
+               elif self.format == 0:
+                       vendor = self.metadata.get("PKG_VENDOR")
+
+               return vendor
+
+       @property
+       def summary(self):
+               if self.format >= 1:
+                       summary = self.lexer.package.get_var("summary")
+               elif self.format == 0:
+                       summary = self.metadata.get("PKG_SUMMARY")
+
+               assert summary, self
+               return summary
+
+       @property
+       def description(self):
+               if self.format >= 1:
+                       description = self.lexer.package.get_var("description")
+               elif self.format == 0:
+                       description = self.metadata.get("PKG_DESC")
+
+               return description
+
+       @property
+       def groups(self):
+               if self.format >= 1:
+                       groups = self.lexer.package.get_var("groups")
+               elif self.format == 0:
+                       groups = self.metadata.get("PKG_GROUPS")
+
+               if groups:
+                       return groups.split()
+
+               return []
+
+       @property
+       def license(self):
+               if self.format >= 1:
+                       license = self.lexer.package.get_var("license")
+               elif self.format == 0:
+                       license = self.metadata.get("PKG_LICENSE")
+
+               return license
+
+       @property
+       def url(self):
+               if self.format >= 1:
+                       url = self.lexer.package.get_var("url")
+               elif self.format == 0:
+                       url = self.metadata.get("PKG_URL")
+
+               return url
+
+       @property
+       def maintainer(self):
+               if self.format >= 1:
+                       maintainer = self.lexer.package.get_var("maintainer")
+               elif self.format == 0:
+                       maintainer = self.metadata.get("PKG_MAINTAINER")
+
+               return maintainer
+
+       @property
+       def uuid(self):
+               if self.format >= 1:
+                       uuid = self.lexer.package.get_var("uuid")
+               elif self.format == 0:
+                       uuid = self.metadata.get("PKG_UUID")
+
+               #assert uuid, self XXX re-enable this
+               return uuid
+
+       @property
+       def build_id(self):
+               if self.format >= 1:
+                       build_id = self.lexer.build.get_var("id")
+               elif self.format == 0:
+                       build_id = self.metadata.get("BUILD_ID")
+
+               assert build_id, self
+               return build_id
+
+       @property
+       def build_host(self):
+               if self.format >= 1:
+                       build_host = self.lexer.build.get_var("host")
+               elif self.format == 0:
+                       build_host = self.metadata.get("BUILD_HOST")
+
+               assert build_host, self
+               return build_host
+
+       @property
+       def build_time(self):
+               if self.format >= 1:
+                       build_time = self.lexer.build.get_var("time")
+               elif self.format == 0:
+                       build_time = self.metadata.get("BUILD_TIME")
+
+               # XXX re-enable this later
+               #assert build_time, self
+
+               try:
+                       build_time = int(build_time)
+               except TypeError:
+                       build_time = 0
+
+               return build_time
+
+       @property
+       def provides(self):
+               if self.format >= 1:
+                       provides = self.lexer.deps.get_var("provides")
+               elif self.format == 0:
+                       provides = self.metadata.get("PKG_PROVIDES")
+
+               if not provides:
+                       return []
+
+               return provides.split()
+
+       @property
+       def requires(self):
+               if self.format >= 1:
+                       requires = self.lexer.deps.get_var("requires")
+               elif self.format == 0:
+                       requires = self.metadata.get("PKG_REQUIRES")
+
+               if not requires:
+                       return []
+
+               return requires.split()
+
+       @property
+       def prerequires(self):
+               if self.format >= 1:
+                       prerequires = self.lexer.deps.get_var("prerequires")
+               elif self.format == 0:
+                       prerequires = self.metadata.get("PKG_PREREQUIRES")
+
+               if not prerequires:
+                       return []
+
+               return prerequires.split()
+
+       @property
+       def obsoletes(self):
+               if self.format >= 1:
+                       obsoletes = self.lexer.deps.get_var("obsoletes")
+               elif self.format == 0:
+                       obsoletes = self.metadata.get("PKG_OBSOLETES")
+
+               if not obsoletes:
+                       return []
+
+               return obsoletes.split()
+
+       @property
+       def conflicts(self):
+               if self.format >= 1:
+                       conflicts = self.lexer.deps.get_var("conflicts")
+               elif self.format == 0:
+                       conflicts = self.metadata.get("PKG_CONFLICTS")
+
+               if not conflicts:
+                       return []
 
-               return ret or ""
+               return conflicts.split()
diff --git a/pakfire/packages/lexer.py b/pakfire/packages/lexer.py
new file mode 100644 (file)
index 0000000..54572c4
--- /dev/null
@@ -0,0 +1,777 @@
+#!/usr/bin/python
+
+import logging
+import os
+import re
+
+from pakfire.constants import *
+
+class LexerError(Exception):
+       pass
+
+
+class LexerUnhandledLine(LexerError):
+       pass
+
+
+class EndOfFileError(LexerError):
+       pass
+
+
+class LexerUndefinedVariableError(LexerError):
+       pass
+
+
+LEXER_VALID_PACKAGE_NAME    = re.compile(r"[A-Za-z][A-Za-z0-9\_\-\+]")
+
+# XXX need to build check
+LEXER_VALID_SCRIPTLET_NAME  = re.compile(r"((pre|post|posttrans)(in|un|up))")
+
+LEXER_COMMENT_CHAR    = "#"
+LEXER_COMMENT         = re.compile(r"^\s*#")
+LEXER_QUOTES          = "\"'"
+LEXER_EMPTY_LINE      = re.compile(r"^\s*$")
+
+LEXER_DEFINITION      = re.compile(r"^([A-Za-z0-9_\-]+)\s*(\+)?=\s*(.+)?")
+
+LEXER_BLOCK_LINE_INDENT = "\t"
+LEXER_BLOCK_LINE      = re.compile(r"^\t(.*)$")
+LEXER_BLOCK_END       = re.compile(r"^end$")
+
+LEXER_DEFINE_BEGIN    = re.compile(r"^def ([A-Za-z0-9_\-]+)$")
+LEXER_DEFINE_LINE     = LEXER_BLOCK_LINE
+LEXER_DEFINE_END      = LEXER_BLOCK_END
+
+LEXER_PACKAGE_BEGIN   = re.compile(r"^package ([A-Za-z0-9_\-\+\%\{\}]+)$")
+LEXER_PACKAGE_LINE    = LEXER_BLOCK_LINE
+LEXER_PACKAGE_END     = LEXER_BLOCK_END
+LEXER_PACKAGE_INHERIT = re.compile(r"^template ([A-Z]+)$")
+
+LEXER_SCRIPTLET_BEGIN = re.compile(r"^script ([a-z]+)\s?(/[A-Za-z0-9\-\_/]+)?$")
+LEXER_SCRIPTLET_LINE  = LEXER_BLOCK_LINE
+LEXER_SCRIPTLET_END   = LEXER_BLOCK_END
+
+LEXER_TEMPLATE_BEGIN  = re.compile(r"^template ([A-Z]+)$")
+LEXER_TEMPLATE_LINE   = LEXER_BLOCK_LINE
+LEXER_TEMPLATE_END    = LEXER_BLOCK_END
+
+LEXER_BUILD_BEGIN     = re.compile(r"^build$")
+LEXER_BUILD_LINE      = LEXER_BLOCK_LINE
+LEXER_BUILD_END       = LEXER_BLOCK_END
+
+LEXER_DEPS_BEGIN      = re.compile(r"^dependencies$")
+LEXER_DEPS_LINE       = LEXER_BLOCK_LINE
+LEXER_DEPS_END        = LEXER_BLOCK_END
+
+LEXER_DISTRO_BEGIN    = re.compile(r"^distribution$")
+LEXER_DISTRO_LINE     = LEXER_BLOCK_LINE
+LEXER_DISTRO_END      = LEXER_BLOCK_END
+
+LEXER_PACKAGE2_BEGIN  = re.compile(r"^package$")
+LEXER_PACKAGE2_LINE   = LEXER_BLOCK_LINE
+LEXER_PACKAGE2_END    = LEXER_BLOCK_END
+
+# Statements:
+LEXER_EXPORT          = re.compile(r"^export ([A-Za-z0-9_\-])\s*(\+)?=\s*(.+)$")
+LEXER_UNEXPORT        = re.compile(r"^unexport ([A-Za-z0-9_\-]+)$")
+LEXER_INCLUDE         = re.compile(r"^include (.+)$")
+
+LEXER_VARIABLE        = re.compile(r"\%\{([A-Za-z0-9_\-]+)\}")
+
+
+class Lexer(object):
+       def __init__(self, lines=[], parent=None, environ=None):
+               self.lines = lines
+               self.parent = parent
+
+               self._lineno = 0
+
+               # A place to store all definitions.
+               self._definitions = {}
+
+               # Init function that can be overwritten by child classes.
+               self.init(environ)
+
+               # Run the parser.
+               self.run()
+
+       def inherit(self, other):
+               self._definitions.update(other._definitions)
+
+       @property
+       def definitions(self):
+               return self._definitions
+
+       @classmethod
+       def open(cls, filename, *args, **kwargs):
+               f = open(filename)
+               lines = f.readlines()
+               f.close()
+
+               return cls(lines, *args, **kwargs)
+
+       @property
+       def lineno(self):
+               return self._lineno + 1
+
+       @property
+       def root(self):
+               if self.parent:
+                       return self.parent.root
+
+               return self
+
+       def get_line(self, no, raw=False):
+               try:
+                       line = self.lines[no]
+               except KeyError:
+                       raise EndOfFileError
+
+               # Strip newline.
+               line = line.rstrip("\n")
+               
+               # DEBUG
+               #print line
+
+               if raw:
+                       return line
+
+               # strip comments - caution: quotations
+
+               if line.startswith(LEXER_COMMENT_CHAR):
+                       return ""
+
+               # XXX fix removing of comments in lines
+               #i = -1
+               #length = len(line)
+               #quote = None
+
+               #for i in range(length):
+               #       s = line[i]
+
+               #       if s in LEXER_QUOTES:
+               #               if quote == s:
+               #                       quote = None
+               #               else:
+               #                       quote = s
+
+               #       if s == LEXER_COMMENT_CHAR:
+               #               return line[:i+1]
+
+               return line
+
+       def line_is_empty(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_EMPTY_LINE, line)
+               if m:
+                       return True
+
+               return False
+
+       def expand_string(self, s):
+               if s is None:
+                       return ""
+
+               while s:
+                       m = re.search(LEXER_VARIABLE, s)
+                       if not m:
+                               break
+
+                       var = m.group(1)
+                       s = s.replace("%%{%s}" % var, self.get_var(var))
+
+               return s
+
+       def get_var(self, key, default=None):
+               definitions = {}
+               definitions.update(self.root.definitions)
+               definitions.update(self.definitions)
+
+               val = None
+               try:
+                       val = definitions[key]
+               except KeyError:
+                       logging.warning("Undefined variable: %s" % key)
+                       #if default is None:
+                       #       logging.warning("Undefined variable: %s" % key)
+                       #       raise LexerUndefinedVariableError, key
+
+               if val is None:
+                       val = default
+
+               return self.expand_string(val)
+
+       def init(self, environ):
+               pass
+
+       def get_default_parsers(self):
+               return [
+                       (LEXER_COMMENT,                 self.parse_comment),
+                       (LEXER_DEFINITION,              self.parse_definition),
+                       (LEXER_DEFINE_BEGIN,    self.parse_define),
+               ]
+
+       def get_parsers(self):
+               return []
+
+       def parse_line(self):
+               # Skip empty lines.
+               if self.line_is_empty():
+                       self._lineno += 1
+                       return
+
+               line = self.get_line(self._lineno)
+
+               parsers = self.get_default_parsers() + self.get_parsers()
+
+               found = False
+               for pattern, func in parsers:
+                       m = re.match(pattern, line)
+                       if m:
+                               # Hey, I found a match, we parse it with the subparser function.
+                               found = True
+                               func()
+
+                               break
+
+               if not found:
+                       raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
+
+       def read_block(self, pattern_start=None, pattern_line=None, pattern_end=None,
+                       raw=False):
+               assert pattern_start
+               assert pattern_line
+               assert pattern_end
+
+               line = self.get_line(self._lineno)
+
+               m = re.match(pattern_start, line)
+               if not m:
+                       raise LexerError
+
+               # Go in to next line.
+               self._lineno += 1
+
+               groups = m.groups()
+
+               lines = []
+               while True:
+                       line = self.get_line(self._lineno, raw=raw)
+
+                       m = re.match(pattern_end, line)
+                       if m:
+                               self._lineno += 1
+                               break
+
+                       m = re.match(pattern_line, line)
+                       if m:
+                               lines.append(m.group(1))
+                               self._lineno += 1
+                               continue
+
+                       m = re.match(LEXER_EMPTY_LINE, line)
+                       if m:
+                               lines.append("")
+                               self._lineno += 1
+                               continue
+
+                       if not line.startswith(LEXER_BLOCK_LINE_INDENT):
+                               raise LexerError, "Line has not the right indentation: %d: %s" \
+                                       % (self.lineno, line)
+
+                       raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
+
+               return (groups, lines)
+
+       def run(self):
+               while self._lineno < len(self.lines):
+                       self.parse_line()
+
+       def parse_comment(self):
+               line = self.get_line(self._lineno)
+
+               if not line:
+                       return
+
+               raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
+
+       def parse_definition(self, pattern=LEXER_DEFINITION):
+               line = self.get_line(self._lineno)
+
+               m = re.match(pattern, line)
+               if not m:
+                       raise LexerError, "Not a definition: %s" % line
+
+               # Line was correctly parsed, can go on.
+               self._lineno += 1
+
+               k, o, v = m.groups()
+
+               if o == "+":
+                       prev = self.definitions.get(k, None)
+                       if prev:
+                               v = " ".join((prev, v))
+
+               # Handle backslash.
+               while v and v.endswith("\\"):
+                       line = self.get_line(self._lineno)
+                       self._lineno += 1
+
+                       v = v[:-1] + line
+
+               self._definitions[k] = v
+
+               return k, v
+
+       def parse_define(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_DEFINE_BEGIN, line)
+               if not m:
+                       raise Exception, "XXX not a define"
+
+               # Go in to next line.
+               self._lineno += 1
+
+               key = m.group(1)
+               assert key
+
+               value = []
+               while True:
+                       line = self.get_line(self._lineno)              
+
+                       m = re.match(LEXER_DEFINE_END, line)
+                       if m:
+                               self._lineno += 1
+                               break
+
+                       m = re.match(LEXER_DEFINE_LINE, line)
+                       if m:
+                               self._lineno += 1
+                               value.append(m.group(1))
+                               continue
+
+                       m = re.match(LEXER_EMPTY_LINE, line)
+                       if m:
+                               self._lineno += 1
+                               value.append("")
+                               continue
+
+                       raise LexerError, "Unhandled line: %s" % line
+
+               self._definitions[key] = "\n".join(value)
+
+
+class DefaultLexer(Lexer):
+       """
+               A lexer which only knows about about simple definitions and def.
+       """
+       pass
+
+
+class TemplateLexer(DefaultLexer):
+       def init(self, environ):
+               # A place to store the scriptlets.
+               self.scriptlets = {}
+
+       @property
+       def definitions(self):
+               definitions = {}
+
+               assert self.parent
+               definitions.update(self.parent.definitions)
+               definitions.update(self._definitions)
+
+               return definitions
+
+       def get_parsers(self):
+               return [
+                       (LEXER_SCRIPTLET_BEGIN, self.parse_scriptlet),
+               ]
+
+       def parse_scriptlet(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_SCRIPTLET_BEGIN, line)
+               if not m:
+                       raise Exception, "Not a scriptlet"
+
+               self._lineno += 1
+
+               name = m.group(1)
+
+               # check if scriptlet was already defined.
+               if self.scriptlets.has_key(name):
+                       raise Exception, "Scriptlet %s is already defined" % name
+
+               path = m.group(2)
+               if path:
+                       self.scriptlets[name] = {
+                               "lang" : "bin",
+                               "path" : self.expand_string(path),
+                       }
+                       return
+
+               lines = []
+               while True:
+                       line = self.get_line(self._lineno, raw=True)
+
+                       m = re.match(LEXER_SCRIPTLET_END, line)
+                       if m:
+                               self._lineno += 1
+                               break
+
+                       m = re.match(LEXER_SCRIPTLET_LINE, line)
+                       if m:
+                               lines.append(m.group(1))
+                               self._lineno += 1
+                               continue
+
+                       m = re.match(LEXER_EMPTY_LINE, line)
+                       if m:
+                               lines.append("")
+                               self._lineno += 1
+                               continue
+
+                       raise LexerUnhandledLine, "%d: %s" % (self.lineno, line)
+
+               self.scriptlets[name] = {
+                       "lang"      : "shell",
+                       "scriptlet" : "\n".join(lines),
+               }
+
+
+class PackageLexer(TemplateLexer):
+       def init(self, environ):
+               TemplateLexer.init(self, environ)
+
+               self._template = "MAIN"
+
+       @property
+       def definitions(self):
+               definitions = {}
+
+               if self.template:
+                       definitions.update(self.template.definitions)
+
+               definitions.update(self._definitions)
+
+               return definitions
+
+       @property
+       def template(self):
+               if not self._template:
+                       return None
+
+               # Get templates from root.
+               assert self.root
+               templates = self.root.templates
+
+               try:
+                       return templates[self._template]
+               except KeyError:
+                       raise LexerError, "Template does not exist: %s" % self._template
+
+       def get_parsers(self):
+               parsers = TemplateLexer.get_parsers(self)
+
+               parsers += [
+                       (LEXER_PACKAGE_INHERIT,         self.parse_inherit),
+               ]
+
+               return parsers
+
+       def parse_inherit(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_PACKAGE_INHERIT, line)
+               if not m:
+                       raise LexerError, "Not a template inheritance: %s" % line
+
+               self._lineno += 1
+
+               self._template = m.group(1)
+
+               # Check if template exists.
+               assert self.template
+
+
+class BuildLexer(DefaultLexer):
+       @property
+       def definitions(self):
+               return self._definitions
+
+       @property
+       def stages(self):
+               return self.definitions
+
+       def inherit(self, other):
+               """
+                       Inherit everything from other lexer.
+               """
+               self._definitions.update(other._definitions)
+
+
+class RootLexer(DefaultLexer):
+       def init(self, environ):
+               # A list of variables that should be exported in the build
+               # environment.
+               self.exports = []
+
+               # Import all environment variables.
+               if environ:
+                       for k, v in environ.items():
+                               self._definitions[k] = v
+
+                               self.exports.append(k)
+
+               # A place to store all packages.
+               self.packages = []
+
+               # A place to store all templates.
+               self.templates = {}
+
+               # Place for build instructions
+               self.build = BuildLexer([], parent=self)
+
+               # Include all macros.
+               if not self.parent:
+                       for macro in MACRO_FILES:
+                               self.include(macro)
+
+       def include(self, file):
+               # Create a new lexer, and parse the whole file.
+               include = RootLexer.open(file, parent=self)
+
+               # Copy all data from the included file.
+               self.inherit(include)
+
+       def inherit(self, other):
+               """
+                       Inherit everything from other lexer.
+               """
+               self._definitions.update(other._definitions)
+
+               self.build.inherit(other.build)
+               self.templates.update(other.templates)
+               self.packages += other.packages
+
+               for export in other.exports:
+                       if not export in self.exports:
+                               self.exports.append(export)
+
+       def get_parsers(self):
+               return [
+                       (LEXER_INCLUDE,                 self.parse_include),
+                       (LEXER_TEMPLATE_BEGIN,  self.parse_template),
+                       (LEXER_PACKAGE_BEGIN,   self.parse_package),
+                       (LEXER_BUILD_BEGIN,             self.parse_build),
+               ]
+
+       def parse_build(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_BUILD_BEGIN, line)
+               if not m:
+                       raise LexerError, "Not a build statement: %s" % line
+
+               self._lineno += 1
+
+               lines = []
+
+               while True:
+                       line = self.get_line(self._lineno)
+
+                       m = re.match(LEXER_BUILD_END, line)
+                       if m:
+                               self._lineno += 1
+                               break
+
+                       m = re.match(LEXER_BUILD_LINE, line)
+                       if m:
+                               lines.append(m.group(1))
+                               self._lineno += 1
+
+                       # Accept empty lines.
+                       m = re.match(LEXER_EMPTY_LINE, line)
+                       if m:
+                               lines.append(line)
+                               self._lineno += 1
+                               continue
+
+               build = BuildLexer(lines, parent=self)
+               self.build.inherit(build)
+
+       def parse_include(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_INCLUDE, line)
+               if not m:
+                       raise LexerError, "Not an include statement: %s" % line
+
+               # Get the filename from the line.
+               file = m.group(1)
+               file = self.expand_string(file)
+
+               # Include the content of the file.
+               self.include(file)
+
+               # Go on to next line.
+               self._lineno += 1
+
+       def parse_export(self):
+               k, v = self.parse_definition(pattern, LEXER_EXPORT)
+
+               if k and not k in self.exports:
+                       self.exports.append(k)
+
+       def parse_unexport(self):
+               line = self.get_line(self._lineno)
+               self._lineno += 1
+
+               m = re.match(LEXER_UNEXPORT, line)
+               if m:
+                       k = m.group(1)
+                       if k and k in self.exports:
+                               self.exports.remove(k)
+
+       def parse_template(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_TEMPLATE_BEGIN, line)
+               if not m:
+                       raise Exception, "Not a template"
+
+               # Line was correctly parsed, can go on.
+               self._lineno += 1
+
+               name = m.group(1)
+               lines = []
+
+               while True:
+                       line = self.get_line(self._lineno)
+
+                       m = re.match(LEXER_TEMPLATE_END, line)
+                       if m:
+                               self._lineno += 1
+                               break
+
+                       m = re.match(LEXER_TEMPLATE_LINE, line)
+                       if m:
+                               lines.append(m.group(1))
+                               self._lineno += 1
+
+                       # Accept empty lines.
+                       m = re.match(LEXER_EMPTY_LINE, line)
+                       if m:
+                               lines.append(line)
+                               self._lineno += 1
+                               continue
+
+               template = TemplateLexer(lines, parent=self)
+               self.templates[name] = template
+
+       def parse_package(self):
+               line = self.get_line(self._lineno)
+
+               m = re.match(LEXER_PACKAGE_BEGIN, line)
+               if not m:
+                       raise Exception, "Not a package: %s" %line
+
+               self._lineno += 1
+
+               name = m.group(1)
+               name = self.expand_string(name)
+
+               m = re.match(LEXER_VALID_PACKAGE_NAME, name)
+               if not m:
+                       raise LexerError, "Invalid package name: %s" % name
+
+               lines = ["name = %s" % name]
+
+               while True:
+                       line = self.get_line(self._lineno)
+
+                       m = re.match(LEXER_PACKAGE_END, line)
+                       if m:
+                               self._lineno += 1
+                               break
+
+                       m = re.match(LEXER_PACKAGE_LINE, line)
+                       if m:
+                               self._lineno += 1
+                               lines.append(m.group(1))
+                               continue
+
+                       # Accept empty lines.
+                       m = re.match(LEXER_EMPTY_LINE, line)
+                       if m:
+                               self._lineno += 1
+                               lines.append(line)
+                               continue
+
+                       raise Exception, "XXX unhandled line in package block: %s" % line
+
+               package = PackageLexer(lines, parent=self)
+               self.packages.append(package)
+
+
+class FileLexer(DefaultLexer):
+       def init(self, environ):
+               self.build = DefaultLexer()
+               self.deps = DefaultLexer()
+               self.distro = DefaultLexer()
+               self.package = DefaultLexer()
+
+       def get_parsers(self):
+               return [
+                       (LEXER_BUILD_BEGIN,             self.parse_build),
+                       (LEXER_DISTRO_BEGIN,    self.parse_distro),
+                       (LEXER_PACKAGE2_BEGIN,  self.parse_package),
+                       (LEXER_DEPS_BEGIN,              self.parse_deps),
+               ]
+
+       def parse_build(self):
+               keys, lines = self.read_block(
+                       pattern_start=LEXER_BUILD_BEGIN,
+                       pattern_line=LEXER_BUILD_LINE,
+                       pattern_end=LEXER_BUILD_END,
+                       raw=True,
+               )
+
+               build = DefaultLexer(lines)
+               self.build.inherit(build)
+
+       def parse_distro(self):
+               keys, lines = self.read_block(
+                       pattern_start=LEXER_DISTRO_BEGIN,
+                       pattern_line=LEXER_DISTRO_LINE,
+                       pattern_end=LEXER_DISTRO_END,
+                       raw=True,
+               )
+
+               distro = DefaultLexer(lines)
+               self.distro.inherit(distro)
+
+       def parse_package(self):
+               keys, lines = self.read_block(
+                       pattern_start=LEXER_PACKAGE2_BEGIN,
+                       pattern_line=LEXER_PACKAGE2_LINE,
+                       pattern_end=LEXER_PACKAGE2_END,
+                       raw=True,
+               )
+
+               pkg = DefaultLexer(lines)
+               self.package.inherit(pkg)
+
+       def parse_deps(self):
+               keys, lines = self.read_block(
+                       pattern_start=LEXER_DEPS_BEGIN,
+                       pattern_line=LEXER_DEPS_LINE,
+                       pattern_end=LEXER_DEPS_END,
+                       raw=True,
+               )
+
+               deps = DefaultLexer(lines)
+               self.deps.inherit(deps)
index 34f6c16a2c0e4dc6de92df776617e226a8a312b6..1b96b3b7384ad21cc1c57d73447010b8f744e252 100644 (file)
 #                                                                             #
 ###############################################################################
 
+import logging
 import os
+import shutil
+import socket
 import tarfile
 
 from urlgrabber.grabber import URLGrabber, URLGrabError
 from urlgrabber.progress import TextMeter
 
+import lexer
 import packager
 
+import pakfire.util as util
+
 from base import Package
 from source import SourcePackage
-from virtual import VirtualPackage
-from pakfire.errors import DownloadError
 from pakfire.constants import *
+from pakfire.i18n import _
 
+# XXX to be moved to pakfire.downloader
 class SourceDownloader(object):
-       def __init__(self, pakfire):
+       def __init__(self, pakfire, mirrors=None):
                self.pakfire = pakfire
+               self.mirrors = mirrors
 
                # XXX need to use downloader.py
                self.grabber = URLGrabber(
@@ -62,42 +69,17 @@ class SourceDownloader(object):
                return filename
 
 
-class MakeVirtualPackage(VirtualPackage):
-       """
-               A simple package that always overwrites the file_patterns.
-       """
-       @property
-       def file_patterns(self):
-               """
-                       All files that belong into a source package are located in /build.
-               """
-               return ["/",]
-
-class Makefile(Package):
+class MakefileBase(Package):
        def __init__(self, pakfire, filename):
                Package.__init__(self, pakfire)
-               self.filename = os.path.abspath(filename)
-
-       @property
-       def files(self):
-               basedir = os.path.dirname(self.filename)
-
-               for dirs, subdirs, files in os.walk(basedir):
-                       for f in files:
-                               yield os.path.join(dirs, f)
 
-       def extract(self, env):
-               # Copy all files that belong to the package
-               for f in self.files:
-                       _f = f[len(os.path.dirname(self.filename)):]
-                       env.copyin(f, "/build/%s" % _f)
-
-               downloader = SourceDownloader(env.pakfire)
-               for filename in env.make_sources():
-                       _filename = downloader.download(filename)
+               # Save the filename of the makefile.
+               self.filename = os.path.abspath(filename)
 
-                       if _filename:
-                               env.copyin(_filename, "/build/files/%s" % os.path.basename(_filename))
+               # Open and parse the makefile.
+               # XXX pass environment to lexer
+               self.lexer = lexer.RootLexer.open(self.filename,
+                       environ=self.pakfire.environ)
 
        @property
        def package_filename(self):
@@ -109,6 +91,112 @@ class Makefile(Package):
                        "version" : self.version,
                }
 
+       def lint(self):
+               errors = []
+
+               if not self.name:
+                       errors.append(_("Package name is undefined."))
+
+               if not self.version:
+                       errors.append(_("Package version is undefined."))
+
+               # XXX to do...
+
+               return errors
+
+       @property
+       def name(self):
+               return self.lexer.get_var("name")
+
+       @property
+       def epoch(self):
+               epoch = self.lexer.get_var("epoch")
+               if not epoch:
+                       return 0
+
+               return int(epoch)
+
+       @property
+       def version(self):
+               return self.lexer.get_var("version")
+
+       @property
+       def release(self):
+               release = self.lexer.get_var("release")
+               assert release
+
+               tag = self.lexer.get_var("DISTRO_DISTTAG")
+               assert tag
+
+               return ".".join((release, tag))
+
+       @property
+       def summary(self):
+               return self.lexer.get_var("summary")
+
+       @property
+       def description(self):
+               return self.lexer.get_var("description")
+
+       @property
+       def groups(self):
+               groups = self.lexer.get_var("groups").split()
+
+               return sorted(groups)
+
+       @property
+       def url(self):
+               return self.lexer.get_var("url")
+
+       @property
+       def license(self):
+               return self.lexer.get_var("license")
+
+       @property
+       def maintainer(self):
+               maintainer = self.lexer.get_var("maintainer")
+
+               if not maintainer:
+                       maintainer = self.lexer.get_var("DISTRO_MAINTAINER")
+
+               return maintainer
+
+       @property
+       def vendor(self):
+               return self.lexer.get_var("DISTRO_VENDOR")
+
+       @property
+       def build_host(self):
+               return socket.gethostname()
+
+       # XXX build_id and build_time are used to create a source package
+
+       @property
+       def build_id(self):
+               # XXX todo
+               # Not existant for Makefiles
+               return None
+
+       @property
+       def build_time(self):
+               # XXX todo
+               # Not existant for Makefiles
+               return None
+
+
+class Makefile(MakefileBase):
+       @property
+       def uuid(self):
+               hash1 = util.calc_hash1(self.filename)
+
+               # Return UUID version 5 (SHA1 hash)
+               return "%8s-%4s-5%3s-%4s-%11s" % \
+                       (hash1[0:8], hash1[9:13], hash1[14:17], hash1[18:22], hash1[23:34])
+
+       @property
+       def path(self):
+               return os.path.dirname(self.filename)
+
        @property
        def arch(self):
                """
@@ -116,14 +204,211 @@ class Makefile(Package):
                """
                return "src"
 
-       def dist(self, env):
+       @property
+       def packages(self):
+               pkgs = []
+
+               for lexer in self.lexer.packages:
+                       name = lexer.get_var("name")
+
+                       pkg = MakefilePackage(self.pakfire, name, lexer)
+                       pkgs.append(pkg)
+
+               return sorted(pkgs)
+
+       @property
+       def source_dl(self):
+               dls = []
+
+               if self.pakfire.distro.source_dl:
+                       dls.append(self.pakfire.distro.source_dl)
+
+               dl = self.lexer.get_var("source_dl")
+               if dl:
+                       dls.append(dl)
+
+               return dls
+
+       def download(self):
+               """
+                       Download all external sources and return a list with the local
+                       copies.
                """
-                       Create a source package in env.
+               # Download source files.
+               # XXX need to implement mirrors
+               downloader = SourceDownloader(self.pakfire, mirrors=self.source_dl)
+
+               files = []
+               for filename in self.sources:
+                       filename = downloader.download(filename)
+                       files.append(filename)
 
-                       We assume that all requires files are in /build.
+               return files
+
+       def dist(self, resultdirs):
                """
-               pkg = MakeVirtualPackage(self.pakfire, env.make_info)
+                       Create a source package.
+
+                       We assume that all required files are in /build.
+               """
+               #dump = self.dump()
+               #for line in dump.splitlines():
+               #       logging.info(line)
+
+               p = packager.SourcePackager(self.pakfire, self)
+               p.run(resultdirs)
+
+       def dump(self, *args, **kwargs):
+               dump = MakefileBase.dump(self, *args, **kwargs)
+               dump = dump.splitlines()
+
+               #dump += ["", _("Containing the following binary packages:"),]
+               #
+               #for pkg in self.packages:
+               #       _dump = pkg.dump(*args, **kwargs)
+               #
+               #       for line in _dump.splitlines():
+               #               dump.append("  %s" % line)
+               #       dump.append("")
+
+               return "\n".join(dump)
+
+       def get_buildscript(self, stage):
+               return self.lexer.build.get_var("_%s" % stage)
+
+       @property
+       def prerequires(self):
+               return []
+
+       @property
+       def requires(self):
+               return self.lexer.get_var("build_requires").split()
+
+       @property
+       def provides(self):
+               return []
+
+       @property
+       def obsoletes(self):
+               return []
+
+       @property
+       def conflicts(self):
+               return []
+
+       @property
+       def files(self):
+               files = []
+               basedir = os.path.dirname(self.filename)
+
+               for dirs, subdirs, _files in os.walk(basedir):
+                       for f in _files:
+                               files.append(os.path.join(dirs, f))
+
+               return files
+
+       @property
+       def sources(self):
+               return self.lexer.get_var("sources").split()
+
+       def extract(self, message=None, prefix=None):
+               # XXX neeed to make this waaaaaaaaaay better.
+
+               files = self.files
+
+               # Load progressbar.
+               pb = None
+               if message:
+                       message = "%-10s : %s" % (message, self.friendly_name)
+                       pb = util.make_progress(message, len(files), eta=False)
+
+               dir_len = len(os.path.dirname(self.filename))
+
+               # Copy all files that belong to the package
+               i = 0
+               for f in files:
+                       if pb:
+                               i += 1
+                               pb.update(i)
 
-               p = packager.SourcePackager(self.pakfire, pkg, env)
-               p()
+                       _f = f[dir_len:]
+                       logging.debug("%s/%s" % (prefix, _f))
+       
+                       path = "%s/%s" % (prefix, _f)
+
+                       path_dir = os.path.dirname(path)
+                       if not os.path.exists(path_dir):
+                               os.makedirs(path_dir)
+
+                       shutil.copy2(f, path)
+
+               if pb:
+                       pb.finish()
+
+               # Download source files.
+               downloader = SourceDownloader(self.pakfire, mirrors=self.source_dl)
+               for filename in self.sources:
+                       _filename = downloader.download(filename)
+                       assert _filename
+
+                       filename = "%s/files/%s" % (prefix, os.path.basename(_filename))
+                       dirname = os.path.dirname(filename)
+
+                       if not os.path.exists(dirname):
+                               os.makedirs(dirname)
+                               
+                       shutil.copy2(_filename, filename)
+
+
+class MakefilePackage(MakefileBase):
+       def __init__(self, pakfire, name, lexer):
+               Package.__init__(self, pakfire)
+
+               self._name = name
+               self.lexer = lexer
+
+       @property
+       def name(self):
+               return self._name
+
+       @property
+       def arch(self):
+               return self.lexer.get_var("arch", "%{DISTRO_ARCH}")
+
+       @property
+       def configfiles(self):
+               return self.lexer.get_var("configfiles").split()
+
+       @property
+       def files(self):
+               return self.lexer.get_var("files").split()
+
+       @property
+       def uuid(self):
+               return None
+
+       def get_deps_from_builder(self, builder):
+               pass
+
+       @property
+       def prerequires(self):
+               return []
+
+       @property
+       def requires(self):
+               return []
+
+       @property
+       def provides(self):
+               return []
+
+       @property
+       def obsoletes(self):
+               return []
+
+       @property
+       def conflicts(self):
+               return []
 
+       def get_scriptlet(self, type):
+               return self.lexer.scriptlets.get(type, None)
index 28539b15cb8490270d0197a99359e8cb0c95151d..3ab007fd4db8f003879d4780b4cc570514bd5175 100644 (file)
 #                                                                             #
 ###############################################################################
 
+import collections
+import fnmatch
 import glob
+import hashlib
 import logging
 import lzma
 import os
@@ -29,6 +32,7 @@ import shutil
 import sys
 import tarfile
 import tempfile
+import time
 import uuid
 import xattr
 import zlib
@@ -40,137 +44,249 @@ from pakfire.util import rm
 from pakfire.constants import *
 from pakfire.i18n import _
 
+from binary import BinaryPackage
+from source import SourcePackage
 from file import InnerTarFile
 
 class Packager(object):
-       ARCHIVE_FILES = ("info", "filelist", "data.img")
-
-       def __init__(self, pakfire, pkg, env):
+       def __init__(self, pakfire, pkg):
                self.pakfire = pakfire
                self.pkg = pkg
-               self.env = env
-
-               self.tarball = None
-
-               self.cleanup = True
-
-               # Store meta information
-               self.info = {
-                       "package_format" : PACKAGE_FORMAT,
-                       "package_type" : self.type,
-                       "package_uuid" : uuid.uuid4(),
-                       "payload_comp" : "",
-
-                       "prerequires" : "",
-                       "requires" : "",
-                       "provides" : "",
-                       "conflicts" : "",
-                       "obsoletes": "",
-               }
-               self.info.update(self.pkg.info)
-               self.info["groups"] = " ".join(self.info["groups"])
-               self.info.update(self.pakfire.distro.info)
-               self.info.update(self.env.info)
-
-               ### Create temporary files
-               # Create temp directory to where we extract all files again and
-               # gather some information about them like requirements and provides.
-               self.tempdir = self.env.chrootPath("tmp", "%s_data" % self.pkg.friendly_name)
-               if not os.path.exists(self.tempdir):
-                       os.makedirs(self.tempdir)
-
-               # Create files that have the archive data
-               self.archive_files = {}
-               for i in self.ARCHIVE_FILES:
-                       self.archive_files[i] = \
-                               self.env.chrootPath("tmp", "%s_%s" % (self.pkg.friendly_name, i))
-
-       def __call__(self):
-               logging.debug("Packaging %s" % self.pkg.friendly_name)
-
-               # Create the tarball and add all data to it.
-               self.create_tarball()
-
-               if self.type == "binary":
-                       e = self.env.do("/usr/lib/buildsystem-tools/dependency-tracker %s" % \
-                               self.tempdir[len(self.env.chrootPath()):], returnOutput=True,
-                               env=self.pkg.env)
-
-                       for line in e.splitlines():
-                               m = re.match(r"^(\w+)=(.*)$", line)
-                               if m is None:
-                                       continue
 
-                               key, val = m.groups()
+               self.files = []
+               self.tmpfiles = []
 
-                               if not key in ("prerequires", "requires", "provides", "conflicts", "obsoletes",):
-                                       continue
+       def __del__(self):
+               for file in self.tmpfiles:
+                       if not os.path.exists(file):
+                               continue
 
-                               val = val.strip("\"")
-                               val = val.split()
+                       logging.debug("Removing tmpfile: %s" % file)
+                       os.remove(file)
 
-                               self.info[key] = " ".join(sorted(val))
+       def mktemp(self):
+               # XXX use real mk(s)temp here
+               filename = os.path.join("/", LOCAL_TMP_PATH, util.random_string())
 
-               elif self.type == "source":
-                       # Save the build requirements.
-                       self.info["requires"] = " ".join(self.pkg.requires)
+               self.tmpfiles.append(filename)
 
-               self.create_info()
+               return filename
 
-               # Create the outer tarball.
-               resultdir = os.path.join(self.env.chrootPath("result", self.pkg.arch))
-               if not os.path.exists(resultdir):
-                       os.makedirs(resultdir)
+       def save(self, filename):
+               # Create a new tar archive.
+               tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT)
 
-               filename = os.path.join(resultdir, self.pkg.filename)
+               # Add package formation information.
+               # Must always be the first file in the archive.
+               formatfile = self.create_package_format()
+               tar.add(formatfile, arcname="pakfire-format")
 
-               tar = tarfile.TarFile(filename, mode="w", format=tarfile.PAX_FORMAT)
+               # XXX make sure all files belong to the root user
+
+               # Create checksum file.
+               chksumsfile = self.mktemp()
+               chksums = open(chksumsfile, "w")
 
-               for i in self.ARCHIVE_FILES:
-                       tar.add(self.archive_files[i], arcname=i)
+               # Add all files to tar file.
+               for arcname, filename in self.files:
+                       tar.add(filename, arcname=arcname)
 
+                       # Calculating the hash sum of the added file
+                       # and store it in the chksums file.
+                       f = open(filename)
+                       h = hashlib.sha512()
+                       while True:
+                               buf = f.read(BUFFER_SIZE)
+                               if not buf:
+                                       break
+
+                               h.update(buf)
+                       f.close()
+
+                       chksums.write("%-10s %s\n" % (arcname, h.hexdigest()))
+
+               # Close checksum file and attach it to the end.
+               chksums.close()
+               tar.add(chksumsfile, "chksums")
+
+               # Close the tar file.
                tar.close()
 
-               rm(self.tempdir)
+       def add(self, filename, arcname=None):
+               if not arcname:
+                       arcname = os.path.basename(filename)
+
+               logging.debug("Adding %s (as %s) to tarball." % (filename, arcname))
+               self.files.append((arcname, filename))
+
+       def create_package_format(self):
+               filename = self.mktemp()
+
+               f = open(filename, "w")
+               f.write("%s\n" % PACKAGE_FORMAT)
+               f.close()
+
+               return filename
+
+       def create_filelist(self, datafile):
+               filelist = self.mktemp()
+
+               f = open(filelist, "w")
+               datafile = InnerTarFile(datafile)
+
+               for m in datafile.getmembers():
+                       # XXX need to check what to do with UID/GID
+                       logging.info("  %s %-8s %-8s %s %6s %s" % \
+                               (tarfile.filemode(m.mode), m.uname, m.gname,
+                               "%d-%02d-%02d %02d:%02d:%02d" % time.localtime(m.mtime)[:6],
+                               util.format_size(m.size), m.name))
+               
+                       info = m.get_info(tarfile.ENCODING, "strict")
+
+                       # Update uname.
+                       if hasattr(self, "builder"):
+                               pwuid = self.builder.get_pwuid(info["uid"])
+                               if not pwuid:
+                                       logging.warning("UID '%d' not found. Using root.")
+                                       info["uname"] = "root"
+                               else:
+                                       info["uname"] = pwuid["name"]
+
+                               # Update gname.
+                               grgid = self.builder.get_grgid(info["gid"])
+                               if not grgid:
+                                       logging.warning("GID '%d' not found. Using root.")
+                                       info["gname"] = "root"
+                               else:
+                                       info["gname"] = grgid["name"]
+                       else:
+                               # Files in the source packages always belong to root.
+                               info["uname"] = info["gname"] = "root"
+
+                       f.write("%(name)-40s %(type)1s %(size)-10d %(uname)-10s %(gname)-10s %(mode)-6d %(mtime)-12d" \
+                               % info)
 
-       def create_tarball(self, compress=None):
-               tar = InnerTarFile(self.archive_files["data.img"], mode="w")
+                       # Calculate SHA512 hash of regular files.
+                       if m.isreg():
+                               mobj = datafile.extractfile(m)
+                               h = hashlib.sha512()
 
-               prefix = self.env.buildroot
-               if self.type == "source":
-                       prefix = "build"
+                               while True:
+                                       buf = mobj.read(BUFFER_SIZE)
+                                       if not buf:
+                                               break
+                                       h.update(buf)
 
-               if not compress and self.type == "binary":
-                       compress = "xz"
+                               mobj.close()
+                               f.write(" %s\n" % h.hexdigest())
+
+                       # For other files, just finish the line.
+                       else:
+                               f.write(" -\n")
+                               
+               logging.info("")
+
+               datafile.close()
+               f.close()
+
+               return filelist
+
+       def run(self):
+               raise NotImplementedError
 
+
+class BinaryPackager(Packager):
+       def __init__(self, pakfire, pkg, buildroot):
+               Packager.__init__(self, pakfire, pkg)
+
+               self.buildroot = buildroot
+
+       def create_metafile(self, datafile):
+               info = collections.defaultdict(lambda: "")
+
+               # Generic package information including Pakfire information.
+               info.update({
+                       "pakfire_version" : PAKFIRE_VERSION,
+                       "uuid"            : uuid.uuid4(),
+               })
+
+               # Include distribution information.
+               info.update(self.pakfire.distro.info)
+               info.update(self.pkg.info)
+
+               # Update package information for string formatting.
+               info.update({
+                       "groups"   : " ".join(self.pkg.groups),
+                       "requires" : " ".join(self.pkg.requires),
+               })
+
+               # Format description.
+               description = [PACKAGE_INFO_DESCRIPTION_LINE % l \
+                       for l in util.text_wrap(self.pkg.description, length=80)]
+               info["description"] = "\n".join(description)
+
+               # Build information.
+               info.update({
+                       # Package it built right now.
+                       "build_time" : int(time.time()),
+                       "build_id"   : uuid.uuid4(),
+               })
+
+               # Installed size (equals size of the uncompressed tarball).
+               info.update({
+                       "inst_size" : os.path.getsize(datafile),
+               })
+
+               metafile = self.mktemp()
+
+               f = open(metafile, "w")
+               f.write(PACKAGE_INFO % info)
+               f.close()
+
+               return metafile
+
+       def create_datafile(self):
                includes = []
                excludes = []
 
-               for pattern in self.pkg.file_patterns:
+               # List of all patterns, which grows.
+               patterns = self.pkg.files
+
+               for pattern in patterns:
                        # Check if we are running in include or exclude mode.
                        if pattern.startswith("!"):
                                files = excludes
 
-                               # Strip the ! charater
+                               # Strip the ! character.
                                pattern = pattern[1:]
-
                        else:
                                files = includes
 
+                       # Expand file to point to chroot.
                        if pattern.startswith("/"):
                                pattern = pattern[1:]
-                       pattern = self.env.chrootPath(prefix, pattern)
+                       pattern = os.path.join(self.buildroot, pattern)
 
                        # Recognize the type of the pattern. Patterns could be a glob
                        # pattern that is expanded here or just a directory which will
                        # be included recursively.
                        if "*" in pattern or "?" in pattern:
-                               files += glob.glob(pattern)
+                               patterns += glob.glob(pattern)
 
                        elif os.path.exists(pattern):
                                # Add directories recursively...
                                if os.path.isdir(pattern):
+                                       # Add directory itself.
+                                       files.append(pattern)
+
                                        for dir, subdirs, _files in os.walk(pattern):
+                                               for subdir in subdirs:
+                                                       if subdir in ORPHAN_DIRECTORIES:
+                                                               continue
+
+                                                       subdir = os.path.join(dir, subdir)
+                                                       files.append(subdir)
+
                                                for file in _files:
                                                        file = os.path.join(dir, file)
                                                        files.append(file)
@@ -187,99 +303,341 @@ class Packager(object):
                                continue
 
                        files.append(file)
-
                files.sort()
 
-               filelist = open(self.archive_files["filelist"], mode="w")
+               # Load progressbar.
+               message = "%-10s : %s" % (_("Packaging"), self.pkg.friendly_name)
+               pb = util.make_progress(message, len(files), eta=False)
+
+               datafile = self.mktemp()
+               tar = InnerTarFile(datafile, mode="w")
+
+               # All files in the tarball are relative to this directory.
+               basedir = self.buildroot
 
-               for file_real in files:
-                       file_tar = file_real[len(self.env.chrootPath(prefix)) + 1:]
-                       file_tmp = os.path.join(self.tempdir, file_tar)
+               i = 0
+               for file in files:
+                       if pb:
+                               i += 1
+                               pb.update(i)
 
-                       if file_tar in ORPHAN_DIRECTORIES and not os.listdir(file_real):
-                               logging.debug("Found an orphaned directory: %s" % file_tar)
-                               os.unlink(file_real)
+                       # Never package /.
+                       if os.path.normpath(file) == os.path.normpath(basedir):
                                continue
 
-                       tar.add(file_real, arcname=file_tar)
+                       arcname = "/%s" % os.path.relpath(file, basedir)
 
-                       # Record the packaged file to the filelist.
-                       filelist.write("/%s\n" % file_tar)
+                       # Special handling for directories.
+                       if os.path.isdir(file):
+                               # Empty directories that are in the list of ORPHAN_DIRECTORIES
+                               # can be skipped and removed.
+                               if arcname in ORPHAN_DIRECTORIES and not os.listdir(file):
+                                       logging.debug("Found an orphaned directory: %s" % arcname)
+                                       try:
+                                               os.unlink(file)
+                                       except OSError:
+                                               pass
 
-                       # "Copy" the file to the tmp path for later investigation.
-                       if os.path.isdir(file_real):
-                               file_dir = file_tmp
+                                       continue
+
+                       # Add file to tarball.
+                       tar.add(file, arcname=arcname, recursive=False)
+
+               # Remove all packaged files.
+               for file in reversed(files):
+                       if not os.path.exists(file):
+                               continue
+
+                       # It's okay if we cannot remove directories,
+                       # when they are not empty.
+                       if os.path.isdir(file):
+                               try:
+                                       os.rmdir(file)
+                               except OSError:
+                                       continue
                        else:
-                               file_dir = os.path.dirname(file_tmp)
+                               os.unlink(file)
+
+                       while True:
+                               file = os.path.dirname(file)
+
+                               if not file.startswith(basedir):
+                                       break
+
+                               try:
+                                       os.rmdir(file)
+                               except OSError:
+                                       break
+
+               # Close the tarfile.
+               tar.close()
+
+               # Finish progressbar.
+               if pb:
+                       pb.finish()
+
+               return datafile
 
-                       if not os.path.exists(file_dir):
-                               os.makedirs(file_dir)
+       def create_scriptlets(self):
+               scriptlets = []
 
-                       if os.path.isfile(file_real):
-                               os.link(file_real, file_tmp)
+               for scriptlet_name in SCRIPTS:
+                       scriptlet = self.pkg.get_scriptlet(scriptlet_name)
 
-                       elif os.path.islink(file_real):
-                               # Dead symlinks cannot be copied by shutil.
-                               os.symlink(os.readlink(file_real), file_tmp)
+                       if not scriptlet:
+                               continue
+
+                       # Write script to a file.
+                       scriptlet_file = self.mktemp()
+
+                       if scriptlet["lang"] == "bin":
+                               path = lang["path"]
+                               try:
+                                       f = open(path, "b")
+                               except OSError:
+                                       raise Exception, "Cannot open script file: %s" % lang["path"]
+
+                               s = open(scriptlet_file, "wb")
+
+                               while True:
+                                       buf = f.read(BUFFER_SIZE)
+                                       if not buf:
+                                               break
+
+                                       s.write(buf)
+
+                               f.close()
+                               s.close()
 
-                       elif os.path.isdir(file_real):
-                               if not os.path.exists(file_tmp):
-                                       os.makedirs(file_tmp)
+                       elif scriptlet["lang"] == "shell":
+                               s = open(scriptlet_file, "w")
+
+                               # Write shell script to file.
+                               s.write("#!/bin/sh -e\n\n")
+                               s.write(scriptlet["scriptlet"])
+                               s.write("\n\nexit 0\n")
+
+                               s.close()
 
                        else:
-                               shutil.copy2(file_real, file_tmp)
+                               raise Exception, "Unknown scriptlet language: %s" % scriptlet["lang"]
 
-                       # Unlink the file and remove empty directories.
-                       if self.cleanup:
-                               if not os.path.isdir(file_real):
-                                       os.unlink(file_real)
+                       scriptlets.append((scriptlet_name, scriptlet_file))
 
-                               elif os.path.isdir(file_real) and not os.listdir(file_real):
-                                       os.rmdir(file_real)
+               # XXX scan for script dependencies
 
-               # Dump all files that are in the archive.
-               tar.list()
+               return scriptlets
 
-               # Write all data to disk.
-               tar.close()
-               filelist.close()
+       def create_configs(self, datafile):
+               datafile = InnerTarFile(datafile)
+
+               members = datafile.getmembers()
+
+               configfiles = []
+               configdirs  = []
+
+               # Find all directories in the config file list.
+               for file in self.pkg.configfiles:
+                       if file.startswith("/"):
+                               file = file[1:]
+
+                       for member in members:
+                               if member.name == file and member.isdir():
+                                       configdirs.append(file)
 
-               # compress the tarball here
-               if compress:
-                       # Save algorithm to metadata.
-                       self.info["payload_comp"] = compress
+               for configdir in configdirs:
+                       for member in members:
+                               if not member.isdir() and member.name.startswith(configdir):
+                                       configfiles.append(member.name)
 
-                       logging.debug("Compressing package with %s algorithm." % compress or "no")
+               for pattern in self.pkg.configfiles:
+                       if pattern.startswith("/"):
+                               pattern = pattern[1:]
+
+                       for member in members:
+                               if not fnmatch.fnmatch(member.name, pattern):
+                                       continue
+
+                               if member.name in configfiles:
+                                       continue
+
+                               configfiles.append(member.name)
 
-                       # Compress file (in place).
-                       pakfire.compress.compress(self.archive_files["data.img"],
-                               algo=compress, progress=True)
+               # Sort list alphabetically.
+               configfiles.sort()
 
-               # Calc hashsum of the payload of the package.
-               self.info["payload_hash1"] = util.calc_hash1(self.archive_files["data.img"])
+               configsfile = self.mktemp()
 
-       def create_info(self):
-               f = open(self.archive_files["info"], "w")
-               f.write(BINARY_PACKAGE_META % self.info)
+               f = open(configsfile, "w")
+               for file in configfiles:
+                       f.write("%s\n" % file)
                f.close()
 
-       @property
-       def type(self):
-               raise NotImplementedError
+               return configsfile
 
+       def compress_datafile(self, datafile, algo="xz"):
+               pass
 
-class BinaryPackager(Packager):
-       @property
-       def type(self):
-               return "binary"
+       def run(self, resultdirs=[]):
+               assert resultdirs
+
+               # Add all files to this package.
+               datafile = self.create_datafile()
+
+               # Get filelist from datafile.
+               filelist = self.create_filelist(datafile)
+               configs  = self.create_configs(datafile)
+
+               # Create script files.
+               scriptlets = self.create_scriptlets()
+
+               metafile = self.create_metafile(datafile)
+
+               # XXX make xz in variable
+               self.compress_datafile(datafile, algo="xz")
+
+               # Add files to the tar archive in correct order.
+               self.add(metafile, "info")
+               self.add(filelist, "filelist")
+               self.add(configs,  "configs")
+               self.add(datafile, "data.img")
+
+               for scriptlet_name, scriptlet_file in scriptlets:
+                       self.add(scriptlet_file, "scriptlets/%s" % scriptlet_name)
+
+               # Build the final package.
+               tempfile = self.mktemp()
+               self.save(tempfile)
+
+               for resultdir in resultdirs:
+                       # XXX sometimes, there has been a None in resultdirs
+                       if not resultdir:
+                               continue
+
+                       resultdir = "%s/%s" % (resultdir, self.pkg.arch)
+
+                       if not os.path.exists(resultdir):
+                               os.makedirs(resultdir)
+
+                       resultfile = os.path.join(resultdir, self.pkg.package_filename)
+                       logging.info("Saving package to %s" % resultfile)
+                       try:
+                               os.link(tempfile, resultfile)
+                       except OSError:
+                               shutil.copy2(tempfile, resultfile)
+
+               ## Dump package information.
+               #pkg = BinaryPackage(self.pakfire, self.pakfire.repos.dummy, tempfile)
+               #for line in pkg.dump(long=True).splitlines():
+               #       logging.info(line)
+               #logging.info("")
 
 
 class SourcePackager(Packager):
-       def __init__(self, *args, **kwargs):
-               Packager.__init__(self, *args, **kwargs)
+       def create_metafile(self, datafile):
+               info = collections.defaultdict(lambda: "")
+
+               # Generic package information including Pakfire information.
+               info.update({
+                       "pakfire_version" : PAKFIRE_VERSION,
+               })
+
+               # Include distribution information.
+               info.update(self.pakfire.distro.info)
+               info.update(self.pkg.info)
+
+               # Update package information for string formatting.
+               requires = [PACKAGE_INFO_DEPENDENCY_LINE % r for r in self.pkg.requires]
+               info.update({
+                       "groups"   : " ".join(self.pkg.groups),
+                       "requires" : "\n".join(requires),
+               })
+
+               # Format description.
+               description = [PACKAGE_INFO_DESCRIPTION_LINE % l \
+                       for l in util.text_wrap(self.pkg.description, length=80)]
+               info["description"] = "\n".join(description)
+
+               # Build information.
+               info.update({
+                       # Package it built right now.
+                       "build_time" : int(time.time()),
+                       "build_id"   : uuid.uuid4(),
+               })
+
+               # Set UUID
+               # XXX replace this by the payload hash
+               info.update({
+                       "uuid"       : uuid.uuid4(),
+               })
+
+               metafile = self.mktemp()
+
+               f = open(metafile, "w")
+               f.write(PACKAGE_INFO % info)
+               f.close()
+
+               return metafile
+
+       def create_datafile(self):
+               filename = self.mktemp()
+               datafile = InnerTarFile(filename, mode="w")
+
+               # Add all downloaded files to the package.
+               for file in self.pkg.download():
+                       datafile.add(file, "files/%s" % os.path.basename(file))
+
+               # Add all files in the package directory.
+               for file in self.pkg.files:
+                       arcname = os.path.relpath(file, self.pkg.path)
+                       datafile.add(file, arcname)
+
+               datafile.close()
+
+               return filename
+
+       def run(self, resultdirs=[]):
+               assert resultdirs
+
+               logging.info(_("Building source package %s:") % self.pkg.package_filename)
+
+               # Add datafile to package.
+               datafile = self.create_datafile()
+
+               # Create filelist out of data.
+               filelist = self.create_filelist(datafile)
+
+               # Create metadata.
+               metafile = self.create_metafile(datafile)
+
+               # Add files to the tar archive in correct order.
+               self.add(metafile, "info")
+               self.add(filelist, "filelist")
+               self.add(datafile, "data.img")
+
+               # Build the final tarball.
+               tempfile = self.mktemp()
+               self.save(tempfile)
+
+               for resultdir in resultdirs:
+                       # XXX sometimes, there has been a None in resultdirs
+                       if not resultdir:
+                               continue
+
+                       resultdir = "%s/%s" % (resultdir, self.pkg.arch)
+
+                       if not os.path.exists(resultdir):
+                               os.makedirs(resultdir)
 
-               self.cleanup = False
+                       resultfile = os.path.join(resultdir, self.pkg.package_filename)
+                       logging.info("Saving package to %s" % resultfile)
+                       try:
+                               os.link(tempfile, resultfile)
+                       except OSError:
+                               shutil.copy2(tempfile, resultfile)
 
-       @property
-       def type(self):
-               return "source"
+               # Dump package information.
+               pkg = SourcePackage(self.pakfire, self.pakfire.repos.dummy, tempfile)
+               for line in pkg.dump(long=True).splitlines():
+                       logging.info(line)
+               logging.info("")
index 0ec8973a46e46e478fac741d7660846405bd9fc1..28d06da80a9c77b58ac60e4ee79e654e78ee285e 100644 (file)
@@ -215,3 +215,7 @@ class SolvPackage(base.Package):
                        self.repo.download(self, text=text)
 
                return self.get_from_cache()
+
+       def get_scriptlet(self, type):
+               # XXX TODO
+               return None
index 3defa3ea86bac36ad709e0db194db8ad6781808c..ebda89db73e1aceb9d3f7ed6a42a757b1c3d3e1e 100644 (file)
@@ -22,6 +22,4 @@
 from file import FilePackage
 
 class SourcePackage(FilePackage):
-       @property
-       def arch(self):
-               return "src"
+       pass
diff --git a/pakfire/packages/virtual.py b/pakfire/packages/virtual.py
deleted file mode 100644 (file)
index e8de482..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/python
-###############################################################################
-#                                                                             #
-# Pakfire - The IPFire package management system                              #
-# Copyright (C) 2011 Pakfire development team                                 #
-#                                                                             #
-# This program is free software: you can redistribute it and/or modify        #
-# it under the terms of the GNU General Public License as published by        #
-# the Free Software Foundation, either version 3 of the License, or           #
-# (at your option) any later version.                                         #
-#                                                                             #
-# This program is distributed in the hope that it will be useful,             #
-# but WITHOUT ANY WARRANTY; without even the implied warranty of              #
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               #
-# GNU General Public License for more details.                                #
-#                                                                             #
-# You should have received a copy of the GNU General Public License           #
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.       #
-#                                                                             #
-###############################################################################
-
-from base import Package
-
-from pakfire.constants import *
-
-
-class VirtualPackage(Package):
-       def __init__(self, pakfire, data):
-               self.pakfire = pakfire
-               self._data = {}
-
-               for key in data.keys():
-                       self._data[key] = data[key]
-
-       def __repr__(self):
-               return "<%s %s>" % (self.__class__.__name__, self.friendly_name)
-
-       @property
-       def metadata(self):
-               return self._data
-
-       @property
-       def filename(self):
-               return PACKAGE_FILENAME_FMT % {
-                       "arch"    : self.arch,
-                       "ext"     : PACKAGE_EXTENSION,
-                       "name"    : self.name,
-                       "release" : self.release,
-                       "version" : self.version,
-               }
-
-       @property
-       def arch(self):
-               return self.metadata.get("PKG_ARCH")
-
-       @property
-       def file_patterns(self):
-               return self.metadata.get("PKG_FILES").split()
-
-       @property
-       def env(self):
-               return self.metadata
-
index 901fdbee4bf442bbc8713155f3fe42d933fb47f5..283ff7f54f8493f468affd124f224b1baabf1480 100644 (file)
@@ -70,7 +70,7 @@ class Repositories(object):
 
                # Update all indexes of the repositories (not force) so that we will
                # always work with valid data.
-               self.update()
+               self.update(offline=self.pakfire.offline)
 
        def __iter__(self):
                repositories = self.__repos.values()
@@ -140,12 +140,12 @@ class Repositories(object):
                except KeyError:
                        pass
 
-       def update(self, force=False):
+       def update(self, force=False, offline=False):
                logging.debug("Updating all repository indexes (force=%s)" % force)
 
                # update all indexes if necessary or forced
                for repo in self:
-                       repo.update(force=force)
+                       repo.update(force=force, offline=offline)
 
        def whatprovides(self, what):
                what = self.pakfire.create_relation(what)
index e2ca5174f90224604d72751218943c0a678f3e97..241fc3ec03709452ef28ff8f72d237d43a38ddc3 100644 (file)
@@ -106,14 +106,14 @@ class RepositoryFactory(object):
                """
                return False
 
-       def update(self, force=False):
+       def update(self, force=False, offline=False):
                """
                        A function that is called to update the local data of
                        the repository.
                """
                assert self.index
 
-               self.index.update(force)
+               self.index.update(force, offline=offline)
 
        def clean(self):
                """
index bd9b8e25f89ec4a5ad64e0d08a8a51b5fe54761e..f8dc8ca8d0eda0e85b918162cbdb69921baead86 100644 (file)
@@ -149,9 +149,21 @@ class DatabaseLocal(Database):
                                build_time      INTEGER,
                                installed       INT,
                                reason          TEXT,
-                               repository      TEXT,
-                               scriptlet       TEXT,
-                               triggers        TEXT
+                               repository      TEXT
+                       );
+
+                       CREATE TABLE scriptlets(
+                               id                      INTEGER PRIMARY KEY,
+                               pkg                     INTEGER,
+                               action          TEXT,
+                               scriptlet       TEXT
+                       );
+
+                       CREATE TABLE triggers(
+                               id                      INTEGER PRIMARY KEY,
+                               pkg                     INTEGER,
+                               dependency      TEXT,
+                               scriptlet       TEXT
                        );
                """)
                # XXX add some indexes here
@@ -189,10 +201,8 @@ class DatabaseLocal(Database):
                                        build_time,
                                        installed,
                                        repository,
-                                       reason,
-                                       scriptlet,
-                                       triggers
-                               ) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
+                                       reason
+                               ) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
                                (
                                        pkg.name,
                                        pkg.epoch,
@@ -218,8 +228,6 @@ class DatabaseLocal(Database):
                                        time.time(),
                                        pkg.repo.name,
                                        reason or "",
-                                       pkg.scriptlet,
-                                       " ".join(pkg.triggers)
                                )
                        )
 
index 79297cd603a990ca930142e05e3a75eede1d1a62..9a398d240cbe77883631458141e00a5effb8013a 100644 (file)
@@ -66,7 +66,7 @@ class Index(object):
                """
                raise NotImplementedError
 
-       def update(self, force=False):
+       def update(self, force=False, offline=False):
                raise NotImplementedError
 
        def read(self, filename):
@@ -190,11 +190,11 @@ class IndexSolv(Index):
        def check(self):
                pass # XXX to be done
 
-       def update(self, force=False):
-               self._update_metadata(force)
-               self._update_database(force)
+       def update(self, force=False, offline=False):
+               self._update_metadata(force, offline)
+               self._update_database(force, offline)
 
-       def _update_metadata(self, force):
+       def _update_metadata(self, force, offline=False):
                filename = os.path.join(METADATA_DOWNLOAD_PATH, METADATA_DOWNLOAD_FILE)
 
                # Marker if we need to do the download.
@@ -216,19 +216,23 @@ class IndexSolv(Index):
                                        self.cache.abspath(filename))
 
                        # If no metadata was downloaded and we are in offline mode.
-                       elif self.pakfire.offline:
-                               raise OfflineModeError, _("There is no metadata for the repository '%s' and"
-                                       " we cannot download any because we are running in offline mode."
-                                       " Connect to a network or disable this repository.") % self.repo.name
+                       elif offline:
+                               # If we cannot download new metadata, we should skip this
+                               # repository.
+                               return
+
+                               #raise OfflineModeError, _("There is no metadata for the repository '%s' and"
+                               #       " we cannot download any because we are running in offline mode."
+                               #       " Connect to a network or disable this repository.") % self.repo.name
 
-               elif force and self.pakfire.offline:
+               elif force and offline:
                        raise OfflineModeError, _("I cannot be forced to re-download the metadata for"
                                " the repository '%s' when running in offline mode.") % self.repo.name
 
                if download:
                        # We are supposed to download new metadata, but we are running in
                        # offline mode. That's okay. Just doing nothing.
-                       if not self.pakfire.offline:
+                       if not offline:
                                logging.debug("Going to (re-)download the repository metadata.")
 
                                # Initialize a grabber for download.
@@ -253,14 +257,21 @@ class IndexSolv(Index):
                self.metadata = metadata.Metadata(self.pakfire, self,
                        self.cache.abspath(filename))
 
-       def _update_database(self, force):
+       def _update_database(self, force, offline=False):
+               if not hasattr(self, "metadata"):
+                       return
+
                # Construct cache and download filename.
                filename = os.path.join(METADATA_DOWNLOAD_PATH, self.metadata.database)
 
                if not self.cache.exists(filename):
-                       if self.pakfire.offline:
-                               raise OfflineModeError, _("Your repository metadata is outdated "
-                                       " and a new version needs to be downloaded.")
+                       if offline:
+                               # If there is not database and we are in offline mode, we cannot
+                               # download anything so we just skip the rest of this function.
+                               return
+
+                               #raise OfflineModeError, _("Your repository metadata is outdated "
+                               #       " and a new version needs to be downloaded.")
 
                        # Initialize a grabber for download.
                        grabber = downloader.DatabaseDownloader(
@@ -319,7 +330,7 @@ class IndexDir(Index):
 
                return path
 
-       def update(self, force=False):
+       def update(self, force=False, offline=False):
                logging.debug("Updating repository index '%s' (force=%s)" % (self.path, force))
 
                # Do nothing if the update is not forced but populate the database
@@ -398,7 +409,7 @@ class IndexLocal(Index):
                # XXX Create the database and lock it or something.
                pass
 
-       def update(self, force=True):
+       def update(self, force=True, offline=False):
                if self.solver_repo.size() == 0:
                        force = True
 
index 43d3f61a0492b5dff295cfc5110b6701803c81b1..66cbc1f71456f0631d3be79e91f1c452fb420b5d 100644 (file)
@@ -24,6 +24,7 @@ from __future__ import division
 import fcntl
 import hashlib
 import logging
+import math
 import os
 import progressbar
 import random
@@ -205,21 +206,44 @@ def calc_hash1(filename=None, data=None):
        return h.hexdigest()
 
 def text_wrap(s, length=65):
-       t = []
-       s = s.split()
-
-       l = []
-       for word in s:
-               l.append(word)
+       if not s:
+               return ""
+
+       lines = []
+
+       words = []
+       for line in s.splitlines():
+               if not line:
+                       words.append("")
+               else:
+                       words += line.split()
+
+       line = []
+       while words:
+               word = words.pop(0)
+
+               # An empty words means a line break.
+               if not word:
+                       if line:
+                               lines.append(" ".join(line))
+                       lines.append("")
+                       line = []
+
+               else:
+                       if len(" ".join(line)) + len(word) >= length:
+                               lines.append(" ".join(line))
+                               line = []
+                               words.insert(0, word)
+                       else:
+                               line.append(word)
 
-               if len(" ".join(l)) >= length:
-                       t.append(l)
-                       l = []
+       if line:
+               lines.append(" ".join(line))
 
-       if l:
-               t.append(l)
+       assert not words
 
-       return [" ".join(l) for l in t]
+       #return "\n".join(lines)
+       return lines
 
 def orphans_kill(root, killsig=signal.SIGTERM):
        """
@@ -238,3 +262,31 @@ def orphans_kill(root, killsig=signal.SIGTERM):
                                os.waitpid(pid, 0)
                except OSError, e:
                        pass
+
+def scriptlet_interpreter(scriptlet):
+       """
+               This function returns the interpreter of a scriptlet.
+       """
+       # XXX handle ELF?
+       interpreter = None
+
+       for line in scriptlet.splitlines():
+               if line.startswith("#!/"):
+                       interpreter = line[2:]
+                       interpreter = interpreter.split()[0]
+               break
+
+       return interpreter
+
+def calc_parallelism():
+       """
+               Calculate how many processes to run
+               at the same time.
+
+               We take the log10(number of processors) * factor
+       """
+       num = os.sysconf("SC_NPROCESSORS_CONF")
+       if num == 1:
+               return 2
+       else:
+               return int(round(math.log10(num) * 26))
index 5faa6cb49689977e59cb86f8a306b8ceeee00a57..4bc1518a4f5b62f3d54881c5d05ecff192cf9816 100644 (file)
@@ -18,11 +18,11 @@ pakfire/packages/binary.py
 pakfire/packages/file.py
 pakfire/packages/__init__.py
 pakfire/packages/installed.py
+pakfire/packages/lexer.py
 pakfire/packages/make.py
 pakfire/packages/packager.py
 pakfire/packages/solv.py
 pakfire/packages/source.py
-pakfire/packages/virtual.py
 pakfire/repository/base.py
 pakfire/repository/cache.py
 pakfire/repository/database.py
index a2ffe2a028cc8d346ef8ac3d675d50c5d8ef9548..37faea7f0c8e3bdebbacc27abc294ef2a20c4f40 100644 (file)
@@ -11,8 +11,8 @@ msgstr ""
 "Project-Id-Version: IPFire\n"
 "Report-Msgid-Bugs-To: \n"
 "POT-Creation-Date: 2011-08-07 13:23+0200\n"
-"PO-Revision-Date: 2011-08-08 00:23+0000\n"
-"Last-Translator: romank <roman.kornfeld@gmail.com>\n"
+"PO-Revision-Date: 2011-08-11 11:10+0000\n"
+"Last-Translator: cibomato <jochen_rupp@web.de>\n"
 "Language-Team: German (Germany) (http://www.transifex.net/projects/p/ipfire/team/de_DE/)\n"
 "MIME-Version: 1.0\n"
 "Content-Type: text/plain; charset=UTF-8\n"
@@ -125,7 +125,7 @@ msgstr "Ein Paket zum Aktualisieren oder leer lassen für alle."
 
 #: ../pakfire/cli.py:161
 msgid "Check, if there are any updates available."
-msgstr ""
+msgstr "Prüfe, ob Updates verfügbar sind."
 
 #: ../pakfire/cli.py:169
 msgid "Print some information about the given package(s)."
@@ -157,11 +157,11 @@ msgstr ""
 
 #: ../pakfire/cli.py:195
 msgid "Group name to search for."
-msgstr ""
+msgstr "Gruppenname, nach dem gesucht werden soll."
 
 #: ../pakfire/cli.py:201
 msgid "Install all packages that belong to the given group."
-msgstr ""
+msgstr "Installiere alle Pakete, die zu der angegebenen Gruppe gehören."
 
 #: ../pakfire/cli.py:203
 msgid "Group name."
@@ -177,7 +177,7 @@ msgstr ""
 
 #: ../pakfire/cli.py:221
 msgid "Cleanup all temporary files."
-msgstr ""
+msgstr "Löschen aller temporären Dateien."
 
 #: ../pakfire/cli.py:227
 msgid "Check the system for any errors."
@@ -185,7 +185,7 @@ msgstr "Überprüfe das System auf Fehler"
 
 #: ../pakfire/cli.py:233
 msgid "Check the dependencies for a particular package."
-msgstr ""
+msgstr "Überprüfe die Abhängigkeiten eines bestimmten Paketes."
 
 #: ../pakfire/cli.py:235
 msgid "Give name of at least one package to check."
@@ -217,7 +217,7 @@ msgstr "Pakfire-builder Kommandozeilen-Interface."
 
 #: ../pakfire/cli.py:379
 msgid "Update the package indexes."
-msgstr ""
+msgstr "Aktualisiere die Paket-Indizes."
 
 #: ../pakfire/cli.py:385
 msgid "Build one or more packages."
@@ -229,7 +229,7 @@ msgstr ""
 
 #: ../pakfire/cli.py:391
 msgid "Build the package for the given architecture."
-msgstr ""
+msgstr "Baue das Palet für die angegebene Architektur."
 
 #: ../pakfire/cli.py:393 ../pakfire/cli.py:419
 msgid "Path were the output files should be copied to."
@@ -261,7 +261,7 @@ msgstr ""
 
 #: ../pakfire/cli.py:491
 msgid "Pakfire server command line interface."
-msgstr ""
+msgstr "Pakfire Server Kommandozeile."
 
 #: ../pakfire/cli.py:528
 msgid "Request a build job from the server."
@@ -281,7 +281,7 @@ msgstr "Repository-Management-Befehle."
 
 #: ../pakfire/cli.py:555
 msgid "Create a new repository index."
-msgstr ""
+msgstr "Erstelle einen neuen Repositorium Index."
 
 #: ../pakfire/cli.py:556
 msgid "Path to the packages."
@@ -375,11 +375,11 @@ msgstr "Benötigt"
 
 #: ../pakfire/packages/base.py:125
 msgid "File"
-msgstr ""
+msgstr "Datei"
 
 #: ../pakfire/packages/base.py:305
 msgid "Not set"
-msgstr ""
+msgstr "Nicht gesetzt"
 
 #: ../pakfire/repository/index.py:220
 #, python-format
@@ -421,7 +421,7 @@ msgstr "Lade installierte Pakete"
 #: ../pakfire/repository/remote.py:106
 #, python-format
 msgid "Cannot download this file in offline mode: %s"
-msgstr ""
+msgstr "Kann diese Datei im Offline-Modus nicht herunterladen: %s"
 
 #: ../pakfire/satsolver.py:148
 msgid "The solver returned one problem:"
@@ -529,7 +529,7 @@ msgstr ""
 
 #: ../scripts/pakfire:27
 msgid "The error that lead to this:"
-msgstr ""
+msgstr "Der Fehler, welcher dazu führte:"
 
 #: ../scripts/pakfire:64
 msgid "An error has occured when running Pakfire."
@@ -537,7 +537,7 @@ msgstr ""
 
 #: ../scripts/pakfire:67
 msgid "Error message:"
-msgstr ""
+msgstr "Fehlermeldung:"
 
 #: ../scripts/pakfire:71
 msgid "Further description:"
@@ -586,7 +586,7 @@ msgstr ""
 #: ../src/problem.c:209
 #, c-format
 msgid "cannot install both %s and %s"
-msgstr ""
+msgstr "Kann nicht sowohl %s als auch %s installieren"
 
 #: ../src/problem.c:216
 #, c-format
index c4776a5daaea742ff7449ef37a646532f4f54e64..01010e5293394b4dc049b6f1792131ba511a9eef 100644 (file)
--- a/po/es.po
+++ b/po/es.po
@@ -9,8 +9,8 @@ msgstr ""
 "Project-Id-Version: IPFire\n"
 "Report-Msgid-Bugs-To: \n"
 "POT-Creation-Date: 2011-08-07 13:23+0200\n"
-"PO-Revision-Date: 2011-08-08 19:46+0000\n"
-"Last-Translator: andres_reyes <andres.rt@gmail.com>\n"
+"PO-Revision-Date: 2011-08-11 05:24+0000\n"
+"Last-Translator: urkonn <urkonn@gmail.com>\n"
 "Language-Team: Spanish (Castilian) (http://www.transifex.net/projects/p/ipfire/team/es/)\n"
 "MIME-Version: 1.0\n"
 "Content-Type: text/plain; charset=UTF-8\n"
@@ -483,7 +483,7 @@ msgstr "Reinstalando:"
 
 #: ../pakfire/transaction.py:200
 msgid "Updating:"
-msgstr "Actualizando"
+msgstr "Actualizando:"
 
 #: ../pakfire/transaction.py:201
 msgid "Downgrading:"
index 61bb9b88a13bdf5dd858c8b12777320004254ef9..fbb0b8c078aa6f923636e345dab1c01192d40a8c 100644 (file)
@@ -8,7 +8,7 @@ msgid ""
 msgstr ""
 "Project-Id-Version: PACKAGE VERSION\n"
 "Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2011-08-10 18:10+0200\n"
+"POT-Creation-Date: 2011-08-23 20:53+0200\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@li.org>\n"
@@ -17,71 +17,95 @@ msgstr ""
 "Content-Type: text/plain; charset=CHARSET\n"
 "Content-Transfer-Encoding: 8bit\n"
 
-#: ../pakfire/actions.py:117
+#: ../pakfire/actions.py:108
 #, python-format
 msgid "Cannot run scriptlet because no interpreter is available: %s"
 msgstr ""
 
-#: ../pakfire/actions.py:121
+#: ../pakfire/actions.py:112
 #, python-format
 msgid "Cannot run scriptlet because the interpreter is not executable: %s"
 msgstr ""
 
-#: ../pakfire/actions.py:169
+#: ../pakfire/actions.py:161
 #, python-format
 msgid ""
 "The scriptlet returned an error:\n"
 "%s"
 msgstr ""
 
-#: ../pakfire/actions.py:172
+#: ../pakfire/actions.py:164
 #, python-format
 msgid "The scriptlet ran more than %s seconds and was killed."
 msgstr ""
 
-#: ../pakfire/actions.py:248 ../pakfire/actions.py:305
+#: ../pakfire/actions.py:222 ../pakfire/actions.py:279
 msgid "Installing"
 msgstr ""
 
-#: ../pakfire/actions.py:258
+#: ../pakfire/actions.py:232
 msgid "Updating"
 msgstr ""
 
-#: ../pakfire/actions.py:272
+#: ../pakfire/actions.py:246
 msgid "Removing"
 msgstr ""
 
 #. Cleaning up leftover files and stuff.
-#: ../pakfire/actions.py:290
+#: ../pakfire/actions.py:264
 msgid "Cleanup"
 msgstr ""
 
-#: ../pakfire/actions.py:315
+#: ../pakfire/actions.py:289
 msgid "Downgrading"
 msgstr ""
 
-#: ../pakfire/base.py:188 ../pakfire/base.py:202 ../pakfire/base.py:244
-#: ../pakfire/base.py:284 ../pakfire/base.py:317
+#: ../pakfire/base.py:199 ../pakfire/base.py:229 ../pakfire/base.py:275
+#: ../pakfire/base.py:315 ../pakfire/base.py:348
 msgid "Nothing to do"
 msgstr ""
 
-#: ../pakfire/base.py:230
+#: ../pakfire/base.py:261
 msgid "There are no packages to install."
 msgstr ""
 
-#: ../pakfire/base.py:513
+#: ../pakfire/base.py:453
+msgid "Build command has failed."
+msgstr ""
+
+#: ../pakfire/base.py:537
 msgid "Everything is fine."
 msgstr ""
 
-#: ../pakfire/builder.py:282
-#, python-format
-msgid "Extracting: %s (source)"
+#: ../pakfire/builder.py:122
+msgid "Package information:"
 msgstr ""
 
-#: ../pakfire/builder.py:706
+#. Copy the makefile and load source tarballs.
+#: ../pakfire/builder.py:306
+msgid "Extracting"
+msgstr ""
+
+#: ../pakfire/builder.py:614
 msgid "Dumping created packages"
 msgstr ""
 
+#: ../pakfire/builder.py:636
+msgid "The build command failed. See logfile for details."
+msgstr ""
+
+#. Package the result.
+#. Make all these little package from the build environment.
+#: ../pakfire/builder.py:778
+msgid "Creating packages:"
+msgstr ""
+
+#. Execute the buildscript of this stage.
+#: ../pakfire/builder.py:789
+#, python-format
+msgid "Running stage %s:"
+msgstr ""
+
 #: ../pakfire/cli.py:42
 msgid "Pakfire command line interface."
 msgstr ""
@@ -230,7 +254,7 @@ msgstr ""
 msgid "Cleaning up everything..."
 msgstr ""
 
-#: ../pakfire/cli.py:324
+#: ../pakfire/cli.py:324 ../pakfire/cli.py:579
 msgid "Pakfire builder command line interface."
 msgstr ""
 
@@ -238,23 +262,23 @@ msgstr ""
 msgid "Update the package indexes."
 msgstr ""
 
-#: ../pakfire/cli.py:385
+#: ../pakfire/cli.py:385 ../pakfire/cli.py:599
 msgid "Build one or more packages."
 msgstr ""
 
-#: ../pakfire/cli.py:387
+#: ../pakfire/cli.py:387 ../pakfire/cli.py:601
 msgid "Give name of at least one package to build."
 msgstr ""
 
-#: ../pakfire/cli.py:391
+#: ../pakfire/cli.py:391 ../pakfire/cli.py:605
 msgid "Build the package for the given architecture."
 msgstr ""
 
-#: ../pakfire/cli.py:393 ../pakfire/cli.py:419
+#: ../pakfire/cli.py:393 ../pakfire/cli.py:419 ../pakfire/cli.py:607
 msgid "Path were the output files should be copied to."
 msgstr ""
 
-#: ../pakfire/cli.py:395 ../pakfire/cli.py:408
+#: ../pakfire/cli.py:395 ../pakfire/cli.py:408 ../pakfire/cli.py:609
 msgid "Mode to run in. Is either 'release' or 'development' (default)."
 msgstr ""
 
@@ -278,38 +302,42 @@ msgstr ""
 msgid "Give name(s) of a package(s)."
 msgstr ""
 
-#: ../pakfire/cli.py:491
+#: ../pakfire/cli.py:492
 msgid "Pakfire server command line interface."
 msgstr ""
 
-#: ../pakfire/cli.py:528
+#: ../pakfire/cli.py:529
 msgid "Request a build job from the server."
 msgstr ""
 
-#: ../pakfire/cli.py:534
+#: ../pakfire/cli.py:535
 msgid "Send a keepalive to the server."
 msgstr ""
 
-#: ../pakfire/cli.py:541
+#: ../pakfire/cli.py:542
 msgid "Update all repositories."
 msgstr ""
 
-#: ../pakfire/cli.py:547
+#: ../pakfire/cli.py:548
 msgid "Repository management commands."
 msgstr ""
 
-#: ../pakfire/cli.py:555
+#: ../pakfire/cli.py:556
 msgid "Create a new repository index."
 msgstr ""
 
-#: ../pakfire/cli.py:556
+#: ../pakfire/cli.py:557
 msgid "Path to the packages."
 msgstr ""
 
-#: ../pakfire/cli.py:557
+#: ../pakfire/cli.py:558
 msgid "Path to input packages."
 msgstr ""
 
+#: ../pakfire/cli.py:611
+msgid "Do not verify build dependencies."
+msgstr ""
+
 #: ../pakfire/errors.py:30
 msgid "An unhandled error occured."
 msgstr ""
@@ -341,100 +369,125 @@ msgstr ""
 msgid "Release"
 msgstr ""
 
-#: ../pakfire/packages/base.py:93 ../pakfire/transaction.py:194
+#: ../pakfire/packages/base.py:96 ../pakfire/transaction.py:194
 msgid "Size"
 msgstr ""
 
-#: ../pakfire/packages/base.py:94
+#: ../pakfire/packages/base.py:100
 msgid "Repo"
 msgstr ""
 
-#: ../pakfire/packages/base.py:95
+#: ../pakfire/packages/base.py:103
 msgid "Summary"
 msgstr ""
 
-#: ../pakfire/packages/base.py:96
+#: ../pakfire/packages/base.py:104
 msgid "Groups"
 msgstr ""
 
-#: ../pakfire/packages/base.py:97
+#: ../pakfire/packages/base.py:105
 msgid "URL"
 msgstr ""
 
-#: ../pakfire/packages/base.py:98
+#: ../pakfire/packages/base.py:106
 msgid "License"
 msgstr ""
 
-#: ../pakfire/packages/base.py:101
+#: ../pakfire/packages/base.py:109
 msgid "Description"
 msgstr ""
 
-#: ../pakfire/packages/base.py:107
+#: ../pakfire/packages/base.py:116
+msgid "Maintainer"
+msgstr ""
+
+#: ../pakfire/packages/base.py:118
 msgid "UUID"
 msgstr ""
 
-#: ../pakfire/packages/base.py:108
+#: ../pakfire/packages/base.py:119
 msgid "Build ID"
 msgstr ""
 
-#: ../pakfire/packages/base.py:109
+#: ../pakfire/packages/base.py:120
 msgid "Build date"
 msgstr ""
 
-#: ../pakfire/packages/base.py:110
+#: ../pakfire/packages/base.py:121
 msgid "Build host"
 msgstr ""
 
-#: ../pakfire/packages/base.py:112
+#: ../pakfire/packages/base.py:123
 msgid "Provides"
 msgstr ""
 
-#: ../pakfire/packages/base.py:117
+#: ../pakfire/packages/base.py:128
+msgid "Pre-requires"
+msgstr ""
+
+#: ../pakfire/packages/base.py:133
 msgid "Requires"
 msgstr ""
 
-#: ../pakfire/packages/base.py:125
+#: ../pakfire/packages/base.py:138
+msgid "Conflicts"
+msgstr ""
+
+#: ../pakfire/packages/base.py:143
+msgid "Obsoletes"
+msgstr ""
+
+#: ../pakfire/packages/base.py:151
 msgid "File"
 msgstr ""
 
-#: ../pakfire/packages/base.py:305
+#: ../pakfire/packages/base.py:333
 msgid "Not set"
 msgstr ""
 
-#: ../pakfire/repository/index.py:220
+#: ../pakfire/packages/file.py:146
 #, python-format
-msgid ""
-"There is no metadata for the repository '%s' and we cannot download any "
-"because we are running in offline mode. Connect to a network or disable this "
-"repository."
+msgid "Filename: %s"
+msgstr ""
+
+#: ../pakfire/packages/make.py:98
+msgid "Package name is undefined."
 msgstr ""
 
-#: ../pakfire/repository/index.py:225
+#: ../pakfire/packages/make.py:101
+msgid "Package version is undefined."
+msgstr ""
+
+#. Load progressbar.
+#: ../pakfire/packages/packager.py:309
+msgid "Packaging"
+msgstr ""
+
+#: ../pakfire/packages/packager.py:602
 #, python-format
-msgid ""
-"I cannot be forced to re-download the metadata for the repository '%s' when "
-"running in offline mode."
+msgid "Building source package %s:"
 msgstr ""
 
-#: ../pakfire/repository/index.py:262
+#: ../pakfire/repository/index.py:229
+#, python-format
 msgid ""
-"Your repository metadata is outdated  and a new version needs to be "
-"downloaded."
+"I cannot be forced to re-download the metadata for the repository '%s' when "
+"running in offline mode."
 msgstr ""
 
-#: ../pakfire/repository/index.py:268
+#: ../pakfire/repository/index.py:279
 #, python-format
 msgid "%s: package database"
 msgstr ""
 
 #. Create progress bar.
-#: ../pakfire/repository/index.py:356
+#: ../pakfire/repository/index.py:367
 #, python-format
 msgid "Loading from %s"
 msgstr ""
 
 #. Add all packages from the database to the index.
-#: ../pakfire/repository/index.py:413
+#: ../pakfire/repository/index.py:424
 msgid "Loading installed packages"
 msgstr ""
 
@@ -532,7 +585,7 @@ msgstr ""
 msgid "Running transaction"
 msgstr ""
 
-#: ../pakfire/util.py:65
+#: ../pakfire/util.py:66
 #, python-format
 msgid "%s [y/N]"
 msgstr ""
@@ -551,15 +604,15 @@ msgstr ""
 msgid "The error that lead to this:"
 msgstr ""
 
-#: ../scripts/pakfire:64
+#: ../scripts/pakfire:65
 msgid "An error has occured when running Pakfire."
 msgstr ""
 
-#: ../scripts/pakfire:67
+#: ../scripts/pakfire:68
 msgid "Error message:"
 msgstr ""
 
-#: ../scripts/pakfire:71
+#: ../scripts/pakfire:72
 msgid "Further description:"
 msgstr ""
 
index dd7fabc41e65d0b7de08210d107be2b0534b5959..1e85f064943ef30f45d798282be59d0c4e363236 100755 (executable)
@@ -34,6 +34,7 @@ except ImportError, e:
 basename2cls = {
        "pakfire" : Cli,
        "pakfire-build" : CliBuilder,
+       "pakfire-build2" : CliBuilder2,
        "pakfire-server" : CliServer,
 }
 
index c168cf4368030b8af289e5f4b3d97973284c61ee..252b7525194144d40b16d6254b354018d5a90c05 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ from distutils.core import Extension, setup
 
 from DistUtilsExtra.command import *
 
-PAKFIRE_VERSION = "0.9.7"
+PAKFIRE_VERSION = "0.9.8"
 
 _pakfire_module_files = [os.path.join("src", f) for f in os.listdir("src") if f.endswith(".c")]
 
@@ -30,8 +30,12 @@ setup(
        scripts = [
                "scripts/pakfire",
                "scripts/pakfire-build",
+               "scripts/pakfire-build2",
                "scripts/pakfire-server",
        ],
+       data_files = [
+               ("lib/pakfire/macros", [os.path.join("macros", f) for f in os.listdir("macros") if f.endswith(".macro")]),
+       ],
        ext_modules = [
                Extension("pakfire._pakfire", _pakfire_module_files,
                        extra_link_args = ["-lsolv", "-lsolvext"])