kernel_version = os.uname()[2]
def __init__(self, filename=None, distro_name=None, config=None, configs=None, arch=None,
- build_id=None, logfile=None, builder_mode="release", use_cache=None, **pakfire_args):
+ build_id=None, logfile=None, builder_mode="release", **pakfire_args):
# Set mode.
assert builder_mode in ("development", "release",)
self.mode = builder_mode
# Where do we put the result?
self.resultdir = os.path.join(self.path, "result")
- # Check weather to use or not use the cache.
- if use_cache is None:
- # If use_cache is None, the user did not provide anything and
- # so we guess.
- if self.mode == "development":
- use_cache = True
- else:
- use_cache = False
-
- self.use_cache = use_cache
-
# Open package.
# If we have a plain makefile, we first build a source package and go with that.
if filename:
if not requires:
requires = []
- if self.use_cache and os.path.exists(self.cache_file):
- # If we are told to use the cache, we just import the
- # file.
- self.cache_extract()
- else:
- # Add neccessary build dependencies.
- requires += BUILD_PACKAGES
+ # Add neccessary build dependencies.
+ requires += BUILD_PACKAGES
# If we have ccache enabled, we need to extract it
# to the build chroot.
self.log.info(" %s" % line)
self.log.info("") # Empty line.
- @property
- def cache_file(self):
- comps = [
- self.pakfire.distro.sname, # name of the distribution
- self.pakfire.distro.release, # release version
- self.pakfire.distro.arch, # architecture
- ]
-
- return os.path.join(CACHE_ENVIRON_DIR, "%s.cache" %"-".join(comps))
-
- def cache_export(self, filename):
- # Sync all disk caches.
- _pakfire.sync()
-
- # A list to store all mountpoints, so we don't package them.
- mountpoints = []
-
- # A list containing all files we want to package.
- filelist = []
-
- # Walk through the whole tree and collect all files
- # that are on the same disk (not crossing mountpoints).
- log.info(_("Creating filelist..."))
- root = self.chrootPath()
- for dir, subdirs, files in os.walk(root):
- # Search for mountpoints and skip them.
- if not dir == root and os.path.ismount(dir):
- mountpoints.append(dir)
- continue
-
- # Skip all directories under mountpoints.
- if any([dir.startswith(m) for m in mountpoints]):
- continue
-
- # Add all other files.
- filelist.append(dir)
- for file in files:
- file = os.path.join(dir, file)
- filelist.append(file)
-
- # Create a nice progressbar.
- p = util.make_progress(_("Compressing files..."), len(filelist))
- i = 0
-
- # Create tar file and add all files to it.
- f = packages.file.InnerTarFile.open(filename, "w:gz")
- for file in filelist:
- i += 1
- if p:
- p.update(i)
-
- f.add(file, os.path.relpath(file, root), recursive=False)
- f.close()
-
- # Finish progressbar.
- if p:
- p.finish()
-
- filesize = os.path.getsize(filename)
-
- log.info(_("Cache file was successfully created at %s.") % filename)
- log.info(_(" Containing %(files)s files, it has a size of %(size)s.") % \
- { "files" : len(filelist), "size" : util.format_size(filesize), })
-
- def cache_extract(self):
- root = self.chrootPath()
- filename = self.cache_file
-
- f = packages.file.InnerTarFile.open(filename, "r:gz")
- members = f.getmembers()
-
- # Make a nice progress bar as always.
- p = util.make_progress(_("Extracting files..."), len(members))
-
- # Extract all files from the cache.
- i = 0
- for member in members:
- if p:
- i += 1
- p.update(i)
-
- f.extract(member, path=root)
- f.close()
-
- # Finish progressbar.
- if p:
- p.finish()
-
- # Re-read local repository.
- self.pakfire.repos.local.update(force=True)
-
- # Update all packages.
- self.log.info(_("Updating packages from cache..."))
- self.pakfire.update(interactive=False, logger=self.log,
- allow_archchange=True, allow_vendorchange=True, allow_downgrade=True)
-
class Builder(object):
def __init__(self, pakfire, filename, resultdir, **kwargs):
self.parse_command_repolist()
self.parse_command_clean()
self.parse_command_resolvdep()
- self.parse_command_cache()
# Finally parse all arguments from the command line and save them.
self.args = self.parser.parse_args()
"repolist" : self.handle_repolist,
"clean_all" : self.handle_clean_all,
"resolvdep" : self.handle_resolvdep,
- "cache_create": self.handle_cache_create,
- "cache_cleanup": self.handle_cache_cleanup,
}
@property
sub_dist.add_argument("--resultdir", nargs="?",
help=_("Path were the output files should be copied to."))
- def parse_command_cache(self):
- # Implement the "cache" command.
- sub_cache = self.sub_commands.add_parser("cache",
- help=_("Create a build environment cache."))
-
- # Implement subcommands.
- sub_cache_commands = sub_cache.add_subparsers()
-
- self.parse_command_cache_create(sub_cache_commands)
- self.parse_command_cache_cleanup(sub_cache_commands)
-
- def parse_command_cache_create(self, sub_commands):
- sub_create = sub_commands.add_parser("create",
- help=_("Create a new build environment cache."))
- sub_create.add_argument("action", action="store_const", const="cache_create")
-
- def parse_command_cache_cleanup(self, sub_commands):
- sub_cleanup = sub_commands.add_parser("cleanup",
- help=_("Remove all cached build environments."))
- sub_cleanup.add_argument("action", action="store_const", const="cache_cleanup")
-
def handle_info(self):
Cli.handle_info(self, long=True)
for pkg in pkgs:
print pkg.dump(long=True)
- def handle_cache_create(self):
- pakfire.cache_create(**self.pakfire_args)
-
- def handle_cache_cleanup(self):
- for env in os.listdir(CACHE_ENVIRON_DIR):
- if not env.endswith(".cache"):
- continue
-
- print _("Removing environment cache file: %s..." % env)
- env = os.path.join(CACHE_ENVIRON_DIR, env)
-
- try:
- os.unlink(env)
- except OSError:
- print _("Could not remove file: %s") % env
-
class CliServer(Cli):
def __init__(self):