This references #53.
+[default]
+
+; The default architecture
+default = i686
[i686]
import sys
import time
-import backend
+import architectures
import build
-import chroot
import environ
+import generators
import logger
-import repo
+import repositories
import terminal
import util
"package" : self.call_package,
"source" : self.call_source,
"shell" : self.call_shell,
- "repository" : self.call_repository,
"generate" : self.call_generate,
"batch" : self.call_batch,
}
return actionmap[args.action.name](args.action)
+ def _get_source_repos(self, arch=None):
+ if not arch:
+ arches = architectures.Architectures()
+ arch = arches.get_default()
+
+ return repositories.SourceRepositories(arch=arch)
+
def call_build(self, args):
- builder = build.Builder()
+ # Source repository
+ repo = self._get_source_repos()
+
+ # Initialize job queue
+ jobs = build.Jobs()
if args.all:
raise Exception, "XXX to be implemented"
else:
for name in args.packages:
- p = repo.find_source_package(name)
+ p = repo.find_package_by_name(name)
if not p:
raise Exception, "Could not find package: %s" % name
- builder.add(p)
+ p = build.Build(p)
+ jobs.add(p)
- return builder.run(ignore_dependency_errors=args.ignore_dependency_errors)
+ #return builder.run(ignore_dependency_errors=args.ignore_dependency_errors)
+ while jobs.has_jobs:
+ jobs.process_next()
def call_package(self, args):
if not args.has_key("action"):
actionmap = {
"info" : self.call_package_info,
"list" : self.call_package_list,
- "tree" : self.call_package_tree,
"groups" : self.call_package_groups,
}
return actionmap[args.action.name](args.action)
def call_package_info(self, args):
- for package in backend.parse_package_info(args.packages):
+ # Source repositories
+ repo = self._get_source_repos()
+
+ for package in repo.packages:
if args.long:
print package.fmtstr("""\
--------------------------------------------------------------------------------
""")
def call_package_list(self, args):
- for package in backend.parse_package_info(backend.get_package_names()):
+ repo = self._get_source_repos()
+
+ for package in repo.packages:
# Skip unbuilt packages if we want built packages
- if args.built and not package.built:
+ if args.built and not package.is_built:
continue
# Skip built packages if we want unbuilt only
- if args.unbuilt and package.built:
+ if args.unbuilt and package.is_built:
continue
if args.long:
else:
print package.name
- def call_package_tree(self, args):
- print backend.deptree(backend.parse_package(backend.get_package_names(), naoki=self))
-
def call_package_groups(self, args):
- groups = backend.get_group_names()
- print "\n".join(groups)
+ # XXX
+ #groups = backend.get_group_names()
+ #print "\n".join(groups)
+ pass
def call_source(self, args):
if not args.has_key("action"):
actionmap = {
"download" : self.call_source_download,
"upload" : self.call_source_upload,
- "clean" : self.call_source_clean,
}
return actionmap[args.action.name](args.action)
def call_source_download(self, args):
- for package in backend.parse_package(args.packages or \
- backend.get_package_names(), naoki=self):
- package.download()
+ repo = self._get_source_repos()
- def call_source_upload(self, args):
- pass # TODO
+ for package in repo.packages:
+ if args.packages:
+ if not package.name in args.packages:
+ continue
- def call_source_clean(self, args):
- self.log.info("Remove all unused files")
- files = os.listdir(TARBALLDIR)
- for package in backend.parse_package_info(backend.get_package_names()):
- for object in package.objects:
- if object in files:
- files.remove(object)
+ package.source_donwload()
- for file in sorted(files):
- self.log.info("Removing %s..." % file)
- os.remove(os.path.join(TARBALLDIR, file))
+ def call_source_upload(self, args):
+ pass # TODO
def call_shell(self, args):
- p = repo.find_source_package(args.package)
- if not p:
- raise Exception, "Could not find package: %s" % args.package
-
- build_set = build.BuildSet(p)
-
- return build_set.shell()
+ # Load architecture set
+ arches = architectures.Architectures()
- def call_repository(self, args):
- actionmap = {
- "clean" : self.call_repository_clean,
- "build" : self.call_repository_build,
- }
-
- return actionmap[args.action.name](args.action)
+ # Choose default architecture
+ arch = arches.get_default()
- def call_repository_clean(self, repo, args):
- if args.names == ["all"]:
- args.names = [r.name for r in backend.get_repositories()]
+ # Load all source packages
+ repo = repositories.SourceRepositories(arch=arch)
- for name in args.names:
- repo = backend.BinaryRepository(name, naoki=self)
- repo.clean()
+ # Pick the one we need
+ p = repo.find_package_by_name(args.package)
+ if not p:
+ raise Exception, "Could not find package: %s" % args.package
- def call_repository_build(self, args):
- if args.names == ["all"]:
- args.names = [r.name for r in backend.get_repositories()]
+ # Initialize and run the shell
+ shell = build.PackageShell(p)
- for name in args.names:
- repo = backend.BinaryRepository(name, naoki=self)
- repo.build()
+ return shell.shell()
def call_generate(self, args):
if not args.type in ("iso",):
return
- gen = chroot.Generator(self, arches.current, args.type)
+ arch = architectures.Architectures().get_default()
+
+ gen = generators.Generator(args.type, arch)
return gen.run()
def call_batch(self, args):
return actionmap[args.action.name](args.action)
def call_batch_cron(self, args):
- packages = []
- packages_may = []
- for package in backend.parse_package_info(backend.get_package_names()):
- if not package.built and package.buildable:
- packages.append(package)
- continue
-
- # If package was altered since last build
- if package.last_change >= package.last_build:
- packages.append(package)
- continue
-
- if package.buildable:
- packages_may.append(package)
+ pkgs = []
+ candidates = []
- packages_may = sorted(packages_may, key=lambda p: p.last_build)
+ # Choose architecture
+ arches = architectures.Architectures()
+ arch = arches.get_default()
- while len(packages) < 10 and packages_may:
- package = packages_may.pop(0)
- packages.append(package)
-
- # Bad hack because we lack a _build method
- args.packages = [p.name for p in packages]
- args.onlydeps = False
- args.withdeps = False
- args.shell = False
-
- self.call_build(args)
+ repo = repositories.SourceRepositories(arch=arch)
+ for package in repo.packages:
+ if not package.is_built:
+ pkgs.append(package)
+ else:
+ candidates.append(package)
+
+ # Initialize a job queue
+ jobs = build.Jobs()
+
+ # Add all unbuilt packages to the job queue
+ for package in pkgs:
+ package = build.Build(package)
+ jobs.add(package)
+
+ # If we have less than ten packages in the queue we add some random
+ # ones
+ need_counter = 10 - len(jobs)
+ if need_counter >= 0:
+ for candidate in random.sample(candidates, need_counter):
+ candidate = build.Build(candidate)
+ jobs.add(candidate)
+
+ while jobs.has_jobs:
+ jobs.process_next()
+++ /dev/null
-#!/usr/bin/python
-
-from ConfigParser import ConfigParser
-
-from constants import *
-
-_architectures = []
-
-def all():
- return _architectures
-
-def get(name):
- for arch in _architectures:
- if arch.name == name:
- return arch
-
- raise Exception, "Architecture was not found: %s" % name
-
-def get_default():
- for a in _architectures:
- if a.default:
- return a
-
- return "i686" # XXX for now
-
-def set_default(name):
- for a in _architectures:
- if a.name == name:
- a.default = True
- else:
- a.default = False
-
-def read(configuration_file):
- p = ConfigParser()
- p.read(configuration_file)
-
- for arch in p.sections():
- settings = {}
- for key, val in p.items(arch):
- settings[key] = val
- a = Architecture(arch, **settings)
- _architectures.append(a)
-
-class Architecture(object):
- def __init__(self, name, **settings):
- self.name = name
-
- self.settings = {
- "default" : False,
- "package_dir" : os.path.join(PACKAGESDIR, self.name),
- }
- self.settings.update(settings)
-
- def __repr__(self):
- return "<%s %s>" % (self.__class__.__name__, self.name)
-
- def __getattr__(self, key):
- try:
- return self.settings[key]
- except KeyError:
- raise ValueError
-
- def get_default(self):
- return self.settings["default"]
-
- def set_default(self, val):
- self.settings["default"]
-
- default = property(get_default, set_default)
-
-
-read(config["architecture_config"])
-
-if __name__ == "__main__":
- for a in _architectures:
- print a, a.settings
--- /dev/null
+#!/usr/bin/python
+
+import logging
+import os.path
+
+from ConfigParser import ConfigParser, DEFAULTSECT
+
+from constants import *
+from decorators import *
+
+class Architecture(object):
+ def __init__(self, name, **settings):
+
+ self._settings = {
+ "default" : False,
+ "name" : name,
+ }
+ self._settings.update(settings)
+
+ logging.debug("Set up new architecture: %s" % self._settings)
+
+ def __repr__(self):
+ s = "<%s %s" % (self.__class__.__name__, self._settings["name"])
+ if self.default:
+ s += " (default)"
+ s += ">"
+ return s
+
+ @property
+ def buildable(self):
+ """ Check if this architecture is buildable on the local host"""
+
+ return True # TODO
+
+ def get_default(self):
+ return self._settings["default"]
+
+ def set_default(self, value):
+ self._settings["default"] = value
+
+ default = property(get_default, set_default)
+
+ def __getattr__(self, attr):
+ try:
+ return self._settings[attr]
+ except KeyError:
+ raise AttributeError, attr
+
+
+@singleton
+class Architectures(object):
+ def __init__(self):
+ self._architectures = []
+
+ # Try to read the default architectures
+ self.read(ARCHES_DEFAULT)
+
+ def read(self, filename):
+ logging.debug("Reading architectures from %s" % filename)
+
+ if not os.path.exists(filename):
+ return
+
+ p = ConfigParser()
+ p.read(filename)
+
+ default = {}
+
+ for arch in p.sections():
+ if arch == "default":
+ default = p.items(arch)
+ continue
+
+ settings = {}
+ for key, val in p.items(arch):
+ settings[key] = val
+
+ a = Architecture(arch, **settings)
+ self.add_architecture(a)
+
+ for key, val in default:
+ if key == "default":
+ self.default = val
+
+ def add_architecture(self, arch):
+ assert isinstance(arch, Architecture)
+
+ self._architectures.append(arch)
+
+ def get_default(self):
+ for a in self._architectures:
+ if a.default:
+ return a
+
+ raise Exception, "Cannot find default architecture"
+
+ def set_default(self, arch):
+ if arch is None:
+ return
+
+ if not arch in [a.name for a in self.all]:
+ raise Exception, "Cannot set default architecture: Unknown architecture %s" % arch
+
+ logging.debug("Setting default architecture: %s" % arch)
+
+ for a in self._architectures:
+ if a.name == arch:
+ a.default = True
+ else:
+ a.default = False
+
+ default = property(get_default, set_default)
+
+ def get(self, name):
+ for arch in self._architectures:
+ if arch.name == name:
+ return arch
+
+ raise Exception, "Could not find arch: %s" % name
+
+ @property
+ def all(self):
+ return self._architectures[:]
+
+
+if __name__ == "__main__":
+ arches = Architectures()
+
+ for arch in arches.all:
+ print "Name: %s" % arch.name
+ for key, val in arch._settings.items():
+ print " %-20s : %s" % (key, val)
+
+ print arches.get("i686")
+ print arches.default
+
+++ /dev/null
-#!/usr/bin/python
-
-
-import os
-import shutil
-import smtplib
-import urlgrabber
-import urlgrabber.progress
-import urllib
-
-import chroot
-import util
-
-from exception import *
-from constants import *
-
-__cache = {
- "package_names" : None,
- "group_names" : None,
-}
-
-# Python 2.4 does not have that email module, so
-# we disable the mail function here.
-try:
- import email.mime.multipart
- import email.mime.text
- have_email = 1
-except ImportError:
- have_email = 0
-
-try:
- import hashlib
- have_hashlib = 1
-except ImportError:
- import sha
- have_hashlib = 0
-
-def find_package_info(name, toolchain=False, **kwargs):
- for repo in get_repositories(toolchain):
- if not os.path.exists(os.path.join(repo.path, name, name + ".nm")):
- continue
-
- return PackageInfo(name, repo=repo, **kwargs)
-
-def find_package(name, naoki, toolchain=False):
- package = find_package_info(name, toolchain)
- if package:
- return package.getPackage(naoki)
-
- return None
-
-def parse_package_info(names, toolchain=False, **kwargs):
- packages = []
- for name in names:
- package = find_package_info(name, toolchain, **kwargs)
- if package:
- packages.append(package)
-
- return packages
-
-def parse_package(names, toolchain=False, naoki=None):
- packages = parse_package_info(names, toolchain)
-
- return [Package(package.name, naoki=naoki, toolchain=toolchain) \
- for package in packages]
-
-def get_package_names(toolchain=False):
- if not __cache["package_names"]:
- names = []
- for repo in get_repositories(toolchain):
- names.extend(repo.package_names)
-
- __cache["package_names"] = sorted(names)
-
- return __cache["package_names"]
-
-def get_group_names():
- if not __cache["group_names"]:
- groups = []
- for package in parse_package_info(get_package_names()):
- if not package.group in groups:
- groups.append(package.group)
-
- __cache["group_names"] = sorted(groups)
-
- return __cache["group_names"]
-
-def find_package_name(name, toolchain=False):
- if name in get_package_names(toolchain):
- return name
-
- for package in get_package_names(toolchain):
- if os.path.basename(package) == name:
- return package
-
-def depsolve(packages, recursive=False, build=False, toolchain=False):
- deps = []
- for package in packages:
- if not package in deps:
- deps.append(package)
-
- if not recursive or not deps:
- return deps
-
- while True:
- length = len(deps)
- for dep in deps[:]:
- deps.extend(dep.dependencies)
- if build and not toolchain:
- deps.extend(dep.dependencies_build)
-
- new_deps = []
- for dep in deps:
- if not dep in new_deps:
- new_deps.append(dep)
-
- deps = new_deps
-
- if length == len(deps):
- break
-
- deps.sort()
- return deps
-
-def deptree(packages, toolchain=False):
- ret = [packages]
-
- while True:
- next = []
- stage = ret[-1][:]
- for package in stage[:]:
- for dep in package.dependencies_all:
- if dep in ret[-1]:
- stage.remove(package)
- next.append(package)
- break
-
- ret[-1] = stage
- if next:
- ret.append(next)
- continue
-
- break
-
- return ret
-
-def depsort(packages, toolchain=False):
- ret = []
- for l1 in deptree(packages, toolchain=toolchain):
- ret.extend(l1)
- return ret
-
-def calc_hash(data):
- if have_hashlib:
- obj = hashlib.sha1(data)
- else:
- obj = sha.new(data)
-
- return obj.hexdigest()
-
-def download(files, logger=None):
- for file in files:
- filepath = os.path.join(TARBALLDIR, file)
-
- if not os.path.exists(TARBALLDIR):
- os.makedirs(TARBALLDIR)
-
- if os.path.exists(filepath):
- continue
-
- url = config["sources_download_url"] + "/" + file
-
- if logger:
- logger.debug("Retrieving %s" % url)
-
- g = urlgrabber.grabber.URLGrabber(
- user_agent = "%sSourceGrabber/%s" % (config["distro_name"], config["distro_version"],),
- progress_obj = urlgrabber.progress.TextMeter(),
- quote=0,
- )
-
- try:
- gobj = g.urlopen(url)
- except urlgrabber.grabber.URLGrabError, e:
- if logger:
- logger.error("Could not retrieve %s - %s" % (url, e))
- raise
-
- data = gobj.read()
- gobj.close()
-
- if gobj.hdr.has_key("X-Hash-Sha1"):
- hash_server = gobj.hdr["X-Hash-Sha1"]
- msg = "Comparing hashes - %s" % hash_server
-
- hash_calculated = calc_hash(data)
- if hash_calculated == hash_server:
- if logger:
- logger.debug(msg + " - OK")
- else:
- if logger:
- logger.error(msg + " - ERROR")
- raise DownloadError, "Hash sum of downloaded file does not match"
-
- fobj = open(filepath, "w")
- fobj.write(data)
- fobj.close()
-
-
-class PackageInfo(object):
- __data = {}
-
- def __init__(self, name, repo=None, arch="i686"):
- self._name = name
- self.repo = repo
-
- self.arch = arch
-
- def __cmp__(self, other):
- return cmp(self.name, other.name)
-
- def __repr__(self):
- return "<PackageInfo %s>" % self.name
-
- def get_data(self):
- if not self.__data.has_key(self.name):
- self.__data[self.name] = self.fetch()
-
- return self.__data[self.name]
-
- def set_data(self, data):
- self.__data[self.name] = data
-
- _data = property(get_data, set_data)
-
- def fetch(self):
- env = os.environ.copy()
- env.update(config.environment)
- env.update({
- "PKG_ARCH" : self.arch,
- "PKGROOT" : PKGSDIR,
- })
- output = util.do("make -f %s" % self.filename, shell=True,
- cwd=os.path.join(PKGSDIR, self.repo.name, self.name), returnOutput=1, env=env)
-
- ret = {}
- for line in output.splitlines():
- a = line.split("=", 1)
- if not len(a) == 2: continue
- key, val = a
- ret[key] = val.strip("\"")
-
- ret["FINGERPRINT"] = self.fingerprint
-
- return ret
-
- def fmtstr(self, s):
- return s % self.all
-
- def getPackage(self, naoki):
- return Package(self.name, naoki)
-
- @property
- def all(self):
- return {
- "build_deps" : [dep.name for dep in self.dependencies_build],
- "deps" : [dep.name for dep in self.dependencies],
- "description" : self.description,
- "filename" : self.filename,
- "fingerprint" : self.fingerprint,
- "files" : self.package_files,
- "group" : self.group,
- "id" : self.id,
- "license" : self.license,
- "maintainer" : self.maintainer,
- "name" : self.name,
- "objects" : self.objects,
- "patches" : self.patches,
- "release" : self.release,
- "summary" : self.summary,
- "url" : self.url,
- "version" : self.version,
- }
-
- @property
- def buildable(self):
- return self.dependencies_unbuilt == []
-
- @property
- def built(self):
- for file in self.package_files:
- if not os.path.exists(os.path.join(PACKAGESDIR, file)):
- return False
-
- return True
-
- def _dependencies(self, s, recursive=False, toolchain=False):
- c = s + "_CACHE"
- if not self._data.has_key(c):
- deps = parse_package_info(self._data.get(s).split(" "), toolchain=toolchain)
- self._data.update({c : depsolve(deps, recursive)})
-
- return self._data.get(c)
-
- @property
- def dependencies(self):
- if self.__toolchain:
- return self.dependencies_toolchain
-
- return self._dependencies("PKG_DEPENDENCIES")
-
- @property
- def dependencies_build(self):
- return self._dependencies("PKG_BUILD_DEPENDENCIES")
-
- @property
- def dependencies_built(self):
- ret = []
- for dep in self.dependencies_all:
- if dep.built:
- ret.append(dep)
-
- return ret
-
- @property
- def dependencies_unbuilt(self):
- ret = []
- for dep in self.dependencies_all:
- if not dep.built:
- ret.append(dep)
-
- return ret
-
- @property
- def dependencies_all(self):
- deps = self.dependencies
- if not self.__toolchain:
- deps.extend(self.dependencies_build)
- return depsolve(deps, build=True, recursive=True, toolchain=self.__toolchain)
-
- @property
- def dependencies_toolchain(self):
- return self._dependencies("PKG_TOOLCHAIN_DEPENDENCIES", toolchain=True)
-
- @property
- def dependencies_reversed(self):
- deps = []
- for package in parse_package_info(get_package_names()):
- if self.name in [dep.name for dep in package.dependencies]:
- deps.append(package.name)
-
- return deps
-
- @property
- def description(self):
- return self._data.get("PKG_DESCRIPTION")
-
- @property
- def filename(self):
- return os.path.join(PKGSDIR, self.repo.name, self.name,
- os.path.basename(self.name)) + ".nm"
-
- @property
- def fingerprint(self):
- return "%d" % self.last_change
-
- @property
- def group(self):
- return self._data.get("PKG_GROUP")
-
- @property
- def id(self):
- return "%s-%s-%s" % (self.name, self.version, self.release)
-
- @property
- def last_build(self):
- file = os.path.join(PACKAGESDIR, self.package_files[0])
- if not os.path.exists(file):
- return 0
-
- return os.stat(file).st_mtime
-
- @property
- def last_change(self):
- return os.stat(self.filename).st_mtime
-
- @property
- def license(self):
- return self._data.get("PKG_LICENSE")
-
- @property
- def maintainer(self):
- return self._data.get("PKG_MAINTAINER")
-
- @property
- def name(self):
- return self._name
-
- @property
- def objects(self):
- return self._data.get("PKG_OBJECTS").split(" ")
-
- @property
- def package_files(self):
- return self._data.get("PKG_PACKAGES_FILES").split(" ")
-
- @property
- def patches(self):
- return self._data.get("PKG_PATCHES").split(" ")
-
- @property
- def release(self):
- return self._data.get("PKG_REL")
-
- @property
- def summary(self):
- return self._data.get("PKG_SUMMARY")
-
- @property
- def url(self):
- return self._data.get("PKG_URL")
-
- @property
- def version(self):
- return self._data.get("PKG_VER")
-
- @property
- def __toolchain(self):
- return self.repo.name == "toolchain"
-
-
-class Package(object):
- def __init__(self, name, naoki, toolchain=False):
- self.info = find_package_info(name, toolchain)
-
- assert naoki
- self.naoki = naoki
-
- #self.log.debug("Initialized package object %s" % name)
-
- def __repr__(self):
- return "<Package %s>" % self.info.name
-
- def __cmp__(self, other):
- return cmp(self.name, other.name)
-
- def __getattr__(self, attr):
- return getattr(self.info, attr)
-
- @property
- def name(self):
- return self.info.name
-
- def build(self):
- environment = chroot.PackageEnvironment(self)
- environment.build()
-
- def download(self):
- download(self.info.objects, logger=self.log)
-
- def extract(self, dest):
- files = [os.path.join(PACKAGESDIR, file) for file in self.info.package_files]
- if not files:
- return
-
- self.log.debug("Extracting %s..." % files)
- util.do("%s --root=%s %s" % (os.path.join(TOOLSDIR, "decompressor"),
- dest, " ".join(files)), shell=True, logger=self.log)
-
- def getEnvironment(self, *args, **kwargs):
- return chroot.PackageEnvironment(self, *args, **kwargs)
-
- @property
- def logfile(self):
- return os.path.join(LOGDIR, self.repo.name, self.info.id) + ".log"
-
- @property
- def log(self):
- return self.naoki.logging.getBuildLogger(self)
-
-
-def get_repositories(toolchain=False):
- if toolchain:
- return [Repository("toolchain")]
-
- repos = []
- for repo in os.listdir(PKGSDIR):
- if os.path.isdir(os.path.join(PKGSDIR, repo)):
- repos.append(repo)
-
- repos.remove("toolchain")
-
- return [Repository(repo) for repo in repos]
-
-class Repository(object):
- def __init__(self, name):
- self.name = name
-
- def __repr__(self):
- return "<Repository %s>" % self.name
-
- @property
- def packages(self):
- packages = []
- for package in os.listdir(self.path):
- package = PackageInfo(package, repo=self)
- packages.append(package)
-
- return packages
-
- @property
- def package_names(self):
- return [package.name for package in self.packages]
-
- @property
- def path(self):
- return os.path.join(PKGSDIR, self.name)
-
-
-class BinaryRepository(object):
- DIRS = ("db", "packages")
-
- def __init__(self, name, naoki=None, arch=None):
- self.name = name
- self.arch = arch or arches.current
- self.repo = Repository(self.name)
-
- assert naoki
- self.naoki = naoki
-
- def build(self):
- if not self.buildable:
- raise Exception, "Cannot build repository"
-
- # Create temporary directory layout
- util.rm(self.repopath("tmp"))
- for dir in self.DIRS:
- util.mkdir(self.repopath("tmp", dir))
-
- # Copy packages
- for package in self.packages:
- for file in package.package_files:
- shutil.copy(os.path.join(PACKAGESDIR, file),
- self.repopath("tmp", "packages"))
-
- # TODO check repository's sanity
- # TODO create repoview
- f = open(self.repopath("tmp", "db", "package-list.txt"), "w")
- for package in self.packages:
- s = "%-40s" % package.fmtstr("%(name)s-%(version)s-%(release)s")
- s += " | %s\n" % package.summary
- f.write(s)
- f.close()
-
- for dir in self.DIRS:
- util.rm(self.repopath(dir))
- shutil.move(self.repopath("tmp", dir), self.repopath(dir))
- util.rm(self.repopath("tmp"))
-
- def clean(self):
- if os.path.exists(self.path):
- self.log.debug("Cleaning up repository: %s" % self.path)
- util.rm(self.path)
-
- def repopath(self, *args):
- return os.path.join(self.path, *args)
-
- @property
- def buildable(self):
- for package in self.packages:
- if package.built:
- continue
- return False
-
- return True
-
- @property
- def log(self):
- return self.naoki.log
-
- @property
- def packages(self):
- packages = []
- for package in parse_package_info(get_package_names(), arch=self.arch["name"]):
- if not package.repo.name == self.name:
- continue
- packages.append(package)
- return packages
-
- @property
- def path(self):
- return os.path.join(REPOSDIR, self.name, self.arch["name"])
-
-def report_error_by_mail(package):
- log = package.naoki.log
-
- # Do not send a report if no recipient is configured
- if not config["error_report_recipient"]:
- return
-
- if not have_email:
- log.error("Can't send mail because this python version does not support this")
- return
-
- try:
- connection = smtplib.SMTP(config["smtp_server"])
- #connection.set_debuglevel(1)
-
- if config["smtp_user"] and config["smtp_password"]:
- connection.login(config["smtp_user"], config["smtp_password"])
-
- except smtplib.SMTPConnectError, e:
- log.error("Could not establish a connection to the smtp server: %s" % e)
- return
- except smtplib.SMTPAuthenticationError, e:
- log.error("Could not successfully login to the smtp server: %s" % e)
- return
-
- msg = email.mime.multipart.MIMEMultipart()
- msg["From"] = config["error_report_sender"]
- msg["To"] = config["error_report_recipient"]
- msg["Subject"] = config["error_report_subject"] % package.all
- msg.preamble = 'You will not see this in a MIME-aware mail reader.\n'
-
- body = """\
-The package %(name)s had a difficulty to build itself.
-This email will give you a short report about the error.
-
-Package information:
- Name : %(name)s - %(summary)s
- Version : %(version)s
- Release : %(release)s
-
- This package in maintained by %(maintainer)s.
-
-
-A detailed logfile is attached.
-
-Sincerely,
- Naoki
- """ % package.all
-
- msg.attach(email.mime.text.MIMEText(body))
-
- # Read log and append it to mail
- loglines = []
- if os.path.exists(package.logfile):
- f = open(package.logfile)
- line = f.readline()
- while line:
- line = line.rstrip()
- if line.endswith(LOG_MARKER):
- # Reset log
- loglines = []
-
- loglines.append(line)
- line = f.readline()
-
- f.close()
-
- if not loglines:
- loglines = ["Logfile wasn't found."]
-
- log = email.mime.text.MIMEText("\n".join(loglines), _subtype="plain")
- log.add_header('Content-Disposition', 'attachment',
- filename="%s.log" % package.id)
- msg.attach(log)
-
- try:
- connection.sendmail(config["error_report_sender"],
- config["error_report_recipient"], msg.as_string())
- except Exception, e:
- log.error("Could not send error report: %s: %s" % (e.__class__.__name__, e))
- return
-
- connection.quit()
#!/usr/bin/python
import logging
+import uuid
-import deps
+import dependencies
import environ
from constants import *
from exception import *
-class BuildSet(object):
+
+class Build(object):
def __init__(self, package):
self.package = package
- self.dependency_set = deps.DependencySet(self.package)
+ # Generate a random, but unique id
+ self.id = uuid.uuid4()
- logging.debug("Successfully created BuildSet for %s" % self.package)
+ # Create dependency set
+ self.dependency_set = dependencies.DependencySet(arch=self.arch)
- def __repr__(self):
- return "<%s %s>" % (self.__class__.__name__, self.package.name)
+ # Add all mandatory packages and build dependencies
+ deps = [dependencies.Dependency(p) for p in config["mandatory_packages"]]
+ deps += self.package.get_dependencies("build")
+ for package in deps:
+ self.dependency_set.add_dependency(package)
- def _resolve(self, ignore_errors=False):
- try:
- self.dependency_set.resolve()
- except DependencyResolutionError, e:
- if ignore_errors:
- logging.warning("Ignoring dependency errors: %s" % e)
- else:
- raise
+ self.settings = {
+ "ignore_dependency_errors" : False,
+ }
+
+ def __repr__(self):
+ return "<%s %s-%s:%s>" % \
+ (self.__class__.__name__, self.id, self.package.name, self.arch.name)
@property
def arch(self):
return self.package.arch
- def print_info(self):
- logging.info("Building: %s" % self.package.id)
+ def build(self, **settings):
+ self.settings.update(settings)
+
+ logging.info("Building: %s (%s)" % (self.package.id, self.id))
+ logging.info("")
logging.info(" %s" % self.package.summary)
logging.info("")
- def build(self, ignore_dependency_errors=False):
- logging.debug("Running build process for %s" % self)
- self.print_info()
+ # Resolve the dependencies
+ try:
+ self.dependency_set.resolve()
+ except DependencyResolutionError, e:
+ if self.settings["ignore_dependency_errors"]:
+ logging.warning("Ignoring dependency errors: %s" % e)
+ else:
+ raise
- self._resolve(ignore_errors=ignore_dependency_errors)
+ e = environ.Build(self.package)
- env = environ.Environment(self)
- env.build()
+ # Extract all tools
+ for package in self.dependency_set.packages:
+ e.extract(package)
- run = build
+ # Build :D
+ e.build()
- def shell(self):
- logging.debug("Running shell for %s" % self)
- self.print_info()
- # Add some packages that are kind of nice in a shell
- # like an editor and less...
- for dependency in [deps.Dependency(d) for d in config["shell_packages"]]:
- logging.debug("Adding shell dependency: %s" % dependency)
- self.dependency_set.add_dependency(dependency)
+class Jobs(object):
+ def __init__(self):
+ self.__jobs = []
+ self.__error_jobs = []
- self._resolve()
+ logging.debug("Initialized jobs queue")
- env = environ.Shell(self)
- env.shell()
+ def __len__(self):
+ return len(self.__jobs)
+ def add(self, job):
+ assert isinstance(job, Build)
-class Builder(object):
- def __init__(self):
- self._items = []
-
- logging.debug("Successfully created Builder instance")
+ self.__jobs.append(job)
- def add(self, i):
- self._items.append(BuildSet(i))
- logging.debug("Added %s to %s" % (i, self))
+ @property
+ def all(self):
+ return self.__jobs[:]
- def _reorder(self):
- logging.debug("Reordering BuildSets")
+ @property
+ def has_jobs(self):
+ return self.__jobs != []
- def run(self, *args, **kwargs):
- self._reorder()
+ def process_next(self):
+ if not self.__jobs:
+ return
- logging.info("Running build process")
+ job = self.__jobs[0]
- while self._items:
- i = self._items.pop(0)
+ try:
+ job.build()
+ finally:
+ self.__jobs.remove(job)
- # Run the actual build
- i.run(*args, **kwargs)
+class PackageShell(environ.Shell):
+ pass
+++ /dev/null
-#!/usr/bin/python
-
-import fcntl
-import grp
-import logging
-import os
-import random
-import shutil
-import stat
-import time
-
-import arches
-import backend
-import util
-from constants import *
-from exception import *
-from logger import getLog
-
-class Environment(object):
- kernel_version = os.uname()[2]
-
- def __init__(self, naoki, arch="i686"):
- self.arch = arch
- self.config = config
- self.naoki = naoki
-
- self.initialized = False
- self.__buildroot = None
-
- self.toolchain = Toolchain(self.arch["name"])
-
- # Create initial directory that we can set the lock
- util.mkdir(self.chrootPath())
-
- # Lock environment. Throws exception if function cannot set the lock.
- self.lock()
-
- def init(self, clean=True):
- marker = self.chrootPath(".initialized")
- self.log.debug("Initialize environment %s..." % self.chrootPath())
-
- if clean:
- self.clean()
-
- # If marker exists, we won't reinit again
- if os.path.exists(marker):
- return
-
- # create dirs
- dirs = (
- CACHEDIR,
- CCACHEDIR,
- IMAGESDIR,
- PACKAGESDIR,
- self.chrootPath("bin"),
- self.chrootPath("etc"),
- self.chrootPath("lib"),
- self.chrootPath("proc"),
- self.chrootPath("root"),
- self.chrootPath("sbin"),
- self.chrootPath("sys"),
- self.chrootPath("tmp"),
- self.chrootPath("tools_%s" % self.arch["name"]),
- self.chrootPath("usr/src/cache"),
- self.chrootPath("usr/src/ccache"),
- self.chrootPath("usr/src/images"),
- self.chrootPath("usr/src/packages"),
- self.chrootPath("usr/src/pkgs"),
- self.chrootPath("usr/src/src"),
- self.chrootPath("usr/src/tools"),
- self.chrootPath("var/tmp"),
- )
- for item in dirs:
- util.mkdir(item)
-
- # touch files
- files = (
- "etc/fstab",
- "etc/mtab",
- )
- for item in files:
- util.touch(self.chrootPath(item))
-
- self._setupDev()
- self._setupUsers()
- self._setupDns()
-
- self.toolchain.extract(self.chrootPath())
-
- self.extractAll()
-
- self.toolchain.adjust(self.chrootPath())
-
- # Set marker
- util.touch(marker)
-
- @property
- def buildroot(self):
- if not self.__buildroot:
- self.__buildroot = "buildroot.%s" % util.random_string()
-
- return self.__buildroot
-
- def lock(self):
- self.log.debug("Trying to lock environment")
-
- try:
- self._lock = open(self.chrootPath(".lock"), "a+")
- except IOError, e:
- return 0
-
- try:
- fcntl.lockf(self._lock.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
- except IOError, e:
- raise BuildRootLocked, "Environment is locked by another process"
-
- return 1
-
- def clean(self):
- if os.path.exists(self.chrootPath()):
- util.rm(self.chrootPath())
-
- @property
- def environ(self):
- env = config.environment.copy()
- env.update({
- "HOME" : "/root",
- "BASEDIR" : "/usr/src",
- "PKGROOT" : "/usr/src/pkgs",
- "TOOLS_DIR" : "/tools_%s" % self.arch["name"],
- "TARGET" : "%s-ipfire-linux-gnu" % self.arch["machine"],
- "TARGET_MACHINE" : self.arch["machine"],
- "PATH" : CHROOT_PATH + ":/tools_%(arch)s/sbin:/tools_%(arch)s/bin" \
- % { "arch" : self.arch["name"], },
- "BUILDROOT" : "/%s" % self.buildroot,
- "CHROOT" : "1",
- "CFLAGS" : self.arch["cflags"],
- "CXXFLAGS" : self.arch["cxxflags"],
- "PKG_ARCH" : self.arch["name"],
- })
-
- ccache_path = os.path.join("tools_%s" % self.arch["name"],
- "usr", "ccache", "bin")
- if os.path.exists(self.chrootPath(ccache_path)):
- env.update({
- "PATH" : "/%s:%s" % (ccache_path, env["PATH"]),
- "CCACHE_COMPILERCHECK" : "none",
- "CCACHE_COMPRESS" : "1",
- "CCACHE_DIR" : "/usr/src/ccache",
- })
-
- return env
-
- def doChroot(self, command, shell=True, *args, **kwargs):
- ret = None
- try:
- env = self.environ
-
- if kwargs.has_key("env"):
- env.update(kwargs.pop("env"))
-
- self._mountall()
-
- if not kwargs.has_key("chrootPath"):
- kwargs["chrootPath"] = self.chrootPath()
-
- ret = util.do(command, personality=self.arch["personality"],
- shell=shell, env=env, logger=self.log, *args, **kwargs)
-
- finally:
- self._umountall()
-
- return ret
-
- def chrootPath(self, *args):
- raise NotImplementedError
-
- def _setupDev(self):
- self.log.debug("Setting up /dev and /proc")
-
- # files in /dev
- util.rm(self.chrootPath("dev"))
- util.mkdir(self.chrootPath("dev", "pts"))
- util.mkdir(self.chrootPath("dev", "shm"))
- prevMask = os.umask(0000)
-
- devNodes = [
- (stat.S_IFCHR | 0666, os.makedev(1, 3), "dev/null"),
- (stat.S_IFCHR | 0666, os.makedev(1, 7), "dev/full"),
- (stat.S_IFCHR | 0666, os.makedev(1, 5), "dev/zero"),
- (stat.S_IFCHR | 0666, os.makedev(1, 8), "dev/random"),
- (stat.S_IFCHR | 0444, os.makedev(1, 9), "dev/urandom"),
- (stat.S_IFCHR | 0666, os.makedev(5, 0), "dev/tty"),
- (stat.S_IFCHR | 0600, os.makedev(5, 1), "dev/console")
- ]
-
- # make device node for el4 and el5
- if self.kernel_version < "2.6.19":
- devNodes.append((stat.S_IFCHR | 0666, os.makedev(5, 2), "dev/ptmx"))
-
- for i in devNodes:
- # create node
- os.mknod(self.chrootPath(i[2]), i[0], i[1])
-
- os.symlink("/proc/self/fd/0", self.chrootPath("dev", "stdin"))
- os.symlink("/proc/self/fd/1", self.chrootPath("dev", "stdout"))
- os.symlink("/proc/self/fd/2", self.chrootPath("dev", "stderr"))
- os.symlink("/proc/self/fd", self.chrootPath("dev", "fd"))
-
- if self.kernel_version >= "2.6.19":
- os.symlink("/dev/pts/ptmx", self.chrootPath("dev", "ptmx"))
-
- os.umask(prevMask)
-
- def _setupUsers(self):
- self.log.debug("Creating users")
- f = open(self.chrootPath("etc", "passwd"), "w")
- f.write("root:x:0:0:root:/root:/bin/bash\n")
- f.write("nobody:x:99:99:Nobody:/:/sbin/nologin\n")
- f.close()
-
- f = open(self.chrootPath("etc", "group"), "w")
- f.write("root:x:0:root\n")
- f.write("nobody:x:99:\n")
- f.close()
-
- def _setupDns(self):
- self.log.debug("Setting up DNS")
- nameservers = []
- f = open("/etc/resolv.conf")
- for line in f.readlines():
- if line.startswith("nameserver"):
- nameservers.append(line.split(" ")[-1])
- f.close()
-
- self.log.debug("Using nameservers: %s" % nameservers)
-
- f = open(self.chrootPath("etc", "resolv.conf"), "w")
- for nameserver in nameservers:
- f.write("nameserver %s" % nameserver)
- f.close()
-
- self.log.debug("Creating record for localhost")
- f = open(self.chrootPath("etc", "hosts"), "w")
- f.write("127.0.0.1 localhost\n")
- f.close()
-
- def _mountall(self):
- """mount 'normal' fs like /dev/ /proc/ /sys"""
- self.log.debug("Mounting chroot")
- for cmd in self.mountCmds:
- util.do(cmd, shell=True)
-
- def _umountall(self):
- """umount all mounted chroot fs."""
- self.log.debug("Umounting chroot")
- for cmd in self.umountCmds:
- util.do(cmd, raiseExc=0, shell=True)
-
- @property
- def log(self):
- return getLog()
-
- def shell(self, args=[]):
- command = "chroot %s /usr/src/tools/chroot-shell %s" % \
- (self.chrootPath(), " ".join(args))
-
- for key, val in self.environ.items():
- command = "%s=\"%s\" " % (key, val) + command
-
- try:
- self._mountall()
-
- shell = os.system(command)
- return os.WEXITSTATUS(shell)
-
- finally:
- self._umountall()
-
- @property
- def umountCmds(self):
- ret = (
- "umount -n %s" % self.chrootPath("proc"),
- "umount -n %s" % self.chrootPath("sys"),
- "umount -n %s" % self.chrootPath("usr", "src", "cache"),
- "umount -n %s" % self.chrootPath("usr", "src", "ccache"),
- "umount -n %s" % self.chrootPath("usr", "src", "images"),
- "umount -n %s" % self.chrootPath("usr", "src", "packages"),
- "umount -n %s" % self.chrootPath("usr", "src", "pkgs"),
- "umount -n %s" % self.chrootPath("usr", "src", "src"),
- "umount -n %s" % self.chrootPath("usr", "src", "tools"),
- "umount -n %s" % self.chrootPath("dev", "pts"),
- "umount -n %s" % self.chrootPath("dev", "shm")
- )
-
- return ret
-
- @property
- def mountCmds(self):
- ret = [
- "mount -n -t proc naoki_chroot_proc %s" % self.chrootPath("proc"),
- "mount -n -t sysfs naoki_chroot_sysfs %s" % self.chrootPath("sys"),
- "mount -n --bind %s %s" % (CACHEDIR, self.chrootPath("usr", "src", "cache")),
- "mount -n --bind %s %s" % (CCACHEDIR, self.chrootPath("usr", "src", "ccache")),
- "mount -n --bind %s %s" % (IMAGESDIR, self.chrootPath("usr", "src", "images")),
- "mount -n --bind %s %s" % (PACKAGESDIR, self.chrootPath("usr", "src", "packages")),
- "mount -n --bind %s %s" % (PKGSDIR, self.chrootPath("usr", "src", "pkgs")),
- "mount -n --bind %s %s" % (os.path.join(BASEDIR, "src"), self.chrootPath("usr", "src", "src")),
- "mount -n --bind %s %s" % (TOOLSDIR, self.chrootPath("usr", "src", "tools")),
- ]
-
- mountopt = "gid=%d,mode=0620,ptmxmode=0666" % grp.getgrnam("tty").gr_gid
- if self.kernel_version >= "2.6.29":
- mountopt += ",newinstance"
-
- ret.extend([
- "mount -n -t devpts -o %s naoki_chroot_devpts %s" % (mountopt, self.chrootPath("dev", "pts")),
- "mount -n -t tmpfs naoki_chroot_shmfs %s" % self.chrootPath("dev", "shm")])
-
- return ret
-
- def extractAll(self):
- raise NotImplementedError
-
-
-class PackageEnvironment(Environment):
- def __init__(self, package, *args, **kwargs):
- self.package = package
-
- Environment.__init__(self, naoki=package.naoki, *args, **kwargs)
-
- def build(self):
- self.log.debug(LOG_MARKER)
-
- self.package.download()
-
- # Save start time
- time_start = time.time()
-
- try:
- self.make("package")
- except Error:
- if config["cleanup_on_failure"]:
- self.clean()
- backend.report_error_by_mail(self.package)
- raise
-
- time_end = time.time()
- self.log.debug("Package build took %.2fs" % (time_end - time_start))
-
- if config["cleanup_on_success"]:
- self.clean()
-
- def chrootPath(self, *args):
- return os.path.join(BUILDDIR, "environments", self.package.info.id, *args)
-
- def extractAll(self):
- packages = [p.getPackage(self.naoki) \
- for p in self.package.info.dependencies_all]
-
- for package in packages:
- package.extract(self.chrootPath())
-
- def make(self, target):
- file = "/usr/src%s" % self.package.info.filename[len(BASEDIR):]
-
- return self.doChroot("make -C %s -f %s %s" % \
- (os.path.dirname(file), file, target), shell=True)
-
- @property
- def log(self):
- return self.package.log
-
- def shell(self, *args):
- self.make("prepare")
- Environment.shell(self, *args)
- self.clean()
-
-
-class ShellEnvironment(Environment):
- def chrootPath(self, *args):
- return os.path.join(BUILDDIR, "environments", "shell", *args)
-
- def extractAll(self):
- pass
-
-
-class Toolchain(object):
- def __init__(self, arch):
- util.mkdir(TOOLCHAINSDIR)
-
- self.arch = arches[arch]
-
- # Create a filename object
- self.filename = "toolchain-%s.%s.tar.gz" % \
- (self.arch["name"], config["toolchain_version"],)
-
- # Store the path including the filename
- self.path = os.path.join(TOOLCHAINSDIR, self.filename)
-
- self.build_dir = os.path.join(BUILDDIR, "toolchains",
- "tools_%s.%s" % (self.arch["name"], config["toolchain_version"]))
-
- self.log = getLog()
-
- @property
- def exists(self):
- return os.path.exists(self.path)
-
- def download(self):
- self.log.info("Downloading toolchain...")
- pass
-
- def cmd(self, args=[]):
- cmd = "%s" % os.path.join(TOOLSDIR, "toolchain")
- if args:
- cmd += " "
- cmd += " ".join(args)
- util.do(cmd, cwd=self.build_dir, shell=True, logger=self.log,
- env={ "TOOLS_DIR" : self.build_dir })
-
- # TODO:
- # - logging
- def make(self, pkg, target):
- env = config.environment.copy()
- env.update({
- "BASEDIR" : BASEDIR,
- "PATH" : "/tools_%(arch)s/sbin:/tools_%(arch)s/bin:%(path)s" % \
- { "arch" : self.arch["name"], "path" : os.environ["PATH"], },
- "PKGROOT" : PKGSDIR,
- "ROOT" : self.build_dir,
- "TARGET" : "%s-ipfire-linux-gnu" % self.arch["machine"],
- "TARGET_MACHINE" : self.arch["machine"],
- "TOOLCHAIN" : "1",
- "TOOLS_DIR" : "/tools_%s" % self.arch["name"],
-
- "CFLAGS" : self.arch["cflags"],
- "CXXFLAGS" : self.arch["cxxflags"],
- })
-
- command = "make -C %s -f %s %s" % \
- (os.path.dirname(pkg.filename), pkg.filename, target)
-
- return util.do(command, shell=True, env=env, personality=self.arch["personality"],
- logger=self.log)
-
- def build_package(self, pkg):
- self.log.info("Building %s..." % pkg.name)
-
- source_dir = os.path.join(self.build_dir, "usr/src")
-
- util.rm(source_dir)
- util.mkdir(source_dir)
-
- self.checkLink()
-
- return self.make(pkg, "package")
-
- def compress(self):
- self.cmd(["compress", self.path, self.build_dir])
-
- def extract(self, path):
- self.cmd(["extract", self.path, os.path.join(path, "tools_%s" % self.arch["name"])])
-
- def adjust(self, path):
- self.cmd(["adjust", path])
-
- def build(self, naoki):
- self.log.info("Building toolchain...")
-
- packages = backend.get_package_names(toolchain=True)
- packages = backend.parse_package(packages, toolchain=True, naoki=naoki)
- packages = backend.depsort(packages)
- for pkg in packages:
- if os.path.exists(os.path.join(self.path, pkg.name)):
- continue
- self.build_package(pkg)
- self.compress()
-
- def checkLink(self):
- link = "/tools_%s" % self.arch["name"]
- destination = os.path.abspath(self.build_dir)
-
- if not os.path.islink(link):
- # File is not a link. Remove it...
- util.rm(link)
-
- else:
- # If link points to correct destination we break up
- if os.path.abspath(os.readlink(link)) == destination:
- return
- os.unlink(link)
-
-
- os.symlink(destination, link)
-
-
-class Generator(Environment):
- def __init__(self, naoki, arch, type):
- self.type = type
- Environment.__init__(self, naoki, arch)
-
- def chrootPath(self, *args):
- return os.path.join(BUILDDIR, "generators", self.type, self.arch["name"], *args)
-
- def run(self):
- self.init()
-
- # Extracting installer packages
- util.mkdir(self.chrootPath("installer"))
-
- for package in self.get_packages("installer"):
- package.extract(self.chrootPath("installer"))
-
- all_package_files = []
- for package in self.get_packages("all"):
- all_package_files.extend(package.package_files)
-
- self.doChroot("/usr/src/tools/generator %s" % self.type,
- env={"ALL_PACKAGES" : " ".join(all_package_files)})
-
- def get_packages(self, type):
- _packages = {
- "all" : backend.get_package_names(),
- "build" : [ "arping", "bash", "coreutils", "cpio", "curl",
- "dhcp", "findutils", "grep", "iproute2", "iputils", "kbd",
- "less", "module-init-tools", "procps", "sed", "sysvinit",
- "udev", "util-linux-ng", "which", "dvdrtools", "kernel",
- "squashfs-tools", "syslinux", "zlib",],
- "installer" : ["initscripts", "kernel", "pomona", "upstart"],
- }
- _package_infos = backend.parse_package_info(_packages[type])
-
- packages = []
- for package in backend.depsolve(_package_infos, recursive=True):
- package = package.getPackage(self.naoki)
- if not package in packages:
- packages.append(package)
-
- return packages
-
- def extractAll(self):
- # Extract required tools
- for package in self.get_packages("build"):
- package.extract(self.chrootPath())
CCACHEDIR = os.path.join(BASEDIR, "ccache")
CONFIGDIR = os.path.join(BASEDIR, "config")
DOCDIR = os.path.join(BASEDIR, "doc")
+GENDIR = os.path.join(BUILDDIR, "generators")
IMAGESDIR = os.path.join(BUILDDIR, "images")
LOGDIR = os.path.join(BASEDIR, "logs")
PKGSDIR = os.path.join(BASEDIR, "pkgs")
TOOLCHAINSDIR = os.path.join(CACHEDIR, "toolchains")
PATCHESDIR = os.path.join(CACHEDIR, "patches")
+ARCHES_DEFAULT = os.path.join(CONFIGDIR, "architectures.conf")
+
CONFIGFILE = os.path.join(CONFIGDIR, "naoki.conf")
CHROOT_PATH = "/sbin:/bin:/usr/sbin:/usr/bin"
"patch",
"sed",
"tar",
+ "which",
"xz",
],
"shell_packages" : [
return ret
-#class Architectures(object):
-# def __init__(self, configfile):
-# parser = ConfigParser.ConfigParser()
-# parser.read(configfile)
-#
-# arches = {}
-# for arch in parser.sections():
-# arches[arch] = { "name" : arch }
-# for key, val in parser.items(arch):
-# arches[arch][key] = val
-#
-# self._arches = arches
-# self.__current = None
-#
-# def set(self, arch):
-# self.__current = arch
-#
-# @property
-# def all(self):
-# return self._arches
-#
-# @property
-# def default(self):
-# return self._arches.get("i686")
-#
-# @property
-# def current(self):
-# if not self.__current:
-# return self.default
-# return self._arches[self.__current]
-#
-# def __getitem__(self, key):
-# return self._arches[key]
-#
-
# Create a globally useable instance of the configuration
config = Config()
-#arches = Architectures(config["architecture_config"])
--- /dev/null
+#!/usr/bin/python
+
+# A decorator to use singleton on a class
+# http://en.wikipedia.org/wiki/Singleton_pattern#Python_.28using_decorators.29
+def singleton(cls):
+ instance_container = []
+ def getinstance():
+ if not len(instance_container):
+ instance_container.append(cls())
+ return instance_container[0]
+ return getinstance
+
import os
import re
-import arches
-import paks
-import repo
+import architectures
+import repositories
+import packages
-from constants import *
from exception import *
DEP_INVALID, DEP_FILE, DEP_LIBRARY, DEP_PACKAGE = range(4)
+
class Dependency(object):
def __init__(self, identifier, origin=None):
self.identifier = identifier
s += " by %s" % self.origin.name
s += "(%s)" % os.path.basename(self.origin.filename)
s += ">"
-
return s
@property
class DependencySet(object):
- def __init__(self, package):
- self.package = package
+ def __init__(self, dependencies=[], arch=None):
+ if not arch:
+ arches = architectures.Architectures()
+ arch = arches.get_default()
+ self.arch = arch
+
+ self.repo = repositories.BinaryRepository(self.arch)
- self.repo_bin = repo.BinaryRepository(self.arch.name)
+ # initialize package lists
+ self._dependencies = []
+ self._items = []
+ self._provides = []
+
+ # add all provided dependencies
+ for dependency in dependencies:
+ self.add_dependency(dependency)
logging.debug("Successfully initialized %s" % self)
- self.reset()
def __repr__(self):
- return "<%s %s>" % (self.__class__.__name__, self.package.id)
+ return "<%s>" % (self.__class__.__name__)
def add_dependency(self, item):
assert isinstance(item, Dependency)
logging.debug("Added new dependency %s" % item)
def add_package(self, item):
- assert isinstance(item, paks.BinaryPackage)
+ assert isinstance(item, packages.BinaryPackage)
if item in self._items:
return
self._provides.append(item)
self._provides.sort()
- def reset(self):
- self._dependencies = []
- self._items = []
- self._provides = []
- logging.debug("Reset %s" % self)
-
def resolve(self):
logging.debug("Resolving %s" % self)
- #self.reset()
-
- # Always add the default packages
- _dependencies = [Dependency(i) for i in config["mandatory_packages"]]
-
- # Add build dependencies from package
- _dependencies += self.package.get_dependencies("build")
-
- for dependency in _dependencies:
- self.add_dependency(dependency)
-
# Safe for endless loop
counter = 1000
- while self.dependencies:
+ while self._dependencies:
counter -= 1
if not counter:
logging.debug("Maximum count of dependency loop was reached")
logging.debug("Processing dependency: %s" % dependency.identifier)
if dependency.type == DEP_PACKAGE:
- package = self.repo_bin.find_package_by_name(dependency.identifier)
+ package = self.repo.find_package_by_name(dependency.identifier)
if package:
# Found a package and add this
self.add_package(package)
continue
elif dependency.type == DEP_LIBRARY:
- for package in self.repo_bin.list():
+ for package in self.repo.all:
for provides in package.get_provides():
if provides.match(dependency):
self.add_package(package)
break
elif dependency.type == DEP_FILE:
- package = self.repo_bin.find_package_by_file(dependency.identifier)
+ package = self.repo.find_package_by_file(dependency.identifier)
if package:
self.add_package(package)
continue
#logging.error("Unresolveable dependencies: %s" % self.dependencies)
raise DependencyResolutionError, "%s" % self.dependencies
- @property
- def arch(self):
- return self.package.arch
-
@property
def dependencies(self):
return sorted(self._dependencies)
@property
def provides(self):
return self._provides
+
+
+if __name__ == "__main__":
+ import architectures
+ arches = architectures.Architectures()
+
+ ds = DependencySet(arch=arches.get("i686"))
+ ds.add_dependency(Dependency("/bin/bash"))
+ ds.resolve()
+ print ds.packages
import stat
import logger
+import mail
import util
from constants import *
-
# XXX to be moved to somewhere else
ENVIRONMENT_ARGS = ["PATH", "PWD" ]
return env
-class Environment(object):
+
+class _Environment(object):
kernel_version = os.uname()[2]
- def __init__(self, buildset):
- self.buildset = buildset
+ def __init__(self, arch):
+ self.arch = arch
logging.debug("Successfully initialized %s" % self)
# XXX check if already locked
self.prepare()
- @property
- def arch(self):
- return self.buildset.arch
-
- @property
- def package(self):
- return self.buildset.package
-
- @property
- def logger(self):
- return logging.getLogger() # XXX just for now
-
def chrootPath(self, *args):
- return os.path.join(BUILDDIR, "environments", self.package.id, *args)
+ raise NotImplementedError
def clean(self):
logging.debug("Cleaning environment %s" % self)
if os.path.exists(self.chrootPath()):
util.rm(self.chrootPath())
-
+
def prepare(self):
self.clean()
f.write("127.0.0.1 localhost\n")
f.close()
- @property
- def buildroot(self):
- if not hasattr(self, "__buildroot"):
- self.__buildroot = "buildroot.%s" % util.random_string()
+ def extract(self, package, *args):
+ logging.info("Extracting %s" % package)
- return self.__buildroot
+ package.extract(self.chrootPath(*args))
- @property
- def environ(self):
+ def variables(self):
env = set({
"HOME" : "/root",
"PATH" : "/sbin:/bin:/usr/sbin:/usr/bin",
"PKGROOT" : "/usr/src/pkgs",
"TARGET" : "%s-ipfire-linux-gnu" % self.arch.machine,
"TARGET_MACHINE" : self.arch.machine,
- "BUILDROOT" : "/%s" % self.buildroot,
"CHROOT" : "1", # XXX to be removed
"CFLAGS" : self.arch.cflags,
"CXXFLAGS" : self.arch.cxxflags,
def doChroot(self, command, shell=True, *args, **kwargs):
ret = None
try:
- env = self.environ
+ env = self.variables()
if kwargs.has_key("env"):
env.update(kwargs.pop("env"))
return ret
- def extract(self):
- logging.debug("Extracting all packages and tools")
- for i in self.buildset.dependency_set.packages:
- i.extract(self.chrootPath())
-
- def build(self, *args, **kwargs):
- # Extract all packages and tools
- self.extract()
-
- try:
- self.make("package")
- except:
- if config["cleanup_on_failure"]:
- self.clean()
- # XXX send email report
- raise
-
- if config["cleanup_on_success"]:
- self.clean()
-
- def make(self, target):
- file = "/usr/src%s" % self.package.filename[len(BASEDIR):]
-
- return self.doChroot("make -C %s -f %s %s" % \
- (os.path.dirname(file), file, target), shell=True)
-
def _mountall(self):
logging.debug("Mounting environment")
for cmd in self.mountCmds:
return ret
+ @property
+ def logger(self):
+ return logging.getLogger()
-class Shell(Environment):
- def shell(self, args=[]):
- self.extract()
+class Build(_Environment):
+ def __init__(self, package):
+ self.package = package
+
+ _Environment.__init__(self, self.package.arch)
+
+ @property
+ def logger(self):
+ return logging.getLogger() # XXX just for now
+
+ def chrootPath(self, *args):
+ return os.path.join(BUILDDIR, "environments", self.package.id, *args)
+
+ @property
+ def buildroot(self):
+ if not hasattr(self, "__buildroot"):
+ self.__buildroot = "buildroot.%s" % util.random_string()
+
+ return self.__buildroot
+
+ def variables(self):
+ v = _Environment.variables(self)
+ v.update({ "BUILDROOT" : "/%s" % self.buildroot })
+ return v
+
+ def build(self, *args, **kwargs):
+ try:
+ self.make("package")
+ except:
+ if config["cleanup_on_failure"]:
+ self.clean()
+
+ # Send email report about an error
+ mail.report_error(self.package)
+ raise
+
+ if config["cleanup_on_success"]:
+ self.clean()
+
+ def make(self, target):
+ file = "/usr/src%s" % self.package.filename[len(BASEDIR):]
+
+ return self.doChroot("make -C %s -f %s %s" % \
+ (os.path.dirname(file), file, target), shell=True)
+
+
+class Shell(Build):
+ def shell(self, args=[]):
# Preparing source...
self.make("prepare")
command = "chroot %s /usr/src/tools/chroot-shell %s" % \
(self.chrootPath(), " ".join(args))
- for key, val in self.environ.items():
+ for key, val in self.variables().items():
command = "%s=\"%s\" " % (key, val) + command
if self.package.source_dir:
--- /dev/null
+#!/usr/bin/python
+
+import logging
+import os
+
+import dependencies
+import repositories
+import util
+
+from constants import *
+from environ import _Environment
+
+
+class Generator(_Environment):
+ def __init__(self, type, arch=None):
+ _Environment.__init__(self, arch)
+
+ self.type = type
+
+ self.repos = repositories.SourceRepositories(self.arch)
+
+ for r in self.repos.all:
+ if not r.completely_built:
+ raise Exception, "The repo is not built completely: %s" % r
+
+ self.build_deps = dependencies.DependencySet(arch=self.arch)
+ deps = [
+ "basesystem",
+ "dracut",
+ "squashfs-tools",
+ "syslinux",
+ "/usr/bin/mkisofs",
+ ]
+ for dep in deps:
+ dep = dependencies.Dependency(dep)
+ self.build_deps.add_dependency(dep)
+
+ self.installer_deps = dependencies.DependencySet(arch=self.arch)
+ deps = [
+ "basesystem",
+ #"installer",
+ ]
+ for dep in deps:
+ dep = dependencies.Dependency(dep)
+ self.installer_deps.add_dependency(dep)
+
+ def chrootPath(self, *args):
+ return os.path.join(GENDIR, self.arch.name, *args)
+
+ @property
+ def logger(self):
+ return logging.getLogger()
+
+ def run(self):
+ # Unpacking packages we need in this environment
+ logging.info("Resolving dependencies...")
+ self.build_deps.resolve()
+ self.installer_deps.resolve()
+
+ for package in self.build_deps.packages:
+ self.extract(package)
+
+ util.mkdir(self.chrootPath("installer"))
+ for package in self.installer_deps.packages:
+ self.extract(package, "installer")
+
+ util.mkdir(self.chrootPath("packages"))
+ for p in self.repos.packages:
+ for bin in p.binary_files:
+ os.link(os.path.join(PACKAGESDIR, self.arch.name, bin),
+ self.chrootPath("packages", bin))
+
+ self.doChroot("/usr/src/tools/generator %s" % self.type, shell=True)
--- /dev/null
+#!/usr/bin/python
+
+import email.mime.multipart
+import email.mime.text
+import logging
+import os
+import smtplib
+
+from constants import *
+
+def report_error(package):
+ # Do not send a report if no recipient is configured
+ if not config["error_report_recipient"]:
+ return
+
+ try:
+ connection = smtplib.SMTP(config["smtp_server"])
+ #connection.set_debuglevel(1)
+
+ if config["smtp_user"] and config["smtp_password"]:
+ connection.login(config["smtp_user"], config["smtp_password"])
+
+ except smtplib.SMTPConnectError, e:
+ logging.error("Could not establish a connection to the smtp server: %s" % e)
+ return
+ except smtplib.SMTPAuthenticationError, e:
+ logging.error("Could not successfully login to the smtp server: %s" % e)
+ return
+
+ msg = email.mime.multipart.MIMEMultipart()
+ msg["From"] = config["error_report_sender"]
+ msg["To"] = config["error_report_recipient"]
+ msg["Subject"] = config["error_report_subject"] % package.all
+ msg.preamble = 'You will not see this in a MIME-aware mail reader.\n'
+
+ body = """\
+The package %(name)s had a difficulty to build itself.
+This email will give you a short report about the error.
+
+Package information:
+ Name : %(name)s - %(summary)s
+ Version : %(version)s
+ Release : %(release)s
+
+ This package in maintained by %(maintainer)s.
+
+
+A detailed logfile is attached.
+
+Sincerely,
+ Naoki
+ """ % {
+ "name" : package.name,
+ "summary" : package.summary,
+ "version" : package.version,
+ "release" : package.release,
+ "maintainer" : package.maintainer,
+ }
+
+ msg.attach(email.mime.text.MIMEText(body))
+
+ # Read log and append it to mail
+ loglines = []
+ if os.path.exists(package.logfile):
+ f = open(package.logfile)
+ line = f.readline()
+ while line:
+ line = line.rstrip()
+ if line.endswith(LOG_MARKER):
+ # Reset log
+ loglines = []
+
+ loglines.append(line)
+ line = f.readline()
+
+ f.close()
+
+ if not loglines:
+ loglines = ["Logfile wasn't found."]
+
+ log = email.mime.text.MIMEText("\n".join(loglines), _subtype="plain")
+ log.add_header('Content-Disposition', 'attachment',
+ filename="%s.log" % package.id)
+ msg.attach(log)
+
+ try:
+ connection.sendmail(config["error_report_sender"],
+ config["error_report_recipient"], msg.as_string())
+ except Exception, e:
+ logging.error("Could not send error report: %s: %s" % (e.__class__.__name__, e))
+ return
+
+ connection.quit()
import logging
import os
import re
-import tarfile
-import arches
-import deps
+import architectures
+import dependencies
import environ
import io
import util
from constants import *
class Package(object):
- def __init__(self, name, arch):
- self.name = name
- self.arch = arches.get(arch)
-
def __repr__(self):
return "<%s %s:%s>" % \
(self.__class__.__name__, self.name, self.arch.name)
+ #@property
+ #def arch(self):
+ # raise NotImplementedError
+
+ @property
+ def name(self):
+ return self._info["PKG_NAME"]
+
@property
def id(self):
return "%s-%s-%s.%s" % \
class SourcePackage(Package):
- def __init__(self, name, repo, arch):
- Package.__init__(self, name, arch)
- self.repo = repo
+ def __init__(self, filename, repo, arch):
+ self.arch = arch
+ self.filename = filename
+ self.repository = repo
self.init()
- logging.debug("Successfully initialized %s" % self)
- @property
- def filename(self):
- return os.path.join(self.repo.path, self.name, self.name + ".nm")
+ logging.debug("Successfully initialized %s" % self)
def init(self):
self._info = {}
"PKGROOT" : PKGSDIR,
})
- output = util.do("make -f %s" % self.filename,
+ output = util.do("make -f %s" % os.path.basename(self.filename),
shell=True,
- cwd=os.path.join(self.repo.path, self.name),
+ cwd=os.path.dirname(self.filename),
returnOutput=1,
env=env)
key, val = m.groups()
self._info[key] = val.strip("\"")
+ # XXX should return a dependencyset
def get_dependencies(self, type=""):
type2var = {
"" : "PKG_DEPENDENCIES",
type = type2var[type]
- return [deps.Dependency(d, origin=self) for d in self._info[type].split()]
+ return [dependencies.Dependency(d, origin=self) for d in self._info[type].split()]
+
+ @property
+ def source_dir(self):
+ return self._info.get("SOURCE_DIR", "")
@property
def summary(self):
return self._info["PKG_SUMMARY"]
@property
- def source_dir(self):
- return self._info.get("SOURCE_DIR", "")
+ def binary_files(self):
+ return self._info.get("PKG_PACKAGES_FILES").split()
+
+ @property
+ def is_built(self):
+ for file in self.binary_files:
+ file = os.path.join(PACKAGESDIR, self.arch.name, file)
+ if not os.path.exists(file):
+ return False
+
+ return True
class BinaryPackage(Package):
f.close()
return ret
-
def init(self):
self._info = {}
self._filelist = []
@property
def arch(self):
- return arches.get(self._info.get("PKG_ARCH", arches.get_default()))
+ arches = architectures.Architectures()
+
+ arch = self._info.get("PKG_ARCH", None)
+ if arch:
+ arch = arches.get(arch)
+ else:
+ arch = arches.get_default()
+
+ return arch
@property
def name(self):
# Compatibility to older package format
objects += self._info.get("PKG_REQUIRES", "").split()
- return [deps.Dependency(o, origin=self) for o in objects]
+ return [dependencies.Dependency(o, origin=self) for o in objects]
def get_provides(self):
- return [deps.Provides(p, origin=self) \
+ return [dependencies.Provides(p, origin=self) \
for p in self._info.get("PKG_PROVIDES", "").split()]
@property
+++ /dev/null
-#!/usr/bin/python
-
-# XXX make better version comparison
-
-import logging
-import os
-import re
-
-import arches
-import paks
-
-from constants import *
-
-def find_source_repos():
- # XXX detection for all repositories missing
- return [SourceRepository("core", arches.get_default()),]
-
-def find_source_package(name):
- for r in find_source_repos():
- p = r.get_package(name)
- if p:
- return p
-
-class Repository(object):
- def __init__(self, arch):
- self.arch = arches.get(arch)
-
- logging.debug("Successfully initialized %s" % self)
-
-
-class SourceRepository(Repository):
- _cache = {}
-
- def __init__(self, name, arch):
- self.name = name
-
- Repository.__init__(self, arch)
-
- @property
- def path(self):
- return os.path.join(PKGSDIR, self.name)
-
- def list(self):
- if not self._cache.has_key(self.arch.name):
- l = []
-
- for package in os.listdir(self.path):
- filename = os.path.join(self.path, package, package + ".nm")
- if not os.path.exists(filename):
- continue
-
- package = paks.SourcePackage(package, repo=self, arch=self.arch.name)
-
- l.append(package)
-
- self._cache[self.arch.name] = l
-
- return self._cache[self.arch.name]
-
- def get_package(self, name):
- for package in self.list():
- if package.name == name:
- return package
-
- return None
-
-
-class BinaryRepository(Repository):
- _cache = {}
-
- @property
- def path(self):
- return os.path.join(PACKAGESDIR, self.arch.name)
-
- def list(self):
- if not self._cache.has_key(self.arch.name):
- l = []
-
- for package in os.listdir(self.path):
- # Construct filename
- package = os.path.join(self.path, package)
-
- # Create package instance
- package = paks.BinaryPackage(package)
-
- # Append package to list
- l.append(package)
-
- self._cache[self.arch.name] = l
-
- return self._cache[self.arch.name]
-
- def get_packages(self, name):
- l = []
-
- # Check every package if name is equal
- for package in self.list():
- if package.name == name:
- l.append(package)
-
- return l
-
- def get_latest_package(self, name):
- # XXX to be done
- l = self.get_packages(name)
- if not len(l):
- return
-
- return l[-1]
-
- def find_package_by_name(self, name, all=False):
- if all:
- return self.get_packages(name)
-
- return self.get_latest_package(name)
-
- def find_package_by_file(self, file):
- for package in self.list():
- if file in package.filelist:
- return package
- # XXX first match is not always the best result
-
-
-
-
-
-
-
--- /dev/null
+#!/usr/bin/python
+
+import logging
+import operator
+import os
+
+import architectures
+import packages
+
+from constants import *
+from decorators import *
+
+class Repository(object):
+ def __init__(self, arch):
+ assert isinstance(arch, architectures.Architecture)
+ self.arch = arch
+
+ logging.debug("Successfully initialized %s" % self)
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.path)
+
+ @property
+ def path(self):
+ raise NotImplementedError
+
+
+class BinaryRepository(Repository):
+ _cache = {}
+
+ @property
+ def path(self):
+ return os.path.join(PACKAGESDIR, self.arch.name)
+
+ def find_package_by_name(self, name):
+ pkgs = self.find_packages_by_name(name)
+
+ if pkgs:
+ return pkgs[0]
+
+ def find_packages_by_name(self, name):
+ pkgs = []
+
+ for package in self.all:
+ if package.name == name:
+ pkgs.append(package)
+
+ return sorted(pkgs, key=operator.attrgetter("release"), reverse=True)
+
+ @property
+ def all(self):
+ if not self._cache.has_key(self.arch.name):
+ l = []
+
+ for package in os.listdir(self.path):
+ # Construct filename
+ package = os.path.join(self.path, package)
+
+ # Create package instance
+ package = packages.BinaryPackage(package)
+
+ # Append package to list
+ l.append(package)
+
+ self._cache[self.arch.name] = l
+
+ return self._cache[self.arch.name]
+
+ def find_package_by_file(self, filename):
+ pkgs = self.find_packages_by_file(filename)
+
+ if pkgs:
+ return pkgs[0]
+
+ def find_packages_by_file(self, filename):
+ pkgs = []
+
+ for package in self.all:
+ if filename in package.filelist:
+ pkgs.append(package)
+
+ return sorted(pkgs, key=operator.attrgetter("release"), reverse=True)
+
+
+class SourceRepository(Repository):
+ def __init__(self, name, arch):
+ self.name = name
+
+ Repository.__init__(self, arch)
+
+ def __iter__(self):
+ pkgs = [os.path.join(self.path, p, p + ".nm") for p in self.packages]
+
+ args = {
+ "arch" : self.arch,
+ "repo" : self,
+ }
+
+ return PackageIterator(pkgs, packages.SourcePackage, args)
+
+ def find_package_by_name(self, name, fast=False):
+ if fast:
+ filename = os.path.join(self.path, name, name + ".nm")
+ if os.path.exists(filename):
+ return packages.SourcePackage(filename, repo=self, arch=self.arch)
+
+ else:
+ for package in self:
+ if package.name == name:
+ return package
+
+ @property
+ def path(self):
+ return os.path.join(PKGSDIR, self.name)
+
+ @property
+ def packages(self):
+ return os.listdir(self.path)
+
+ @property
+ def completely_built(self):
+ for p in self:
+ if not p.is_built:
+ return False
+
+ return True
+
+
+class PackageIterator(object):
+ def __init__(self, paks, package_cls=None, cls_args={}):
+ self.packages = paks
+ self.package_cls = package_cls
+ self.package_cls_args = cls_args
+
+ self.__i = 0
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if self.__i >= len(self.packages):
+ raise StopIteration
+
+ pkgs = self.packages[self.__i]
+ self.__i += 1
+
+ if self.package_cls:
+ package = self.package_cls(pkgs, **self.package_cls_args)
+
+ return package
+
+
+class SourceRepositories(object):
+ def __init__(self, arch):
+ assert isinstance(arch, architectures.Architecture)
+ self.arch = arch
+
+ self._repositories = []
+
+ self.find_all()
+
+ def find_all(self):
+ for repo in os.listdir(PKGSDIR):
+ # Skip all non-directories
+ if not os.path.isdir(os.path.join(PKGSDIR, repo)):
+ continue
+
+ logging.debug("Found source repository: %s" % repo)
+
+ repo = SourceRepository(repo, self.arch)
+ self._repositories.append(repo)
+
+ @property
+ def all(self):
+ return self._repositories[:]
+
+ def find_package_by_name(self, name):
+ for fast in (True, False):
+ for repo in self._repositories:
+ package = repo.find_package_by_name(name, fast=fast)
+ if package:
+ return package
+
+ @property
+ def packages(self):
+ #pkgs = []
+ #for repository in self._repositories:
+ # pkgs += [p for p in repository]
+ #
+ #return pkgs
+
+ for repository in self._repositories:
+ for p in repository:
+ yield p
+
+
+if __name__ == "__main__":
+ arches = architectures.Architectures()
+
+ r = SourceRepositories(arches.get("i686"))
+
+ for repo in r.all:
+ print "%s" % repo.name
+
+ #for pack in repo:
+ # print " %s" % pack.name
+
+ #for package in ("gcc", "screen", "iptables", "system-release", "glibc", "nonexistant"):
+ # print "Searching for %s" % package,
+ # print r.find_package_by_name(package)
+
+ b = BinaryRepository(arches.get("i686"))
+
+ print b.find_packages_by_name("aiccu")
+ print b.find_package_by_name("aiccu")
import sys
import termios
-import arches
+import architectures
from constants import *
+arches = architectures.Architectures()
+
class ParsingError(Exception):
pass
Option("quiet", ["-q", "--quiet"], help="Set quiet mode"),
Option("debug", ["-d", "--debug"], help="Set debugging mode"),
Choice("arch", ["-a", "--arch"], help="Set architecture",
- choices=[arch.name for arch in arches.all()]),
+ choices=[arch.name for arch in arches.all]),
],
parsers=[
# Build
arguments=[
List("packages"),
]),
- Parser("clean", help="Cleanup unused tarballs"),
]),
# Check
Argument("package", help="Package to process."),
]),
- # Repository
- Parser("repository",
- help="Repository commands",
- parsers=[
- Parser("clean",
- help="Cleanup the repository",
- arguments=[
- List("names", help="List of repositories"),
- ]),
- Parser("build",
- help="Build the repository",
- arguments=[
- List("names", help="List of repositories"),
- ]),
- ]),
-
# Generator
Parser("generate",
help="Generator command",
LABEL install
MENU label ^Install a new @NAME@ system
MENU default
- KERNEL @SNAME@0
- APPEND initrd=initrd0 root=CDLABEL=@NAME@_@VERSION@ rootfstype=iso9660 mode=install quiet ro
+ KERNEL vmlinuz-@KERNEL@
+ APPEND initrd=initrd-@KERNEL@.img root=CDLABEL=@NAME@_@VERSION@ rootfstype=iso9660 mode=install quiet ro
label rescue
MENU label ^Rescue installed @NAME@ system
- KERNEL @SNAME@0
- APPEND initrd=initrd0 root=CDLABEL=@NAME@_@VERSION@ rootfstype=iso9660 mode=rescue quiet ro
+ KERNEL vmlinuz-@KERNEL@
+ APPEND initrd=initrd-@KERNEL@.img root=CDLABEL=@NAME@_@VERSION@ rootfstype=iso9660 mode=rescue quiet ro
label local
MENU label Boot from ^local drive
LOCALBOOT 0xffff
FILES=$(for i in $FILES; do echo $i; done | sort -u)
for file in $FILES; do
- echo "Extracting $(basename ${file})..."
+ #echo "Extracting $(basename ${file})..."
cpio --extract --quiet -H newc --to-stdout data.img < $file | \
tar --extract --use-compress-program=xz -C ${ROOT}
done
-#!/bin/bash -x
+#!/bin/bash
BOOTLOADER_DIR=/usr/src/src/bootloader
IMAGES_DIR=/usr/src/images
-MKLIVERAMFS=$(dirname ${0})/mkliveramfs
-
ISO_FILENAME=${DISTRO_SNAME}-${DISTRO_VERSION}-${TARGET}.iso
-install_config() {
+. $(dirname ${0})/common-functions
+
+function find_latest_kernel_release() {
+ local i
+ for i in /lib/modules/*; do
+ i=$(basename ${i})
+ done
+
+ echo "${i}"
+}
+
+KERNEL_RELEASE=$(find_latest_kernel_release)
+
+function installer_image() {
+ local file=${1}
+ local dir=${2}
+
+ cd ${dir} && mksquashfs * ${file} -no-progress -e boot
+}
+
+function install_config() {
local src=${1}
local dest=${2}
-e "s/@SNAME@/${DISTRO_SNAME}/g" \
-e "s/@VERSION@/${DISTRO_VERSION}/g" \
-e "s/@SLOGAN@/${DISTRO_SLOGAN}/g" \
+ -e "s/@KERNEL@/${KERNEL_RELEASE}/g" \
< ${src} > ${dest}
}
-install_isolinux() {
+function install_isolinux() {
local dest=${1}
mkdir -p ${dest} 2>/dev/null
install_kernel ${dest}
}
-install_kernel() {
+function install_kernel() {
local dest=${1}
mkdir -p ${dest} 2>/dev/null
- local release
- local releases
- for release in /lib/modules/*; do
- [ ! -d "${release}" ] && continue
- releases="${releases} $(basename ${release})"
+ local file
+ for file in vmlinuz-${KERNEL_RELEASE} initrd-${KERNEL_RELEASE}.img; do
+ cp -f /boot/${file} ${dest}/${file}
done
- releases=$(echo ${releases})
-
- # Grab first release
- release=$(awk '{ print $1 }' <<<${releases})
-
- if [ -z "${release}" ]; then
- echo "No kernel image installed" >&2
- return 1
- fi
-
- cp -f /boot/vmlinuz-${release} ${dest}/${DISTRO_SNAME}0
-
- # Create liveramfs
- ${MKLIVERAMFS} -v --with-net -f ${dest}/initrd0 ${release}
}
-copy_packages() {
- local dest=${1}
-
- mkdir -p ${dest} 2>/dev/null
-
- for i in ${ALL_PACKAGES}; do
- cp -f /usr/src/packages/${i} ${dest}/
- done
-}
-
-make_installer() {
- local dest=${1}
-
- mkdir -p ${dest} 2>/dev/null
-
- pushd /installer
-
- # Copy installer overlay
- cp -prf /usr/src/src/install/* .
-
- rm -rf /installer/usr/include
-
- for dir in dev proc sys; do
- mkdir -p ${dir} 2>/dev/null
- done
- install -dv -m 1777 tmp var/tmp
-
- # XXX add -comp lzma here when kernel supports it
- mksquashfs * ${dest}/installer.sfs -no-progress
-
- popd
-}
-
-## main ##
+case "${1}" in
+ iso)
+ log DEBUG "Creating ISO image..."
-type=${1}
+ ISO_DIR=$(mktemp -d)
+ ISO_FILE=$(mktemp)
-case "${type}" in
- iso)
- mkdir /iso
- cd /iso
+ # Copy installer image to ISO
+ installer_image ${ISO_DIR}/installer.sfs /installer
- copy_packages packages
+ # Install bootloader
+ install_isolinux ${ISO_DIR}/isolinux
- make_installer /iso
- install_isolinux /iso/isolinux
+ # "Copy" packages
+ mv /packages ${ISO_DIR}/Packages
- cd /iso && \
+ cd ${ISO_DIR} && \
mkisofs -J -r -V "${DISTRO_NAME}_${DISTRO_VERSION}" \
-b isolinux/isolinux.bin -c isolinux/boot.cat -no-emul-boot \
- -boot-load-size 4 -boot-info-table . > ${IMAGES_DIR}/${ISO_FILENAME}
+ -boot-load-size 4 -boot-info-table . > ${ISO_FILE}
+
+ cat ${ISO_FILE} > ${IMAGES_DIR}/${ISO_FILENAME}
;;
- "")
+ *)
+ log ERROR "Unknown type of image: ${1}"
exit 1
;;
esac