return pakfire.requires(requires)
-def repo_create(path, input_paths, **pakfire_args):
+def repo_create(path, input_paths, type="binary", **pakfire_args):
pakfire = Pakfire(**pakfire_args)
- return pakfire.repo_create(path, input_paths)
+ return pakfire.repo_create(path, input_paths, type=type)
def repo_list(**pakfire_args):
pakfire = Pakfire(**pakfire_args)
return pkgs
- def repo_create(self, path, input_paths):
+ def repo_create(self, path, input_paths, type="binary"):
+ assert type in ("binary", "source",)
+
repo = repository.RepositoryDir(
self,
name="new",
description="New repository.",
path=path,
+ type=type,
)
for input_path in input_paths:
repo.save()
+ return repo
+
def repo_list(self):
return [r for r in self.repos]
self.parse_command_build()
self.parse_command_keepalive()
+ self.parse_command_repoupdate()
# Finally parse all arguments from the command line and save them.
self.args = self.parser.parse_args()
self.server = server.Server()
self.action2func = {
- "build" : self.handle_build,
- "keepalive" : self.handle_keepalive,
+ "build" : self.handle_build,
+ "keepalive" : self.handle_keepalive,
+ "repoupdate" : self.handle_repoupdate,
}
@property
sub_keepalive.add_argument("action", action="store_const",
const="keepalive")
+ def parse_command_repoupdate(self):
+ # Implement the "repoupdate" command.
+ sub_repoupdate = self.sub_commands.add_parser("repoupdate",
+ help=_("Update all repositories."))
+ sub_repoupdate.add_argument("action", action="store_const",
+ const="repoupdate")
+
def handle_keepalive(self):
self.server.update_info()
def handle_build(self):
self.server.build_job()
+
+ def handle_repoupdate(self):
+ self.server.update_repositories()
#!/usr/bin/python
+import tarfile
+
from binary import BinaryPackage
from file import InnerTarFile
from installed import DatabasePackage, InstalledPackage
not.
"""
# XXX We should make this check much better...
- if filename.endswith(".src.%s" % PACKAGE_EXTENSION):
- return SourcePackage(pakfire, repo, filename)
+
+ # Simply check if the given file is a tarfile.
+ if tarfile.is_tarfile(filename):
+ if filename.endswith(".src.%s" % PACKAGE_EXTENSION):
+ return SourcePackage(pakfire, repo, filename)
+
+ return BinaryPackage(pakfire, repo, filename)
elif filename.endswith(".%s" % MAKEFILE_EXTENSION):
return Makefile(pakfire, filename)
-
- return BinaryPackage(pakfire, repo, filename)
import binary
class SolvPackage(base.Package):
- def __init__(self, pakfire, solvable):
- base.Package.__init__(self, pakfire)
+ def __init__(self, pakfire, solvable, repo=None):
+ base.Package.__init__(self, pakfire, repo)
# Save solvable object
self.solvable = solvable
@property
def repo(self):
- repo_name = self.solvable.get_repo_name()
+ if self._repo:
+ return self._repo
+ repo_name = self.solvable.get_repo_name()
return self.pakfire.repos.get_repo(repo_name)
@property
Return the requirements for the build.
"""
return self.metadata.get("PKG_REQUIRES", "").split()
+
+ @property
+ def conflicts(self):
+ return self.metadata.get("PKG_CONFLICTS", "").split()
import re
import cache
+import pakfire.packages as packages
import pakfire.satsolver as satsolver
class RepositoryFactory(object):
def __len__(self):
return self.solver_repo.size()
+ def __iter__(self):
+ pkgs = []
+
+ for solv in self.solver_repo.get_all():
+ pkg = packages.SolvPackage(self.pakfire, solv, self)
+ pkgs.append(pkg)
+
+ return iter(pkgs)
+
@property
def pool(self):
return self.pakfire.pool
class IndexDir(Index):
+ def init(self):
+ self.pkg_type = None
+
+ if self.repo.type == "binary":
+ self.pkg_type = packages.BinaryPackage
+ elif self.repo.type == "source":
+ self.pkg_type = packages.SourcePackage
+
+ assert self.pkg_type
+
def check(self):
pass # XXX to be done
package = packages.open(self.pakfire, self.repo, file)
- if isinstance(package, packages.BinaryPackage):
- if not package.arch in (self.repo.arch, "noarch"):
+ # Find all packages with the given type and skip those of
+ # the other type.
+ if isinstance(package, self.pkg_type):
+ # Check for binary packages if the architecture matches.
+ if isinstance(package, packages.BinaryPackage) and \
+ not package.arch in (self.repo.arch, "noarch"):
logging.warning("Skipped package with wrong architecture: %s (%s)" \
% (package.filename, package.arch))
- print package.type
continue
# Skip all source packages.
- elif isinstance(package, packages.SourcePackage):
+ else:
continue
self.add_package(package)
from pakfire.constants import *
class RepositoryDir(base.RepositoryFactory):
- def __init__(self, pakfire, name, description, path):
+ def __init__(self, pakfire, name, description, path, type="binary"):
base.RepositoryFactory.__init__(self, pakfire, name, description)
# Path to files.
self.path = path
+ # Save type.
+ assert type in ("binary", "source",)
+ self.type = type
+
# Create index
self.index = index.IndexDir(self.pakfire, self)
+ def remove(self):
+ self.index.clear()
+ util.rm(self.path)
+
@property
def priority(self):
"""
import hashlib
import logging
+import os
import random
import socket
import subprocess
pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args)
# Create a kind of dummy repository to link the packages against it.
- repo = pakfire.repository.LocalSourceRepository(self.pakfire,
- "source-%s" % rnd, "Source packages", tmpdir, idx="directory")
- repo.update(force=True)
+ if pakfire_args.has_key("build_id"):
+ del pakfire_args["build_id"]
+ pakfire_args["mode"] = "server"
- return repo
+ repo = pakfire.api.repo_create("source-%s" % rnd, [tmpdir,], type="source",
+ **pakfire_args)
- # XXX don't forget to remove the repository.
+ return repo
def update_all(self):
_files = []
repo = source.update_revision(build["revision"], build_id=build_id)
- # Upload all files in the repository.
- for pkg in repo.get_all():
- self.upload_file(pkg.filename, build_id)
+ try:
+ # Upload all files in the repository.
+ for pkg in repo:
+ path = os.path.join(pkg.repo.path, pkg.filename)
+ self.upload_file(path, build_id)
+ finally:
+ repo.remove()
+
+ def update_repositories(self, limit=2):
+ repos = self.conn.get_repos(limit)
+
+ for repo in repos:
+ files = self.conn.get_repo_packages(repo["id"])
+
+ for arch in repo["arches"]:
+ path = "/pakfire/repositories/%s/%s/%s" % \
+ (repo["distro"]["sname"], repo["name"], arch)
- repo.remove()
+ pakfire.api.repo_create(path, files)
{"write", (PyCFunction)Repo_write, METH_VARARGS, NULL},
{"read", (PyCFunction)Repo_read, METH_VARARGS, NULL},
{"clear", (PyCFunction)Repo_clear, METH_NOARGS, NULL},
+ {"get_all", (PyCFunction)Repo_get_all, METH_NOARGS, NULL},
{ NULL, NULL, 0, NULL }
};
+#include <Python.h>
#include <stdbool.h>
+#include <satsolver/repo.h>
#include <satsolver/repo_solv.h>
#include <satsolver/repo_write.h>
#include "pool.h"
#include "repo.h"
+#include "solvable.h"
PyTypeObject RepoType = {
PyObject_HEAD_INIT(NULL)
Py_RETURN_NONE;
}
+
+PyObject *Repo_get_all(RepoObject *self) {
+ Solvable *s;
+ Id p;
+ Repo *r = self->_repo;
+
+ PyObject *list = PyList_New(0);
+
+ FOR_REPO_SOLVABLES(r, p, s) {
+ SolvableObject *solv;
+
+ solv = PyObject_New(SolvableObject, &SolvableType);
+ if (solv == NULL)
+ return NULL;
+
+ solv->_pool = self->_repo->pool;
+ solv->_id = p;
+
+ PyList_Append(list, (PyObject *)solv);
+ }
+
+ Py_INCREF(list);
+ return list;
+}
extern PyObject *Repo_write(RepoObject *self, PyObject *args);
extern PyObject *Repo_read(RepoObject *self, PyObject *args);
extern PyObject *Repo_clear(RepoObject *self);
+extern PyObject *Repo_get_all(RepoObject *self);
extern PyTypeObject RepoType;