# mkosi Changelog
-## v20.3
+## v21
- We now handle unmerged-usr systems correctly
- Builtin configs (`mkosi-initrd`, `mkosi-tools`) can now be included
- Added `MicrocodeHost=` setting to only include the CPU specific
microcode for the current host system.
- The kernel-install plugin now only includes the CPU specific microcode
+- Introduced `PackageCacheDirectory=` to set the directory for package
+ manager caches. This setting defaults to a suitable location in the
+ system or user directory depending on how mkosi is invoked.
+ `CacheDirectory=` is only used for incremental cached images now.
+- Repository metadata is now synced once at the start of each image
+ build and never during an image build. Each image includes a snapshot
+ of the repository metadata in `/mkosi` so that incremental images and
+ extension images can reuse the same snapshot. When building an image
+ intended to be used with `BaseTrees=`, disable `CleanPackageMetadata=`
+ to make sure the repository metadata in `/mkosi` is not cleaned up,
+ otherwise any extension images using this image as their base tree
+ will not be able to install additional packages.
## v20.2
"--format", str(format),
"--output", output,
"--workspace-dir=/var/tmp",
- "--cache-dir=/var",
+ "--package-cache-dir=/var",
"--output-dir", context.staging_area,
"--extra-tree", f"/usr/lib/modules/{context.kernel_version}:/usr/lib/modules/{context.kernel_version}",
"--extra-tree=/usr/lib/firmware:/usr/lib/firmware",
)
from mkosi.context import Context
from mkosi.distributions import Distribution
-from mkosi.installer import clean_package_manager_metadata, finalize_package_manager_mounts
+from mkosi.installer import clean_package_manager_metadata
from mkosi.kmod import gen_required_kernel_modules, process_kernel_modules
from mkosi.log import ARG_DEBUG, complete_step, die, log_notice, log_step
from mkosi.manifest import Manifest
from mkosi.types import PathString
from mkosi.user import CLONE_NEWNS, INVOKING_USER, become_root, unshare
from mkosi.util import (
+ flatten,
+ flock,
format_rlimit,
make_executable,
one_zero,
"--ro-bind", script, "/work/prepare",
"--ro-bind", cd, "/work/scripts",
"--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
+ *context.config.distribution.package_manager(context.config).mounts(context),
"--chdir", "/work/src",
],
scripts=hd,
if context.config.build_dir
else []
),
- *finalize_package_manager_mounts(context),
+ *context.config.distribution.package_manager(context.config).mounts(context),
"--chdir", "/work/src",
],
scripts=hd,
"--ro-bind", cd, "/work/scripts",
"--bind", context.root, context.root,
"--bind", context.staging, "/work/out",
- *finalize_package_manager_mounts(context),
+ *context.config.distribution.package_manager(context.config).mounts(context),
"--chdir", "/work/src",
],
scripts=hd,
"--ro-bind", cd, "/work/scripts",
"--bind", context.root, context.root,
"--bind", context.staging, "/work/out",
- *finalize_package_manager_mounts(context),
+ *context.config.distribution.package_manager(context.config).mounts(context),
"--chdir", "/work/src",
],
scripts=hd,
"--cache-only", str(context.config.cache_only),
"--output-dir", str(context.workspace / "initrd"),
*(["--workspace-dir", str(context.config.workspace_dir)] if context.config.workspace_dir else []),
- "--cache-dir", str(context.cache_dir),
+ *(["--cache-dir", str(context.config.cache_dir)] if context.config.cache_dir else []),
+ *(["--package-cache-dir", str(context.config.package_cache_dir)] if context.config.package_cache_dir else []),
*(["--local-mirror", str(context.config.local_mirror)] if context.config.local_mirror else []),
"--incremental", str(context.config.incremental),
"--acl", str(context.config.acl),
complete_step("Building default initrd"),
setup_workspace(args, config) as workspace,
):
- build_image(Context(args, config, workspace=workspace, resources=context.resources))
+ build_image(
+ Context(
+ args,
+ config,
+ workspace=workspace,
+ resources=context.resources,
+ # Re-use the repository metadata snapshot from the main image for the initrd.
+ package_cache_dir=context.package_cache_dir,
+ )
+ )
return config.output_dir / config.output
raise
+def copy_package_manager_state(context: Context) -> None:
+ if have_cache(context.config) or context.config.base_trees:
+ return
+
+ subdir = context.config.distribution.package_manager(context.config).subdir(context.config)
+
+ for d in ("cache", "lib"):
+ src = context.config.package_cache_dir_or_default() / d / subdir
+ if not src.exists():
+ continue
+
+ caches = context.config.distribution.package_manager(context.config).cache_subdirs(src) if d == "cache" else []
+
+ with tempfile.TemporaryDirectory() as tmp:
+ os.chmod(tmp, 0o755)
+
+ # cp doesn't support excluding directories but we can imitate it by bind mounting an empty directory over
+ # the directories we want to exclude.
+ exclude = flatten(["--ro-bind", tmp, os.fspath(p)] for p in caches)
+
+ dst = context.root / "mkosi" / d / subdir
+ with umask(~0o755):
+ dst.mkdir(parents=True, exist_ok=True)
+
+ with flock(src):
+ copy_tree(
+ src, dst,
+ tools=context.config.tools(),
+ preserve=False,
+ sandbox=context.sandbox(
+ options=["--ro-bind", src, src, "--bind", dst.parent, dst.parent, *exclude]
+ ),
+ )
+
+
def build_image(context: Context) -> None:
manifest = Manifest(context.config) if context.config.manifest_format else None
install_base_trees(context)
cached = reuse_cache(context)
+ # The repository metadata is copied into the image root directory to ensure it remains static and available
+ # when using the image to build system extensions. This has to be ordered after setup() as cache keys might
+ # depend on config files created by the distribution's setup() method.
+ copy_package_manager_state(context)
+
context.config.distribution.setup(context)
install_package_directories(context)
*(["--output-dir", str(config.output_dir)] if config.output_dir else []),
*(["--workspace-dir", str(config.workspace_dir)] if config.workspace_dir else []),
*(["--cache-dir", str(config.cache_dir)] if config.cache_dir else []),
+ *(["--package-cache-dir", str(config.package_cache_dir)] if config.package_cache_dir else []),
"--incremental", str(config.incremental),
"--acl", str(config.acl),
*([f"--package={package}" for package in config.tools_tree_packages]),
with complete_step(f"Clearing out build directory of {config.name()} image…"):
rmtree(*config.build_dir.iterdir())
- if remove_package_cache and config.cache_dir and config.cache_dir.exists() and any(config.cache_dir.iterdir()):
+ if (
+ remove_package_cache and
+ config.package_cache_dir and
+ config.package_cache_dir.exists() and
+ any(config.package_cache_dir.iterdir())
+ ):
with complete_step(f"Clearing out package cache of {config.name()} image…"):
rmtree(
*(
- config.cache_dir / p / d
- for p in ("cache", "lib")
- for d in ("apt", "dnf", "libdnf5", "pacman", "zypp")
+ config.package_cache_dir / d / config.distribution.package_manager(config).subdir(config)
+ for d in ("cache", "lib")
),
)
+@contextlib.contextmanager
+def rchown_package_manager_dirs(config: Config) -> Iterator[None]:
+ try:
+ yield
+ finally:
+ if INVOKING_USER.is_regular_user():
+ with complete_step("Fixing ownership of package manager cache directory"):
+ subdir = config.distribution.package_manager(config).subdir(config)
+ for d in ("cache", "lib"):
+ INVOKING_USER.rchown(config.package_cache_dir_or_default() / d / subdir)
+
+
+def sync_repository_metadata(args: Args, config: Config, *, resources: Path) -> None:
+ if have_cache(config) or config.cache_only or config.base_trees:
+ return
+
+ with (
+ complete_step(f"Syncing package manager metadata for {config.name()} image"),
+ prepend_to_environ_path(config),
+ rchown_package_manager_dirs(config),
+ setup_workspace(args, config) as workspace,
+ ):
+ context = Context(
+ args,
+ config,
+ workspace=workspace,
+ resources=resources,
+ package_cache_dir=config.package_cache_dir_or_default(),
+ )
+
+ install_package_manager_trees(context)
+ context.config.distribution.setup(context)
+
+ subdir = context.config.distribution.package_manager(config).subdir(config)
+
+ with (
+ flock(context.config.package_cache_dir_or_default() / "cache" / subdir),
+ flock(context.config.package_cache_dir_or_default() / "lib" / subdir),
+ ):
+ context.config.distribution.sync(context)
+
+
def run_build(args: Args, config: Config, *, resources: Path) -> None:
check_inputs(config)
for p in (
config.output_dir,
config.cache_dir,
+ config.package_cache_dir_or_default(),
config.build_dir,
config.workspace_dir,
):
if p and not p.exists():
INVOKING_USER.mkdir(p)
+ subdir = config.distribution.package_manager(config).subdir(config)
+
+ for d in ("cache", "lib"):
+ src = config.package_cache_dir_or_default() / d / subdir
+ INVOKING_USER.mkdir(src)
+
+ sync_repository_metadata(args, config, resources=resources)
+
+ src = config.package_cache_dir_or_default() / "cache" / subdir
+ for p in config.distribution.package_manager(config).cache_subdirs(src):
+ INVOKING_USER.mkdir(p)
+
with (
acl_toggle_build(config, INVOKING_USER.uid),
+ rchown_package_manager_dirs(config),
setup_workspace(args, config) as workspace,
):
build_image(Context(args, config, workspace=workspace, resources=resources))
output_dir: Optional[Path]
workspace_dir: Optional[Path]
cache_dir: Optional[Path]
+ package_cache_dir: Optional[Path]
build_dir: Optional[Path]
image_id: Optional[str]
image_version: Optional[str]
return Path("/var/tmp")
+ def package_cache_dir_or_default(self) -> Path:
+ return (
+ self.package_cache_dir or
+ (INVOKING_USER.cache_dir() / f"{self.distribution}~{self.release}~{self.architecture}")
+ )
+
def tools(self) -> Path:
return self.tools_tree or Path("/")
section="Output",
parse=config_make_path_parser(required=False),
paths=("mkosi.cache",),
- help="Package cache path",
+ help="Incremental cache directory",
+ ),
+ ConfigSetting(
+ dest="package_cache_dir",
+ metavar="PATH",
+ name="PackageCacheDirectory",
+ section="Output",
+ parse=config_make_path_parser(required=False),
+ help="Package cache directory",
),
ConfigSetting(
dest="build_dir",
Output Directory: {config.output_dir_or_cwd()}
Workspace Directory: {config.workspace_dir_or_default()}
Cache Directory: {none_to_none(config.cache_dir)}
+ Package Cache Directory: {none_to_default(config.package_cache_dir)}
Build Directory: {none_to_none(config.build_dir)}
Image ID: {config.image_id}
Image Version: {config.image_version}
class Context:
"""State related properties."""
- def __init__(self, args: Args, config: Config, *, workspace: Path, resources: Path) -> None:
+ def __init__(
+ self,
+ args: Args,
+ config: Config,
+ *,
+ workspace: Path,
+ resources: Path,
+ package_cache_dir: Optional[Path] = None,
+ ) -> None:
self.args = args
self.config = config
self.workspace = workspace
self.resources = resources
+ self.package_cache_dir = package_cache_dir or (self.root / "mkosi")
with umask(~0o755):
# Using a btrfs subvolume as the upperdir in an overlayfs results in EXDEV so make sure we create
(self.pkgmngr / "var/log").mkdir(parents=True)
self.packages.mkdir()
self.install_dir.mkdir(exist_ok=True)
- self.cache_dir.mkdir(parents=True, exist_ok=True)
@property
def root(self) -> Path:
def packages(self) -> Path:
return self.workspace / "packages"
- @property
- def cache_dir(self) -> Path:
- return self.config.cache_dir or (self.workspace / "cache")
-
@property
def install_dir(self) -> Path:
return self.workspace / "dest"
def createrepo(cls, context: "Context") -> None:
raise NotImplementedError
+ @classmethod
+ def sync(cls, context: "Context") -> None:
+ raise NotImplementedError
+
class Distribution(StrEnum):
# Please consult docs/distribution-policy.md and contact one
def createrepo(self, context: "Context") -> None:
return self.installer().createrepo(context)
+ def sync(self, context: "Context") -> None:
+ return self.installer().sync(context)
+
def installer(self) -> type[DistributionInstaller]:
modname = str(self).replace('-', '_')
mod = importlib.import_module(f"mkosi.distributions.{modname}")
def setup(cls, context: Context) -> None:
Pacman.setup(context, cls.repositories(context))
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ Pacman.sync(context)
+
@classmethod
def install(cls, context: Context) -> None:
cls.install_packages(context, ["filesystem"], apivfs=False)
Pacman.invoke(
context,
"--sync",
- ["--refresh", "--needed", "--assume-installed", "initramfs"],
+ ["--needed", "--assume-installed", "initramfs"],
packages,
apivfs=apivfs,
)
if context.config.local_mirror:
yield Pacman.Repository("core", context.config.local_mirror)
else:
- if context.want_local_repo():
- yield Pacman.localrepo()
-
if context.config.architecture == Architecture.arm64:
url = f"{context.config.mirror or 'http://mirror.archlinuxarm.org'}/$arch/$repo"
else:
Dnf.setup(context, cls.repositories(context))
(context.pkgmngr / "etc/dnf/vars/stream").write_text(f"{context.config.release}-stream\n")
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ Dnf.sync(context)
+
@classmethod
def install(cls, context: Context) -> None:
# Make sure glibc-minimal-langpack is installed instead of glibc-all-langpacks.
yield from cls.repository_variants(context, "AppStream")
return
- if context.want_local_repo():
- yield Dnf.localrepo()
-
yield from cls.repository_variants(context, "BaseOS")
yield from cls.repository_variants(context, "AppStream")
yield from cls.repository_variants(context, "extras")
def setup(cls, context: Context) -> None:
pass
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ pass
+
@classmethod
def install(cls, context: Context) -> None:
pass
)
return
- if context.want_local_repo():
- yield Apt.localrepo(context)
-
mirror = context.config.mirror or "http://deb.debian.org/debian"
signedby = "/usr/share/keyrings/debian-archive-keyring.gpg"
def createrepo(cls, context: Context) -> None:
Apt.createrepo(context)
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ Apt.sync(context)
+
@classmethod
def install(cls, context: Context) -> None:
# Instead of using debootstrap, we replicate its core functionality here. Because dpkg does not have
(context.root / d).symlink_to(f"usr/{d}")
(context.root / f"usr/{d}").mkdir(parents=True, exist_ok=True)
- Apt.invoke(context, "update", apivfs=False)
-
# Next, we invoke apt-get install to download all the essential packages. With DPkg::Pre-Install-Pkgs,
# we specify a shell command that will receive the list of packages that will be installed on stdin.
# By configuring Debug::pkgDpkgPm=1, apt-get install will not actually execute any dpkg commands, so
# then extracting the tar file into the chroot.
for deb in essential:
- with (
- # The deb paths will be in the form of "/var/cache/apt/<deb>" so we transform them to the corresponding
- # path in mkosi's package cache directory.
- open(context.cache_dir / Path(deb).relative_to("/var"), "rb") as i,
- tempfile.NamedTemporaryFile() as o
- ):
+ # If a deb path is in the form of "/var/cache/apt/<deb>", we transform it to the corresponding path in
+ # mkosi's package cache directory. If it's relative to /work/packages, we transform it to the corresponding
+ # path in mkosi's local package repository. Otherwise, we use the path as is.
+ if Path(deb).is_relative_to("/var/cache"):
+ path = context.config.package_cache_dir_or_default() / Path(deb).relative_to("/var")
+ elif Path(deb).is_relative_to("/work/packages"):
+ path = context.packages / Path(deb).relative_to("/work/packages")
+ else:
+ path = Path(deb)
+
+ with open(path, "rb") as i, tempfile.NamedTemporaryFile() as o:
run(["dpkg-deb", "--fsys-tarfile", "/dev/stdin"], stdin=i, stdout=o, sandbox=context.sandbox())
extract_tar(
Path(o.name), context.root,
with umask(~0o644):
policyrcd.write_text("#!/bin/sh\nexit 101\n")
- Apt.invoke(context, "update", apivfs=False)
Apt.invoke(context, "install", packages, apivfs=apivfs)
install_apt_sources(context, cls.repositories(context, local=False))
def setup(cls, context: Context) -> None:
Dnf.setup(context, cls.repositories(context), filelists=False)
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ Dnf.sync(context)
+
@classmethod
def install(cls, context: Context) -> None:
cls.install_packages(context, ["filesystem"], apivfs=False)
yield RpmRepository("fedora", f"baseurl={context.config.local_mirror}", gpgurls)
return
- if context.want_local_repo():
- yield Dnf.localrepo()
-
if context.config.release == "eln":
mirror = context.config.mirror or "https://odcs.fedoraproject.org/composes/production/latest-Fedora-ELN/compose"
for repo in ("Appstream", "BaseOS", "Extras", "CRB"):
from mkosi.config import Architecture
from mkosi.context import Context
from mkosi.distributions import Distribution, fedora, join_mirror
-from mkosi.installer.dnf import Dnf
from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
from mkosi.log import die
yield RpmRepository("core-release", f"baseurl={context.config.local_mirror}", gpgurls)
return
- if context.want_local_repo():
- yield Dnf.localrepo()
-
if context.config.mirror:
url = f"baseurl={join_mirror(context.config.mirror, 'distrib/$releasever/$basearch/media/core/')}"
yield RpmRepository("core-release", f"{url}/release", gpgurls)
from mkosi.config import Architecture
from mkosi.context import Context
from mkosi.distributions import Distribution, fedora, join_mirror
-from mkosi.installer.dnf import Dnf
from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
from mkosi.log import die
yield RpmRepository("main-release", f"baseurl={context.config.local_mirror}", gpgurls)
return
- if context.want_local_repo():
- yield Dnf.localrepo()
-
url = f"baseurl={join_mirror(mirror, '$releasever/repository/$basearch/main')}"
yield RpmRepository("main-release", f"{url}/release", gpgurls)
yield RpmRepository("main-updates", f"{url}/updates", gpgurls)
else:
Dnf.setup(context, cls.repositories(context))
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ if find_binary("zypper", root=context.config.tools()):
+ Zypper.sync(context)
+ else:
+ Dnf.sync(context)
+
@classmethod
def install(cls, context: Context) -> None:
cls.install_packages(context, ["filesystem", "distribution-release"], apivfs=False)
def repositories(cls, context: Context) -> Iterable[RpmRepository]:
zypper = find_binary("zypper", root=context.config.tools())
- if context.want_local_repo():
- yield Zypper.localrepo() if zypper else Dnf.localrepo()
-
release = context.config.release
if release == "leap":
release = "stable"
)
return
- if context.want_local_repo():
- yield Apt.localrepo(context)
-
if context.config.architecture in (Architecture.x86, Architecture.x86_64):
mirror = context.config.mirror or "http://archive.ubuntu.com/ubuntu"
else:
# SPDX-License-Identifier: LGPL-2.1+
+import os
from pathlib import Path
-from mkosi.config import ConfigFeature
+from mkosi.config import Config, ConfigFeature, OutputFormat
from mkosi.context import Context
from mkosi.run import find_binary
from mkosi.sandbox import finalize_crypto_mounts
-from mkosi.tree import rmtree
+from mkosi.tree import move_tree, rmtree
from mkosi.types import PathString
from mkosi.util import flatten
class PackageManager:
+ @classmethod
+ def subdir(cls, config: Config) -> Path:
+ raise NotImplementedError
+
+ @classmethod
+ def cache_subdirs(cls, cache: Path) -> list[Path]:
+ raise NotImplementedError
+
@classmethod
def scripts(cls, context: Context) -> dict[str, list[PathString]]:
raise NotImplementedError
+ @classmethod
+ def mounts(cls, context: Context) -> list[PathString]:
+ mounts: list[PathString] = [
+ *(["--ro-bind", m, m] if (m := context.config.local_mirror) else []),
+ *finalize_crypto_mounts(tools=context.config.tools()),
+ "--bind", context.packages, "/work/packages",
+ ]
+
+ subdir = context.config.distribution.package_manager(context.config).subdir(context.config)
+
+ for d in ("cache", "lib"):
+ src = context.package_cache_dir / d / subdir
+ mounts += ["--bind", src, Path("/var") / d / subdir]
+
+ # If we're not operating on the configured package cache directory, we're operating on a snapshot of the
+ # repository metadata in the image root directory. To make sure any downloaded packages are still cached in
+ # the configured package cache directory in this scenario, we mount in the relevant directories from the
+ # configured package cache directory.
+ if d == "cache" and context.package_cache_dir != context.config.package_cache_dir_or_default():
+ caches = context.config.distribution.package_manager(context.config).cache_subdirs(src)
+ mounts += flatten(
+ [
+ "--bind",
+ os.fspath(context.config.package_cache_dir_or_default() / d / subdir / p.relative_to(src)),
+ Path("/var") / d / subdir / p.relative_to(src),
+ ]
+ for p in caches
+ )
+
+ return mounts
+
def clean_package_manager_metadata(context: Context) -> None:
"""
Try them all regardless of the distro: metadata is only removed if
the package manager is not present in the image.
"""
+ if (
+ context.package_cache_dir.is_relative_to(context.root) and
+ not context.config.overlay and (
+ context.config.clean_package_metadata != ConfigFeature.disabled or
+ context.config.output_format not in (OutputFormat.directory, OutputFormat.tar)
+ )
+ ):
+ # Instead of removing the package cache directory from the image, we move it to the workspace so it stays
+ # available for later steps and is automatically removed along with the workspace when the build finishes.
+ context.package_cache_dir = move_tree(
+ context.package_cache_dir, context.workspace / "package-cache-dir",
+ tools=context.config.tools(),
+ sandbox=context.sandbox(
+ options=[
+ "--bind", context.package_cache_dir.parent, context.package_cache_dir.parent,
+ "--bind", context.workspace, context.workspace,
+ ],
+ ),
+ )
if context.config.clean_package_metadata == ConfigFeature.disabled:
return
if always or not find_binary(tool, root=context.root):
rmtree(*(context.root / p for p in paths),
sandbox=context.sandbox(options=["--bind", context.root, context.root]))
-
-
-def finalize_package_manager_mounts(context: Context) -> list[PathString]:
- from mkosi.installer.dnf import Dnf
-
- mounts: list[PathString] = [
- *(["--ro-bind", m, m] if (m := context.config.local_mirror) else []),
- *finalize_crypto_mounts(tools=context.config.tools()),
- "--bind", context.packages, "/work/packages",
- ]
-
- mounts += flatten(
- ["--bind", context.cache_dir / d, Path("/var") / d]
- for d in (
- "lib/apt",
- "cache/apt",
- f"cache/{Dnf.subdir(context.config)}",
- f"lib/{Dnf.subdir(context.config)}",
- "cache/pacman/pkg",
- "cache/zypp",
- )
- if (context.cache_dir / d).exists()
- )
-
- return mounts
# SPDX-License-Identifier: LGPL-2.1+
import textwrap
from collections.abc import Iterable, Sequence
+from pathlib import Path
from typing import NamedTuple, Optional
+from mkosi.config import Config
from mkosi.context import Context
-from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer import PackageManager
from mkosi.mounts import finalize_ephemeral_source_mounts
from mkosi.run import find_binary, run
from mkosi.sandbox import apivfs_cmd
"""
)
+ @classmethod
+ def subdir(cls, config: Config) -> Path:
+ return Path("apt")
+
+ @classmethod
+ def cache_subdirs(cls, cache: Path) -> list[Path]:
+ return [cache / "archives"]
+
@classmethod
def scripts(cls, context: Context) -> dict[str, list[PathString]]:
return {
(context.pkgmngr / "etc/apt/preferences.d").mkdir(exist_ok=True, parents=True)
(context.pkgmngr / "etc/apt/sources.list.d").mkdir(exist_ok=True, parents=True)
- # TODO: Drop once apt 2.5.4 is widely available.
with umask(~0o755):
+ # TODO: Drop once apt 2.5.4 is widely available.
(context.root / "var/lib/dpkg").mkdir(parents=True, exist_ok=True)
(context.root / "var/lib/dpkg/status").touch()
- (context.cache_dir / "lib/apt").mkdir(exist_ok=True, parents=True)
- (context.cache_dir / "cache/apt").mkdir(exist_ok=True, parents=True)
+ (context.package_cache_dir / "lib/apt/lists/partial").mkdir(parents=True, exist_ok=True)
# We have a special apt.conf outside of pkgmngr dir that only configures "Dir::Etc" that we pass to APT_CONFIG
# to tell apt it should read config files from /etc/apt in case this is overridden by distributions. This is
operation: str,
packages: Sequence[str] = (),
*,
+ options: Sequence[str] = (),
apivfs: bool = True,
mounts: Sequence[PathString] = (),
) -> None:
with finalize_ephemeral_source_mounts(context.config) as sources:
run(
- cls.cmd(context, "apt-get") + [operation, *sort_packages(packages)],
+ cls.cmd(context, "apt-get") + [operation, *options, *sort_packages(packages)],
sandbox=(
context.sandbox(
network=True,
options=[
"--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
+ *cls.mounts(context),
*sources,
*mounts,
"--chdir", "/work/src",
env=context.config.environment,
)
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ cls.invoke(context, "update")
@classmethod
def createrepo(cls, context: Context) -> None:
with (context.packages / "Packages").open("wb") as f:
- run(["dpkg-scanpackages", context.packages],
- stdout=f, sandbox=context.sandbox(options=["--ro-bind", context.packages, context.packages]))
+ run(
+ ["dpkg-scanpackages", "."],
+ stdout=f,
+ sandbox=context.sandbox(
+ options=[
+ "--ro-bind", context.packages, context.packages,
+ "--chdir", context.packages,
+ ],
+ ),
+ )
+ (context.pkgmngr / "etc/apt/sources.list.d").mkdir(parents=True, exist_ok=True)
+ (context.pkgmngr / "etc/apt/sources.list.d/mkosi-local.sources").write_text(
+ textwrap.dedent(
+ """\
+ Enabled: yes
+ Types: deb
+ URIs: file:///work/packages
+ Suites: ./
+ Trusted: yes
+ """
+ )
+ )
- @classmethod
- def localrepo(cls, context: Context) -> Repository:
- return cls.Repository(
- types=("deb",),
- url="file:///work/packages",
- suite=context.config.release,
- components=("main",),
- signedby=None,
+ cls.invoke(
+ context,
+ "update",
+ options=[
+ "-o", "Dir::Etc::sourcelist=sources.list.d/mkosi-local.sources",
+ "-o", "Dir::Etc::sourceparts=-",
+ "-o", "APT::Get::List-Cleanup=0",
+ ],
+ apivfs=False,
)
# SPDX-License-Identifier: LGPL-2.1+
import textwrap
-from collections.abc import Iterable
+from collections.abc import Iterable, Sequence
from pathlib import Path
from mkosi.config import Config
from mkosi.context import Context
-from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer import PackageManager
from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, rpm_cmd, setup_rpm
from mkosi.log import ARG_DEBUG
from mkosi.mounts import finalize_ephemeral_source_mounts
def subdir(cls, config: Config) -> Path:
return Path("libdnf5" if cls.executable(config) == "dnf5" else "dnf")
+ @classmethod
+ def cache_subdirs(cls, cache: Path) -> list[Path]:
+ return [
+ p / "packages"
+ for p in cache.iterdir()
+ if p.is_dir() and "-" in p.name and "mkosi" not in p.name
+ ]
+
@classmethod
def scripts(cls, context: Context) -> dict[str, list[PathString]]:
return {
@classmethod
def setup(cls, context: Context, repositories: Iterable[RpmRepository], filelists: bool = True) -> None:
- (context.pkgmngr / "etc/dnf/vars").mkdir(exist_ok=True, parents=True)
- (context.pkgmngr / "etc/yum.repos.d").mkdir(exist_ok=True, parents=True)
-
- (context.cache_dir / "cache" / cls.subdir(context.config)).mkdir(exist_ok=True, parents=True)
- (context.cache_dir / "lib" / cls.subdir(context.config)).mkdir(exist_ok=True, parents=True)
+ (context.pkgmngr / "etc/dnf/vars").mkdir(parents=True, exist_ok=True)
+ (context.pkgmngr / "etc/yum.repos.d").mkdir(parents=True, exist_ok=True)
config = context.pkgmngr / "etc/dnf/dnf.conf"
[{repo.id}]
name={repo.id}
{repo.url}
- gpgcheck={int(repo.gpgcheck)}
+ gpgcheck=1
enabled={int(repo.enabled)}
"""
)
)
- if repo.metadata_expire is not None:
- f.write(f"metadata_expire={repo.metadata_expire}\n")
- if repo.priority is not None:
- f.write(f"priority={repo.priority}\n")
-
if repo.sslcacert:
f.write(f"sslcacert={repo.sslcacert}\n")
if repo.sslclientcert:
opt = "--enable-repo" if dnf.endswith("dnf5") else "--enablerepo"
cmdline += [f"{opt}={repo}" for repo in context.config.repositories]
- # TODO: this breaks with a local, offline repository created with 'createrepo'
- if context.config.cache_only and not context.config.local_mirror:
+ if context.config.cache_only:
cmdline += ["--cacheonly"]
+ else:
+ cmdline += ["--setopt=metadata_expire=never"]
+ if dnf == "dnf5":
+ cmdline += ["--setopt=cacheonly=metadata"]
if not context.config.architecture.is_native():
cmdline += [f"--forcearch={context.config.distribution.architecture(context.config.architecture)}"]
return cmdline
@classmethod
- def invoke(cls, context: Context, operation: str, packages: Iterable[str], apivfs: bool = True) -> None:
+ def invoke(
+ cls,
+ context: Context,
+ operation: str,
+ packages: Iterable[str] = (),
+ options: Sequence[str] = (),
+ apivfs: bool = True,
+ ) -> None:
with finalize_ephemeral_source_mounts(context.config) as sources:
run(
- cls.cmd(context) + [operation, *sort_packages(packages)],
+ cls.cmd(context) + [operation, *options, *sort_packages(packages)],
sandbox=(
context.sandbox(
network=True,
options=[
"--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
+ *cls.mounts(context),
*sources,
"--chdir", "/work/src",
],
if any(p.name.startswith(prefix) for prefix in ("dnf", "hawkey", "yum")):
p.unlink()
+ @classmethod
+ def sync(cls, context: Context, options: Sequence[str] = ()) -> None:
+ cls.invoke(
+ context,
+ "makecache",
+ options=[
+ "--refresh",
+ *(["--setopt=cacheonly=none"] if cls.executable(context.config) == "dnf5" else []),
+ *options,
+ ],
+ apivfs=False,
+ )
+
@classmethod
def createrepo(cls, context: Context) -> None:
run(["createrepo_c", context.packages],
sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
- @classmethod
- def localrepo(cls) -> RpmRepository:
- return RpmRepository(
- id="mkosi-packages",
- url="baseurl=file:///work/packages",
- gpgcheck=False,
- gpgurls=(),
- metadata_expire=0,
- priority=50,
+ (context.pkgmngr / "etc/yum.repos.d/mkosi-local.repo").write_text(
+ textwrap.dedent(
+ """\
+ [mkosi]
+ name=mkosi
+ baseurl=file:///work/packages
+ gpgcheck=0
+ metadata_expire=never
+ priority=50
+ """
+ )
)
+
+ cls.sync(context, options=["--disablerepo=*", "--enablerepo=mkosi"])
# SPDX-License-Identifier: LGPL-2.1+
+import shutil
import textwrap
from collections.abc import Iterable, Sequence
from pathlib import Path
from typing import NamedTuple
+from mkosi.config import Config
from mkosi.context import Context
-from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer import PackageManager
from mkosi.mounts import finalize_ephemeral_source_mounts
from mkosi.run import run
from mkosi.sandbox import apivfs_cmd
id: str
url: str
+ @classmethod
+ def subdir(cls, config: Config) -> Path:
+ return Path("pacman")
+
+ @classmethod
+ def cache_subdirs(cls, cache: Path) -> list[Path]:
+ return [cache / "pkg"]
+
@classmethod
def scripts(cls, context: Context) -> dict[str, list[PathString]]:
return {"pacman": apivfs_cmd(context.root) + cls.cmd(context)}
+ @classmethod
+ def mounts(cls, context: Context) -> list[PathString]:
+ return [
+ *super().mounts(context),
+ # pacman reuses the same directory for the sync databases and the local database containing the list of
+ # installed packages. The format should go in the cache directory, the latter should go in the image, so we
+ # bind mount the local directory from the image to make sure that happens.
+ "--bind", context.root / "var/lib/pacman/local", "/var/lib/pacman/local",
+ # pacman writes downloaded packages to the first writable cache directory. We don't want it to write to our
+ # local repository directory so we expose it as a read-only directory to pacman.
+ "--ro-bind", context.packages, "/var/cache/pacman/mkosi",
+ ]
+
@classmethod
def setup(cls, context: Context, repositories: Iterable[Repository]) -> None:
if context.config.repository_key_check:
# will be no signatures
sig_level = "Never"
- # Create base layout for pacman and pacman-key
with umask(~0o755):
- (context.root / "var/lib/pacman").mkdir(exist_ok=True, parents=True)
+ (context.root / "var/lib/pacman/local").mkdir(parents=True, exist_ok=True)
- (context.cache_dir / "cache/pacman/pkg").mkdir(parents=True, exist_ok=True)
+ (context.pkgmngr / "etc/mkosi-local.conf").touch()
config = context.pkgmngr / "etc/pacman.conf"
if config.exists():
SigLevel = {sig_level}
LocalFileSigLevel = Optional
ParallelDownloads = 5
+ Architecture = {context.config.distribution.architecture(context.config.architecture)}
+
+ # This has to go first so that our local repository always takes precedence over any other ones.
+ Include = /etc/mkosi-local.conf
"""
)
)
"pacman",
"--root", context.root,
"--logfile=/dev/null",
+ "--dbpath=/var/lib/pacman",
+ # Make sure pacman looks at our local repository first by putting it as the first cache directory. We mount
+ # it read-only so the second directory will still be used for writing new cache entries.
+ "--cachedir=/var/cache/pacman/mkosi",
"--cachedir=/var/cache/pacman/pkg",
"--hookdir", context.root / "etc/pacman.d/hooks",
"--arch", context.config.distribution.architecture(context.config.architecture),
network=True,
options=[
"--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
+ *cls.mounts(context),
*sources,
"--chdir", "/work/src",
],
)
@classmethod
- def createrepo(cls, context: Context, *, force: bool = False) -> None:
- run(
- [
- "repo-add",
- context.packages / "mkosi-packages.db.tar",
- *sorted(context.packages.glob("*.pkg.tar*"), key=lambda p: GenericVersion(Path(p).name)),
- ]
- )
+ def sync(cls, context: Context) -> None:
+ cls.invoke(context, "--sync", ["--refresh"], apivfs=False)
@classmethod
- def localrepo(cls) -> Repository:
- return cls.Repository(id="mkosi-packages", url="file:///work/packages")
+ def createrepo(cls, context: Context) -> None:
+ run(["repo-add", "--quiet", context.packages / "mkosi.db.tar",
+ *sorted(context.packages.glob("*.pkg.tar*"), key=lambda p: GenericVersion(Path(p).name))])
+
+ (context.pkgmngr / "etc/mkosi-local.conf").write_text(
+ textwrap.dedent(
+ """\
+ [mkosi]
+ Server = file:///i/dont/exist
+ SigLevel = Never
+ Usage = Install Search Upgrade
+ """
+ )
+ )
+
+ # pacman can't sync a single repository, so we go behind its back and do it ourselves.
+ shutil.move(
+ context.packages / "mkosi.db.tar",
+ context.package_cache_dir / "lib/pacman/sync/mkosi.db"
+ )
id: str
url: str
gpgurls: tuple[str, ...]
- gpgcheck: bool = True
enabled: bool = True
sslcacert: Optional[Path] = None
sslclientkey: Optional[Path] = None
sslclientcert: Optional[Path] = None
- metadata_expire: Optional[int] = None
- priority: Optional[int] = None
def find_rpm_gpgkey(context: Context, key: str) -> Optional[str]:
import hashlib
import textwrap
from collections.abc import Iterable, Sequence
+from pathlib import Path
-from mkosi.config import yes_no
+from mkosi.config import Config, yes_no
from mkosi.context import Context
-from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer import PackageManager
from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, rpm_cmd, setup_rpm
from mkosi.mounts import finalize_ephemeral_source_mounts
from mkosi.run import run
class Zypper(PackageManager):
+ @classmethod
+ def subdir(cls, config: Config) -> Path:
+ return Path("zypp")
+
+ @classmethod
+ def cache_subdirs(cls, cache: Path) -> list[Path]:
+ return [cache / "packages"]
+
@classmethod
def scripts(cls, context: Context) -> dict[str, list[PathString]]:
return {
config = context.pkgmngr / "etc/zypp/zypp.conf"
config.parent.mkdir(exist_ok=True, parents=True)
- (context.cache_dir / "cache/zypp").mkdir(exist_ok=True, parents=True)
-
# rpm.install.excludedocs can only be configured in zypp.conf so we append
# to any user provided config file. Let's also bump the refresh delay to
# the same default as dnf which is 48 hours.
[{repo.id}-{key}]
name={repo.id}
{repo.url}
- gpgcheck={int(repo.gpgcheck)}
+ gpgcheck=1
enabled={int(repo.enabled)}
- autorefresh=1
+ autorefresh=0
keeppackages=1
"""
)
)
- if repo.priority is not None:
- f.write(f"priority={repo.priority}\n")
-
for i, url in enumerate(repo.gpgurls):
f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
f.write(f"{url}\n")
"--cache-dir=/var/cache/zypp",
"--gpg-auto-import-keys" if context.config.repository_key_check else "--no-gpg-checks",
"--non-interactive",
+ "--no-refresh",
]
@classmethod
network=True,
options=[
"--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
+ *cls.mounts(context),
*sources,
"--chdir", "/work/src",
],
fixup_rpmdb_location(context)
+ @classmethod
+ def sync(cls, context: Context) -> None:
+ cls.invoke(context, "refresh", apivfs=False)
+
@classmethod
def createrepo(cls, context: Context) -> None:
run(["createrepo_c", context.packages],
sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
- @classmethod
- def localrepo(cls) -> RpmRepository:
- return RpmRepository(
- id="mkosi-packages",
- url="baseurl=file:///work/packages",
- gpgcheck=False,
- gpgurls=(),
- priority=50,
+ (context.pkgmngr / "etc/zypp/repos.d/mkosi-local.repo").write_text(
+ textwrap.dedent(
+ """\
+ [mkosi]
+ name=mkosi
+ baseurl=file:///work/packages
+ gpgcheck=0
+ autorefresh=0
+ keeppackages=0
+ priority=50
+ """
+ )
)
+
+ cls.invoke(context, "refresh", ["mkosi"], apivfs=False)
`CacheDirectory=`, `--cache-dir=`
-: Takes a path to a directory to use as package cache for the
- distribution package manager used. If this option is not used, but a
- `mkosi.cache/` directory is found in the local directory it is
- automatically used for this purpose.
+: Takes a path to a directory to use as the incremental cache directory
+ for the incremental images produced when the `Incremental=` option is
+ enabled. If this option is not used, but a `mkosi.cache/` directory is
+ found in the local directory it is automatically used for this
+ purpose.
+
+`PackageCacheDirectory=`, `--package-cache-dir`
+
+: Takes a path to a directory to use as the package cache directory for
+ the distribution package manager used. If unset, a suitable directory
+ in the user's home directory or system is used.
`BuildDirectory=`, `--build-dir=`
distribution instead of installing the distribution from scratch. Only
extra packages are installed on top of the ones already installed in
the base trees. Note that for this to work properly, the base image
- still needs to contain the package manager metadata (see
- `CleanPackageMetadata=`).
+ still needs to contain the package manager metadata by setting
+ `CleanPackageMetadata=no` (see `CleanPackageMetadata=`).
: Instead of a directory, a tar file or a disk image may be provided. In
this case it is unpacked into the OS tree. This mode of operation
`CleanPackageMetadata=`, `--clean-package-metadata=`
-: Enable/disable removal of package manager databases at the end of
- installation. Can be specified as `true`, `false`, or `auto` (the
- default). With `auto`, files will be removed if the respective
- package manager executable is *not* present at the end of the
- installation.
+: Enable/disable removal of package manager databases and repository
+ metadata in `/mkosi` at the end of installation. Can be specified as
+ `true`, `false`, or `auto` (the default). With `auto`, package manager
+ databases will be removed if the respective package manager executable
+ is *not* present at the end of the installation.
+
+: Note that when not building a tar or directory image, the repository
+ metadata in `/mkosi` is always removed, regardless of this setting as
+ it is only useful for building extensions using `BaseTrees=`.
+
+: Note that when set to `auto`, repository metadata in `/mkosi` is
+ removed regardless of whether the respective package manager
+ executable is present or not.
`PrepareScripts=`, `--prepare-script=`
run(["mkdir", "--parents", path], user=user, group=group)
return path
+ @classmethod
+ def rchown(cls, path: Path) -> None:
+ if cls.is_regular_user() and path.is_relative_to(INVOKING_USER.home()) and path.exists():
+ run(["chown", "--recursive", f"{INVOKING_USER.uid}:{INVOKING_USER.gid}", path])
+
def read_subrange(path: Path) -> int:
uid = str(os.getuid())
"Output": "outfile",
"OutputDirectory": "/your/output/here",
"Overlay": true,
+ "PackageCacheDirectory": "/a/b/c",
"PackageDirectories": [],
"PackageManagerTrees": [
{
output_dir = Path("/your/output/here"),
output_format = OutputFormat.uki,
overlay = True,
+ package_cache_dir = Path("/a/b/c"),
package_directories = [],
package_manager_trees = [ConfigTree(Path("/foo/bar"), None)],
packages = [],