)
from mkosi.context import Context
from mkosi.distributions import Distribution
-from mkosi.installer import (
- clean_package_manager_metadata,
- finalize_package_manager_mounts,
- package_manager_scripts,
-)
+from mkosi.installer import clean_package_manager_metadata, finalize_package_manager_mounts
from mkosi.kmod import gen_required_kernel_modules, process_kernel_modules
from mkosi.log import ARG_DEBUG, complete_step, die, log_notice, log_step
from mkosi.manifest import Manifest
for binary in ("useradd", "groupadd"):
if find_binary(binary, root=context.config.tools()):
scripts[binary] = (binary, "--root", context.root)
- return finalize_scripts(scripts | dict(helpers) | package_manager_scripts(context))
+ return finalize_scripts(
+ scripts | dict(helpers) | context.config.distribution.package_manager(context.config).scripts(context)
+ )
def finalize_chroot_scripts(context: Context) -> contextlib.AbstractContextManager[Path]:
from mkosi.util import StrEnum, read_os_release
if TYPE_CHECKING:
- from mkosi.config import Architecture
+ from mkosi.config import Architecture, Config
from mkosi.context import Context
+ from mkosi.installer import PackageManager
class PackageType(StrEnum):
def pretty_name(cls) -> str:
raise NotImplementedError
+ @classmethod
+ def package_manager(cls, config: "Config") -> type["PackageManager"]:
+ raise NotImplementedError
+
@classmethod
def setup(cls, context: "Context") -> None:
raise NotImplementedError
def pretty_name(self) -> str:
return self.installer().pretty_name()
+ def package_manager(self, config: "Config") -> type["PackageManager"]:
+ return self.installer().package_manager(config)
+
def setup(self, context: "Context") -> None:
return self.installer().setup(context)
from collections.abc import Iterable, Sequence
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
from mkosi.context import Context
from mkosi.distributions import Distribution, DistributionInstaller, PackageType
-from mkosi.installer.pacman import (
- PacmanRepository,
- createrepo_pacman,
- invoke_pacman,
- localrepo_pacman,
- setup_pacman,
-)
+from mkosi.installer import PackageManager
+from mkosi.installer.pacman import Pacman
from mkosi.log import die
def default_tools_tree_distribution(cls) -> Distribution:
return Distribution.arch
+ @classmethod
+ def package_manager(cls, config: "Config") -> type[PackageManager]:
+ return Pacman
+
@classmethod
def createrepo(cls, context: Context) -> None:
- createrepo_pacman(context)
+ Pacman.createrepo(context)
@classmethod
def setup(cls, context: Context) -> None:
- setup_pacman(context, cls.repositories(context))
+ Pacman.setup(context, cls.repositories(context))
@classmethod
def install(cls, context: Context) -> None:
@classmethod
def install_packages(cls, context: Context, packages: Sequence[str], apivfs: bool = True) -> None:
- invoke_pacman(
+ Pacman.invoke(
context,
"--sync",
["--refresh", "--needed", "--assume-installed", "initramfs"],
@classmethod
def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
- invoke_pacman(context, "--remove", ["--nosave", "--recursive"], packages)
+ Pacman.invoke(context, "--remove", ["--nosave", "--recursive"], packages)
@classmethod
- def repositories(cls, context: Context) -> Iterable[PacmanRepository]:
+ def repositories(cls, context: Context) -> Iterable[Pacman.Repository]:
if context.config.local_mirror:
- yield PacmanRepository("core", context.config.local_mirror)
+ yield Pacman.Repository("core", context.config.local_mirror)
else:
if context.want_local_repo():
- yield localrepo_pacman()
+ yield Pacman.localrepo()
if context.config.architecture == Architecture.arm64:
url = f"{context.config.mirror or 'http://mirror.archlinuxarm.org'}/$arch/$repo"
] + ["core", "extra"]
for repo in repos:
- yield PacmanRepository(repo, url)
+ yield Pacman.Repository(repo, url)
@classmethod
def architecture(cls, arch: Architecture) -> str:
import shutil
from collections.abc import Iterable, Sequence
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
from mkosi.context import Context
from mkosi.distributions import (
Distribution,
PackageType,
join_mirror,
)
-from mkosi.installer.dnf import createrepo_dnf, invoke_dnf, localrepo_dnf, setup_dnf
+from mkosi.installer import PackageManager
+from mkosi.installer.dnf import Dnf
from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
from mkosi.log import complete_step, die
from mkosi.tree import rmtree
def default_tools_tree_distribution(cls) -> Distribution:
return Distribution.fedora
+ @classmethod
+ def package_manager(cls, config: "Config") -> type[PackageManager]:
+ return Dnf
+
@classmethod
def grub_prefix(cls) -> str:
return "grub2"
@classmethod
def createrepo(cls, context: Context) -> None:
- createrepo_dnf(context)
+ Dnf.createrepo(context)
@classmethod
def setup(cls, context: Context) -> None:
if GenericVersion(context.config.release) <= 7:
die(f"{cls.pretty_name()} 7 or earlier variants are not supported")
- setup_dnf(context, cls.repositories(context))
+ Dnf.setup(context, cls.repositories(context))
(context.pkgmngr / "etc/dnf/vars/stream").write_text(f"{context.config.release}-stream\n")
@classmethod
@classmethod
def install_packages(cls, context: Context, packages: Sequence[str], apivfs: bool = True) -> None:
- invoke_dnf(context, "install", packages, apivfs=apivfs)
+ Dnf.invoke(context, "install", packages, apivfs=apivfs)
@classmethod
def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
- invoke_dnf(context, "remove", packages)
+ Dnf.invoke(context, "remove", packages)
@classmethod
def architecture(cls, arch: Architecture) -> str:
return
if context.want_local_repo():
- yield localrepo_dnf()
+ yield Dnf.localrepo()
yield from cls.repository_variants(context, "BaseOS")
yield from cls.repository_variants(context, "AppStream")
from pathlib import Path
from mkosi.archive import extract_tar
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
from mkosi.context import Context
from mkosi.distributions import Distribution, DistributionInstaller, PackageType
-from mkosi.installer.apt import AptRepository, createrepo_apt, invoke_apt, localrepo_apt, setup_apt
+from mkosi.installer import PackageManager
+from mkosi.installer.apt import Apt
from mkosi.log import die
from mkosi.run import run
from mkosi.sandbox import finalize_passwd_mounts
def default_tools_tree_distribution(cls) -> Distribution:
return Distribution.debian
+ @classmethod
+ def package_manager(cls, config: Config) -> type[PackageManager]:
+ return Apt
+
@staticmethod
- def repositories(context: Context, local: bool = True) -> Iterable[AptRepository]:
+ def repositories(context: Context, local: bool = True) -> Iterable[Apt.Repository]:
types = ("deb", "deb-src")
components = ("main", *context.config.repositories)
if context.config.local_mirror and local:
- yield AptRepository(
+ yield Apt.Repository(
types=("deb",),
url=context.config.local_mirror,
suite=context.config.release,
return
if context.want_local_repo():
- yield localrepo_apt(context)
+ yield Apt.localrepo(context)
mirror = context.config.mirror or "http://deb.debian.org/debian"
signedby = "/usr/share/keyrings/debian-archive-keyring.gpg"
- yield AptRepository(
+ yield Apt.Repository(
types=types,
url=mirror,
suite=context.config.release,
# Debug repos are typically not mirrored.
url = "http://deb.debian.org/debian-debug"
- yield AptRepository(
+ yield Apt.Repository(
types=types,
url=url,
suite=f"{context.config.release}-debug",
if context.config.release in ("unstable", "sid"):
return
- yield AptRepository(
+ yield Apt.Repository(
types=types,
url=mirror,
suite=f"{context.config.release}-updates",
signedby=signedby,
)
- yield AptRepository(
+ yield Apt.Repository(
types=types,
# Security updates repos are never mirrored.
url="http://security.debian.org/debian-security",
@classmethod
def setup(cls, context: Context) -> None:
- setup_apt(context, cls.repositories(context))
+ Apt.setup(context, cls.repositories(context))
@classmethod
def createrepo(cls, context: Context) -> None:
- createrepo_apt(context)
+ Apt.createrepo(context)
@classmethod
def install(cls, context: Context) -> None:
(context.root / d).symlink_to(f"usr/{d}")
(context.root / f"usr/{d}").mkdir(parents=True, exist_ok=True)
- invoke_apt(context, "update", apivfs=False)
+ Apt.invoke(context, "update", apivfs=False)
# Next, we invoke apt-get install to download all the essential packages. With DPkg::Pre-Install-Pkgs,
# we specify a shell command that will receive the list of packages that will be installed on stdin.
# all it does is download the essential debs and tell us their full in the apt cache without actually
# installing them.
with tempfile.NamedTemporaryFile(mode="r") as f:
- invoke_apt(
+ Apt.invoke(
context,
"install",
[
with umask(~0o644):
policyrcd.write_text("#!/bin/sh\nexit 101\n")
- invoke_apt(context, "update", apivfs=False)
- invoke_apt(context, "install", packages, apivfs=apivfs)
+ Apt.invoke(context, "update", apivfs=False)
+ Apt.invoke(context, "install", packages, apivfs=apivfs)
install_apt_sources(context, cls.repositories(context, local=False))
policyrcd.unlink()
@classmethod
def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
- invoke_apt(context, "purge", packages)
+ Apt.invoke(context, "purge", packages)
@classmethod
def architecture(cls, arch: Architecture) -> str:
return a
-def install_apt_sources(context: Context, repos: Iterable[AptRepository]) -> None:
+def install_apt_sources(context: Context, repos: Iterable[Apt.Repository]) -> None:
if not (context.root / "usr/bin/apt").exists():
return
from collections.abc import Iterable, Sequence
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
from mkosi.context import Context
from mkosi.distributions import (
Distribution,
PackageType,
join_mirror,
)
-from mkosi.installer.dnf import createrepo_dnf, invoke_dnf, localrepo_dnf, setup_dnf
+from mkosi.installer import PackageManager
+from mkosi.installer.dnf import Dnf
from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
from mkosi.log import die
def grub_prefix(cls) -> str:
return "grub2"
+ @classmethod
+ def package_manager(cls, config: Config) -> type[PackageManager]:
+ return Dnf
+
@classmethod
def createrepo(cls, context: Context) -> None:
- createrepo_dnf(context)
+ Dnf.createrepo(context)
@classmethod
def setup(cls, context: Context) -> None:
- setup_dnf(context, cls.repositories(context), filelists=False)
+ Dnf.setup(context, cls.repositories(context), filelists=False)
@classmethod
def install(cls, context: Context) -> None:
@classmethod
def install_packages(cls, context: Context, packages: Sequence[str], apivfs: bool = True) -> None:
- invoke_dnf(context, "install", packages, apivfs=apivfs)
+ Dnf.invoke(context, "install", packages, apivfs=apivfs)
@classmethod
def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
- invoke_dnf(context, "remove", packages)
+ Dnf.invoke(context, "remove", packages)
@classmethod
def repositories(cls, context: Context) -> Iterable[RpmRepository]:
return
if context.want_local_repo():
- yield localrepo_dnf()
+ yield Dnf.localrepo()
if context.config.release == "eln":
mirror = context.config.mirror or "https://odcs.fedoraproject.org/composes/production/latest-Fedora-ELN/compose"
from mkosi.config import Architecture
from mkosi.context import Context
from mkosi.distributions import Distribution, fedora, join_mirror
-from mkosi.installer.dnf import localrepo_dnf
+from mkosi.installer.dnf import Dnf
from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
from mkosi.log import die
return
if context.want_local_repo():
- yield localrepo_dnf()
+ yield Dnf.localrepo()
if context.config.mirror:
url = f"baseurl={join_mirror(context.config.mirror, 'distrib/$releasever/$basearch/media/core/')}"
from mkosi.config import Architecture
from mkosi.context import Context
from mkosi.distributions import Distribution, fedora, join_mirror
-from mkosi.installer.dnf import localrepo_dnf
+from mkosi.installer.dnf import Dnf
from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
from mkosi.log import die
return
if context.want_local_repo():
- yield localrepo_dnf()
+ yield Dnf.localrepo()
url = f"baseurl={join_mirror(mirror, '$releasever/repository/$basearch/main')}"
yield RpmRepository("main-release", f"{url}/release", gpgurls)
from collections.abc import Iterable, Sequence
from pathlib import Path
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
from mkosi.context import Context
from mkosi.distributions import Distribution, DistributionInstaller, PackageType
-from mkosi.installer.dnf import createrepo_dnf, invoke_dnf, localrepo_dnf, setup_dnf
+from mkosi.installer import PackageManager
+from mkosi.installer.dnf import Dnf
from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
-from mkosi.installer.zypper import createrepo_zypper, invoke_zypper, localrepo_zypper, setup_zypper
+from mkosi.installer.zypper import Zypper
from mkosi.log import die
from mkosi.run import find_binary, run
from mkosi.sandbox import finalize_crypto_mounts
def grub_prefix(cls) -> str:
return "grub2"
+ @classmethod
+ def package_manager(cls, config: Config) -> type[PackageManager]:
+ if find_binary("zypper", root=config.tools()):
+ return Zypper
+ else:
+ return Dnf
+
@classmethod
def createrepo(cls, context: Context) -> None:
if find_binary("zypper", root=context.config.tools()):
- createrepo_zypper(context)
+ Zypper.createrepo(context)
else:
- createrepo_dnf(context)
+ Dnf.createrepo(context)
@classmethod
def setup(cls, context: Context) -> None:
zypper = find_binary("zypper", root=context.config.tools())
if zypper:
- setup_zypper(context, cls.repositories(context))
+ Zypper.setup(context, cls.repositories(context))
else:
- setup_dnf(context, cls.repositories(context))
+ Dnf.setup(context, cls.repositories(context))
@classmethod
def install(cls, context: Context) -> None:
"--download", "in-advance",
"--recommends" if context.config.with_recommends else "--no-recommends",
]
- invoke_zypper(context, "install", packages, options=options, apivfs=apivfs)
+ Zypper.invoke(context, "install", packages, options=options, apivfs=apivfs)
else:
- invoke_dnf(context, "install", packages, apivfs=apivfs)
+ Dnf.invoke(context, "install", packages, apivfs=apivfs)
@classmethod
def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
if find_binary("zypper", root=context.config.tools()):
- invoke_zypper(context, "remove", packages, options=["--clean-deps"])
+ Zypper.invoke(context, "remove", packages, options=["--clean-deps"])
else:
- invoke_dnf(context, "remove", packages)
+ Dnf.invoke(context, "remove", packages)
@classmethod
def repositories(cls, context: Context) -> Iterable[RpmRepository]:
zypper = find_binary("zypper", root=context.config.tools())
if context.want_local_repo():
- yield localrepo_zypper() if zypper else localrepo_dnf()
+ yield Zypper.localrepo() if zypper else Dnf.localrepo()
release = context.config.release
if release == "leap":
from mkosi.config import Architecture
from mkosi.context import Context
from mkosi.distributions import debian
-from mkosi.installer.apt import AptRepository, localrepo_apt
+from mkosi.installer.apt import Apt
class Installer(debian.Installer):
return "lunar"
@staticmethod
- def repositories(context: Context, local: bool = True) -> Iterable[AptRepository]:
+ def repositories(context: Context, local: bool = True) -> Iterable[Apt.Repository]:
types = ("deb", "deb-src")
# From kinetic onwards, the usr-is-merged package is available in universe and is required by
components = (*components, *context.config.repositories)
if context.config.local_mirror and local:
- yield AptRepository(
+ yield Apt.Repository(
types=("deb",),
url=context.config.local_mirror,
suite=context.config.release,
return
if context.want_local_repo():
- yield localrepo_apt(context)
+ yield Apt.localrepo(context)
if context.config.architecture in (Architecture.x86, Architecture.x86_64):
mirror = context.config.mirror or "http://archive.ubuntu.com/ubuntu"
signedby = "/usr/share/keyrings/ubuntu-archive-keyring.gpg"
- yield AptRepository(
+ yield Apt.Repository(
types=types,
url=mirror,
suite=context.config.release,
signedby=signedby,
)
- yield AptRepository(
+ yield Apt.Repository(
types=types,
url=mirror,
suite=f"{context.config.release}-updates",
else:
mirror = "http://ports.ubuntu.com/"
- yield AptRepository(
+ yield Apt.Repository(
types=types,
url=mirror,
suite=f"{context.config.release}-security",
from mkosi.config import ConfigFeature
from mkosi.context import Context
from mkosi.run import find_binary
-from mkosi.sandbox import apivfs_cmd, finalize_crypto_mounts
+from mkosi.sandbox import finalize_crypto_mounts
from mkosi.tree import rmtree
from mkosi.types import PathString
from mkosi.util import flatten
+class PackageManager:
+ @classmethod
+ def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+ raise NotImplementedError
+
+
def clean_package_manager_metadata(context: Context) -> None:
"""
Remove package manager metadata
sandbox=context.sandbox(options=["--bind", context.root, context.root]))
-def package_manager_scripts(context: Context) -> dict[str, list[PathString]]:
- from mkosi.installer.apt import apt_cmd
- from mkosi.installer.dnf import dnf_cmd
- from mkosi.installer.pacman import pacman_cmd
- from mkosi.installer.rpm import rpm_cmd
- from mkosi.installer.zypper import zypper_cmd
-
- return {
- "pacman": apivfs_cmd(context.root) + pacman_cmd(context),
- "zypper": apivfs_cmd(context.root) + zypper_cmd(context),
- "dnf" : apivfs_cmd(context.root) + dnf_cmd(context),
- "rpm" : apivfs_cmd(context.root) + rpm_cmd(context),
- } | {
- command: apivfs_cmd(context.root) + apt_cmd(context, command) for command in (
- "apt",
- "apt-cache",
- "apt-cdrom",
- "apt-config",
- "apt-extracttemplates",
- "apt-get",
- "apt-key",
- "apt-mark",
- "apt-sortpkgs",
- )
- }
-
-
def finalize_package_manager_mounts(context: Context) -> list[PathString]:
- from mkosi.installer.dnf import dnf_subdir
+ from mkosi.installer.dnf import Dnf
mounts: list[PathString] = [
*(["--ro-bind", m, m] if (m := context.config.local_mirror) else []),
for d in (
"lib/apt",
"cache/apt",
- f"cache/{dnf_subdir(context)}",
- f"lib/{dnf_subdir(context)}",
+ f"cache/{Dnf.subdir(context.config)}",
+ f"lib/{Dnf.subdir(context.config)}",
"cache/pacman/pkg",
"cache/zypp",
)
from typing import NamedTuple, Optional
from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
from mkosi.mounts import finalize_ephemeral_source_mounts
from mkosi.run import find_binary, run
from mkosi.sandbox import apivfs_cmd
from mkosi.util import sort_packages, umask
-class AptRepository(NamedTuple):
- types: tuple[str, ...]
- url: str
- suite: str
- components: tuple[str, ...]
- signedby: Optional[str]
+class Apt(PackageManager):
+ class Repository(NamedTuple):
+ types: tuple[str, ...]
+ url: str
+ suite: str
+ components: tuple[str, ...]
+ signedby: Optional[str]
- def __str__(self) -> str:
- return textwrap.dedent(
- f"""\
- Types: {" ".join(self.types)}
- URIs: {self.url}
- Suites: {self.suite}
- Components: {" ".join(self.components)}
- {"Signed-By" if self.signedby else "Trusted"}: {self.signedby or "yes"}
+ def __str__(self) -> str:
+ return textwrap.dedent(
+ f"""\
+ Types: {" ".join(self.types)}
+ URIs: {self.url}
+ Suites: {self.suite}
+ Components: {" ".join(self.components)}
+ {"Signed-By" if self.signedby else "Trusted"}: {self.signedby or "yes"}
- """
- )
-
-
-def setup_apt(context: Context, repos: Iterable[AptRepository]) -> None:
- (context.pkgmngr / "etc/apt").mkdir(exist_ok=True, parents=True)
- (context.pkgmngr / "etc/apt/apt.conf.d").mkdir(exist_ok=True, parents=True)
- (context.pkgmngr / "etc/apt/preferences.d").mkdir(exist_ok=True, parents=True)
- (context.pkgmngr / "etc/apt/sources.list.d").mkdir(exist_ok=True, parents=True)
-
- # TODO: Drop once apt 2.5.4 is widely available.
- with umask(~0o755):
- (context.root / "var/lib/dpkg").mkdir(parents=True, exist_ok=True)
- (context.root / "var/lib/dpkg/status").touch()
-
- (context.cache_dir / "lib/apt").mkdir(exist_ok=True, parents=True)
- (context.cache_dir / "cache/apt").mkdir(exist_ok=True, parents=True)
-
- # We have a special apt.conf outside of /etc/apt that only configures "Dir::Etc" that we pass to APT_CONFIG to tell
- # apt it should read config files from /etc/apt in case this is overridden by distributions. This is required
- # because apt parses CLI configuration options after parsing its configuration files and as such we can't use CLI
- # options to tell apt where to look for configuration files.
- config = context.pkgmngr / "etc/apt.conf"
- if not config.exists():
- config.write_text(
- textwrap.dedent(
- """\
- Dir::Etc "etc/apt";
"""
)
- )
- sources = context.pkgmngr / "etc/apt/sources.list.d/mkosi.sources"
- if not sources.exists():
- with sources.open("w") as f:
- for repo in repos:
- f.write(str(repo))
-
-
-def apt_cmd(context: Context, command: str) -> list[PathString]:
- debarch = context.config.distribution.architecture(context.config.architecture)
-
- cmdline: list[PathString] = [
- "env",
- "APT_CONFIG=/etc/apt.conf",
- "DEBIAN_FRONTEND=noninteractive",
- "DEBCONF_INTERACTIVE_SEEN=true",
- "INITRD=No",
- command,
- "-o", f"APT::Architecture={debarch}",
- "-o", f"APT::Architectures={debarch}",
- "-o", f"APT::Install-Recommends={str(context.config.with_recommends).lower()}",
- "-o", "APT::Immediate-Configure=off",
- "-o", "APT::Get::Assume-Yes=true",
- "-o", "APT::Get::AutomaticRemove=true",
- "-o", "APT::Get::Allow-Change-Held-Packages=true",
- "-o", "APT::Get::Allow-Remove-Essential=true",
- "-o", "APT::Sandbox::User=root",
- "-o", "Dir::Cache=/var/cache/apt",
- "-o", "Dir::State=/var/lib/apt",
- "-o", f"Dir::State::Status={context.root / 'var/lib/dpkg/status'}",
- "-o", f"Dir::Log={context.workspace}",
- "-o", f"Dir::Bin::DPkg={find_binary('dpkg', root=context.config.tools())}",
- "-o", "Debug::NoLocking=true",
- "-o", f"DPkg::Options::=--root={context.root}",
- "-o", "DPkg::Options::=--force-unsafe-io",
- "-o", "DPkg::Options::=--force-architecture",
- "-o", "DPkg::Options::=--force-depends",
- "-o", "DPkg::Options::=--no-debsig",
- "-o", "DPkg::Use-Pty=false",
- "-o", "DPkg::Install::Recursive::Minimum=1000",
- "-o", "pkgCacheGen::ForceEssential=,",
- ]
-
- if not context.config.repository_key_check:
- cmdline += [
- "-o", "Acquire::AllowInsecureRepositories=true",
- "-o", "Acquire::AllowDowngradeToInsecureRepositories=true",
- "-o", "APT::Get::AllowUnauthenticated=true",
- ]
+ @classmethod
+ def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+ return {
+ command: apivfs_cmd(context.root) + cls.cmd(context, command) for command in (
+ "apt",
+ "apt-cache",
+ "apt-cdrom",
+ "apt-config",
+ "apt-extracttemplates",
+ "apt-get",
+ "apt-key",
+ "apt-mark",
+ "apt-sortpkgs",
+ )
+ }
+
+ @classmethod
+ def setup(cls, context: Context, repos: Iterable[Repository]) -> None:
+ (context.pkgmngr / "etc/apt").mkdir(exist_ok=True, parents=True)
+ (context.pkgmngr / "etc/apt/apt.conf.d").mkdir(exist_ok=True, parents=True)
+ (context.pkgmngr / "etc/apt/preferences.d").mkdir(exist_ok=True, parents=True)
+ (context.pkgmngr / "etc/apt/sources.list.d").mkdir(exist_ok=True, parents=True)
+
+ # TODO: Drop once apt 2.5.4 is widely available.
+ with umask(~0o755):
+ (context.root / "var/lib/dpkg").mkdir(parents=True, exist_ok=True)
+ (context.root / "var/lib/dpkg/status").touch()
+
+ (context.cache_dir / "lib/apt").mkdir(exist_ok=True, parents=True)
+ (context.cache_dir / "cache/apt").mkdir(exist_ok=True, parents=True)
+
+ # We have a special apt.conf outside of pkgmngr dir that only configures "Dir::Etc" that we pass to APT_CONFIG
+ # to tell apt it should read config files from /etc/apt in case this is overridden by distributions. This is
+ # required because apt parses CLI configuration options after parsing its configuration files and as such we
+ # can't use CLI options to tell apt where to look for configuration files.
+ config = context.pkgmngr / "etc/apt.conf"
+ if not config.exists():
+ config.write_text(
+ textwrap.dedent(
+ """\
+ Dir::Etc "etc/apt";
+ """
+ )
+ )
- if not context.config.with_docs:
- cmdline += [
- "-o", "DPkg::Options::=--path-exclude=/usr/share/doc/*",
- "-o", "DPkg::Options::=--path-include=/usr/share/doc/*/copyright",
- "-o", "DPkg::Options::=--path-exclude=/usr/share/man/*",
- "-o", "DPkg::Options::=--path-exclude=/usr/share/groff/*",
- "-o", "DPkg::Options::=--path-exclude=/usr/share/info/*",
+ sources = context.pkgmngr / "etc/apt/sources.list.d/mkosi.sources"
+ if not sources.exists():
+ with sources.open("w") as f:
+ for repo in repos:
+ f.write(str(repo))
+
+ @classmethod
+ def cmd(cls, context: Context, command: str) -> list[PathString]:
+ debarch = context.config.distribution.architecture(context.config.architecture)
+
+ cmdline: list[PathString] = [
+ "env",
+ "APT_CONFIG=/etc/apt.conf",
+ "DEBIAN_FRONTEND=noninteractive",
+ "DEBCONF_INTERACTIVE_SEEN=true",
+ "INITRD=No",
+ command,
+ "-o", f"APT::Architecture={debarch}",
+ "-o", f"APT::Architectures={debarch}",
+ "-o", f"APT::Install-Recommends={str(context.config.with_recommends).lower()}",
+ "-o", "APT::Immediate-Configure=off",
+ "-o", "APT::Get::Assume-Yes=true",
+ "-o", "APT::Get::AutomaticRemove=true",
+ "-o", "APT::Get::Allow-Change-Held-Packages=true",
+ "-o", "APT::Get::Allow-Remove-Essential=true",
+ "-o", "APT::Sandbox::User=root",
+ "-o", "Dir::Cache=/var/cache/apt",
+ "-o", "Dir::State=/var/lib/apt",
+ "-o", f"Dir::State::Status={context.root / 'var/lib/dpkg/status'}",
+ "-o", f"Dir::Log={context.workspace}",
+ "-o", f"Dir::Bin::DPkg={find_binary('dpkg', root=context.config.tools())}",
+ "-o", "Debug::NoLocking=true",
+ "-o", f"DPkg::Options::=--root={context.root}",
+ "-o", "DPkg::Options::=--force-unsafe-io",
+ "-o", "DPkg::Options::=--force-architecture",
+ "-o", "DPkg::Options::=--force-depends",
+ "-o", "DPkg::Options::=--no-debsig",
+ "-o", "DPkg::Use-Pty=false",
+ "-o", "DPkg::Install::Recursive::Minimum=1000",
+ "-o", "pkgCacheGen::ForceEssential=,",
]
- return cmdline
-
-
-def invoke_apt(
- context: Context,
- operation: str,
- packages: Sequence[str] = (),
- *,
- apivfs: bool = True,
- mounts: Sequence[PathString] = (),
-) -> None:
- with finalize_ephemeral_source_mounts(context.config) as sources:
- run(
- apt_cmd(context, "apt-get") + [operation, *sort_packages(packages)],
- sandbox=(
- context.sandbox(
- network=True,
- options=[
- "--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
- *sources,
- *mounts,
- "--chdir", "/work/src",
- ],
- ) + (apivfs_cmd(context.root) if apivfs else [])
- ),
- env=context.config.environment,
- )
+ if not context.config.repository_key_check:
+ cmdline += [
+ "-o", "Acquire::AllowInsecureRepositories=true",
+ "-o", "Acquire::AllowDowngradeToInsecureRepositories=true",
+ "-o", "APT::Get::AllowUnauthenticated=true",
+ ]
+
+ if not context.config.with_docs:
+ cmdline += [
+ "-o", "DPkg::Options::=--path-exclude=/usr/share/doc/*",
+ "-o", "DPkg::Options::=--path-include=/usr/share/doc/*/copyright",
+ "-o", "DPkg::Options::=--path-exclude=/usr/share/man/*",
+ "-o", "DPkg::Options::=--path-exclude=/usr/share/groff/*",
+ "-o", "DPkg::Options::=--path-exclude=/usr/share/info/*",
+ ]
+
+ return cmdline
+
+ @classmethod
+ def invoke(
+ cls,
+ context: Context,
+ operation: str,
+ packages: Sequence[str] = (),
+ *,
+ apivfs: bool = True,
+ mounts: Sequence[PathString] = (),
+ ) -> None:
+ with finalize_ephemeral_source_mounts(context.config) as sources:
+ run(
+ cls.cmd(context, "apt-get") + [operation, *sort_packages(packages)],
+ sandbox=(
+ context.sandbox(
+ network=True,
+ options=[
+ "--bind", context.root, context.root,
+ *finalize_package_manager_mounts(context),
+ *sources,
+ *mounts,
+ "--chdir", "/work/src",
+ ],
+ ) + (apivfs_cmd(context.root) if apivfs else [])
+ ),
+ env=context.config.environment,
+ )
-def createrepo_apt(context: Context) -> None:
- with (context.packages / "Packages").open("wb") as f:
- run(["dpkg-scanpackages", context.packages],
- stdout=f, sandbox=context.sandbox(options=["--ro-bind", context.packages, context.packages]))
+ @classmethod
+ def createrepo(cls, context: Context) -> None:
+ with (context.packages / "Packages").open("wb") as f:
+ run(["dpkg-scanpackages", context.packages],
+ stdout=f, sandbox=context.sandbox(options=["--ro-bind", context.packages, context.packages]))
-def localrepo_apt(context: Context) -> AptRepository:
- return AptRepository(
- types=("deb",),
- url="file:///work/packages",
- suite=context.config.release,
- components=("main",),
- signedby=None,
- )
+ @classmethod
+ def localrepo(cls, context: Context) -> Repository:
+ return cls.Repository(
+ types=("deb",),
+ url="file:///work/packages",
+ suite=context.config.release,
+ components=("main",),
+ signedby=None,
+ )
from collections.abc import Iterable
from pathlib import Path
+from mkosi.config import Config
from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
-from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, setup_rpm
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, rpm_cmd, setup_rpm
from mkosi.log import ARG_DEBUG
from mkosi.mounts import finalize_ephemeral_source_mounts
from mkosi.run import find_binary, run
from mkosi.util import sort_packages
-def dnf_executable(context: Context) -> str:
- # Allow the user to override autodetection with an environment variable
- dnf = context.config.environment.get("MKOSI_DNF")
- root = context.config.tools()
-
- return Path(dnf or find_binary("dnf5", root=root) or find_binary("dnf", root=root) or "yum").name
-
-
-def dnf_subdir(context: Context) -> str:
- dnf = dnf_executable(context)
- return "libdnf5" if dnf.endswith("dnf5") else "dnf"
-
-
-def setup_dnf(context: Context, repositories: Iterable[RpmRepository], filelists: bool = True) -> None:
- (context.pkgmngr / "etc/dnf/vars").mkdir(exist_ok=True, parents=True)
- (context.pkgmngr / "etc/yum.repos.d").mkdir(exist_ok=True, parents=True)
-
- (context.cache_dir / "cache" / dnf_subdir(context)).mkdir(exist_ok=True, parents=True)
- (context.cache_dir / "lib" / dnf_subdir(context)).mkdir(exist_ok=True, parents=True)
-
- config = context.pkgmngr / "etc/dnf/dnf.conf"
-
- if not config.exists():
- config.parent.mkdir(exist_ok=True, parents=True)
- with config.open("w") as f:
- # Make sure we download filelists so all dependencies can be resolved.
- # See https://bugzilla.redhat.com/show_bug.cgi?id=2180842
- if dnf_executable(context).endswith("dnf5") and filelists:
- f.write("[main]\noptional_metadata_types=filelists\n")
-
- repofile = context.pkgmngr / "etc/yum.repos.d/mkosi.repo"
- if not repofile.exists():
- repofile.parent.mkdir(exist_ok=True, parents=True)
- with repofile.open("w") as f:
- for repo in repositories:
- f.write(
- textwrap.dedent(
- f"""\
- [{repo.id}]
- name={repo.id}
- {repo.url}
- gpgcheck={int(repo.gpgcheck)}
- enabled={int(repo.enabled)}
- """
+class Dnf(PackageManager):
+ @classmethod
+ def executable(cls, config: Config) -> str:
+ # Allow the user to override autodetection with an environment variable
+ dnf = config.environment.get("MKOSI_DNF")
+ root = config.tools()
+
+ return Path(dnf or find_binary("dnf5", root=root) or find_binary("dnf", root=root) or "yum").name
+
+ @classmethod
+ def subdir(cls, config: Config) -> Path:
+ return Path("libdnf5" if cls.executable(config) == "dnf5" else "dnf")
+
+ @classmethod
+ def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+ return {
+ "dnf": apivfs_cmd(context.root) + cls.cmd(context),
+ "rpm": apivfs_cmd(context.root) + rpm_cmd(context),
+ }
+
+ @classmethod
+ def setup(cls, context: Context, repositories: Iterable[RpmRepository], filelists: bool = True) -> None:
+ (context.pkgmngr / "etc/dnf/vars").mkdir(exist_ok=True, parents=True)
+ (context.pkgmngr / "etc/yum.repos.d").mkdir(exist_ok=True, parents=True)
+
+ (context.cache_dir / "cache" / cls.subdir(context.config)).mkdir(exist_ok=True, parents=True)
+ (context.cache_dir / "lib" / cls.subdir(context.config)).mkdir(exist_ok=True, parents=True)
+
+ config = context.pkgmngr / "etc/dnf/dnf.conf"
+
+ if not config.exists():
+ config.parent.mkdir(exist_ok=True, parents=True)
+ with config.open("w") as f:
+ # Make sure we download filelists so all dependencies can be resolved.
+ # See https://bugzilla.redhat.com/show_bug.cgi?id=2180842
+ if cls.executable(context.config).endswith("dnf5") and filelists:
+ f.write("[main]\noptional_metadata_types=filelists\n")
+
+ repofile = context.pkgmngr / "etc/yum.repos.d/mkosi.repo"
+ if not repofile.exists():
+ repofile.parent.mkdir(exist_ok=True, parents=True)
+ with repofile.open("w") as f:
+ for repo in repositories:
+ f.write(
+ textwrap.dedent(
+ f"""\
+ [{repo.id}]
+ name={repo.id}
+ {repo.url}
+ gpgcheck={int(repo.gpgcheck)}
+ enabled={int(repo.enabled)}
+ """
+ )
)
- )
-
- if repo.metadata_expire is not None:
- f.write(f"metadata_expire={repo.metadata_expire}\n")
- if repo.priority is not None:
- f.write(f"priority={repo.priority}\n")
-
- if repo.sslcacert:
- f.write(f"sslcacert={repo.sslcacert}\n")
- if repo.sslclientcert:
- f.write(f"sslclientcert={repo.sslclientcert}\n")
- if repo.sslclientkey:
- f.write(f"sslclientkey={repo.sslclientkey}\n")
-
- for i, url in enumerate(repo.gpgurls):
- f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
- f.write(f"{url}\n")
-
- f.write("\n")
-
- setup_rpm(context)
-
-
-def dnf_cmd(context: Context) -> list[PathString]:
- dnf = dnf_executable(context)
-
- cmdline: list[PathString] = [
- "env",
- "HOME=/", # Make sure rpm doesn't pick up ~/.rpmmacros and ~/.rpmrc.
- dnf,
- "--assumeyes",
- "--best",
- f"--releasever={context.config.release}",
- f"--installroot={context.root}",
- "--setopt=keepcache=1",
- "--setopt=logdir=/var/log",
- f"--setopt=cachedir=/var/cache/{dnf_subdir(context)}",
- f"--setopt=persistdir=/var/lib/{dnf_subdir(context)}",
- f"--setopt=install_weak_deps={int(context.config.with_recommends)}",
- "--setopt=check_config_file_age=0",
- "--disable-plugin=*" if dnf.endswith("dnf5") else "--disableplugin=*",
- "--enable-plugin=builddep" if dnf.endswith("dnf5") else "--enableplugin=builddep",
- ]
-
- if ARG_DEBUG.get():
- cmdline += ["--setopt=debuglevel=10"]
-
- if not context.config.repository_key_check:
- cmdline += ["--nogpgcheck"]
-
- if context.config.repositories:
- opt = "--enable-repo" if dnf.endswith("dnf5") else "--enablerepo"
- cmdline += [f"{opt}={repo}" for repo in context.config.repositories]
-
- # TODO: this breaks with a local, offline repository created with 'createrepo'
- if context.config.cache_only and not context.config.local_mirror:
- cmdline += ["--cacheonly"]
-
- if not context.config.architecture.is_native():
- cmdline += [f"--forcearch={context.config.distribution.architecture(context.config.architecture)}"]
-
- if not context.config.with_docs:
- cmdline += ["--no-docs" if dnf.endswith("dnf5") else "--nodocs"]
-
- if dnf.endswith("dnf5"):
- cmdline += ["--use-host-config"]
- else:
- cmdline += [
- "--config=/etc/dnf/dnf.conf",
- "--setopt=reposdir=/etc/yum.repos.d",
- "--setopt=varsdir=/etc/dnf/vars",
+
+ if repo.metadata_expire is not None:
+ f.write(f"metadata_expire={repo.metadata_expire}\n")
+ if repo.priority is not None:
+ f.write(f"priority={repo.priority}\n")
+
+ if repo.sslcacert:
+ f.write(f"sslcacert={repo.sslcacert}\n")
+ if repo.sslclientcert:
+ f.write(f"sslclientcert={repo.sslclientcert}\n")
+ if repo.sslclientkey:
+ f.write(f"sslclientkey={repo.sslclientkey}\n")
+
+ for i, url in enumerate(repo.gpgurls):
+ f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
+ f.write(f"{url}\n")
+
+ f.write("\n")
+
+ setup_rpm(context)
+
+ @classmethod
+ def cmd(cls, context: Context) -> list[PathString]:
+ dnf = cls.executable(context.config)
+
+ cmdline: list[PathString] = [
+ "env",
+ "HOME=/", # Make sure rpm doesn't pick up ~/.rpmmacros and ~/.rpmrc.
+ dnf,
+ "--assumeyes",
+ "--best",
+ f"--releasever={context.config.release}",
+ f"--installroot={context.root}",
+ "--setopt=keepcache=1",
+ "--setopt=logdir=/var/log",
+ f"--setopt=cachedir=/var/cache/{cls.subdir(context.config)}",
+ f"--setopt=persistdir=/var/lib/{cls.subdir(context.config)}",
+ f"--setopt=install_weak_deps={int(context.config.with_recommends)}",
+ "--setopt=check_config_file_age=0",
+ "--disable-plugin=*" if dnf.endswith("dnf5") else "--disableplugin=*",
+ "--enable-plugin=builddep" if dnf.endswith("dnf5") else "--enableplugin=builddep",
]
- return cmdline
-
-
-def invoke_dnf(context: Context, operation: str, packages: Iterable[str], apivfs: bool = True) -> None:
- with finalize_ephemeral_source_mounts(context.config) as sources:
- run(
- dnf_cmd(context) + [operation, *sort_packages(packages)],
- sandbox=(
- context.sandbox(
- network=True,
- options=[
- "--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
- *sources,
- "--chdir", "/work/src",
- ],
- ) + (apivfs_cmd(context.root) if apivfs else [])
- ),
- env=context.config.environment,
+ if ARG_DEBUG.get():
+ cmdline += ["--setopt=debuglevel=10"]
+
+ if not context.config.repository_key_check:
+ cmdline += ["--nogpgcheck"]
+
+ if context.config.repositories:
+ opt = "--enable-repo" if dnf.endswith("dnf5") else "--enablerepo"
+ cmdline += [f"{opt}={repo}" for repo in context.config.repositories]
+
+ # TODO: this breaks with a local, offline repository created with 'createrepo'
+ if context.config.cache_only and not context.config.local_mirror:
+ cmdline += ["--cacheonly"]
+
+ if not context.config.architecture.is_native():
+ cmdline += [f"--forcearch={context.config.distribution.architecture(context.config.architecture)}"]
+
+ if not context.config.with_docs:
+ cmdline += ["--no-docs" if dnf.endswith("dnf5") else "--nodocs"]
+
+ if dnf.endswith("dnf5"):
+ cmdline += ["--use-host-config"]
+ else:
+ cmdline += [
+ "--config=/etc/dnf/dnf.conf",
+ "--setopt=reposdir=/etc/yum.repos.d",
+ "--setopt=varsdir=/etc/dnf/vars",
+ ]
+
+ return cmdline
+
+ @classmethod
+ def invoke(cls, context: Context, operation: str, packages: Iterable[str], apivfs: bool = True) -> None:
+ with finalize_ephemeral_source_mounts(context.config) as sources:
+ run(
+ cls.cmd(context) + [operation, *sort_packages(packages)],
+ sandbox=(
+ context.sandbox(
+ network=True,
+ options=[
+ "--bind", context.root, context.root,
+ *finalize_package_manager_mounts(context),
+ *sources,
+ "--chdir", "/work/src",
+ ],
+ ) + (apivfs_cmd(context.root) if apivfs else [])
+ ),
+ env=context.config.environment,
+ )
+
+ fixup_rpmdb_location(context)
+
+ # dnf interprets the log directory relative to the install root so there's nothing we can do but to remove the
+ # log files from the install root afterwards.
+ if (context.root / "var/log").exists():
+ for p in (context.root / "var/log").iterdir():
+ if any(p.name.startswith(prefix) for prefix in ("dnf", "hawkey", "yum")):
+ p.unlink()
+
+ @classmethod
+ def createrepo(cls, context: Context) -> None:
+ run(["createrepo_c", context.packages],
+ sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
+
+ @classmethod
+ def localrepo(cls) -> RpmRepository:
+ return RpmRepository(
+ id="mkosi-packages",
+ url="baseurl=file:///work/packages",
+ gpgcheck=False,
+ gpgurls=(),
+ metadata_expire=0,
+ priority=50,
)
-
- fixup_rpmdb_location(context)
-
- # dnf interprets the log directory relative to the install root so there's nothing we can do but to remove the log
- # files from the install root afterwards.
- if (context.root / "var/log").exists():
- for p in (context.root / "var/log").iterdir():
- if any(p.name.startswith(prefix) for prefix in ("dnf", "hawkey", "yum")):
- p.unlink()
-
-
-def createrepo_dnf(context: Context) -> None:
- run(["createrepo_c", context.packages],
- sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
-
-
-def localrepo_dnf() -> RpmRepository:
- return RpmRepository(
- id="mkosi-packages",
- url="baseurl=file:///work/packages",
- gpgcheck=False,
- gpgurls=(),
- metadata_expire=0,
- priority=50,
- )
from typing import NamedTuple
from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
from mkosi.mounts import finalize_ephemeral_source_mounts
from mkosi.run import run
from mkosi.sandbox import apivfs_cmd
from mkosi.versioncomp import GenericVersion
-class PacmanRepository(NamedTuple):
- id: str
- url: str
+class Pacman(PackageManager):
+ class Repository(NamedTuple):
+ id: str
+ url: str
+ @classmethod
+ def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+ return {"pacman": apivfs_cmd(context.root) + cls.cmd(context)}
-def setup_pacman(context: Context, repositories: Iterable[PacmanRepository]) -> None:
- if context.config.repository_key_check:
- sig_level = "Required DatabaseOptional"
- else:
- # If we are using a single local mirror built on the fly there
- # will be no signatures
- sig_level = "Never"
+ @classmethod
+ def setup(cls, context: Context, repositories: Iterable[Repository]) -> None:
+ if context.config.repository_key_check:
+ sig_level = "Required DatabaseOptional"
+ else:
+ # If we are using a single local mirror built on the fly there
+ # will be no signatures
+ sig_level = "Never"
- # Create base layout for pacman and pacman-key
- with umask(~0o755):
- (context.root / "var/lib/pacman").mkdir(exist_ok=True, parents=True)
+ # Create base layout for pacman and pacman-key
+ with umask(~0o755):
+ (context.root / "var/lib/pacman").mkdir(exist_ok=True, parents=True)
- (context.cache_dir / "cache/pacman/pkg").mkdir(parents=True, exist_ok=True)
+ (context.cache_dir / "cache/pacman/pkg").mkdir(parents=True, exist_ok=True)
- config = context.pkgmngr / "etc/pacman.conf"
- if config.exists():
- return
+ config = context.pkgmngr / "etc/pacman.conf"
+ if config.exists():
+ return
- config.parent.mkdir(exist_ok=True, parents=True)
+ config.parent.mkdir(exist_ok=True, parents=True)
- with config.open("w") as f:
- f.write(
- textwrap.dedent(
- f"""\
- [options]
- SigLevel = {sig_level}
- LocalFileSigLevel = Optional
- ParallelDownloads = 5
- """
- )
- )
-
- for repo in repositories:
+ with config.open("w") as f:
f.write(
textwrap.dedent(
f"""\
-
- [{repo.id}]
- Server = {repo.url}
+ [options]
+ SigLevel = {sig_level}
+ LocalFileSigLevel = Optional
+ ParallelDownloads = 5
"""
)
)
- if any((context.pkgmngr / "etc/pacman.d/").glob("*.conf")):
- f.write(
- textwrap.dedent(
- """\
+ for repo in repositories:
+ f.write(
+ textwrap.dedent(
+ f"""\
- Include = /etc/pacman.d/*.conf
- """
+ [{repo.id}]
+ Server = {repo.url}
+ """
+ )
)
- )
+ if any((context.pkgmngr / "etc/pacman.d/").glob("*.conf")):
+ f.write(
+ textwrap.dedent(
+ """\
-def pacman_cmd(context: Context) -> list[PathString]:
- return [
- "pacman",
- "--root", context.root,
- "--logfile=/dev/null",
- "--cachedir=/var/cache/pacman/pkg",
- "--hookdir", context.root / "etc/pacman.d/hooks",
- "--arch", context.config.distribution.architecture(context.config.architecture),
- "--color", "auto",
- "--noconfirm",
- ]
-
-
-def invoke_pacman(
- context: Context,
- operation: str,
- options: Sequence[str] = (),
- packages: Sequence[str] = (),
- apivfs: bool = True,
-) -> None:
- with finalize_ephemeral_source_mounts(context.config) as sources:
- run(
- pacman_cmd(context) + [operation, *options, *sort_packages(packages)],
- sandbox=(
- context.sandbox(
- network=True,
- options=[
- "--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
- *sources,
- "--chdir", "/work/src",
- ],
- ) + (apivfs_cmd(context.root) if apivfs else [])
- ),
- env=context.config.environment,
- )
-
+ Include = /etc/pacman.d/*.conf
+ """
+ )
+ )
-def createrepo_pacman(context: Context, *, force: bool = False) -> None:
- run(
- [
- "repo-add",
- context.packages / "mkosi-packages.db.tar",
- *sorted(context.packages.glob("*.pkg.tar*"), key=lambda p: GenericVersion(Path(p).name)),
+ @classmethod
+ def cmd(cls, context: Context) -> list[PathString]:
+ return [
+ "pacman",
+ "--root", context.root,
+ "--logfile=/dev/null",
+ "--cachedir=/var/cache/pacman/pkg",
+ "--hookdir", context.root / "etc/pacman.d/hooks",
+ "--arch", context.config.distribution.architecture(context.config.architecture),
+ "--color", "auto",
+ "--noconfirm",
]
- )
+ @classmethod
+ def invoke(
+ cls,
+ context: Context,
+ operation: str,
+ options: Sequence[str] = (),
+ packages: Sequence[str] = (),
+ apivfs: bool = True,
+ ) -> None:
+ with finalize_ephemeral_source_mounts(context.config) as sources:
+ run(
+ cls.cmd(context) + [operation, *options, *sort_packages(packages)],
+ sandbox=(
+ context.sandbox(
+ network=True,
+ options=[
+ "--bind", context.root, context.root,
+ *finalize_package_manager_mounts(context),
+ *sources,
+ "--chdir", "/work/src",
+ ],
+ ) + (apivfs_cmd(context.root) if apivfs else [])
+ ),
+ env=context.config.environment,
+ )
+
+ @classmethod
+ def createrepo(cls, context: Context, *, force: bool = False) -> None:
+ run(
+ [
+ "repo-add",
+ context.packages / "mkosi-packages.db.tar",
+ *sorted(context.packages.glob("*.pkg.tar*"), key=lambda p: GenericVersion(Path(p).name)),
+ ]
+ )
-def localrepo_pacman() -> PacmanRepository:
- return PacmanRepository(id="mkosi-packages", url="file:///work/packages")
+ @classmethod
+ def localrepo(cls) -> Repository:
+ return cls.Repository(id="mkosi-packages", url="file:///work/packages")
from mkosi.config import yes_no
from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
-from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, setup_rpm
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, rpm_cmd, setup_rpm
from mkosi.mounts import finalize_ephemeral_source_mounts
from mkosi.run import run
from mkosi.sandbox import apivfs_cmd
from mkosi.util import sort_packages
-def setup_zypper(context: Context, repos: Iterable[RpmRepository]) -> None:
- config = context.pkgmngr / "etc/zypp/zypp.conf"
- config.parent.mkdir(exist_ok=True, parents=True)
-
- (context.cache_dir / "cache/zypp").mkdir(exist_ok=True, parents=True)
-
- # rpm.install.excludedocs can only be configured in zypp.conf so we append
- # to any user provided config file. Let's also bump the refresh delay to
- # the same default as dnf which is 48 hours.
- with config.open("a") as f:
- f.write(
- textwrap.dedent(
- f"""
- [main]
- rpm.install.excludedocs = {yes_no(not context.config.with_docs)}
- repo.refresh.delay = {48 * 60}
- """
+class Zypper(PackageManager):
+ @classmethod
+ def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+ return {
+ "zypper": apivfs_cmd(context.root) + cls.cmd(context),
+ "rpm" : apivfs_cmd(context.root) + rpm_cmd(context),
+ }
+
+ @classmethod
+ def setup(cls, context: Context, repos: Iterable[RpmRepository]) -> None:
+ config = context.pkgmngr / "etc/zypp/zypp.conf"
+ config.parent.mkdir(exist_ok=True, parents=True)
+
+ (context.cache_dir / "cache/zypp").mkdir(exist_ok=True, parents=True)
+
+ # rpm.install.excludedocs can only be configured in zypp.conf so we append
+ # to any user provided config file. Let's also bump the refresh delay to
+ # the same default as dnf which is 48 hours.
+ with config.open("a") as f:
+ f.write(
+ textwrap.dedent(
+ f"""
+ [main]
+ rpm.install.excludedocs = {yes_no(not context.config.with_docs)}
+ repo.refresh.delay = {48 * 60}
+ """
+ )
)
- )
- repofile = context.pkgmngr / "etc/zypp/repos.d/mkosi.repo"
- if not repofile.exists():
- repofile.parent.mkdir(exist_ok=True, parents=True)
- with repofile.open("w") as f:
- for repo in repos:
- # zypper uses the repo ID as its cache key which is unsafe so add a hash of the url used to it to
- # make sure a unique cache is used for each repository. We use roughly the same algorithm here that dnf
- # uses as well.
- key = hashlib.sha256(repo.url.encode()).hexdigest()[:16]
-
- f.write(
- textwrap.dedent(
- f"""\
- [{repo.id}-{key}]
- name={repo.id}
- {repo.url}
- gpgcheck={int(repo.gpgcheck)}
- enabled={int(repo.enabled)}
- autorefresh=1
- keeppackages=1
- """
+ repofile = context.pkgmngr / "etc/zypp/repos.d/mkosi.repo"
+ if not repofile.exists():
+ repofile.parent.mkdir(exist_ok=True, parents=True)
+ with repofile.open("w") as f:
+ for repo in repos:
+ # zypper uses the repo ID as its cache key which is unsafe so add a hash of the url used to it to
+ # make sure a unique cache is used for each repository. We use roughly the same algorithm here that
+ # dnf uses as well.
+ key = hashlib.sha256(repo.url.encode()).hexdigest()[:16]
+
+ f.write(
+ textwrap.dedent(
+ f"""\
+ [{repo.id}-{key}]
+ name={repo.id}
+ {repo.url}
+ gpgcheck={int(repo.gpgcheck)}
+ enabled={int(repo.enabled)}
+ autorefresh=1
+ keeppackages=1
+ """
+ )
)
- )
-
- if repo.priority is not None:
- f.write(f"priority={repo.priority}\n")
-
- for i, url in enumerate(repo.gpgurls):
- f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
- f.write(f"{url}\n")
-
- f.write("\n")
-
- setup_rpm(context)
-
-
-def zypper_cmd(context: Context) -> list[PathString]:
- return [
- "env",
- "ZYPP_CONF=/etc/zypp/zypp.conf",
- "HOME=/",
- "zypper",
- f"--installroot={context.root}",
- "--cache-dir=/var/cache/zypp",
- "--gpg-auto-import-keys" if context.config.repository_key_check else "--no-gpg-checks",
- "--non-interactive",
- ]
-
-
-def invoke_zypper(
- context: Context,
- operation: str,
- packages: Sequence[str] = (),
- *,
- options: Sequence[str] = (),
- apivfs: bool = True,
-) -> None:
- with finalize_ephemeral_source_mounts(context.config) as sources:
- run(
- zypper_cmd(context) + [operation, *options, *sort_packages(packages)],
- sandbox=(
- context.sandbox(
- network=True,
- options=[
- "--bind", context.root, context.root,
- *finalize_package_manager_mounts(context),
- *sources,
- "--chdir", "/work/src",
- ],
- ) + (apivfs_cmd(context.root) if apivfs else [])
- ),
- env=context.config.environment,
- )
-
- fixup_rpmdb_location(context)
-
-
-def createrepo_zypper(context: Context) -> None:
- run(["createrepo_c", context.packages],
- sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
+ if repo.priority is not None:
+ f.write(f"priority={repo.priority}\n")
+
+ for i, url in enumerate(repo.gpgurls):
+ f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
+ f.write(f"{url}\n")
+
+ f.write("\n")
+
+ setup_rpm(context)
+
+ @classmethod
+ def cmd(cls, context: Context) -> list[PathString]:
+ return [
+ "env",
+ "ZYPP_CONF=/etc/zypp/zypp.conf",
+ "HOME=/",
+ "zypper",
+ f"--installroot={context.root}",
+ "--cache-dir=/var/cache/zypp",
+ "--gpg-auto-import-keys" if context.config.repository_key_check else "--no-gpg-checks",
+ "--non-interactive",
+ ]
+
+ @classmethod
+ def invoke(
+ cls,
+ context: Context,
+ operation: str,
+ packages: Sequence[str] = (),
+ *,
+ options: Sequence[str] = (),
+ apivfs: bool = True,
+ ) -> None:
+ with finalize_ephemeral_source_mounts(context.config) as sources:
+ run(
+ cls.cmd(context) + [operation, *options, *sort_packages(packages)],
+ sandbox=(
+ context.sandbox(
+ network=True,
+ options=[
+ "--bind", context.root, context.root,
+ *finalize_package_manager_mounts(context),
+ *sources,
+ "--chdir", "/work/src",
+ ],
+ ) + (apivfs_cmd(context.root) if apivfs else [])
+ ),
+ env=context.config.environment,
+ )
-def localrepo_zypper() -> RpmRepository:
- return RpmRepository(
- id="mkosi-packages",
- url="baseurl=file:///work/packages",
- gpgcheck=False,
- gpgurls=(),
- priority=50,
- )
+ fixup_rpmdb_location(context)
+
+ @classmethod
+ def createrepo(cls, context: Context) -> None:
+ run(["createrepo_c", context.packages],
+ sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
+
+ @classmethod
+ def localrepo(cls) -> RpmRepository:
+ return RpmRepository(
+ id="mkosi-packages",
+ url="baseurl=file:///work/packages",
+ gpgcheck=False,
+ gpgurls=(),
+ priority=50,
+ )