]> git.ipfire.org Git - thirdparty/mkosi.git/commitdiff
Introduce PackageManager interface
authorDaan De Meyer <daan.j.demeyer@gmail.com>
Tue, 30 Jan 2024 14:29:18 +0000 (15:29 +0100)
committerDaan De Meyer <daan.j.demeyer@gmail.com>
Wed, 31 Jan 2024 13:42:30 +0000 (14:42 +0100)
Let's start introducing a common interface for package manager
implementations. This will allow us to slowly get rid of the
functions where we do logic for each package manager because we
don't know the one that's being used. For example we add scripts
for each package manager right now which we can get rid of with a
package manager interface.

This also allows for more intuitive naming of package manager functions,
instead of invoke_dnf() we can now write Dnf.invoke().

15 files changed:
mkosi/__init__.py
mkosi/distributions/__init__.py
mkosi/distributions/arch.py
mkosi/distributions/centos.py
mkosi/distributions/debian.py
mkosi/distributions/fedora.py
mkosi/distributions/mageia.py
mkosi/distributions/openmandriva.py
mkosi/distributions/opensuse.py
mkosi/distributions/ubuntu.py
mkosi/installer/__init__.py
mkosi/installer/apt.py
mkosi/installer/dnf.py
mkosi/installer/pacman.py
mkosi/installer/zypper.py

index 0c1acc1b9642c917ecce3ef762e6387555ae11bf..fcf6a86d25925b3b56ce9fadde977687795aca3e 100644 (file)
@@ -48,11 +48,7 @@ from mkosi.config import (
 )
 from mkosi.context import Context
 from mkosi.distributions import Distribution
-from mkosi.installer import (
-    clean_package_manager_metadata,
-    finalize_package_manager_mounts,
-    package_manager_scripts,
-)
+from mkosi.installer import clean_package_manager_metadata, finalize_package_manager_mounts
 from mkosi.kmod import gen_required_kernel_modules, process_kernel_modules
 from mkosi.log import ARG_DEBUG, complete_step, die, log_notice, log_step
 from mkosi.manifest import Manifest
@@ -394,7 +390,9 @@ def finalize_host_scripts(
     for binary in ("useradd", "groupadd"):
         if find_binary(binary, root=context.config.tools()):
             scripts[binary] = (binary, "--root", context.root)
-    return finalize_scripts(scripts | dict(helpers) | package_manager_scripts(context))
+    return finalize_scripts(
+        scripts | dict(helpers) | context.config.distribution.package_manager(context.config).scripts(context)
+    )
 
 
 def finalize_chroot_scripts(context: Context) -> contextlib.AbstractContextManager[Path]:
index a1c109d2f25661a70848102f919c7d75456b90b5..80a408f0ab63f1d0ca308e06b995b7a7157f9df5 100644 (file)
@@ -10,8 +10,9 @@ from typing import TYPE_CHECKING, Optional, cast
 from mkosi.util import StrEnum, read_os_release
 
 if TYPE_CHECKING:
-    from mkosi.config import Architecture
+    from mkosi.config import Architecture, Config
     from mkosi.context import Context
+    from mkosi.installer import PackageManager
 
 
 class PackageType(StrEnum):
@@ -27,6 +28,10 @@ class DistributionInstaller:
     def pretty_name(cls) -> str:
         raise NotImplementedError
 
+    @classmethod
+    def package_manager(cls, config: "Config") -> type["PackageManager"]:
+        raise NotImplementedError
+
     @classmethod
     def setup(cls, context: "Context") -> None:
         raise NotImplementedError
@@ -116,6 +121,9 @@ class Distribution(StrEnum):
     def pretty_name(self) -> str:
         return self.installer().pretty_name()
 
+    def package_manager(self, config: "Config") -> type["PackageManager"]:
+        return self.installer().package_manager(config)
+
     def setup(self, context: "Context") -> None:
         return self.installer().setup(context)
 
index c297f1d83dae0f3a71acfb86867e2855e64b1037..e97b4b89b924c8579e83c57911ce863a92f4b990 100644 (file)
@@ -2,16 +2,11 @@
 
 from collections.abc import Iterable, Sequence
 
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
 from mkosi.context import Context
 from mkosi.distributions import Distribution, DistributionInstaller, PackageType
-from mkosi.installer.pacman import (
-    PacmanRepository,
-    createrepo_pacman,
-    invoke_pacman,
-    localrepo_pacman,
-    setup_pacman,
-)
+from mkosi.installer import PackageManager
+from mkosi.installer.pacman import Pacman
 from mkosi.log import die
 
 
@@ -36,13 +31,17 @@ class Installer(DistributionInstaller):
     def default_tools_tree_distribution(cls) -> Distribution:
         return Distribution.arch
 
+    @classmethod
+    def package_manager(cls, config: "Config") -> type[PackageManager]:
+        return Pacman
+
     @classmethod
     def createrepo(cls, context: Context) -> None:
-        createrepo_pacman(context)
+        Pacman.createrepo(context)
 
     @classmethod
     def setup(cls, context: Context) -> None:
-        setup_pacman(context, cls.repositories(context))
+        Pacman.setup(context, cls.repositories(context))
 
     @classmethod
     def install(cls, context: Context) -> None:
@@ -50,7 +49,7 @@ class Installer(DistributionInstaller):
 
     @classmethod
     def install_packages(cls, context: Context, packages: Sequence[str], apivfs: bool = True) -> None:
-        invoke_pacman(
+        Pacman.invoke(
             context,
             "--sync",
             ["--refresh", "--needed", "--assume-installed", "initramfs"],
@@ -60,15 +59,15 @@ class Installer(DistributionInstaller):
 
     @classmethod
     def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
-        invoke_pacman(context, "--remove", ["--nosave", "--recursive"], packages)
+        Pacman.invoke(context, "--remove", ["--nosave", "--recursive"], packages)
 
     @classmethod
-    def repositories(cls, context: Context) -> Iterable[PacmanRepository]:
+    def repositories(cls, context: Context) -> Iterable[Pacman.Repository]:
         if context.config.local_mirror:
-            yield PacmanRepository("core", context.config.local_mirror)
+            yield Pacman.Repository("core", context.config.local_mirror)
         else:
             if context.want_local_repo():
-                yield localrepo_pacman()
+                yield Pacman.localrepo()
 
             if context.config.architecture == Architecture.arm64:
                 url = f"{context.config.mirror or 'http://mirror.archlinuxarm.org'}/$arch/$repo"
@@ -88,7 +87,7 @@ class Installer(DistributionInstaller):
             ] + ["core", "extra"]
 
             for repo in repos:
-                yield PacmanRepository(repo, url)
+                yield Pacman.Repository(repo, url)
 
     @classmethod
     def architecture(cls, arch: Architecture) -> str:
index 7cbbe21de295cb59ba951763bcbe7d966d8762e8..899fdbadffa5c09b7a4bbb7c6897e79f911ed0b8 100644 (file)
@@ -4,7 +4,7 @@ import os
 import shutil
 from collections.abc import Iterable, Sequence
 
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
 from mkosi.context import Context
 from mkosi.distributions import (
     Distribution,
@@ -12,7 +12,8 @@ from mkosi.distributions import (
     PackageType,
     join_mirror,
 )
-from mkosi.installer.dnf import createrepo_dnf, invoke_dnf, localrepo_dnf, setup_dnf
+from mkosi.installer import PackageManager
+from mkosi.installer.dnf import Dnf
 from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
 from mkosi.log import complete_step, die
 from mkosi.tree import rmtree
@@ -54,20 +55,24 @@ class Installer(DistributionInstaller):
     def default_tools_tree_distribution(cls) -> Distribution:
         return Distribution.fedora
 
+    @classmethod
+    def package_manager(cls, config: "Config") -> type[PackageManager]:
+        return Dnf
+
     @classmethod
     def grub_prefix(cls) -> str:
         return "grub2"
 
     @classmethod
     def createrepo(cls, context: Context) -> None:
-        createrepo_dnf(context)
+        Dnf.createrepo(context)
 
     @classmethod
     def setup(cls, context: Context) -> None:
         if GenericVersion(context.config.release) <= 7:
             die(f"{cls.pretty_name()} 7 or earlier variants are not supported")
 
-        setup_dnf(context, cls.repositories(context))
+        Dnf.setup(context, cls.repositories(context))
         (context.pkgmngr / "etc/dnf/vars/stream").write_text(f"{context.config.release}-stream\n")
 
     @classmethod
@@ -81,11 +86,11 @@ class Installer(DistributionInstaller):
 
     @classmethod
     def install_packages(cls, context: Context, packages: Sequence[str], apivfs: bool = True) -> None:
-        invoke_dnf(context, "install", packages, apivfs=apivfs)
+        Dnf.invoke(context, "install", packages, apivfs=apivfs)
 
     @classmethod
     def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
-        invoke_dnf(context, "remove", packages)
+        Dnf.invoke(context, "remove", packages)
 
     @classmethod
     def architecture(cls, arch: Architecture) -> str:
@@ -224,7 +229,7 @@ class Installer(DistributionInstaller):
             return
 
         if context.want_local_repo():
-            yield localrepo_dnf()
+            yield Dnf.localrepo()
 
         yield from cls.repository_variants(context, "BaseOS")
         yield from cls.repository_variants(context, "AppStream")
index 8c4f76b9513a1610ad631107d2d5d939b6f99e31..b3b39b0c494df2e0867f495813a4999e83bc9d07 100644 (file)
@@ -6,10 +6,11 @@ from collections.abc import Iterable, Sequence
 from pathlib import Path
 
 from mkosi.archive import extract_tar
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
 from mkosi.context import Context
 from mkosi.distributions import Distribution, DistributionInstaller, PackageType
-from mkosi.installer.apt import AptRepository, createrepo_apt, invoke_apt, localrepo_apt, setup_apt
+from mkosi.installer import PackageManager
+from mkosi.installer.apt import Apt
 from mkosi.log import die
 from mkosi.run import run
 from mkosi.sandbox import finalize_passwd_mounts
@@ -37,13 +38,17 @@ class Installer(DistributionInstaller):
     def default_tools_tree_distribution(cls) -> Distribution:
         return Distribution.debian
 
+    @classmethod
+    def package_manager(cls, config: Config) -> type[PackageManager]:
+        return Apt
+
     @staticmethod
-    def repositories(context: Context, local: bool = True) -> Iterable[AptRepository]:
+    def repositories(context: Context, local: bool = True) -> Iterable[Apt.Repository]:
         types = ("deb", "deb-src")
         components = ("main", *context.config.repositories)
 
         if context.config.local_mirror and local:
-            yield AptRepository(
+            yield Apt.Repository(
                 types=("deb",),
                 url=context.config.local_mirror,
                 suite=context.config.release,
@@ -53,12 +58,12 @@ class Installer(DistributionInstaller):
             return
 
         if context.want_local_repo():
-            yield localrepo_apt(context)
+            yield Apt.localrepo(context)
 
         mirror = context.config.mirror or "http://deb.debian.org/debian"
         signedby = "/usr/share/keyrings/debian-archive-keyring.gpg"
 
-        yield AptRepository(
+        yield Apt.Repository(
             types=types,
             url=mirror,
             suite=context.config.release,
@@ -69,7 +74,7 @@ class Installer(DistributionInstaller):
         # Debug repos are typically not mirrored.
         url = "http://deb.debian.org/debian-debug"
 
-        yield AptRepository(
+        yield Apt.Repository(
             types=types,
             url=url,
             suite=f"{context.config.release}-debug",
@@ -80,7 +85,7 @@ class Installer(DistributionInstaller):
         if context.config.release in ("unstable", "sid"):
             return
 
-        yield AptRepository(
+        yield Apt.Repository(
             types=types,
             url=mirror,
             suite=f"{context.config.release}-updates",
@@ -88,7 +93,7 @@ class Installer(DistributionInstaller):
             signedby=signedby,
         )
 
-        yield AptRepository(
+        yield Apt.Repository(
             types=types,
             # Security updates repos are never mirrored.
             url="http://security.debian.org/debian-security",
@@ -99,11 +104,11 @@ class Installer(DistributionInstaller):
 
     @classmethod
     def setup(cls, context: Context) -> None:
-        setup_apt(context, cls.repositories(context))
+        Apt.setup(context, cls.repositories(context))
 
     @classmethod
     def createrepo(cls, context: Context) -> None:
-        createrepo_apt(context)
+        Apt.createrepo(context)
 
     @classmethod
     def install(cls, context: Context) -> None:
@@ -137,7 +142,7 @@ class Installer(DistributionInstaller):
                 (context.root / d).symlink_to(f"usr/{d}")
                 (context.root / f"usr/{d}").mkdir(parents=True, exist_ok=True)
 
-        invoke_apt(context, "update", apivfs=False)
+        Apt.invoke(context, "update", apivfs=False)
 
         # Next, we invoke apt-get install to download all the essential packages. With DPkg::Pre-Install-Pkgs,
         # we specify a shell command that will receive the list of packages that will be installed on stdin.
@@ -145,7 +150,7 @@ class Installer(DistributionInstaller):
         # all it does is download the essential debs and tell us their full in the apt cache without actually
         # installing them.
         with tempfile.NamedTemporaryFile(mode="r") as f:
-            invoke_apt(
+            Apt.invoke(
                 context,
                 "install",
                 [
@@ -196,8 +201,8 @@ class Installer(DistributionInstaller):
         with umask(~0o644):
             policyrcd.write_text("#!/bin/sh\nexit 101\n")
 
-        invoke_apt(context, "update", apivfs=False)
-        invoke_apt(context, "install", packages, apivfs=apivfs)
+        Apt.invoke(context, "update", apivfs=False)
+        Apt.invoke(context, "install", packages, apivfs=apivfs)
         install_apt_sources(context, cls.repositories(context, local=False))
 
         policyrcd.unlink()
@@ -211,7 +216,7 @@ class Installer(DistributionInstaller):
 
     @classmethod
     def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
-        invoke_apt(context, "purge", packages)
+        Apt.invoke(context, "purge", packages)
 
     @classmethod
     def architecture(cls, arch: Architecture) -> str:
@@ -239,7 +244,7 @@ class Installer(DistributionInstaller):
         return a
 
 
-def install_apt_sources(context: Context, repos: Iterable[AptRepository]) -> None:
+def install_apt_sources(context: Context, repos: Iterable[Apt.Repository]) -> None:
     if not (context.root / "usr/bin/apt").exists():
         return
 
index ba641ba0107bbc49733bc78ba9e9b7b7ae7ea75c..97cfe4f2af3510f8df3e160e1e800f8a8958d7c9 100644 (file)
@@ -2,7 +2,7 @@
 
 from collections.abc import Iterable, Sequence
 
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
 from mkosi.context import Context
 from mkosi.distributions import (
     Distribution,
@@ -10,7 +10,8 @@ from mkosi.distributions import (
     PackageType,
     join_mirror,
 )
-from mkosi.installer.dnf import createrepo_dnf, invoke_dnf, localrepo_dnf, setup_dnf
+from mkosi.installer import PackageManager
+from mkosi.installer.dnf import Dnf
 from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
 from mkosi.log import die
 
@@ -40,13 +41,17 @@ class Installer(DistributionInstaller):
     def grub_prefix(cls) -> str:
         return "grub2"
 
+    @classmethod
+    def package_manager(cls, config: Config) -> type[PackageManager]:
+        return Dnf
+
     @classmethod
     def createrepo(cls, context: Context) -> None:
-        createrepo_dnf(context)
+        Dnf.createrepo(context)
 
     @classmethod
     def setup(cls, context: Context) -> None:
-        setup_dnf(context, cls.repositories(context), filelists=False)
+        Dnf.setup(context, cls.repositories(context), filelists=False)
 
     @classmethod
     def install(cls, context: Context) -> None:
@@ -54,11 +59,11 @@ class Installer(DistributionInstaller):
 
     @classmethod
     def install_packages(cls, context: Context, packages: Sequence[str], apivfs: bool = True) -> None:
-        invoke_dnf(context, "install", packages, apivfs=apivfs)
+        Dnf.invoke(context, "install", packages, apivfs=apivfs)
 
     @classmethod
     def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
-        invoke_dnf(context, "remove", packages)
+        Dnf.invoke(context, "remove", packages)
 
     @classmethod
     def repositories(cls, context: Context) -> Iterable[RpmRepository]:
@@ -74,7 +79,7 @@ class Installer(DistributionInstaller):
             return
 
         if context.want_local_repo():
-            yield localrepo_dnf()
+            yield Dnf.localrepo()
 
         if context.config.release == "eln":
             mirror = context.config.mirror or "https://odcs.fedoraproject.org/composes/production/latest-Fedora-ELN/compose"
index 26bdd54aba66170b1e2203ea05e9424eab921997..ea6e790f5f192c7d467fe7f4fdb08b530654284f 100644 (file)
@@ -6,7 +6,7 @@ from collections.abc import Iterable, Sequence
 from mkosi.config import Architecture
 from mkosi.context import Context
 from mkosi.distributions import Distribution, fedora, join_mirror
-from mkosi.installer.dnf import localrepo_dnf
+from mkosi.installer.dnf import Dnf
 from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
 from mkosi.log import die
 
@@ -52,7 +52,7 @@ class Installer(fedora.Installer):
             return
 
         if context.want_local_repo():
-            yield localrepo_dnf()
+            yield Dnf.localrepo()
 
         if context.config.mirror:
             url = f"baseurl={join_mirror(context.config.mirror, 'distrib/$releasever/$basearch/media/core/')}"
index 0dbf883cc1ffd1876d41d5d6e9d4055bb7f6829b..aba616448103003c9261a0d32eb1e2b876322930 100644 (file)
@@ -6,7 +6,7 @@ from collections.abc import Iterable, Sequence
 from mkosi.config import Architecture
 from mkosi.context import Context
 from mkosi.distributions import Distribution, fedora, join_mirror
-from mkosi.installer.dnf import localrepo_dnf
+from mkosi.installer.dnf import Dnf
 from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
 from mkosi.log import die
 
@@ -58,7 +58,7 @@ class Installer(fedora.Installer):
             return
 
         if context.want_local_repo():
-            yield localrepo_dnf()
+            yield Dnf.localrepo()
 
         url = f"baseurl={join_mirror(mirror, '$releasever/repository/$basearch/main')}"
         yield RpmRepository("main-release", f"{url}/release", gpgurls)
index a6304952e6351920139bc0c98132aca052d70a14..17b3e3b8b04d6818e548f7c271603d56e567a397 100644 (file)
@@ -5,12 +5,13 @@ import xml.etree.ElementTree as ElementTree
 from collections.abc import Iterable, Sequence
 from pathlib import Path
 
-from mkosi.config import Architecture
+from mkosi.config import Architecture, Config
 from mkosi.context import Context
 from mkosi.distributions import Distribution, DistributionInstaller, PackageType
-from mkosi.installer.dnf import createrepo_dnf, invoke_dnf, localrepo_dnf, setup_dnf
+from mkosi.installer import PackageManager
+from mkosi.installer.dnf import Dnf
 from mkosi.installer.rpm import RpmRepository, find_rpm_gpgkey
-from mkosi.installer.zypper import createrepo_zypper, invoke_zypper, localrepo_zypper, setup_zypper
+from mkosi.installer.zypper import Zypper
 from mkosi.log import die
 from mkosi.run import find_binary, run
 from mkosi.sandbox import finalize_crypto_mounts
@@ -41,20 +42,27 @@ class Installer(DistributionInstaller):
     def grub_prefix(cls) -> str:
         return "grub2"
 
+    @classmethod
+    def package_manager(cls, config: Config) -> type[PackageManager]:
+        if find_binary("zypper", root=config.tools()):
+            return Zypper
+        else:
+            return Dnf
+
     @classmethod
     def createrepo(cls, context: Context) -> None:
         if find_binary("zypper", root=context.config.tools()):
-            createrepo_zypper(context)
+            Zypper.createrepo(context)
         else:
-            createrepo_dnf(context)
+            Dnf.createrepo(context)
 
     @classmethod
     def setup(cls, context: Context) -> None:
         zypper = find_binary("zypper", root=context.config.tools())
         if zypper:
-            setup_zypper(context, cls.repositories(context))
+            Zypper.setup(context, cls.repositories(context))
         else:
-            setup_dnf(context, cls.repositories(context))
+            Dnf.setup(context, cls.repositories(context))
 
     @classmethod
     def install(cls, context: Context) -> None:
@@ -67,23 +75,23 @@ class Installer(DistributionInstaller):
                 "--download", "in-advance",
                 "--recommends" if context.config.with_recommends else "--no-recommends",
             ]
-            invoke_zypper(context, "install", packages, options=options, apivfs=apivfs)
+            Zypper.invoke(context, "install", packages, options=options, apivfs=apivfs)
         else:
-            invoke_dnf(context, "install", packages, apivfs=apivfs)
+            Dnf.invoke(context, "install", packages, apivfs=apivfs)
 
     @classmethod
     def remove_packages(cls, context: Context, packages: Sequence[str]) -> None:
         if find_binary("zypper", root=context.config.tools()):
-            invoke_zypper(context, "remove", packages, options=["--clean-deps"])
+            Zypper.invoke(context, "remove", packages, options=["--clean-deps"])
         else:
-            invoke_dnf(context, "remove", packages)
+            Dnf.invoke(context, "remove", packages)
 
     @classmethod
     def repositories(cls, context: Context) -> Iterable[RpmRepository]:
         zypper = find_binary("zypper", root=context.config.tools())
 
         if context.want_local_repo():
-            yield localrepo_zypper() if zypper else localrepo_dnf()
+            yield Zypper.localrepo() if zypper else Dnf.localrepo()
 
         release = context.config.release
         if release == "leap":
index 8f1bb593f2a4ed10f38461a41d4eec1a0cde04b8..8c7d1e6a321d630c777c2d1004a0cf6ae10e8cc5 100644 (file)
@@ -5,7 +5,7 @@ from collections.abc import Iterable
 from mkosi.config import Architecture
 from mkosi.context import Context
 from mkosi.distributions import debian
-from mkosi.installer.apt import AptRepository, localrepo_apt
+from mkosi.installer.apt import Apt
 
 
 class Installer(debian.Installer):
@@ -18,7 +18,7 @@ class Installer(debian.Installer):
         return "lunar"
 
     @staticmethod
-    def repositories(context: Context, local: bool = True) -> Iterable[AptRepository]:
+    def repositories(context: Context, local: bool = True) -> Iterable[Apt.Repository]:
         types = ("deb", "deb-src")
 
         # From kinetic onwards, the usr-is-merged package is available in universe and is required by
@@ -27,7 +27,7 @@ class Installer(debian.Installer):
         components = (*components, *context.config.repositories)
 
         if context.config.local_mirror and local:
-            yield AptRepository(
+            yield Apt.Repository(
                 types=("deb",),
                 url=context.config.local_mirror,
                 suite=context.config.release,
@@ -37,7 +37,7 @@ class Installer(debian.Installer):
             return
 
         if context.want_local_repo():
-            yield localrepo_apt(context)
+            yield Apt.localrepo(context)
 
         if context.config.architecture in (Architecture.x86, Architecture.x86_64):
             mirror = context.config.mirror or "http://archive.ubuntu.com/ubuntu"
@@ -46,7 +46,7 @@ class Installer(debian.Installer):
 
         signedby = "/usr/share/keyrings/ubuntu-archive-keyring.gpg"
 
-        yield AptRepository(
+        yield Apt.Repository(
             types=types,
             url=mirror,
             suite=context.config.release,
@@ -54,7 +54,7 @@ class Installer(debian.Installer):
             signedby=signedby,
         )
 
-        yield AptRepository(
+        yield Apt.Repository(
             types=types,
             url=mirror,
             suite=f"{context.config.release}-updates",
@@ -68,7 +68,7 @@ class Installer(debian.Installer):
         else:
             mirror = "http://ports.ubuntu.com/"
 
-        yield AptRepository(
+        yield Apt.Repository(
             types=types,
             url=mirror,
             suite=f"{context.config.release}-security",
index a19921c9062179d4c34a49d3fbb0d014b58a66ab..2505376b915cfe9907ea7f7019608d8801b99b62 100644 (file)
@@ -5,12 +5,18 @@ from pathlib import Path
 from mkosi.config import ConfigFeature
 from mkosi.context import Context
 from mkosi.run import find_binary
-from mkosi.sandbox import apivfs_cmd, finalize_crypto_mounts
+from mkosi.sandbox import finalize_crypto_mounts
 from mkosi.tree import rmtree
 from mkosi.types import PathString
 from mkosi.util import flatten
 
 
+class PackageManager:
+    @classmethod
+    def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+        raise NotImplementedError
+
+
 def clean_package_manager_metadata(context: Context) -> None:
     """
     Remove package manager metadata
@@ -33,35 +39,8 @@ def clean_package_manager_metadata(context: Context) -> None:
                    sandbox=context.sandbox(options=["--bind", context.root, context.root]))
 
 
-def package_manager_scripts(context: Context) -> dict[str, list[PathString]]:
-    from mkosi.installer.apt import apt_cmd
-    from mkosi.installer.dnf import dnf_cmd
-    from mkosi.installer.pacman import pacman_cmd
-    from mkosi.installer.rpm import rpm_cmd
-    from mkosi.installer.zypper import zypper_cmd
-
-    return {
-        "pacman": apivfs_cmd(context.root) + pacman_cmd(context),
-        "zypper": apivfs_cmd(context.root) + zypper_cmd(context),
-        "dnf"   : apivfs_cmd(context.root) + dnf_cmd(context),
-        "rpm"   : apivfs_cmd(context.root) + rpm_cmd(context),
-    } | {
-        command: apivfs_cmd(context.root) + apt_cmd(context, command) for command in (
-            "apt",
-            "apt-cache",
-            "apt-cdrom",
-            "apt-config",
-            "apt-extracttemplates",
-            "apt-get",
-            "apt-key",
-            "apt-mark",
-            "apt-sortpkgs",
-        )
-    }
-
-
 def finalize_package_manager_mounts(context: Context) -> list[PathString]:
-    from mkosi.installer.dnf import dnf_subdir
+    from mkosi.installer.dnf import Dnf
 
     mounts: list[PathString] = [
         *(["--ro-bind", m, m] if (m := context.config.local_mirror) else []),
@@ -74,8 +53,8 @@ def finalize_package_manager_mounts(context: Context) -> list[PathString]:
         for d in (
             "lib/apt",
             "cache/apt",
-            f"cache/{dnf_subdir(context)}",
-            f"lib/{dnf_subdir(context)}",
+            f"cache/{Dnf.subdir(context.config)}",
+            f"lib/{Dnf.subdir(context.config)}",
             "cache/pacman/pkg",
             "cache/zypp",
         )
index c5005620eb89e90bd0dab688191ef768cb7028f3..97af70f4a3b9b48b3525c57aedb25e674c5a5113 100644 (file)
@@ -4,7 +4,7 @@ from collections.abc import Iterable, Sequence
 from typing import NamedTuple, Optional
 
 from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
 from mkosi.mounts import finalize_ephemeral_source_mounts
 from mkosi.run import find_binary, run
 from mkosi.sandbox import apivfs_cmd
@@ -12,153 +12,173 @@ from mkosi.types import PathString
 from mkosi.util import sort_packages, umask
 
 
-class AptRepository(NamedTuple):
-    types: tuple[str, ...]
-    url: str
-    suite: str
-    components: tuple[str, ...]
-    signedby: Optional[str]
+class Apt(PackageManager):
+    class Repository(NamedTuple):
+        types: tuple[str, ...]
+        url: str
+        suite: str
+        components: tuple[str, ...]
+        signedby: Optional[str]
 
-    def __str__(self) -> str:
-        return textwrap.dedent(
-            f"""\
-            Types: {" ".join(self.types)}
-            URIs: {self.url}
-            Suites: {self.suite}
-            Components: {" ".join(self.components)}
-            {"Signed-By" if self.signedby else "Trusted"}: {self.signedby or "yes"}
+        def __str__(self) -> str:
+            return textwrap.dedent(
+                f"""\
+                Types: {" ".join(self.types)}
+                URIs: {self.url}
+                Suites: {self.suite}
+                Components: {" ".join(self.components)}
+                {"Signed-By" if self.signedby else "Trusted"}: {self.signedby or "yes"}
 
-            """
-        )
-
-
-def setup_apt(context: Context, repos: Iterable[AptRepository]) -> None:
-    (context.pkgmngr / "etc/apt").mkdir(exist_ok=True, parents=True)
-    (context.pkgmngr / "etc/apt/apt.conf.d").mkdir(exist_ok=True, parents=True)
-    (context.pkgmngr / "etc/apt/preferences.d").mkdir(exist_ok=True, parents=True)
-    (context.pkgmngr / "etc/apt/sources.list.d").mkdir(exist_ok=True, parents=True)
-
-    # TODO: Drop once apt 2.5.4 is widely available.
-    with umask(~0o755):
-        (context.root / "var/lib/dpkg").mkdir(parents=True, exist_ok=True)
-        (context.root / "var/lib/dpkg/status").touch()
-
-    (context.cache_dir / "lib/apt").mkdir(exist_ok=True, parents=True)
-    (context.cache_dir / "cache/apt").mkdir(exist_ok=True, parents=True)
-
-    # We have a special apt.conf outside of /etc/apt that only configures "Dir::Etc" that we pass to APT_CONFIG to tell
-    # apt it should read config files from /etc/apt in case this is overridden by distributions. This is required
-    # because apt parses CLI configuration options after parsing its configuration files and as such we can't use CLI
-    # options to tell apt where to look for configuration files.
-    config = context.pkgmngr / "etc/apt.conf"
-    if not config.exists():
-        config.write_text(
-            textwrap.dedent(
-                """\
-                Dir::Etc "etc/apt";
                 """
             )
-        )
 
-    sources = context.pkgmngr / "etc/apt/sources.list.d/mkosi.sources"
-    if not sources.exists():
-        with sources.open("w") as f:
-            for repo in repos:
-                f.write(str(repo))
-
-
-def apt_cmd(context: Context, command: str) -> list[PathString]:
-    debarch = context.config.distribution.architecture(context.config.architecture)
-
-    cmdline: list[PathString] = [
-        "env",
-        "APT_CONFIG=/etc/apt.conf",
-        "DEBIAN_FRONTEND=noninteractive",
-        "DEBCONF_INTERACTIVE_SEEN=true",
-        "INITRD=No",
-        command,
-        "-o", f"APT::Architecture={debarch}",
-        "-o", f"APT::Architectures={debarch}",
-        "-o", f"APT::Install-Recommends={str(context.config.with_recommends).lower()}",
-        "-o", "APT::Immediate-Configure=off",
-        "-o", "APT::Get::Assume-Yes=true",
-        "-o", "APT::Get::AutomaticRemove=true",
-        "-o", "APT::Get::Allow-Change-Held-Packages=true",
-        "-o", "APT::Get::Allow-Remove-Essential=true",
-        "-o", "APT::Sandbox::User=root",
-        "-o", "Dir::Cache=/var/cache/apt",
-        "-o", "Dir::State=/var/lib/apt",
-        "-o", f"Dir::State::Status={context.root / 'var/lib/dpkg/status'}",
-        "-o", f"Dir::Log={context.workspace}",
-        "-o", f"Dir::Bin::DPkg={find_binary('dpkg', root=context.config.tools())}",
-        "-o", "Debug::NoLocking=true",
-        "-o", f"DPkg::Options::=--root={context.root}",
-        "-o", "DPkg::Options::=--force-unsafe-io",
-        "-o", "DPkg::Options::=--force-architecture",
-        "-o", "DPkg::Options::=--force-depends",
-        "-o", "DPkg::Options::=--no-debsig",
-        "-o", "DPkg::Use-Pty=false",
-        "-o", "DPkg::Install::Recursive::Minimum=1000",
-        "-o", "pkgCacheGen::ForceEssential=,",
-    ]
-
-    if not context.config.repository_key_check:
-        cmdline += [
-            "-o", "Acquire::AllowInsecureRepositories=true",
-            "-o", "Acquire::AllowDowngradeToInsecureRepositories=true",
-            "-o", "APT::Get::AllowUnauthenticated=true",
-        ]
+    @classmethod
+    def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+        return {
+            command: apivfs_cmd(context.root) + cls.cmd(context, command) for command in (
+                "apt",
+                "apt-cache",
+                "apt-cdrom",
+                "apt-config",
+                "apt-extracttemplates",
+                "apt-get",
+                "apt-key",
+                "apt-mark",
+                "apt-sortpkgs",
+            )
+        }
+
+    @classmethod
+    def setup(cls, context: Context, repos: Iterable[Repository]) -> None:
+        (context.pkgmngr / "etc/apt").mkdir(exist_ok=True, parents=True)
+        (context.pkgmngr / "etc/apt/apt.conf.d").mkdir(exist_ok=True, parents=True)
+        (context.pkgmngr / "etc/apt/preferences.d").mkdir(exist_ok=True, parents=True)
+        (context.pkgmngr / "etc/apt/sources.list.d").mkdir(exist_ok=True, parents=True)
+
+        # TODO: Drop once apt 2.5.4 is widely available.
+        with umask(~0o755):
+            (context.root / "var/lib/dpkg").mkdir(parents=True, exist_ok=True)
+            (context.root / "var/lib/dpkg/status").touch()
+
+        (context.cache_dir / "lib/apt").mkdir(exist_ok=True, parents=True)
+        (context.cache_dir / "cache/apt").mkdir(exist_ok=True, parents=True)
+
+        # We have a special apt.conf outside of pkgmngr dir that only configures "Dir::Etc" that we pass to APT_CONFIG
+        # to tell apt it should read config files from /etc/apt in case this is overridden by distributions. This is
+        # required because apt parses CLI configuration options after parsing its configuration files and as such we
+        # can't use CLI options to tell apt where to look for configuration files.
+        config = context.pkgmngr / "etc/apt.conf"
+        if not config.exists():
+            config.write_text(
+                textwrap.dedent(
+                    """\
+                    Dir::Etc "etc/apt";
+                    """
+                )
+            )
 
-    if not context.config.with_docs:
-        cmdline += [
-            "-o", "DPkg::Options::=--path-exclude=/usr/share/doc/*",
-            "-o", "DPkg::Options::=--path-include=/usr/share/doc/*/copyright",
-            "-o", "DPkg::Options::=--path-exclude=/usr/share/man/*",
-            "-o", "DPkg::Options::=--path-exclude=/usr/share/groff/*",
-            "-o", "DPkg::Options::=--path-exclude=/usr/share/info/*",
+        sources = context.pkgmngr / "etc/apt/sources.list.d/mkosi.sources"
+        if not sources.exists():
+            with sources.open("w") as f:
+                for repo in repos:
+                    f.write(str(repo))
+
+    @classmethod
+    def cmd(cls, context: Context, command: str) -> list[PathString]:
+        debarch = context.config.distribution.architecture(context.config.architecture)
+
+        cmdline: list[PathString] = [
+            "env",
+            "APT_CONFIG=/etc/apt.conf",
+            "DEBIAN_FRONTEND=noninteractive",
+            "DEBCONF_INTERACTIVE_SEEN=true",
+            "INITRD=No",
+            command,
+            "-o", f"APT::Architecture={debarch}",
+            "-o", f"APT::Architectures={debarch}",
+            "-o", f"APT::Install-Recommends={str(context.config.with_recommends).lower()}",
+            "-o", "APT::Immediate-Configure=off",
+            "-o", "APT::Get::Assume-Yes=true",
+            "-o", "APT::Get::AutomaticRemove=true",
+            "-o", "APT::Get::Allow-Change-Held-Packages=true",
+            "-o", "APT::Get::Allow-Remove-Essential=true",
+            "-o", "APT::Sandbox::User=root",
+            "-o", "Dir::Cache=/var/cache/apt",
+            "-o", "Dir::State=/var/lib/apt",
+            "-o", f"Dir::State::Status={context.root / 'var/lib/dpkg/status'}",
+            "-o", f"Dir::Log={context.workspace}",
+            "-o", f"Dir::Bin::DPkg={find_binary('dpkg', root=context.config.tools())}",
+            "-o", "Debug::NoLocking=true",
+            "-o", f"DPkg::Options::=--root={context.root}",
+            "-o", "DPkg::Options::=--force-unsafe-io",
+            "-o", "DPkg::Options::=--force-architecture",
+            "-o", "DPkg::Options::=--force-depends",
+            "-o", "DPkg::Options::=--no-debsig",
+            "-o", "DPkg::Use-Pty=false",
+            "-o", "DPkg::Install::Recursive::Minimum=1000",
+            "-o", "pkgCacheGen::ForceEssential=,",
         ]
 
-    return cmdline
-
-
-def invoke_apt(
-    context: Context,
-    operation: str,
-    packages: Sequence[str] = (),
-    *,
-    apivfs: bool = True,
-    mounts: Sequence[PathString] = (),
-) -> None:
-    with finalize_ephemeral_source_mounts(context.config) as sources:
-        run(
-            apt_cmd(context, "apt-get") + [operation, *sort_packages(packages)],
-            sandbox=(
-                context.sandbox(
-                    network=True,
-                    options=[
-                        "--bind", context.root, context.root,
-                        *finalize_package_manager_mounts(context),
-                        *sources,
-                        *mounts,
-                        "--chdir", "/work/src",
-                    ],
-                ) + (apivfs_cmd(context.root) if apivfs else [])
-            ),
-            env=context.config.environment,
-        )
+        if not context.config.repository_key_check:
+            cmdline += [
+                "-o", "Acquire::AllowInsecureRepositories=true",
+                "-o", "Acquire::AllowDowngradeToInsecureRepositories=true",
+                "-o", "APT::Get::AllowUnauthenticated=true",
+            ]
+
+        if not context.config.with_docs:
+            cmdline += [
+                "-o", "DPkg::Options::=--path-exclude=/usr/share/doc/*",
+                "-o", "DPkg::Options::=--path-include=/usr/share/doc/*/copyright",
+                "-o", "DPkg::Options::=--path-exclude=/usr/share/man/*",
+                "-o", "DPkg::Options::=--path-exclude=/usr/share/groff/*",
+                "-o", "DPkg::Options::=--path-exclude=/usr/share/info/*",
+            ]
+
+        return cmdline
+
+    @classmethod
+    def invoke(
+        cls,
+        context: Context,
+        operation: str,
+        packages: Sequence[str] = (),
+        *,
+        apivfs: bool = True,
+        mounts: Sequence[PathString] = (),
+    ) -> None:
+        with finalize_ephemeral_source_mounts(context.config) as sources:
+            run(
+                cls.cmd(context, "apt-get") + [operation, *sort_packages(packages)],
+                sandbox=(
+                    context.sandbox(
+                        network=True,
+                        options=[
+                            "--bind", context.root, context.root,
+                            *finalize_package_manager_mounts(context),
+                            *sources,
+                            *mounts,
+                            "--chdir", "/work/src",
+                        ],
+                    ) + (apivfs_cmd(context.root) if apivfs else [])
+                ),
+                env=context.config.environment,
+            )
 
 
-def createrepo_apt(context: Context) -> None:
-    with (context.packages / "Packages").open("wb") as f:
-        run(["dpkg-scanpackages", context.packages],
-            stdout=f, sandbox=context.sandbox(options=["--ro-bind", context.packages, context.packages]))
+    @classmethod
+    def createrepo(cls, context: Context) -> None:
+        with (context.packages / "Packages").open("wb") as f:
+            run(["dpkg-scanpackages", context.packages],
+                stdout=f, sandbox=context.sandbox(options=["--ro-bind", context.packages, context.packages]))
 
 
-def localrepo_apt(context: Context) -> AptRepository:
-    return AptRepository(
-        types=("deb",),
-        url="file:///work/packages",
-        suite=context.config.release,
-        components=("main",),
-        signedby=None,
-    )
+    @classmethod
+    def localrepo(cls, context: Context) -> Repository:
+        return cls.Repository(
+            types=("deb",),
+            url="file:///work/packages",
+            suite=context.config.release,
+            components=("main",),
+            signedby=None,
+        )
index b22778bba5c7df969e347a7af5ec34ad41d8f6fe..6a7ca56ae15fe98790c2a2d8227a67e9859cee18 100644 (file)
@@ -3,9 +3,10 @@ import textwrap
 from collections.abc import Iterable
 from pathlib import Path
 
+from mkosi.config import Config
 from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
-from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, setup_rpm
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, rpm_cmd, setup_rpm
 from mkosi.log import ARG_DEBUG
 from mkosi.mounts import finalize_ephemeral_source_mounts
 from mkosi.run import find_binary, run
@@ -14,166 +15,174 @@ from mkosi.types import PathString
 from mkosi.util import sort_packages
 
 
-def dnf_executable(context: Context) -> str:
-    # Allow the user to override autodetection with an environment variable
-    dnf = context.config.environment.get("MKOSI_DNF")
-    root = context.config.tools()
-
-    return Path(dnf or find_binary("dnf5", root=root) or find_binary("dnf", root=root) or "yum").name
-
-
-def dnf_subdir(context: Context) -> str:
-    dnf = dnf_executable(context)
-    return "libdnf5" if dnf.endswith("dnf5") else "dnf"
-
-
-def setup_dnf(context: Context, repositories: Iterable[RpmRepository], filelists: bool = True) -> None:
-    (context.pkgmngr / "etc/dnf/vars").mkdir(exist_ok=True, parents=True)
-    (context.pkgmngr / "etc/yum.repos.d").mkdir(exist_ok=True, parents=True)
-
-    (context.cache_dir / "cache" / dnf_subdir(context)).mkdir(exist_ok=True, parents=True)
-    (context.cache_dir / "lib" / dnf_subdir(context)).mkdir(exist_ok=True, parents=True)
-
-    config = context.pkgmngr / "etc/dnf/dnf.conf"
-
-    if not config.exists():
-        config.parent.mkdir(exist_ok=True, parents=True)
-        with config.open("w") as f:
-            # Make sure we download filelists so all dependencies can be resolved.
-            # See https://bugzilla.redhat.com/show_bug.cgi?id=2180842
-            if dnf_executable(context).endswith("dnf5") and filelists:
-                f.write("[main]\noptional_metadata_types=filelists\n")
-
-    repofile = context.pkgmngr / "etc/yum.repos.d/mkosi.repo"
-    if not repofile.exists():
-        repofile.parent.mkdir(exist_ok=True, parents=True)
-        with repofile.open("w") as f:
-            for repo in repositories:
-                f.write(
-                    textwrap.dedent(
-                        f"""\
-                        [{repo.id}]
-                        name={repo.id}
-                        {repo.url}
-                        gpgcheck={int(repo.gpgcheck)}
-                        enabled={int(repo.enabled)}
-                        """
+class Dnf(PackageManager):
+    @classmethod
+    def executable(cls, config: Config) -> str:
+        # Allow the user to override autodetection with an environment variable
+        dnf = config.environment.get("MKOSI_DNF")
+        root = config.tools()
+
+        return Path(dnf or find_binary("dnf5", root=root) or find_binary("dnf", root=root) or "yum").name
+
+    @classmethod
+    def subdir(cls, config: Config) -> Path:
+        return Path("libdnf5" if cls.executable(config) == "dnf5" else "dnf")
+
+    @classmethod
+    def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+        return {
+            "dnf": apivfs_cmd(context.root) + cls.cmd(context),
+            "rpm": apivfs_cmd(context.root) + rpm_cmd(context),
+        }
+
+    @classmethod
+    def setup(cls, context: Context, repositories: Iterable[RpmRepository], filelists: bool = True) -> None:
+        (context.pkgmngr / "etc/dnf/vars").mkdir(exist_ok=True, parents=True)
+        (context.pkgmngr / "etc/yum.repos.d").mkdir(exist_ok=True, parents=True)
+
+        (context.cache_dir / "cache" / cls.subdir(context.config)).mkdir(exist_ok=True, parents=True)
+        (context.cache_dir / "lib" / cls.subdir(context.config)).mkdir(exist_ok=True, parents=True)
+
+        config = context.pkgmngr / "etc/dnf/dnf.conf"
+
+        if not config.exists():
+            config.parent.mkdir(exist_ok=True, parents=True)
+            with config.open("w") as f:
+                # Make sure we download filelists so all dependencies can be resolved.
+                # See https://bugzilla.redhat.com/show_bug.cgi?id=2180842
+                if cls.executable(context.config).endswith("dnf5") and filelists:
+                    f.write("[main]\noptional_metadata_types=filelists\n")
+
+        repofile = context.pkgmngr / "etc/yum.repos.d/mkosi.repo"
+        if not repofile.exists():
+            repofile.parent.mkdir(exist_ok=True, parents=True)
+            with repofile.open("w") as f:
+                for repo in repositories:
+                    f.write(
+                        textwrap.dedent(
+                            f"""\
+                            [{repo.id}]
+                            name={repo.id}
+                            {repo.url}
+                            gpgcheck={int(repo.gpgcheck)}
+                            enabled={int(repo.enabled)}
+                            """
+                        )
                     )
-                )
-
-                if repo.metadata_expire is not None:
-                    f.write(f"metadata_expire={repo.metadata_expire}\n")
-                if repo.priority is not None:
-                    f.write(f"priority={repo.priority}\n")
-
-                if repo.sslcacert:
-                    f.write(f"sslcacert={repo.sslcacert}\n")
-                if repo.sslclientcert:
-                    f.write(f"sslclientcert={repo.sslclientcert}\n")
-                if repo.sslclientkey:
-                    f.write(f"sslclientkey={repo.sslclientkey}\n")
-
-                for i, url in enumerate(repo.gpgurls):
-                    f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
-                    f.write(f"{url}\n")
-
-                f.write("\n")
-
-    setup_rpm(context)
-
-
-def dnf_cmd(context: Context) -> list[PathString]:
-    dnf = dnf_executable(context)
-
-    cmdline: list[PathString] = [
-        "env",
-        "HOME=/", # Make sure rpm doesn't pick up ~/.rpmmacros and ~/.rpmrc.
-        dnf,
-        "--assumeyes",
-        "--best",
-        f"--releasever={context.config.release}",
-        f"--installroot={context.root}",
-        "--setopt=keepcache=1",
-        "--setopt=logdir=/var/log",
-        f"--setopt=cachedir=/var/cache/{dnf_subdir(context)}",
-        f"--setopt=persistdir=/var/lib/{dnf_subdir(context)}",
-        f"--setopt=install_weak_deps={int(context.config.with_recommends)}",
-        "--setopt=check_config_file_age=0",
-        "--disable-plugin=*" if dnf.endswith("dnf5") else "--disableplugin=*",
-        "--enable-plugin=builddep" if dnf.endswith("dnf5") else "--enableplugin=builddep",
-    ]
-
-    if ARG_DEBUG.get():
-        cmdline += ["--setopt=debuglevel=10"]
-
-    if not context.config.repository_key_check:
-        cmdline += ["--nogpgcheck"]
-
-    if context.config.repositories:
-        opt = "--enable-repo" if dnf.endswith("dnf5") else "--enablerepo"
-        cmdline += [f"{opt}={repo}" for repo in context.config.repositories]
-
-    # TODO: this breaks with a local, offline repository created with 'createrepo'
-    if context.config.cache_only and not context.config.local_mirror:
-        cmdline += ["--cacheonly"]
-
-    if not context.config.architecture.is_native():
-        cmdline += [f"--forcearch={context.config.distribution.architecture(context.config.architecture)}"]
-
-    if not context.config.with_docs:
-        cmdline += ["--no-docs" if dnf.endswith("dnf5") else "--nodocs"]
-
-    if dnf.endswith("dnf5"):
-        cmdline += ["--use-host-config"]
-    else:
-        cmdline += [
-            "--config=/etc/dnf/dnf.conf",
-            "--setopt=reposdir=/etc/yum.repos.d",
-            "--setopt=varsdir=/etc/dnf/vars",
+
+                    if repo.metadata_expire is not None:
+                        f.write(f"metadata_expire={repo.metadata_expire}\n")
+                    if repo.priority is not None:
+                        f.write(f"priority={repo.priority}\n")
+
+                    if repo.sslcacert:
+                        f.write(f"sslcacert={repo.sslcacert}\n")
+                    if repo.sslclientcert:
+                        f.write(f"sslclientcert={repo.sslclientcert}\n")
+                    if repo.sslclientkey:
+                        f.write(f"sslclientkey={repo.sslclientkey}\n")
+
+                    for i, url in enumerate(repo.gpgurls):
+                        f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
+                        f.write(f"{url}\n")
+
+                    f.write("\n")
+
+        setup_rpm(context)
+
+    @classmethod
+    def cmd(cls, context: Context) -> list[PathString]:
+        dnf = cls.executable(context.config)
+
+        cmdline: list[PathString] = [
+            "env",
+            "HOME=/", # Make sure rpm doesn't pick up ~/.rpmmacros and ~/.rpmrc.
+            dnf,
+            "--assumeyes",
+            "--best",
+            f"--releasever={context.config.release}",
+            f"--installroot={context.root}",
+            "--setopt=keepcache=1",
+            "--setopt=logdir=/var/log",
+            f"--setopt=cachedir=/var/cache/{cls.subdir(context.config)}",
+            f"--setopt=persistdir=/var/lib/{cls.subdir(context.config)}",
+            f"--setopt=install_weak_deps={int(context.config.with_recommends)}",
+            "--setopt=check_config_file_age=0",
+            "--disable-plugin=*" if dnf.endswith("dnf5") else "--disableplugin=*",
+            "--enable-plugin=builddep" if dnf.endswith("dnf5") else "--enableplugin=builddep",
         ]
 
-    return cmdline
-
-
-def invoke_dnf(context: Context, operation: str, packages: Iterable[str], apivfs: bool = True) -> None:
-    with finalize_ephemeral_source_mounts(context.config) as sources:
-        run(
-            dnf_cmd(context) + [operation, *sort_packages(packages)],
-            sandbox=(
-                context.sandbox(
-                    network=True,
-                    options=[
-                        "--bind", context.root, context.root,
-                        *finalize_package_manager_mounts(context),
-                        *sources,
-                        "--chdir", "/work/src",
-                    ],
-                ) + (apivfs_cmd(context.root) if apivfs else [])
-            ),
-            env=context.config.environment,
+        if ARG_DEBUG.get():
+            cmdline += ["--setopt=debuglevel=10"]
+
+        if not context.config.repository_key_check:
+            cmdline += ["--nogpgcheck"]
+
+        if context.config.repositories:
+            opt = "--enable-repo" if dnf.endswith("dnf5") else "--enablerepo"
+            cmdline += [f"{opt}={repo}" for repo in context.config.repositories]
+
+        # TODO: this breaks with a local, offline repository created with 'createrepo'
+        if context.config.cache_only and not context.config.local_mirror:
+            cmdline += ["--cacheonly"]
+
+        if not context.config.architecture.is_native():
+            cmdline += [f"--forcearch={context.config.distribution.architecture(context.config.architecture)}"]
+
+        if not context.config.with_docs:
+            cmdline += ["--no-docs" if dnf.endswith("dnf5") else "--nodocs"]
+
+        if dnf.endswith("dnf5"):
+            cmdline += ["--use-host-config"]
+        else:
+            cmdline += [
+                "--config=/etc/dnf/dnf.conf",
+                "--setopt=reposdir=/etc/yum.repos.d",
+                "--setopt=varsdir=/etc/dnf/vars",
+            ]
+
+        return cmdline
+
+    @classmethod
+    def invoke(cls, context: Context, operation: str, packages: Iterable[str], apivfs: bool = True) -> None:
+        with finalize_ephemeral_source_mounts(context.config) as sources:
+            run(
+                cls.cmd(context) + [operation, *sort_packages(packages)],
+                sandbox=(
+                    context.sandbox(
+                        network=True,
+                        options=[
+                            "--bind", context.root, context.root,
+                            *finalize_package_manager_mounts(context),
+                            *sources,
+                            "--chdir", "/work/src",
+                        ],
+                    ) + (apivfs_cmd(context.root) if apivfs else [])
+                ),
+                env=context.config.environment,
+            )
+
+        fixup_rpmdb_location(context)
+
+        # dnf interprets the log directory relative to the install root so there's nothing we can do but to remove the
+        # log files from the install root afterwards.
+        if (context.root / "var/log").exists():
+            for p in (context.root / "var/log").iterdir():
+                if any(p.name.startswith(prefix) for prefix in ("dnf", "hawkey", "yum")):
+                    p.unlink()
+
+    @classmethod
+    def createrepo(cls, context: Context) -> None:
+        run(["createrepo_c", context.packages],
+            sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
+
+    @classmethod
+    def localrepo(cls) -> RpmRepository:
+        return RpmRepository(
+            id="mkosi-packages",
+            url="baseurl=file:///work/packages",
+            gpgcheck=False,
+            gpgurls=(),
+            metadata_expire=0,
+            priority=50,
         )
-
-    fixup_rpmdb_location(context)
-
-    # dnf interprets the log directory relative to the install root so there's nothing we can do but to remove the log
-    # files from the install root afterwards.
-    if (context.root / "var/log").exists():
-        for p in (context.root / "var/log").iterdir():
-            if any(p.name.startswith(prefix) for prefix in ("dnf", "hawkey", "yum")):
-                p.unlink()
-
-
-def createrepo_dnf(context: Context) -> None:
-    run(["createrepo_c", context.packages],
-        sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
-
-
-def localrepo_dnf() -> RpmRepository:
-    return RpmRepository(
-        id="mkosi-packages",
-        url="baseurl=file:///work/packages",
-        gpgcheck=False,
-        gpgurls=(),
-        metadata_expire=0,
-        priority=50,
-    )
index 941fb8a00be2d4e43a990881cc67cf79aa6aafb8..0e138f67fb5aff09e3946d7afd1ca894003f15dd 100644 (file)
@@ -5,7 +5,7 @@ from pathlib import Path
 from typing import NamedTuple
 
 from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
 from mkosi.mounts import finalize_ephemeral_source_mounts
 from mkosi.run import run
 from mkosi.sandbox import apivfs_cmd
@@ -14,112 +14,118 @@ from mkosi.util import sort_packages, umask
 from mkosi.versioncomp import GenericVersion
 
 
-class PacmanRepository(NamedTuple):
-    id: str
-    url: str
+class Pacman(PackageManager):
+    class Repository(NamedTuple):
+        id: str
+        url: str
 
+    @classmethod
+    def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+        return {"pacman": apivfs_cmd(context.root) + cls.cmd(context)}
 
-def setup_pacman(context: Context, repositories: Iterable[PacmanRepository]) -> None:
-    if context.config.repository_key_check:
-        sig_level = "Required DatabaseOptional"
-    else:
-        # If we are using a single local mirror built on the fly there
-        # will be no signatures
-        sig_level = "Never"
+    @classmethod
+    def setup(cls, context: Context, repositories: Iterable[Repository]) -> None:
+        if context.config.repository_key_check:
+            sig_level = "Required DatabaseOptional"
+        else:
+            # If we are using a single local mirror built on the fly there
+            # will be no signatures
+            sig_level = "Never"
 
-    # Create base layout for pacman and pacman-key
-    with umask(~0o755):
-        (context.root / "var/lib/pacman").mkdir(exist_ok=True, parents=True)
+        # Create base layout for pacman and pacman-key
+        with umask(~0o755):
+            (context.root / "var/lib/pacman").mkdir(exist_ok=True, parents=True)
 
-    (context.cache_dir / "cache/pacman/pkg").mkdir(parents=True, exist_ok=True)
+        (context.cache_dir / "cache/pacman/pkg").mkdir(parents=True, exist_ok=True)
 
-    config = context.pkgmngr / "etc/pacman.conf"
-    if config.exists():
-        return
+        config = context.pkgmngr / "etc/pacman.conf"
+        if config.exists():
+            return
 
-    config.parent.mkdir(exist_ok=True, parents=True)
+        config.parent.mkdir(exist_ok=True, parents=True)
 
-    with config.open("w") as f:
-        f.write(
-            textwrap.dedent(
-                f"""\
-                [options]
-                SigLevel = {sig_level}
-                LocalFileSigLevel = Optional
-                ParallelDownloads = 5
-                """
-            )
-        )
-
-        for repo in repositories:
+        with config.open("w") as f:
             f.write(
                 textwrap.dedent(
                     f"""\
-
-                    [{repo.id}]
-                    Server = {repo.url}
+                    [options]
+                    SigLevel = {sig_level}
+                    LocalFileSigLevel = Optional
+                    ParallelDownloads = 5
                     """
                 )
             )
 
-        if any((context.pkgmngr / "etc/pacman.d/").glob("*.conf")):
-            f.write(
-                textwrap.dedent(
-                    """\
+            for repo in repositories:
+                f.write(
+                    textwrap.dedent(
+                        f"""\
 
-                    Include = /etc/pacman.d/*.conf
-                    """
+                        [{repo.id}]
+                        Server = {repo.url}
+                        """
+                    )
                 )
-            )
 
+            if any((context.pkgmngr / "etc/pacman.d/").glob("*.conf")):
+                f.write(
+                    textwrap.dedent(
+                        """\
 
-def pacman_cmd(context: Context) -> list[PathString]:
-    return [
-        "pacman",
-        "--root", context.root,
-        "--logfile=/dev/null",
-        "--cachedir=/var/cache/pacman/pkg",
-        "--hookdir", context.root / "etc/pacman.d/hooks",
-        "--arch", context.config.distribution.architecture(context.config.architecture),
-        "--color", "auto",
-        "--noconfirm",
-    ]
-
-
-def invoke_pacman(
-    context: Context,
-    operation: str,
-    options: Sequence[str] = (),
-    packages: Sequence[str] = (),
-    apivfs: bool = True,
-) -> None:
-    with finalize_ephemeral_source_mounts(context.config) as sources:
-        run(
-            pacman_cmd(context) + [operation, *options, *sort_packages(packages)],
-            sandbox=(
-                context.sandbox(
-                    network=True,
-                    options=[
-                        "--bind", context.root, context.root,
-                        *finalize_package_manager_mounts(context),
-                        *sources,
-                        "--chdir", "/work/src",
-                    ],
-                ) + (apivfs_cmd(context.root) if apivfs else [])
-            ),
-            env=context.config.environment,
-        )
-
+                        Include = /etc/pacman.d/*.conf
+                        """
+                    )
+                )
 
-def createrepo_pacman(context: Context, *, force: bool = False) -> None:
-    run(
-        [
-            "repo-add",
-            context.packages / "mkosi-packages.db.tar",
-            *sorted(context.packages.glob("*.pkg.tar*"), key=lambda p: GenericVersion(Path(p).name)),
+    @classmethod
+    def cmd(cls, context: Context) -> list[PathString]:
+        return [
+            "pacman",
+            "--root", context.root,
+            "--logfile=/dev/null",
+            "--cachedir=/var/cache/pacman/pkg",
+            "--hookdir", context.root / "etc/pacman.d/hooks",
+            "--arch", context.config.distribution.architecture(context.config.architecture),
+            "--color", "auto",
+            "--noconfirm",
         ]
-    )
 
+    @classmethod
+    def invoke(
+        cls,
+        context: Context,
+        operation: str,
+        options: Sequence[str] = (),
+        packages: Sequence[str] = (),
+        apivfs: bool = True,
+    ) -> None:
+        with finalize_ephemeral_source_mounts(context.config) as sources:
+            run(
+                cls.cmd(context) + [operation, *options, *sort_packages(packages)],
+                sandbox=(
+                    context.sandbox(
+                        network=True,
+                        options=[
+                            "--bind", context.root, context.root,
+                            *finalize_package_manager_mounts(context),
+                            *sources,
+                            "--chdir", "/work/src",
+                        ],
+                    ) + (apivfs_cmd(context.root) if apivfs else [])
+                ),
+                env=context.config.environment,
+            )
+
+    @classmethod
+    def createrepo(cls, context: Context, *, force: bool = False) -> None:
+        run(
+            [
+                "repo-add",
+                context.packages / "mkosi-packages.db.tar",
+                *sorted(context.packages.glob("*.pkg.tar*"), key=lambda p: GenericVersion(Path(p).name)),
+            ]
+        )
 
-def localrepo_pacman() -> PacmanRepository:
-    return PacmanRepository(id="mkosi-packages", url="file:///work/packages")
+    @classmethod
+    def localrepo(cls) -> Repository:
+        return cls.Repository(id="mkosi-packages", url="file:///work/packages")
index 7ac1781e845bce4fe8cbcea0da11ff92f60fbfaf..e56269a4b5a4623b40a6c5e0c9ebc3c9a137d42e 100644 (file)
@@ -5,8 +5,8 @@ from collections.abc import Iterable, Sequence
 
 from mkosi.config import yes_no
 from mkosi.context import Context
-from mkosi.installer import finalize_package_manager_mounts
-from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, setup_rpm
+from mkosi.installer import PackageManager, finalize_package_manager_mounts
+from mkosi.installer.rpm import RpmRepository, fixup_rpmdb_location, rpm_cmd, setup_rpm
 from mkosi.mounts import finalize_ephemeral_source_mounts
 from mkosi.run import run
 from mkosi.sandbox import apivfs_cmd
@@ -14,113 +14,123 @@ from mkosi.types import PathString
 from mkosi.util import sort_packages
 
 
-def setup_zypper(context: Context, repos: Iterable[RpmRepository]) -> None:
-    config = context.pkgmngr / "etc/zypp/zypp.conf"
-    config.parent.mkdir(exist_ok=True, parents=True)
-
-    (context.cache_dir / "cache/zypp").mkdir(exist_ok=True, parents=True)
-
-    # rpm.install.excludedocs can only be configured in zypp.conf so we append
-    # to any user provided config file. Let's also bump the refresh delay to
-    # the same default as dnf which is 48 hours.
-    with config.open("a") as f:
-        f.write(
-            textwrap.dedent(
-                f"""
-                [main]
-                rpm.install.excludedocs = {yes_no(not context.config.with_docs)}
-                repo.refresh.delay = {48 * 60}
-                """
+class Zypper(PackageManager):
+    @classmethod
+    def scripts(cls, context: Context) -> dict[str, list[PathString]]:
+        return {
+            "zypper": apivfs_cmd(context.root) + cls.cmd(context),
+            "rpm"   : apivfs_cmd(context.root) + rpm_cmd(context),
+        }
+
+    @classmethod
+    def setup(cls, context: Context, repos: Iterable[RpmRepository]) -> None:
+        config = context.pkgmngr / "etc/zypp/zypp.conf"
+        config.parent.mkdir(exist_ok=True, parents=True)
+
+        (context.cache_dir / "cache/zypp").mkdir(exist_ok=True, parents=True)
+
+        # rpm.install.excludedocs can only be configured in zypp.conf so we append
+        # to any user provided config file. Let's also bump the refresh delay to
+        # the same default as dnf which is 48 hours.
+        with config.open("a") as f:
+            f.write(
+                textwrap.dedent(
+                    f"""
+                    [main]
+                    rpm.install.excludedocs = {yes_no(not context.config.with_docs)}
+                    repo.refresh.delay = {48 * 60}
+                    """
+                )
             )
-        )
 
-    repofile = context.pkgmngr / "etc/zypp/repos.d/mkosi.repo"
-    if not repofile.exists():
-        repofile.parent.mkdir(exist_ok=True, parents=True)
-        with repofile.open("w") as f:
-            for repo in repos:
-                # zypper uses the repo ID as its cache key which is unsafe so add a hash of the url used to it to
-                # make sure a unique cache is used for each repository. We use roughly the same algorithm here that dnf
-                # uses as well.
-                key = hashlib.sha256(repo.url.encode()).hexdigest()[:16]
-
-                f.write(
-                    textwrap.dedent(
-                        f"""\
-                        [{repo.id}-{key}]
-                        name={repo.id}
-                        {repo.url}
-                        gpgcheck={int(repo.gpgcheck)}
-                        enabled={int(repo.enabled)}
-                        autorefresh=1
-                        keeppackages=1
-                        """
+        repofile = context.pkgmngr / "etc/zypp/repos.d/mkosi.repo"
+        if not repofile.exists():
+            repofile.parent.mkdir(exist_ok=True, parents=True)
+            with repofile.open("w") as f:
+                for repo in repos:
+                    # zypper uses the repo ID as its cache key which is unsafe so add a hash of the url used to it to
+                    # make sure a unique cache is used for each repository. We use roughly the same algorithm here that
+                    # dnf uses as well.
+                    key = hashlib.sha256(repo.url.encode()).hexdigest()[:16]
+
+                    f.write(
+                        textwrap.dedent(
+                            f"""\
+                            [{repo.id}-{key}]
+                            name={repo.id}
+                            {repo.url}
+                            gpgcheck={int(repo.gpgcheck)}
+                            enabled={int(repo.enabled)}
+                            autorefresh=1
+                            keeppackages=1
+                            """
+                        )
                     )
-                )
-
-                if repo.priority is not None:
-                    f.write(f"priority={repo.priority}\n")
-
-                for i, url in enumerate(repo.gpgurls):
-                    f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
-                    f.write(f"{url}\n")
-
-                f.write("\n")
-
-    setup_rpm(context)
-
-
-def zypper_cmd(context: Context) -> list[PathString]:
-    return [
-        "env",
-        "ZYPP_CONF=/etc/zypp/zypp.conf",
-        "HOME=/",
-        "zypper",
-        f"--installroot={context.root}",
-        "--cache-dir=/var/cache/zypp",
-        "--gpg-auto-import-keys" if context.config.repository_key_check else "--no-gpg-checks",
-        "--non-interactive",
-    ]
-
-
-def invoke_zypper(
-    context: Context,
-    operation: str,
-    packages: Sequence[str] = (),
-    *,
-    options: Sequence[str] = (),
-    apivfs: bool = True,
-) -> None:
-    with finalize_ephemeral_source_mounts(context.config) as sources:
-        run(
-            zypper_cmd(context) + [operation, *options, *sort_packages(packages)],
-            sandbox=(
-                context.sandbox(
-                    network=True,
-                    options=[
-                        "--bind", context.root, context.root,
-                        *finalize_package_manager_mounts(context),
-                        *sources,
-                        "--chdir", "/work/src",
-                    ],
-                ) + (apivfs_cmd(context.root) if apivfs else [])
-            ),
-            env=context.config.environment,
-        )
-
-    fixup_rpmdb_location(context)
-
-
-def createrepo_zypper(context: Context) -> None:
-    run(["createrepo_c", context.packages],
-        sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
 
+                    if repo.priority is not None:
+                        f.write(f"priority={repo.priority}\n")
+
+                    for i, url in enumerate(repo.gpgurls):
+                        f.write("gpgkey=" if i == 0 else len("gpgkey=") * " ")
+                        f.write(f"{url}\n")
+
+                    f.write("\n")
+
+        setup_rpm(context)
+
+    @classmethod
+    def cmd(cls, context: Context) -> list[PathString]:
+        return [
+            "env",
+            "ZYPP_CONF=/etc/zypp/zypp.conf",
+            "HOME=/",
+            "zypper",
+            f"--installroot={context.root}",
+            "--cache-dir=/var/cache/zypp",
+            "--gpg-auto-import-keys" if context.config.repository_key_check else "--no-gpg-checks",
+            "--non-interactive",
+        ]
+
+    @classmethod
+    def invoke(
+        cls,
+        context: Context,
+        operation: str,
+        packages: Sequence[str] = (),
+        *,
+        options: Sequence[str] = (),
+        apivfs: bool = True,
+    ) -> None:
+        with finalize_ephemeral_source_mounts(context.config) as sources:
+            run(
+                cls.cmd(context) + [operation, *options, *sort_packages(packages)],
+                sandbox=(
+                    context.sandbox(
+                        network=True,
+                        options=[
+                            "--bind", context.root, context.root,
+                            *finalize_package_manager_mounts(context),
+                            *sources,
+                            "--chdir", "/work/src",
+                        ],
+                    ) + (apivfs_cmd(context.root) if apivfs else [])
+                ),
+                env=context.config.environment,
+            )
 
-def localrepo_zypper() -> RpmRepository:
-    return RpmRepository(
-        id="mkosi-packages",
-        url="baseurl=file:///work/packages",
-        gpgcheck=False,
-        gpgurls=(),
-        priority=50,
-    )
+        fixup_rpmdb_location(context)
+
+    @classmethod
+    def createrepo(cls, context: Context) -> None:
+        run(["createrepo_c", context.packages],
+            sandbox=context.sandbox(options=["--bind", context.packages, context.packages]))
+
+    @classmethod
+    def localrepo(cls) -> RpmRepository:
+        return RpmRepository(
+            id="mkosi-packages",
+            url="baseurl=file:///work/packages",
+            gpgcheck=False,
+            gpgurls=(),
+            priority=50,
+        )