]> git.ipfire.org Git - thirdparty/pdns.git/commitdiff
auth-5.0 actions: Remove non-required workflows. Disable non-auth jobs 16533/head
authorromeroalx <alexis.romero@open-xchange.com>
Thu, 20 Nov 2025 13:59:57 +0000 (14:59 +0100)
committerromeroalx <alexis.romero@open-xchange.com>
Thu, 20 Nov 2025 13:59:57 +0000 (14:59 +0100)
19 files changed:
.github/workflows/build-and-test-all-releases-dispatch.yml [deleted file]
.github/workflows/build-and-test-all.yml
.github/workflows/build-docker-images-dispatch.yml [deleted file]
.github/workflows/build-docker-images.yml [deleted file]
.github/workflows/build-packages-daily-master.yml [deleted file]
.github/workflows/build-packages.yml [deleted file]
.github/workflows/builder-dispatch.yml [deleted file]
.github/workflows/builder-releases-dispatch.yml [deleted file]
.github/workflows/builder.yml [deleted file]
.github/workflows/codeql-analysis.yml
.github/workflows/coverity-dispatch.yml [deleted file]
.github/workflows/coverity.yml [deleted file]
.github/workflows/docker.yml [deleted file]
.github/workflows/documentation.yml [deleted file]
.github/workflows/misc-dailies.yml [deleted file]
.github/workflows/secpoll.yml [deleted file]
.github/workflows/spelling.yml [deleted file]
.github/workflows/spelling2.yml [deleted file]
.github/workflows/spelling3.yml [deleted file]

diff --git a/.github/workflows/build-and-test-all-releases-dispatch.yml b/.github/workflows/build-and-test-all-releases-dispatch.yml
deleted file mode 100644 (file)
index ea2ef3a..0000000
+++ /dev/null
@@ -1,67 +0,0 @@
----
-name: Trigger workflow build-and-test-all for different releases
-
-on:
-  workflow_dispatch:
-  schedule:
-    - cron: '0 22 * * 4'
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  actions: read
-  contents: read
-
-jobs:
-  call-build-and-test-all-master-debian-11:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@master
-    with:
-      branch-name: master
-      runner-docker-image-name: base-pdns-ci-image/debian-11-pdns-base
-
-  call-build-and-test-all-auth-49:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/auth-4.9.x
-    with:
-      branch-name: rel/auth-4.9.x
-
-  call-build-and-test-all-auth-48:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/auth-4.8.x
-    with:
-      branch-name: rel/auth-4.8.x
-
-  call-build-and-test-all-auth-47:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/auth-4.7.x
-    with:
-      branch-name: rel/auth-4.7.x
-
-  call-build-and-test-all-rec-52:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/rec-5.2.x
-    with:
-      branch-name: rel/rec-5.2.x
-
-  call-build-and-test-all-rec-51:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/rec-5.1.x
-    with:
-      branch-name: rel/rec-5.1.x
-
-  call-build-and-test-all-rec-50:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/rec-5.0.x
-    with:
-      branch-name: rel/rec-5.0.x
-
-  call-build-and-test-all-dnsdist-19:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/dnsdist-1.9.x
-    with:
-      branch-name: rel/dnsdist-1.9.x
-
-  call-build-and-test-all-dnsdist-18:
-    if: ${{ vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    uses: PowerDNS/pdns/.github/workflows/build-and-test-all.yml@rel/dnsdist-1.8.x
-    with:
-      branch-name: rel/dnsdist-1.8.x
index ea424e68a77d9f6053287fa1380c34e5bdc13aff..7a8be95c29ee2cd738e45b6c98f9faf75a3951c7 100644 (file)
@@ -139,191 +139,6 @@ jobs:
           path: /opt/pdns-auth
           retention-days: 1
 
-  build-recursor:
-    name: build recursor
-    if: ${{ !github.event.schedule || vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    runs-on: ubuntu-24.04
-    needs: get-runner-container-image
-    strategy:
-      matrix:
-        builder: [autotools, meson]
-        sanitizers: [asan+ubsan, tsan]
-        features: [least, full]
-        exclude:
-          - sanitizers: tsan
-            features: least
-      fail-fast: false
-    container:
-      image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
-      env:
-        SANITIZERS: ${{ matrix.sanitizers }}
-        UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
-        UNIT_TESTS: yes
-      options: --sysctl net.ipv6.conf.all.disable_ipv6=0
-    defaults:
-      run:
-        working-directory: ./pdns/recursordist/pdns-recursor-${{ env.BUILDER_VERSION }}
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - name: get timestamp for cache
-        id: get-stamp
-        run: |
-          echo "stamp=$(/bin/date +%s)" >> "$GITHUB_OUTPUT"
-        shell: bash
-        working-directory: .
-      - run: mkdir -p ~/.ccache
-        working-directory: .
-      - name: let GitHub cache our ccache data
-        uses: actions/cache@v4
-        with:
-          path: ~/.ccache
-          key: recursor-ccache-${{ matrix.builder }}-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ steps.get-stamp.outputs.stamp }}
-          restore-keys: recursor-ccache-${{ matrix.builder }}-${{ matrix.features }}-${{ matrix.sanitizers }}
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${REPO_HOME}/.venv
-          . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
-        working-directory: .
-      - run: ${{ env.INV_CMD }} install-rec-build-deps
-        working-directory: ./pdns/recursordist/
-      - run: ${{ env.INV_CMD }} ci-install-rust ${REPO_HOME}
-        working-directory: ./pdns/recursordist/
-      - run: ${{ env.INV_CMD }} ci-autoconf ${{ matrix.builder == 'meson' && '--meson' || '' }}
-        working-directory: ./pdns/recursordist/
-      - run: ${{ env.INV_CMD }} ci-rec-configure -f ${{ matrix.features }} -b pdns-recursor-${{ env.BUILDER_VERSION }} ${{ matrix.builder == 'meson' && '--meson' || '' }}
-        working-directory: ./pdns/recursordist/
-      - run: ${{ env.INV_CMD }} ci-rec-build ${{ matrix.builder == 'meson' && '--meson' || '' }}
-      - run: ${{ env.INV_CMD }} ci-rec-run-unit-tests ${{ matrix.builder == 'meson' && '--meson' || '' }}
-      - run: ${{ env.INV_CMD }} generate-coverage-info ./testrunner $GITHUB_WORKSPACE
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' && matrix.builder == 'meson' }}
-      - name: Coveralls Parallel rec unit
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' && matrix.builder == 'meson' }}
-        uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
-        with:
-          flag-name: rec-unit-${{ matrix.features }}-${{ matrix.sanitizers }}
-          path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
-          parallel: true
-          allow-empty: true
-          fail-on-error: false
-      - run: ${{ env.INV_CMD }} ci-rec-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
-      - run: ccache -s
-      - if: ${{ matrix.builder == 'meson' }}
-        run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
-      - if: ${{ matrix.builder == 'meson' }}
-        name: Store the binaries
-        uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
-        with:
-          name: pdns-recursor-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ matrix.builder}}-${{ env.normalized-branch-name }}
-          path: /opt/pdns-recursor
-          retention-days: 1
-
-  build-dnsdist:
-    name: build dnsdist
-    if: ${{ !github.event.schedule || vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    runs-on: ${{ ( vars.REPOSITORY_USE_UBICLOUD == '1' ) && 'ubicloud-standard-4-ubuntu-2404' || 'ubuntu-24.04' }}
-    needs: get-runner-container-image
-    strategy:
-      matrix:
-        builder: [autotools, meson]
-        sanitizers: [asan+ubsan, tsan]
-        features: [least, full]
-        exclude:
-          - sanitizers: tsan
-            features: least
-      fail-fast: false
-    container:
-      image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
-      env:
-        SANITIZERS: ${{ matrix.sanitizers }}
-        UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
-        UNIT_TESTS: yes
-        FUZZING_TARGETS: yes
-      options: --sysctl net.ipv6.conf.all.disable_ipv6=0
-    defaults:
-      run:
-        working-directory: ./pdns/dnsdistdist/dnsdist-${{ env.BUILDER_VERSION }}
-    env:
-      CLANG_VERSION: ${{ contains(needs.get-runner-container-image.outputs.id, 'debian-11') && '13' || '19' }}
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - name: get timestamp for cache
-        id: get-stamp
-        run: |
-          echo "stamp=$(/bin/date +%s)" >> "$GITHUB_OUTPUT"
-        shell: bash
-        working-directory: .
-      - run: mkdir -p ~/.ccache
-        working-directory: .
-      - name: let GitHub cache our ccache data
-        uses: actions/cache@v4
-        with:
-          path: ~/.ccache
-          key: dnsdist-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ matrix.builder}}-ccache-${{ steps.get-stamp.outputs.stamp }}
-          restore-keys: dnsdist-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ matrix.builder}}-ccache-
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${REPO_HOME}/.venv
-          . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
-        working-directory: .
-      - run: ${{ env.INV_CMD }} install-clang
-        working-directory: .
-      - run: ${{ env.INV_CMD }} install-lld-linker-if-needed
-        working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-install-rust ${REPO_HOME}
-        if: ${{ matrix.features != 'least' }}
-        working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-build-and-install-quiche ${REPO_HOME}
-        if: ${{ matrix.features != 'least' }}
-        working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-autoconf
-        if: ${{ matrix.builder == 'autotools' }}
-        working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-dnsdist-configure ${{ matrix.features }} ${{ matrix.builder }} dnsdist-${{ env.BUILDER_VERSION }}
-        working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-make-distdir
-        if: ${{ matrix.builder == 'autotools' }}
-        working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-dnsdist-configure ${{ matrix.features }} ${{ matrix.builder }} dnsdist-${{ env.BUILDER_VERSION }}
-        if: ${{ matrix.builder == 'autotools' }}
-      - run: ${{ env.INV_CMD }} ci-dnsdist-make-bear ${{ matrix.builder }}
-      - run: ${{ env.INV_CMD }} ci-dnsdist-run-unit-tests ${{ matrix.builder }}
-      - run: ${{ env.INV_CMD }} generate-coverage-info ./testrunner $GITHUB_WORKSPACE
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' && matrix.builder == 'meson'}}
-      - name: Coveralls Parallel dnsdist unit
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' && matrix.builder == 'meson' }}
-        uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
-        with:
-          flag-name: dnsdist-unit-${{ matrix.features }}-${{ matrix.sanitizers }}
-          path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
-          parallel: true
-          allow-empty: true
-          fail-on-error: false
-      - run: ${{ env.INV_CMD }} ci-dnsdist-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
-      - run: ccache -s
-      - name: Prepare binaries folder
-        if: ${{ matrix.builder == 'meson' }}
-        run: |
-          echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
-          mkdir -p /opt/dnsdist/bin
-          for i in $(find . -maxdepth 1 -type f -executable); do cp ${i} /opt/dnsdist/bin/; done
-      - name: Store the binaries
-        if: ${{ matrix.builder == 'meson' }}
-        uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
-        with:
-          name: dnsdist-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ matrix.builder}}-${{ env.normalized-branch-name }}
-          path: /opt/dnsdist
-          retention-days: 1
-
   test-auth-api:
     needs:
       - build-auth
@@ -571,306 +386,12 @@ jobs:
           allow-empty: true
           fail-on-error: false
 
-  test-recursor-api:
-    needs:
-      - build-recursor
-      - get-runner-container-image
-    runs-on: ubuntu-24.04
-    strategy:
-      matrix:
-        sanitizers: [asan+ubsan, tsan]
-        dist_name: [debian]
-        pdns_repo_version: ['48']
-      fail-fast: false
-    container:
-      image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
-      env:
-        UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
-        ASAN_OPTIONS: detect_leaks=0
-        TSAN_OPTIONS: "halt_on_error=1:suppressions=${{ env.REPO_HOME }}/pdns/recursordist/recursor-tsan.supp"
-      options: --sysctl net.ipv6.conf.all.disable_ipv6=0
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
-      - name: Fetch the binaries
-        uses: actions/download-artifact@v4
-        with:
-          name: pdns-recursor-full-${{ matrix.sanitizers }}-meson-${{ env.normalized-branch-name }}
-          path: /opt/pdns-recursor
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${REPO_HOME}/.venv
-          . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
-      - run: ${{ env.INV_CMD }} apt-fresh
-      - run: ${{ env.INV_CMD }} add-auth-repo ${{ matrix.dist_name }} $(. /etc/os-release && echo $VERSION_CODENAME) ${{ matrix.pdns_repo_version }}
-      - run: ${{ env.INV_CMD }} install-clang-runtime
-      - run: ${{ env.INV_CMD }} install-rec-test-deps
-      - run: ${{ env.INV_CMD }} test-api recursor
-      - run: ${{ env.INV_CMD }} generate-coverage-info /opt/pdns-recursor/sbin/pdns_recursor $GITHUB_WORKSPACE
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-      - name: Coveralls Parallel recursor API
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-        uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
-        with:
-          flag-name: rec-api-full-${{ matrix.sanitizers }}
-          path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
-          parallel: true
-          allow-empty: true
-          fail-on-error: false
-
-  test-recursor-regression:
-    needs:
-      - build-recursor
-      - get-runner-container-image
-    runs-on: ubuntu-24.04
-    strategy:
-      matrix:
-        sanitizers: [asan+ubsan, tsan]
-        dist_name: [debian]
-        pdns_repo_version: ['48']
-      fail-fast: false
-    container:
-      image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
-      env:
-        UBSAN_OPTIONS: 'print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp'
-        ASAN_OPTIONS: ""
-        LSAN_OPTIONS: "suppressions=${{ env.REPO_HOME }}/pdns/recursordist/recursor-lsan.supp"
-        TSAN_OPTIONS: "halt_on_error=1:suppressions=${{ env.REPO_HOME }}/pdns/recursordist/recursor-tsan.supp"
-      options: --sysctl net.ipv6.conf.all.disable_ipv6=0
-    steps:
-      # - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
-      - name: Fetch the binaries
-        uses: actions/download-artifact@v4
-        with:
-          name: pdns-recursor-full-${{ matrix.sanitizers }}-meson-${{ env.normalized-branch-name }}
-          path: /opt/pdns-recursor
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${REPO_HOME}/.venv
-          . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
-      - run: ${{ env.INV_CMD }} apt-fresh
-      - run: ${{ env.INV_CMD }} add-auth-repo ${{ matrix.dist_name }} $(. /etc/os-release && echo $VERSION_CODENAME) ${{ matrix.pdns_repo_version }}
-      - run: ${{ env.INV_CMD }} install-clang-runtime
-      - run: ${{ env.INV_CMD }} install-rec-test-deps
-      - run: ${{ env.INV_CMD }} test-regression-recursor
-      - run: ${{ env.INV_CMD }} generate-coverage-info /opt/pdns-recursor/sbin/pdns_recursor $GITHUB_WORKSPACE
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-      - name: Coveralls Parallel recursor regression
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-        uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
-        with:
-          flag-name: rec-regression-full-${{ matrix.sanitizers }}
-          path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
-          parallel: true
-          allow-empty: true
-          fail-on-error: false
-
-  test-recursor-bulk:
-    name: 'test rec *mini* bulk'
-    needs:
-      - build-recursor
-      - get-runner-container-image
-    runs-on: ubuntu-24.04
-    strategy:
-      matrix:
-        sanitizers: [asan+ubsan, tsan]
-        threads: [1, 2, 3, 4, 8]
-        mthreads: [2048]
-        shards: [1, 2, 1024]
-        IPv6: [0]
-      fail-fast: false
-    container:
-      image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
-      env:
-        UBSAN_OPTIONS: 'print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp'
-        ASAN_OPTIONS: detect_leaks=0
-        TSAN_OPTIONS: "halt_on_error=1:suppressions=${{ env.REPO_HOME }}/pdns/recursordist/recursor-tsan.supp"
-      options: --sysctl net.ipv6.conf.all.disable_ipv6=0
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
-      - name: Fetch the binaries
-        uses: actions/download-artifact@v4
-        with:
-          name: pdns-recursor-full-${{ matrix.sanitizers }}-meson-${{ env.normalized-branch-name }}
-          path: /opt/pdns-recursor
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${REPO_HOME}/.venv
-          . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
-      - run: ${{ env.INV_CMD }} install-clang-runtime
-      - run: ${{ env.INV_CMD }} install-rec-bulk-deps
-      - run: ${{ env.INV_CMD }} test-bulk-recursor 100 ${{ matrix.threads }} ${{ matrix.mthreads }} ${{ matrix.shards }} ${{ matrix.IPv6 }}
-      - run: ${{ env.INV_CMD }} generate-coverage-info /opt/pdns-recursor/sbin/pdns_recursor $GITHUB_WORKSPACE
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-      - name: Coveralls Parallel recursor bulk
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-        uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
-        with:
-          flag-name: rec-regression-bulk-full-${{ matrix.sanitizers }}
-          path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
-          parallel: true
-          allow-empty: true
-          fail-on-error: false
-
-  test-recursor-ubicloud-bulk:
-    if: ${{ ! contains(needs.get-runner-container-image.outputs.id, 'debian-11') && ( vars.REC_BULKTEST_USE_UBICLOUD == '1' || github.repository == 'PowerDNS/pdns' ) }}
-    name: 'test rec ubicloud bulk'
-    needs:
-      - build-recursor
-      - get-runner-container-image
-    runs-on: ubicloud-standard-8-ubuntu-2404
-    strategy:
-      matrix:
-        sanitizers: [asan+ubsan] # TSAN disabled for now
-        threads: [8]
-        mthreads: [2048]
-        shards: [1024]
-        IPv6: [0, 1]
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
-      - name: Fetch the binaries
-        uses: actions/download-artifact@v4
-        with:
-          name: pdns-recursor-full-${{ matrix.sanitizers }}-meson-${{ env.normalized-branch-name }}
-          path: /opt/pdns-recursor
-      - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${{ github.workspace }}/.venv
-          . ${{ github.workspace }}/.venv/bin/activate && pip install -r ${{ github.workspace }}/meson/requirements.txt
-      - run: . ${{ github.workspace }}/.venv/bin/activate && inv install-rec-bulk-ubicloud-deps
-      - run: . ${{ github.workspace }}/.venv/bin/activate && inv test-bulk-recursor 50000 ${{ matrix.threads }} ${{ matrix.mthreads }} ${{ matrix.shards }} ${{ matrix.IPv6 }}
-        env:
-          UBSAN_OPTIONS: 'print_stacktrace=1:halt_on_error=1:suppressions=${{ github.workspace }}/build-scripts/UBSan.supp'
-          ASAN_OPTIONS: detect_leaks=0
-          TSAN_OPTIONS: "halt_on_error=1:suppressions=${{ github.workspace }}/pdns/recursordist/recursor-tsan.supp"
-      #  Disabled, it gives us: "/bin/bash: line 1: llvm-profdata-13: command not found" due to mismatch between deb and ubuntu versions
-      #- run: . ${{ github.workspace }}/.venv/bin/activate && inv generate-coverage-info /opt/pdns-recursor/sbin/pdns_recursor $GITHUB_WORKSPACE
-      #  if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-      #- name: Coveralls Parallel recursor bulk
-      #  if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-      #  uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
-      #  with:
-      #    flag-name: rec-regression-bulk-full-${{ matrix.sanitizers }}
-      #    path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
-      #    parallel: true
-      #    allow-empty: true
-      #    fail-on-error: false
-
-  test-dnsdist-regression:
-    needs:
-      - build-dnsdist
-      - get-runner-container-image
-    runs-on: ubuntu-24.04
-    strategy:
-      matrix:
-        sanitizers: [asan+ubsan, tsan]
-      fail-fast: false
-    container:
-      image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
-      env:
-        UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
-        # Disabling (intercept_send=0) the custom send wrappers for ASAN and TSAN because they cause the tools to report a race that doesn't exist on actual implementations of send(), see https://github.com/google/sanitizers/issues/1498
-        ASAN_OPTIONS: intercept_send=0
-        LSAN_OPTIONS: "suppressions=${{ env.REPO_HOME }}/pdns/dnsdistdist/dnsdist-lsan.supp"
-        TSAN_OPTIONS: "halt_on_error=1:intercept_send=0:suppressions=${{ env.REPO_HOME }}/pdns/dnsdistdist/dnsdist-tsan.supp"
-        # IncludeDir tests are disabled because of a weird interaction between TSAN and these tests which ever only happens on GH actions
-        SKIP_INCLUDEDIR_TESTS: yes
-        SANITIZERS: ${{ matrix.sanitizers }}
-        COVERAGE: no
-      options: --sysctl net.ipv6.conf.all.disable_ipv6=0 --privileged
-    env:
-      CLANG_VERSION: ${{ contains(needs.get-runner-container-image.outputs.id, 'debian-11') && '13' || '19' }}
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
-      - name: Fetch the binaries
-        uses: actions/download-artifact@v4
-        with:
-          name: dnsdist-full-${{ matrix.sanitizers }}-meson-${{ env.normalized-branch-name }}
-          path: /opt/dnsdist
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${REPO_HOME}/.venv
-          . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
-      - run: ${{ env.INV_CMD }} install-clang-runtime
-      - run: ${{ env.INV_CMD }} install-dnsdist-test-deps $([ "$(. /etc/os-release && echo $VERSION_CODENAME)" = "bullseye" ] && echo "--skipXDP=True")
-      - run: ${{ env.INV_CMD }} test-dnsdist $([ "$(. /etc/os-release && echo $VERSION_CODENAME)" = "bullseye" ] && echo "--skipXDP=True")
-      - run: ${{ env.INV_CMD }} generate-coverage-info /opt/dnsdist/bin/dnsdist $GITHUB_WORKSPACE
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-      - name: Coveralls Parallel dnsdist regression
-        if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
-        uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
-        with:
-          flag-name: dnsdist-regression-full-${{ matrix.sanitizers }}
-          path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
-          parallel: true
-          allow-empty: true
-          fail-on-error: false
-
-  swagger-syntax-check:
-    if: ${{ !github.event.schedule || vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
-    runs-on: ubuntu-24.04
-    container:
-      image: ghcr.io/powerdns/base-pdns-ci-image/debian-12-pdns-base:master
-      options: --sysctl net.ipv6.conf.all.disable_ipv6=0
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - name: install pip build dependencies
-        run: |
-          python3 -m venv ${REPO_HOME}/.venv
-          . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
-      - run: ${{ env.INV_CMD }} install-swagger-tools
-      - run: ${{ env.INV_CMD }} swagger-syntax-check
-
   collect:
     needs:
       - build-auth
-      - build-dnsdist
-      - build-recursor
-      - swagger-syntax-check
       - test-auth-api
       - test-auth-backend
-      - test-dnsdist-regression
       - test-ixfrdist
-      - test-recursor-api
-      - test-recursor-regression
-      - test-recursor-bulk
-      - test-recursor-ubicloud-bulk
     if: success() || failure()
     runs-on: ubuntu-24.04
     env:
diff --git a/.github/workflows/build-docker-images-dispatch.yml b/.github/workflows/build-docker-images-dispatch.yml
deleted file mode 100644 (file)
index a1ad31e..0000000
+++ /dev/null
@@ -1,93 +0,0 @@
----
-name: Trigger specific image build
-
-on:
-  workflow_dispatch:
-    inputs:
-      product:
-        required: true
-        description: Product to build
-        type: choice
-        options:
-        - auth
-        - recursor
-        - dnsdist
-      ref:
-        description: git branch or tag to checkout (e.g. rec-5.0.0-rc1)
-        type: string
-        default: master
-        required: false
-      platforms:
-        description: target platform(s)
-        type: string
-        default: linux/arm64/v8,linux/amd64
-        required: false
-      build-args:
-        description: build-time variables (e.g. DOCKER_FAKE_RELEASE=YES when building for tags)
-        type: string
-        default: ''
-        required: false
-      push:
-        description: push image to DockerHub
-        type: boolean
-        required: true
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-  actions: read
-
-jobs:
-  prepare:
-    runs-on: ubuntu-24.04
-    outputs:
-      image-tag: ${{ steps.get-image-tag.outputs.tag }}
-      image-name: ${{ steps.get-image-metadata.outputs.name }}
-      image-description: ${{ steps.get-image-metadata.outputs.description }}
-    env:
-      INPUT_REF: ${{ inputs.ref }}
-      PRODUCT: ${{ inputs.product }}
-    steps:
-      - run: |
-          echo "${INPUT_REF}" | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*' && tag=$(echo "${INPUT_REF}" | cut -d '-' -f 2-)
-          echo "tag=$tag" >> $GITHUB_OUTPUT
-        id: get-image-tag
-      - run: |
-          if $(echo "${INPUT_REF}" | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*'); then
-            echo "version=$(echo "${INPUT_REF}" | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_ENV
-            echo "branch=$(echo "${INPUT_REF}" | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_ENV
-          else
-            echo "version=$(echo "${INPUT_REF}" | tr '/' '-')" >> $GITHUB_ENV
-            echo "branch=${INPUT_REF}" >> $GITHUB_ENV
-          fi
-      - run: |
-          if $(echo "$PRODUCT"| grep -qq auth); then
-            echo "${INPUT_REF}" | egrep -qq '^auth-.*' && description='PowerDNS Authoritative Server '$branch || description='EXPERIMENTAL pdns auth image'
-            echo "name=pdns-auth-$version" >> $GITHUB_OUTPUT
-          elif (echo "$PRODUCT"| grep -qq recursor); then
-            echo "${INPUT_REF}" | egrep -qq '^rec-.*' && description='PowerDNS Recursor '$branch || description='EXPERIMENTAL pdns recursor image'
-            echo "name=pdns-recursor-$version" >> $GITHUB_OUTPUT
-          else
-            echo "${INPUT_REF}" | egrep -qq '^dnsdist-.*' && description='PowerDNS DNSDist '$branch || description='EXPERIMENTAL dnsdist image'
-            echo "name=dnsdist-$version" >> $GITHUB_OUTPUT
-          fi
-          echo "description=$description" >> $GITHUB_OUTPUT
-        id: get-image-metadata
-
-  call-build-docker-image:
-    uses: PowerDNS/pdns/.github/workflows/build-docker-images.yml@master
-    needs: prepare
-    with:
-      product: ${{ inputs.product }}
-      ref: ${{ inputs.ref }}
-      image-name: ${{ needs.prepare.outputs.image-name }}
-      image-tags: |-
-        latest
-        ${{ needs.prepare.outputs.image-tag }}
-      image-description: ${{ needs.prepare.outputs.image-description }}
-      platforms: ${{ inputs.platforms }}
-      build-args: ${{ inputs.build-args }}
-      push: ${{ inputs.push }}
-    secrets:
-      DOCKERHUB_ORGANIZATION_NAME: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}
-      DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
-      DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/build-docker-images.yml b/.github/workflows/build-docker-images.yml
deleted file mode 100644 (file)
index 80e1c62..0000000
+++ /dev/null
@@ -1,189 +0,0 @@
----
-name: Build push and test docker images
-
-on:
-  workflow_call:
-    inputs:
-      product:
-        required: true
-        description: Product to build
-        type: string
-      ref:
-        description: git ref to checkout
-        type: string
-        default: master
-        required: false
-      image-name:
-        description: repository name for the requested image
-        type: string
-        required: true
-      image-tags:
-        description: tag for the requested image
-        type: string
-        required: true
-      image-description:
-        description: short description for the image repository
-        type: string
-        required: true
-      platforms:
-        description: target platform(s)
-        type: string
-        default: linux/arm64/v8,linux/amd64
-        required: false
-      build-args:
-        description: build-time variables
-        type: string
-        default: ''
-        required: false
-      push:
-        description: push image to DockerHub
-        type: boolean
-        required: true
-    secrets:
-      DOCKERHUB_ORGANIZATION_NAME:
-        required: true
-      DOCKERHUB_USERNAME:
-        required: true
-      DOCKERHUB_TOKEN:
-        required: true
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-jobs:
-  validate-push-image:
-    name: Check only images built from tags and master are pushed
-    runs-on: ubuntu-24.04
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 0
-          submodules: recursive
-          ref: ${{ inputs.ref }}
-          persist-credentials: false
-      - name: validate reference only if image will be pushed
-        if: ${{ inputs.push }}
-        env:
-          REF: ${{ inputs.ref }}
-        run: |
-          [[ "${REF}" == "master" ]] || git describe --tags --exact-match
-
-  build:
-    name: build docker image for a product
-    runs-on: ubuntu-24.04
-    needs: validate-push-image
-    outputs:
-      image-digest: ${{ steps.build-image.outputs.digest }}
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 0
-          submodules: recursive
-          ref: ${{ inputs.ref }}
-          persist-credentials: false
-      - name: Set up QEMU
-        uses: docker/setup-qemu-action@v3
-        with:
-          platforms: linux/arm64/v8
-      - name: Set up Docker Buildx for multi-platform builds
-        uses: docker/setup-buildx-action@v3
-        with:
-          platforms: ${{ inputs.platforms }}
-      - name: Login to Docker Hub
-        if: ${{ inputs.push }}
-        uses: docker/login-action@v3
-        with:
-          username: ${{ secrets.DOCKERHUB_USERNAME }}
-          password: ${{ secrets.DOCKERHUB_TOKEN }}
-      - name: Docker image metadata
-        id: meta
-        uses: docker/metadata-action@v5
-        with:
-          images: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME || 'powerdns' }}/${{ inputs.image-name }}
-          tags: ${{ inputs.image-tags }}
-      - name: Build and load powerdns product images
-        id: build-image
-        uses: docker/build-push-action@v6
-        with:
-          context: .
-          file: Dockerfile-${{ inputs.product }}
-          platforms: ${{ inputs.platforms }}
-          push: ${{ inputs.push }}
-          sbom: true
-          tags: ${{ steps.meta.outputs.tags }}
-          build-args: ${{ inputs.build-args }}
-      - name: Update repo description
-        if: ${{ inputs.push }}
-        uses: peter-evans/dockerhub-description@v4
-        with:
-          username: ${{ secrets.DOCKERHUB_USERNAME }}
-          password: ${{ secrets.DOCKERHUB_TOKEN }}
-          repository: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${{ inputs.image-name }}
-          short-description: ${{ inputs.image-description }}
-
-  prepare-test-runner-os-list:
-    name: generate runner list
-    if: ${{ inputs.push }}
-    needs: build
-    runs-on: ubuntu-24.04
-    outputs:
-      runnerlist: ${{ steps.get-runnerlist.outputs.runnerlist }}
-      skiptests: ${{ steps.get-runnerlist.outputs.skiptests }}
-    steps:
-      - run: sudo apt-get update && sudo apt-get -y install jo
-      - id: get-runnerlist
-        run: |
-          runner_os=()
-          [[ -n "${{ contains(inputs.platforms, 'amd64') && 'ubuntu-24.04' || '' }}" ]] && runner_os+=('ubuntu-24.04')
-          [[ -n "${{ vars.ARM64_USE_UBICLOUD == '1' && contains(inputs.platforms, 'arm64') || '' }}" ]] && runner_os+=('ubicloud-standard-2-arm')
-          echo "runnerlist=$(jo -a ${runner_os[@]})" >> "$GITHUB_OUTPUT"
-          # Skip tests if no runner is suitable for running them. Set a default runner to avoid CI failure
-          if [[ -z "${runner_os[@]}" ]]; then
-            echo "runnerlist=$(jo -a ubuntu-24.04)" >> "$GITHUB_OUTPUT"
-            echo "skiptests=1" >> "$GITHUB_OUTPUT"
-          fi
-
-  test-uploaded-images:
-    name: test uploaded images
-    if: ${{ inputs.push && ! needs.prepare-test-runner-os-list.outputs.skiptests }}
-    needs: [build, prepare-test-runner-os-list]
-    runs-on: ${{ matrix.runner-os }}
-    strategy:
-      matrix:
-        runner-os: ${{ fromJson(needs.prepare-test-runner-os-list.outputs.runnerlist )}}
-      fail-fast: false
-    env:
-      INPUT_IMAGE_NAME: ${{ inputs.image-name }}
-      INPUT_IMAGE_TAGS: ${{ inputs.image-tags }}
-      INPUT_PLATFORMS: ${{ inputs.platforms }}
-    steps:
-      - name: Check running image
-        run: |
-          image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
-          for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
-            echo 'Testing: '${image_name}':'${tag};
-            # pdns-auth image returns a 134 exit code
-            docker run ${image_name}:${tag} --version || [ "$?" == "134" ]
-          done
-      - name: Check image digest matches
-        env:
-          EXPECTED_DIGEST: ${{ needs.build.outputs.image-digest }}
-        run: |
-          image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
-          for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
-            image_digest=$(docker inspect --format='{{index .RepoDigests 0}}' ${image_name}:${tag} | cut -d '@' -f 2)
-            [[ "${EXPECTED_DIGEST}" == "${image_digest}" ]] || \
-              ( echo "Image digest does not match => output_digest: "${EXPECTED_DIGEST}" - image_digest: "${image_digest} && exit 1 )
-          done
-      - name: Check SBOM and Provenance
-        run: |
-          image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
-          for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
-            if $(echo "${INPUT_PLATFORMS}" | grep -qq ','); then
-              docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .Provenance}}" | jq -e '."linux/'$(dpkg --print-architecture)'" | has("SLSA")'
-              docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .SBOM}}" | jq -e '."linux/'$(dpkg --print-architecture)'" | has("SPDX")'
-            else
-              docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .Provenance}}" | jq -e 'has("SLSA")'
-              docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .SBOM}}" | jq -e 'has("SPDX")'
-            fi
-          done
diff --git a/.github/workflows/build-packages-daily-master.yml b/.github/workflows/build-packages-daily-master.yml
deleted file mode 100644 (file)
index 87cc9b6..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
----
-name: 'daily: build packages for master'
-
-on:
-  schedule:
-    - cron: '0 5 * * *'
-
-permissions:
-  actions: read
-  id-token: write
-  contents: write
-
-jobs:
-  call-build-packages-auth:
-    if: ${{ vars.SCHEDULED_PACKAGES_DAILY }}
-    uses: PowerDNS/pdns/.github/workflows/build-packages.yml@master
-    with:
-      is_release: 'NO'
-      product: 'authoritative'
-      ref: master
-      os: >-
-          el-8
-          el-9
-          el-10
-          debian-bullseye
-          debian-bookworm
-          debian-trixie
-          ubuntu-jammy
-          ubuntu-noble
-    secrets:
-      DOWNLOADS_AUTOBUILT_SECRET: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
-      DOWNLOADS_AUTOBUILT_RSYNCTARGET: ${{ secrets.DOWNLOADS_AUTOBUILT_RSYNCTARGET }}
-      DOWNLOADS_AUTOBUILT_HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
-      PULP_FILE_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-      PULP_URL: ${{ secrets.PULP_URL }}
-      PULP_CONTENT_URL: ${{ secrets.PULP_CONTENT_URL }}
-      PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-      PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      PULP_PDNS_GPG_PUBKEY_MASTER: ${{ secrets.PULP_PDNS_GPG_PUBKEY_MASTER }}
-      PULP_PDNS_GPG_PUBKEY: ${{ secrets.PULP_PDNS_GPG_PUBKEY }}
-
-  call-build-packages-dnsdist:
-    if: ${{ vars.SCHEDULED_PACKAGES_DAILY }}
-    uses: PowerDNS/pdns/.github/workflows/build-packages.yml@master
-    with:
-      is_release: 'NO'
-      product: 'dnsdist'
-      ref: master
-      os: >-
-          el-8
-          el-9
-          el-10
-          debian-bullseye
-          debian-bookworm
-          debian-trixie
-          ubuntu-jammy
-          ubuntu-noble
-    secrets:
-      DOWNLOADS_AUTOBUILT_SECRET: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
-      DOWNLOADS_AUTOBUILT_RSYNCTARGET: ${{ secrets.DOWNLOADS_AUTOBUILT_RSYNCTARGET }}
-      DOWNLOADS_AUTOBUILT_HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
-      PULP_FILE_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-      PULP_URL: ${{ secrets.PULP_URL }}
-      PULP_CONTENT_URL: ${{ secrets.PULP_CONTENT_URL }}
-      PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-      PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      PULP_PDNS_GPG_PUBKEY_MASTER: ${{ secrets.PULP_PDNS_GPG_PUBKEY_MASTER }}
-      PULP_PDNS_GPG_PUBKEY: ${{ secrets.PULP_PDNS_GPG_PUBKEY }}
-
-  call-build-packages-rec:
-    if: ${{ vars.SCHEDULED_PACKAGES_DAILY }}
-    uses: PowerDNS/pdns/.github/workflows/build-packages.yml@master
-    with:
-      is_release: 'NO'
-      product: 'recursor'
-      ref: master
-      os: >-
-          el-8
-          el-9
-          el-10
-          debian-bullseye
-          debian-bookworm
-          debian-trixie
-          ubuntu-jammy
-          ubuntu-noble
-    secrets:
-      DOWNLOADS_AUTOBUILT_SECRET: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
-      DOWNLOADS_AUTOBUILT_RSYNCTARGET: ${{ secrets.DOWNLOADS_AUTOBUILT_RSYNCTARGET }}
-      DOWNLOADS_AUTOBUILT_HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
-      PULP_FILE_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-      PULP_URL: ${{ secrets.PULP_URL }}
-      PULP_CONTENT_URL: ${{ secrets.PULP_CONTENT_URL }}
-      PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-      PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      PULP_PDNS_GPG_PUBKEY_MASTER: ${{ secrets.PULP_PDNS_GPG_PUBKEY_MASTER }}
-      PULP_PDNS_GPG_PUBKEY: ${{ secrets.PULP_PDNS_GPG_PUBKEY }}
diff --git a/.github/workflows/build-packages.yml b/.github/workflows/build-packages.yml
deleted file mode 100644 (file)
index 06cd89a..0000000
+++ /dev/null
@@ -1,579 +0,0 @@
----
-name: Build packages
-
-on:
-  workflow_call:
-    inputs:
-      product:
-        required: true
-        description: Product to build
-        type: string
-      os:
-        required: false
-        description: OSes to build for, space separated
-        type: string
-        # please remember to update the pkghashes below when you
-        # update this list, as well as the one in builder-dispatch.yml
-        default: >-
-          el-8
-          el-9
-          el-10
-          debian-bullseye
-          debian-bookworm
-          debian-trixie
-          ubuntu-focal
-          ubuntu-jammy
-          ubuntu-noble
-      ref:
-        description: git ref to checkout
-        type: string
-        default: master
-        required: false
-      is_release:
-        description: is this a release build?
-        type: string
-        required: false
-        default: 'NO'
-    secrets:
-      DOWNLOADS_AUTOBUILT_SECRET:
-        required: true
-      DOWNLOADS_AUTOBUILT_RSYNCTARGET:
-        required: true
-      DOWNLOADS_AUTOBUILT_HOSTKEY:
-        required: true
-      PULP_FILE_REPO_NAME:
-        required: true
-      PULP_URL:
-        required: true
-      PULP_CONTENT_URL:
-        required: true
-      PULP_CI_USERNAME:
-        required: true
-      PULP_CI_PASSWORD:
-        required: true
-      PULP_PDNS_GPG_PUBKEY_MASTER:
-        required: true
-      PULP_PDNS_GPG_PUBKEY:
-        required: true
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-env:
-  INV_CMD: ". ${GITHUB_WORKSPACE}/.venv/bin/activate && inv"
-
-jobs:
-  prepare:
-    name: generate OS runner and arch list
-    runs-on: ubuntu-24.04
-    outputs:
-      oslist: ${{ steps.get-oslist.outputs.oslist }}
-      runnerlist: ${{ steps.get-runnerlist.outputs.runnerlist }}
-      archlist: ${{ steps.get-archlist.outputs.archlist }}
-      publish-packages: ${{ steps.set-publish-packages.outputs.publish-packages }}
-    env:
-      OS: ${{ inputs.os }}
-      REF: ${{ inputs.ref }}
-      IS_RELEASE: ${{ inputs.is_release }}
-    steps:
-      # instead of jo, we could use jq here, which avoids running apt, and thus would be faster.
-      # but, as this whole workflow needs at least 30 minutes to run, I prefer spending a few seconds here
-      # so that the command remains readable, because jo is simpler to use.
-      - run: sudo apt-get update && sudo apt-get -y install jo
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 1
-          submodules: recursive
-          ref: ${{ inputs.ref }}
-          persist-credentials: false
-      - id: get-oslist
-        run: |
-          available_targets=$(ls builder-support/dockerfiles/Dockerfile.target.* )
-          for i in $(echo "${OS}"); do
-            if echo $available_targets | grep -qq $i; then
-              targets+=($i)
-            else
-              if [[ "${REF}" == "master" ]]; then
-                echo "::error title=Dockerfile not found for ${i}::OS ${i} not available as target in ${REF}" && exit 1
-              else
-                echo "::warning title=Packages will not be generated for ${i}::OS ${i} not available as target in ${REF}"
-              fi
-            fi
-          done
-          echo "oslist=$(jo -a ${targets[*]})" >> "$GITHUB_OUTPUT"
-      - id: get-runnerlist
-        run: echo "runnerlist=$(jo -a ubuntu-24.04 ${{ vars.ARM64_USE_UBICLOUD == '1' && 'ubicloud-standard-2-arm' || '' }})" >> "$GITHUB_OUTPUT"
-      - id: get-archlist
-        run: echo "archlist=$(jo -a  x86_64 ${{ vars.ARM64_USE_UBICLOUD == '1' && 'aarch64' || '' }})" >> "$GITHUB_OUTPUT"
-        # Only upload RPM and DEB packages for master and tags. Build outputs and provenance of all refs
-      - id: set-publish-packages
-        run: echo "publish-packages=$([[ "${REF}" == "master" || "${IS_RELEASE}" == "YES" ]] && echo 'yes' || echo 'no')" >> "$GITHUB_OUTPUT"
-
-  build:
-    needs: prepare
-    name: for ${{ matrix.os }} ${{ inputs.product }} (${{ inputs.ref }}) on ${{ matrix.runner-os }}
-    runs-on: ${{ matrix.runner-os }}
-    strategy:
-      matrix:
-        os: ${{fromJson(needs.prepare.outputs.oslist)}}
-        runner-os: ${{ fromJson(needs.prepare.outputs.runnerlist )}}
-      fail-fast: false
-    outputs:
-      product-name: ${{ steps.normalize-name.outputs.normalized-package-name }}
-      version: ${{ steps.getversion.outputs.version }}
-      pkghashes-el-8-x86_64: ${{ steps.pkghashes.outputs.pkghashes-el-8-x86_64 }}
-      pkghashes-el-8-aarch64: ${{ steps.pkghashes.outputs.pkghashes-el-8-aarch64 }}
-      pkghashes-el-9-x86_64: ${{ steps.pkghashes.outputs.pkghashes-el-9-x86_64 }}
-      pkghashes-el-9-aarch64: ${{ steps.pkghashes.outputs.pkghashes-el-9-aarch64 }}
-      pkghashes-el-10-x86_64: ${{ steps.pkghashes.outputs.pkghashes-el-10-x86_64 }}
-      pkghashes-el-10-aarch64: ${{ steps.pkghashes.outputs.pkghashes-el-10-aarch64 }}
-      pkghashes-debian-bullseye-x86_64: ${{ steps.pkghashes.outputs.pkghashes-debian-bullseye-x86_64 }}
-      pkghashes-debian-bullseye-aarch64: ${{ steps.pkghashes.outputs.pkghashes-debian-bullseye-aarch64 }}
-      pkghashes-debian-bookworm-x86_64: ${{ steps.pkghashes.outputs.pkghashes-debian-bookworm-x86_64 }}
-      pkghashes-debian-bookworm-aarch64: ${{ steps.pkghashes.outputs.pkghashes-debian-bookworm-aarch64 }}
-      pkghashes-debian-trixie-x86_64: ${{ steps.pkghashes.outputs.pkghashes-debian-trixie-x86_64 }}
-      pkghashes-debian-trixie-aarch64: ${{ steps.pkghashes.outputs.pkghashes-debian-trixie-aarch64 }}
-      pkghashes-ubuntu-focal-x86_64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-focal-x86_64 }}
-      pkghashes-ubuntu-focal-aarch64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-focal-aarch64 }}
-      pkghashes-ubuntu-jammy-x86_64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-jammy-x86_64 }}
-      pkghashes-ubuntu-jammy-aarch64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-jammy-aarch64 }}
-      pkghashes-ubuntu-noble-x86_64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-noble-x86_64 }}
-      pkghashes-ubuntu-noble-aarch64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-noble-aarch64 }}
-      srchashes: ${{ steps.srchashes.outputs.srchashes }}
-    env:
-      IS_RELEASE: ${{ inputs.is_release}}
-      PRODUCT: ${{ inputs.product }}
-      OS: ${{ matrix.os }}
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 0 # for correct version numbers
-          submodules: recursive
-          ref: ${{ inputs.ref }}
-          persist-credentials: false
-      # this builds packages and runs our unit tests (make check)
-      - run: IS_RELEASE=${IS_RELEASE} builder/build.sh -v -m ${PRODUCT} ${OS}
-      - name: Get version number
-        run: |
-          echo "version=$(readlink builder/tmp/latest)" >> $GITHUB_OUTPUT
-        id: getversion
-      - name: Get target architecture
-        run: |
-          echo "target-arch=$(uname -m)" >> $GITHUB_OUTPUT
-        id: getarch
-      - name: Normalize package name
-        id: normalize-name
-        run: |
-          if [ "x${PRODUCT}" = "xauthoritative" ]; then
-            echo "normalized-package-name=pdns" >> $GITHUB_OUTPUT
-          elif [ "x${PRODUCT}" = "xrecursor" ]; then
-            echo "normalized-package-name=pdns-recursor" >> $GITHUB_OUTPUT
-          else
-            echo "normalized-package-name=${PRODUCT}" >> $GITHUB_OUTPUT
-          fi
-      - name: Include architecture in the packages compressed file name
-        env:
-          TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
-        run: for f in $(ls ./built_pkgs/*/*/*-${OS}.tar.bz2 | sed 's/\.tar.bz2$//'); do mv $f.tar.bz2 $f-${TARGET_ARCH}.tar.bz2; done
-      - name: Upload packages as GH artifacts
-        uses: actions/upload-artifact@v4
-        with:
-          name: ${{ inputs.product }}-${{ matrix.os }}-${{ steps.getversion.outputs.version }}-${{ steps.getarch.outputs.target-arch }}
-          path: built_pkgs/
-          retention-days: 7
-      - name: Extract packages from the tarball
-        # so we get provenance for individual packages (and the JSON package manifests from the builder)
-        id: extract
-        env:
-          TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
-          PACKAGE_NAME: ${{ steps.normalize-name.outputs.normalized-package-name }}
-          VERSION: ${{ steps.getversion.outputs.version }}
-        run: |
-          mkdir -m 700 -p ./packages/
-          tar xvf ./built_pkgs/*/*/${PACKAGE_NAME}-${VERSION}-${OS}-${TARGET_ARCH}.tar.bz2 -C ./packages/ --transform='s/.*\///'
-      - name: Generate package hashes for provenance
-        shell: bash
-        id: pkghashes
-        env:
-          TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
-        run: |
-          echo "pkghashes-${OS}-${TARGET_ARCH}=$(shopt -s nullglob; sha256sum ./packages/*.rpm ./packages/*.deb ./packages/*.json | base64 -w0)" >> $GITHUB_OUTPUT
-      - name: Generate source hash for provenance
-        shell: bash
-        id: srchashes
-        env:
-          PACKAGE_NAME: ${{ steps.normalize-name.outputs.normalized-package-name }}
-          VERSION: ${{ steps.getversion.outputs.version }}
-        run: |
-          echo "srchashes=$(sha256sum ./built_pkgs/*/*/${PACKAGE_NAME}-${VERSION}.tar.* | base64 -w0)" >> $GITHUB_OUTPUT
-      - name: Upload packages to downloads.powerdns.com
-        env:
-          SSHKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
-          RSYNCTARGET: ${{ secrets.DOWNLOADS_AUTOBUILT_RSYNCTARGET }}
-          HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
-        if:
-          "${{ env.SSHKEY != '' }}"
-        run: |
-          mkdir -m 700 -p ~/.ssh
-          echo "$SSHKEY" > ~/.ssh/id_ed25519
-          chmod 600 ~/.ssh/id_ed25519
-          echo "$HOSTKEY" > ~/.ssh/known_hosts
-          rsync -4rlptD built_pkgs/* "$RSYNCTARGET"
-
-  check-hashes:
-    needs: [prepare, build]
-    name: Check if hashes were created for all requested targets
-    runs-on: ubuntu-24.04
-    env:
-      OUTPUTS: ${{ toJSON(needs.build.outputs) }}
-      OSLIST: ${{ needs.prepare.outputs.oslist }}
-    steps:
-      - name: Get list of outputs from build jobs
-        run: echo "${OUTPUTS}" | jq 'keys[]' | grep -vE 'version|product-name' | tee /tmp/build-outputs.txt
-      - name: Get list of OS inputs
-        run: |
-          for os in $(echo "${OSLIST}" | jq -r '.[]'); do
-            for architecture in x86_64 ${{ vars.ARM64_USE_UBICLOUD == '1' && 'aarch64' || '' }}; do
-              echo "\"pkghashes-$os-$architecture\"" | tee -a /tmp/os-inputs.txt
-            done
-          done
-          sort -o /tmp/os-inputs.txt /tmp/os-inputs.txt
-          echo "\"srchashes\"" | tee -a /tmp/os-inputs.txt
-      - name: Fail if there is a hash missing
-        run: if ! diff -q /tmp/build-outputs.txt /tmp/os-inputs.txt; then exit 1; fi
-
-  provenance-pkgs:
-    needs: [prepare, build]
-    name: Generate provenance for ${{ inputs.product }} (${{ inputs.ref }}) for ${{ matrix.os }}
-    strategy:
-      matrix:
-        os: ${{fromJson(needs.prepare.outputs.oslist)}}
-        architecture: ${{ fromJson(needs.prepare.outputs.archlist )}}
-    permissions:
-      actions: read   # To read the workflow path.
-      id-token: write # To sign the provenance.
-      contents: write # To be able to upload assets as release artifacts
-    uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0
-    with:
-      base64-subjects: "${{ needs.build.outputs[format('pkghashes-{0}-{1}', matrix.os, matrix.architecture)] }}"
-      upload-assets: false
-      provenance-name: "${{ inputs.product }}-${{ needs.build.outputs.version }}-${{ matrix.os }}-${{ matrix.architecture }}.intoto.jsonl"
-      private-repository: true
-
-  provenance-src:
-    needs: build
-    name: Generate provenance for ${{ inputs.product }} (${{ inputs.ref }}) source tarball
-    permissions:
-      actions: read   # To read the workflow path.
-      id-token: write # To sign the provenance.
-      contents: write # To be able to upload assets as release artifacts
-    uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0
-    with:
-      base64-subjects: "${{ needs.build.outputs.srchashes }}"
-      upload-assets: false
-      provenance-name: "${{ inputs.product }}-${{ needs.build.outputs.version }}-src.intoto.jsonl"
-      private-repository: true
-
-  upload-provenance:
-    needs: [prepare, build, provenance-src, provenance-pkgs]
-    name: Upload the provenance artifacts to downloads.powerdns.com
-    runs-on: ubuntu-24.04
-    strategy:
-      matrix:
-        os: ${{fromJson(needs.prepare.outputs.oslist)}}
-        architecture: ${{ fromJson(needs.prepare.outputs.archlist )}}
-    steps:
-      - name: Download source tarball provenance for ${{ inputs.product }} (${{ inputs.ref }})
-        id: download-src-provenance
-        uses: actions/download-artifact@v4 # be careful, this needs to match what https://github.com/slsa-framework/slsa-github-generator is using
-        with:
-          name: "${{ inputs.product }}-${{ needs.build.outputs.version }}-src.intoto.jsonl"
-      - name: Download provenance for ${{ inputs.product }} (${{ inputs.ref }}) for ${{ matrix.os }}
-        id: download-provenance
-        uses: actions/download-artifact@v4 # be careful, this needs to match what https://github.com/slsa-framework/slsa-github-generator is using
-        with:
-          name: "${{ inputs.product }}-${{ needs.build.outputs.version }}-${{ matrix.os }}-${{ matrix.architecture }}.intoto.jsonl"
-      - name: Upload provenance artifacts to downloads.powerdns.com
-        id: upload-provenance
-        env:
-          SSHKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
-          RSYNCTARGET: ${{ secrets.DOWNLOADS_AUTOBUILT_RSYNCTARGET }}
-          HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
-          PRODUCT: ${{ needs.build.outputs.product-name }}
-          VERSION: ${{ needs.build.outputs.version }}
-          DOWNLOAD_PATH: ${{ steps.download-provenance.outputs.download-path }}
-        if:
-          "${{ env.SSHKEY != '' }}"
-        shell: bash
-        run: |
-          mkdir -m 700 -p ~/.ssh
-          echo "$SSHKEY" > ~/.ssh/id_ed25519
-          chmod 600 ~/.ssh/id_ed25519
-          echo "$HOSTKEY" > ~/.ssh/known_hosts
-          mkdir -m 755 -p "slsa/${PRODUCT}/${VERSION}/"
-          mv "${DOWNLOAD_PATH}"/*.jsonl "slsa/${PRODUCT}/${VERSION}"
-          rsync -4rlptD slsa/* "$RSYNCTARGET"
-
-  upload-packages:
-    needs: [prepare, build, provenance-src, provenance-pkgs]
-    continue-on-error: true
-    if: ${{ needs.prepare.outputs.publish-packages == 'yes' }}
-    name: Upload packages
-    runs-on: ubuntu-24.04
-    strategy:
-      matrix:
-        os: ${{fromJson(needs.prepare.outputs.oslist)}}
-        architecture: ${{ fromJson(needs.prepare.outputs.archlist )}}
-      fail-fast: false
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 1
-          submodules: recursive
-      - name: install pyinvoke and pulp
-        run: |
-          python3 -m venv ${GITHUB_WORKSPACE}/.venv
-          . ${GITHUB_WORKSPACE}/.venv/bin/activate && pip install -r ${GITHUB_WORKSPACE}/builder-support/requirements.txt
-      - run: ${{ env.INV_CMD }} validate-pulp-credentials
-        env:
-          PULP_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      - name: Download packages
-        uses: actions/download-artifact@v4
-        with:
-          pattern: "${{ inputs.product }}-${{ matrix.os }}-${{ needs.build.outputs.version }}-${{ matrix.architecture }}"
-      - name: Normalize package name
-        id: normalize-name
-        run: |
-          shortversion=$(echo ${{ needs.build.outputs.version }} | grep -qq master && echo master || echo ${{ needs.build.outputs.version }} | awk -F. '{print $1$2}')
-          if [ "x${{ inputs.product }}" = "xauthoritative" ]; then
-            echo "normalized-package-name=pdns" >> $GITHUB_OUTPUT
-            echo "normalized-product-name=auth-${shortversion}" >> $GITHUB_OUTPUT
-          elif [ "x${{ inputs.product }}" = "xrecursor" ]; then
-            echo "normalized-package-name=pdns-recursor" >> $GITHUB_OUTPUT
-            echo "normalized-product-name=rec-${shortversion}" >> $GITHUB_OUTPUT
-          else
-            echo "normalized-package-name=${{ inputs.product }}" >> $GITHUB_OUTPUT
-            echo "normalized-product-name=${{ inputs.product }}-${shortversion}" >> $GITHUB_OUTPUT
-          fi
-      - name: Extract and prepare files and packages
-        run: |
-          # Prepare files folder
-          mkdir -p files/${{ steps.normalize-name.outputs.normalized-package-name }}/${{ needs.build.outputs.version }}
-          # Copy compressed and provenance files
-          cp ${{ inputs.product }}-${{ matrix.os }}-*-${{ matrix.architecture }}/*/*/*-${{ matrix.architecture }}.tar.bz2 files/${{ steps.normalize-name.outputs.normalized-package-name }}/${{ needs.build.outputs.version }}/
-          # Extract packages
-          for i in $(ls ./${{ inputs.product }}-${{ matrix.os }}-*-${{ matrix.architecture }}/*/*/*${{ matrix.architecture }}.tar.bz2); do mkdir -p ./packages/$(echo $i | cut -d '/' -f 2)/; tar -xvf $i -C ./packages/$(echo $i | cut -d '/' -f 2)/ --transform='s/.*\///'; done
-          # Move RPM files
-          for i in $(ls ./packages/*/*.rpm); do f=$(echo $i | awk -F/ '{print $3}' | awk -F'${{ inputs.product }}-' '{print $2}' | awk -F'-${{ needs.build.outputs.version }}-' '{print $1"."$2}'); mkdir -p rpm_packages/${f}; mv $i rpm_packages/${f}/; done
-          # Move deb files
-          for i in $(ls ./packages/*/*.deb); do mkdir -p deb_packages/$(echo $i | awk -F/ '{print $3}' | awk -F- '{print $2"-"$3}'); mv $i deb_packages/$(echo $i | awk -F/ '{print $3}' | awk -F- '{print $2"-"$3}'); done
-      - name: Upload DEB Packages
-        if: ${{ contains(matrix.os, 'ubuntu-') || ( contains(matrix.os, 'debian-')) }}
-        run: ${{ env.INV_CMD }} pulp-upload-deb-packages-by-folder deb_packages ${{ steps.normalize-name.outputs.normalized-product-name }}
-        env:
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      - name: Upload RPM Packages
-        if: ${{ contains(matrix.os, 'el-') || ( contains(matrix.os, 'centos-')) }}
-        run: ${{ env.INV_CMD }} pulp-upload-rpm-packages-by-folder rpm_packages ${{ steps.normalize-name.outputs.normalized-product-name }}
-        env:
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-
-  upload-src-files:
-    needs: [prepare, build, provenance-src, provenance-pkgs]
-    continue-on-error: true
-    if: ${{ needs.prepare.outputs.publish-packages == 'yes' }}
-    name: Upload source and other files
-    runs-on: ubuntu-24.04
-    strategy:
-      matrix:
-        os: ${{fromJson(needs.prepare.outputs.oslist)}}
-        architecture: ${{ fromJson(needs.prepare.outputs.archlist )}}
-      fail-fast: false
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 1
-          submodules: recursive
-      - name: install pyinvoke and pulp
-        run: |
-          python3 -m venv ${GITHUB_WORKSPACE}/.venv
-          . ${GITHUB_WORKSPACE}/.venv/bin/activate && pip install -r ${GITHUB_WORKSPACE}/builder-support/requirements.txt
-      - run: ${{ env.INV_CMD }} validate-pulp-credentials
-        env:
-          PULP_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      - name: Download packages
-        uses: actions/download-artifact@v4
-        with:
-          pattern: "${{ inputs.product }}-${{ matrix.os }}-${{ needs.build.outputs.version }}-${{ matrix.architecture }}"
-      - name: Normalize package name
-        id: normalize-name
-        run: |
-          if [ "x${{ inputs.product }}" = "xauthoritative" ]; then
-            echo "normalized-package-name=pdns" >> $GITHUB_OUTPUT
-          elif [ "x${{ inputs.product }}" = "xrecursor" ]; then
-            echo "normalized-package-name=pdns-recursor" >> $GITHUB_OUTPUT
-          else
-            echo "normalized-package-name=${{ inputs.product }}" >> $GITHUB_OUTPUT
-          fi
-      - name: Extract and prepare files and packages
-        run: |
-          # Prepare files folder
-          mkdir -p files/${{ steps.normalize-name.outputs.normalized-package-name }}/${{ needs.build.outputs.version }}
-          # Copy compressed files
-          cp ${{ inputs.product }}-${{ matrix.os }}-*-${{ matrix.architecture }}/*/*/*.tar.bz2 files/${{ steps.normalize-name.outputs.normalized-package-name }}/${{ needs.build.outputs.version }}/
-          # Extract packages
-          for i in $(ls ./${{ inputs.product }}-${{ matrix.os }}-*-${{ matrix.architecture }}/*/*/*${{ matrix.architecture }}.tar.bz2); do mkdir -p ./packages/$(echo $i | cut -d '/' -f 2)/; tar -xvf $i -C ./packages/$(echo $i | cut -d '/' -f 2)/ --transform='s/.*\///'; done
-          # Add file with list of dependencies
-          for i in $(ls ./packages/*/*.json); do mv $i files/${{ steps.normalize-name.outputs.normalized-package-name }}/${{ needs.build.outputs.version }}/; done
-      - name: Upload build data
-        run: ${{ env.INV_CMD }} pulp-upload-file-packages-by-folder files
-        env:
-          PULP_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-
-  upload-provenance-files:
-    needs: [build, provenance-src, provenance-pkgs]
-    name: Upload provenance files
-    continue-on-error: true
-    if: ${{ needs.prepare.outputs.publish-packages == 'yes' }}
-    runs-on: ubuntu-24.04
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 1
-          submodules: recursive
-      - name: install pyinvoke and pulp
-        run: |
-          python3 -m venv ${GITHUB_WORKSPACE}/.venv
-          . ${GITHUB_WORKSPACE}/.venv/bin/activate && pip install -r ${GITHUB_WORKSPACE}/builder-support/requirements.txt
-      - run: ${{ env.INV_CMD }} validate-pulp-credentials
-        env:
-          PULP_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      - name: Download provenance files
-        uses: actions/download-artifact@v4
-        with:
-          pattern: "${{ inputs.product }}-${{ needs.build.outputs.version }}-*.intoto.jsonl"
-      - name: Normalize package name
-        id: normalize-name
-        run: |
-          if [ "x${{ inputs.product }}" = "xauthoritative" ]; then
-            echo "normalized-package-name=pdns" >> $GITHUB_OUTPUT
-          elif [ "x${{ inputs.product }}" = "xrecursor" ]; then
-            echo "normalized-package-name=pdns-recursor" >> $GITHUB_OUTPUT
-          else
-            echo "normalized-package-name=${{ inputs.product }}" >> $GITHUB_OUTPUT
-          fi
-      - name: Extract and prepare files and packages
-        run: |
-          # Prepare files folder
-          mkdir -p files/${{ steps.normalize-name.outputs.normalized-package-name }}/${{ needs.build.outputs.version }}
-          # Copy provenance files
-          cp *.jsonl/*.jsonl files/${{ steps.normalize-name.outputs.normalized-package-name }}/${{ needs.build.outputs.version }}/
-      - name: Upload provenance data
-        run: ${{ env.INV_CMD }} pulp-upload-file-packages-by-folder files
-        env:
-          PULP_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-
-  publish-packages:
-    needs: [prepare, build, upload-packages]
-    continue-on-error: true
-    name: Publish RPM and DEB packages
-    runs-on: ubuntu-24.04
-    outputs:
-      check-finished: ${{ steps.check-finished.outputs.upload-completed }}
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 1
-          submodules: recursive
-      - name: install pyinvoke and pulp
-        run: |
-          python3 -m venv ${GITHUB_WORKSPACE}/.venv
-          . ${GITHUB_WORKSPACE}/.venv/bin/activate && pip install -r ${GITHUB_WORKSPACE}/builder-support/requirements.txt
-      - name: Normalize product name
-        id: normalize-name
-        run: |
-          shortversion=$(echo ${{ needs.build.outputs.version }} | grep -qq master && echo master || echo ${{ needs.build.outputs.version }} | awk -F. '{print $1$2}')
-          if [ "x${{ inputs.product }}" = "xauthoritative" ]; then
-            echo "normalized-product-name=auth-${shortversion}" >> $GITHUB_OUTPUT
-          elif [ "x${{ inputs.product }}" = "xrecursor" ]; then
-            echo "normalized-product-name=rec-${shortversion}" >> $GITHUB_OUTPUT
-          else
-            echo "normalized-product-name=${{ inputs.product }}-${shortversion}" >> $GITHUB_OUTPUT
-          fi
-      - name: Publish DEB Packages
-        run: ${{ env.INV_CMD }} pulp-create-deb-publication
-        env:
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      - name: Publish RPM Packages
-        run: ${{ env.INV_CMD }} pulp-create-rpm-publication ${{ steps.normalize-name.outputs.normalized-product-name }} '${{ needs.prepare.outputs.oslist }}' '${{ needs.prepare.outputs.archlist }}'
-        env:
-          PULP_URL: ${{ secrets.PULP_URL }}
-          PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-          PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      - id: check-finished
-        run: echo "upload-completed=yes" >> $GITHUB_OUTPUT
-
-  test-packages:
-    needs: [prepare, build, publish-packages]
-    continue-on-error: true
-    name: Test packages
-    runs-on: ${{ matrix.runner-os }}
-    if: ${{ needs.publish-packages.outputs.check-finished == 'yes' }}
-    strategy:
-      matrix:
-        os: ${{fromJson(needs.prepare.outputs.oslist)}}
-        runner-os: ${{ fromJson(needs.prepare.outputs.runnerlist )}}
-      fail-fast: false
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 1
-          submodules: recursive
-      - name: install pyinvoke and pulp
-        run: |
-          python3 -m venv ${GITHUB_WORKSPACE}/.venv
-          . ${GITHUB_WORKSPACE}/.venv/bin/activate && pip install -r ${GITHUB_WORKSPACE}/builder-support/requirements.txt
-      - name: Normalize product name
-        id: normalize-name
-        run: |
-          shortversion=$(echo ${{ needs.build.outputs.version }} | grep -qq master && echo master || echo ${{ needs.build.outputs.version }} | awk -F. '{print $1$2}')
-          if [ "x${{ inputs.product }}" = "xauthoritative" ]; then
-            echo "normalized-package-name=pdns" >> $GITHUB_OUTPUT
-            echo "normalized-product-name=auth-${shortversion}" >> $GITHUB_OUTPUT
-            echo "process-name=pdns_server" >> $GITHUB_OUTPUT
-          elif [ "x${{ inputs.product }}" = "xrecursor" ]; then
-            echo "normalized-package-name=pdns-recursor" >> $GITHUB_OUTPUT
-            echo "normalized-product-name=rec-${shortversion}" >> $GITHUB_OUTPUT
-            echo "process-name=pdns_recursor" >> $GITHUB_OUTPUT
-          else
-            echo "normalized-package-name=${{ inputs.product }}" >> $GITHUB_OUTPUT
-            echo "normalized-product-name=${{ inputs.product }}-${shortversion}" >> $GITHUB_OUTPUT
-            echo "process-name=dnsdist" >> $GITHUB_OUTPUT
-          fi
-      - id: get-gpg-pubkey-url
-        run: |
-          echo "gpg-pubkey-url=$(echo ${{ needs.build.outputs.version }} | grep -qq "master" && echo ${{ secrets.PULP_PDNS_GPG_PUBKEY_MASTER }} || echo ${{ secrets.PULP_PDNS_GPG_PUBKEY }} )" >> $GITHUB_OUTPUT
-      - run: ${{ env.INV_CMD }} test-install-package ${{ steps.normalize-name.outputs.normalized-product-name }} ${{ matrix.os }} ${{ secrets.PULP_CONTENT_URL }} ${{ steps.get-gpg-pubkey-url.outputs.gpg-pubkey-url }} ${{ steps.normalize-name.outputs.normalized-package-name }} ${{ needs.build.outputs.version }}
-      - name: check package version
-        run: docker run -t test-build-${{ steps.normalize-name.outputs.normalized-product-name }}-${{ matrix.os }}:latest ${{ steps.normalize-name.outputs.process-name }} --version | grep -qq ${{ needs.build.outputs.version }} && echo "[OK]" || exit 1
diff --git a/.github/workflows/builder-dispatch.yml b/.github/workflows/builder-dispatch.yml
deleted file mode 100644 (file)
index aad6ba9..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
----
-name: Trigger specific package build
-
-on:
-  workflow_dispatch:
-    inputs:
-      product:
-        description: Product to build
-        type: choice
-        options:
-        - authoritative
-        - recursor
-        - dnsdist
-      os:
-        description: OSes to build for, space separated
-        type: string
-        # please remember to update build-packages.yml as well
-        default: >-
-          el-8
-          el-9
-          el-10
-          debian-bullseye
-          debian-bookworm
-          debian-trixie
-          ubuntu-jammy
-          ubuntu-noble
-      ref:
-        description: git ref to checkout
-        type: string
-        default: master
-      is_release:
-        description: is this a release build?
-        type: choice
-        options:
-        - 'NO'
-        - 'YES'
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  actions: read
-  contents: write # To be able to upload assets as release artifacts
-  id-token: write # To sign the provenance in the build packages reusable workflow.
-
-jobs:
-  call-build-packages:
-    uses: PowerDNS/pdns/.github/workflows/build-packages.yml@master
-    with:
-      product: ${{ github.event.inputs.product }}
-      os: ${{ github.event.inputs.os }}
-      ref: ${{ github.event.inputs.ref }}
-      is_release: ${{ github.event.inputs.is_release }}
-    secrets:
-      DOWNLOADS_AUTOBUILT_SECRET: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
-      DOWNLOADS_AUTOBUILT_RSYNCTARGET: ${{ secrets.DOWNLOADS_AUTOBUILT_RSYNCTARGET }}
-      DOWNLOADS_AUTOBUILT_HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
-      PULP_FILE_REPO_NAME: ${{ secrets.PULP_FILE_REPO_NAME }}
-      PULP_URL: ${{ secrets.PULP_URL }}
-      PULP_CONTENT_URL: ${{ secrets.PULP_CONTENT_URL }}
-      PULP_CI_USERNAME: ${{ secrets.PULP_CI_USERNAME }}
-      PULP_CI_PASSWORD: ${{ secrets.PULP_CI_PASSWORD }}
-      PULP_PDNS_GPG_PUBKEY_MASTER: ${{ secrets.PULP_PDNS_GPG_PUBKEY_MASTER }}
-      PULP_PDNS_GPG_PUBKEY: ${{ secrets.PULP_PDNS_GPG_PUBKEY }}
diff --git a/.github/workflows/builder-releases-dispatch.yml b/.github/workflows/builder-releases-dispatch.yml
deleted file mode 100644 (file)
index 54b057e..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
----
-name: Trigger workflow builder for different releases
-
-on:
-  workflow_dispatch:
-  schedule:
-    - cron: '0 2 * * *'
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  actions: read
-  contents: read
-
-jobs:
-  call-builder-auth-49:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/auth-4.9.x
-    with:
-      branch-name: rel/auth-4.9.x
-
-  call-builder-auth-48:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/auth-4.8.x
-    with:
-      branch-name: rel/auth-4.8.x
-
-  call-builder-auth-47:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/auth-4.7.x
-    with:
-      branch-name: rel/auth-4.7.x
-
-  call-builder-rec-52:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/rec-5.2.x
-    with:
-      branch-name: rel/rec-5.2.x
-
-  call-builder-rec-51:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/rec-5.1.x
-    with:
-      branch-name: rel/rec-5.1.x
-
-  call-builder-rec-50:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/rec-5.0.x
-    with:
-      branch-name: rel/rec-5.0.x
-
-  call-builder-dnsdist-19:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/dnsdist-1.9.x
-    with:
-      branch-name: rel/dnsdist-1.9.x
-
-  call-builder-dnsdist-18:
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    uses: PowerDNS/pdns/.github/workflows/builder.yml@rel/dnsdist-1.8.x
-    with:
-      branch-name: rel/dnsdist-1.8.x
diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml
deleted file mode 100644 (file)
index 37b0e8e..0000000
+++ /dev/null
@@ -1,66 +0,0 @@
----
-name: 'Test package building for specific distributions'
-
-on:
-  workflow_call:
-    inputs:
-      branch-name:
-        description: 'Checkout to a specific branch'
-        required: true
-        default: ''
-        type: string
-  schedule:
-    - cron: '0 1 * * *'
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-jobs:
-  prepare:
-    name: generate runner list
-    runs-on: ubuntu-24.04
-    outputs:
-      runnerlist: ${{ steps.get-runnerlist.outputs.runnerlist }}
-    steps:
-      - run: sudo apt-get update && sudo apt-get -y install jo
-      - id: get-runnerlist
-        run: echo "runnerlist=$(jo -a ubuntu-24.04 ${{ vars.ARM64_USE_UBICLOUD == '1' && 'ubicloud-standard-2-arm' || '' }})" >> "$GITHUB_OUTPUT"
-
-  build:
-    name: build.sh
-    if: ${{ vars.SCHEDULED_JOBS_BUILDER }}
-    needs: prepare
-    runs-on: ${{ matrix.runner-os }}
-    strategy:
-      matrix:
-        product: ['authoritative', 'recursor', 'dnsdist']
-        os:
-          - centos-9-stream
-          - centos-10-stream
-          - ubuntu-oracular
-          - amazon-2023
-        runner-os: ${{ fromJson(needs.prepare.outputs.runnerlist )}}
-      fail-fast: false
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 0  # for correct version numbers
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      # this builds packages and runs our unit test (make check)
-      - run: builder/build.sh -v -m ${{ matrix.product }} ${{ matrix.os }}
-      - name: Get version number
-        run: |
-          echo "version=$(readlink builder/tmp/latest)" >> $GITHUB_OUTPUT
-        id: getversion
-      - name: Get target architecture
-        run: |
-          echo "target-arch=$(uname -m)" >> $GITHUB_OUTPUT
-        id: getarch
-      - name: Upload packages
-        uses: actions/upload-artifact@v4
-        with:
-          name: ${{ matrix.product }}-${{ matrix.os }}-${{ steps.getversion.outputs.version }}-${{ steps.getarch.outputs.target-arch }}
-          path: built_pkgs/
-          retention-days: 7
index a49547608f3fc0abc4eebbe3cb486b1e87c7d67e..bdc2134b4dffc353cbb7bfd09bc97e1f5057a5f4 100644 (file)
@@ -39,7 +39,7 @@ jobs:
         # Override automatic language detection by changing the below list
         # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
         language: ['cpp']
-        product: ['auth', 'rec', 'dnsdist']
+        product: ['auth']
         # Learn more...
         # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
 
diff --git a/.github/workflows/coverity-dispatch.yml b/.github/workflows/coverity-dispatch.yml
deleted file mode 100644 (file)
index 2b4f116..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
----
-name: Trigger specific coverity scan
-
-on:
-  workflow_dispatch:
-    inputs:
-      product:
-        description: Product to build
-        type: choice
-        options:
-        - authoritative
-        - recursor
-        - dnsdist
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-jobs:
-  coverity-auth:
-    name: coverity scan of the auth
-    if: ${{ github.event.inputs.product == 'authoritative' }}
-    uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
-    with:
-      product: ${{ github.event.inputs.product }}
-    secrets:
-      COVERITY_TOKEN: ${{ secrets.coverity_auth_token }}
-      COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
-  coverity-dnsdist:
-    name: coverity scan of dnsdist
-    if: ${{ github.event.inputs.product == 'dnsdist' }}
-    uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
-    with:
-      product: ${{ github.event.inputs.product }}
-    secrets:
-      COVERITY_TOKEN: ${{ secrets.coverity_dnsdist_token }}
-      COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
-  coverity-rec:
-    name: coverity scan of the rec
-    if: ${{ github.event.inputs.product == 'recursor' }}
-    uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
-    with:
-      product: ${{ github.event.inputs.product }}
-    secrets:
-      COVERITY_TOKEN: ${{ secrets.coverity_rec_token }}
-      COVERITY_EMAIL: ${{ secrets.coverity_email }}
diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml
deleted file mode 100644 (file)
index 2cc9995..0000000
+++ /dev/null
@@ -1,130 +0,0 @@
----
-name: Coverity scan
-
-on:
-  workflow_call:
-    inputs:
-      product:
-        required: true
-        description: Product to build
-        type: string
-    secrets:
-      COVERITY_TOKEN:
-        required: true
-      COVERITY_EMAIL:
-        required: true
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-env:
-  CLANG_VERSION: '12'
-
-jobs:
-  coverity-auth:
-    name: coverity scan of the auth
-    if: ${{ inputs.product == 'authoritative' }}
-    runs-on: ubuntu-22.04
-    env:
-      COVERITY_TOKEN: ${{ secrets.COVERITY_TOKEN }}
-      FUZZING_TARGETS: no
-      SANITIZERS:
-      UNIT_TESTS: no
-    steps:
-      - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          persist-credentials: false
-      - uses: actions/setup-python@v5
-        with:
-          python-version: '3.11'
-      - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
-      - run: inv install-clang
-      - run: inv install-auth-build-deps
-      - run: inv install-coverity-tools PowerDNS
-      - run: inv coverity-clang-configure
-      - run: inv ci-autoconf
-      - run: inv ci-auth-configure
-      - run: inv coverity-make
-      - run: inv coverity-tarball auth.tar.bz2
-      - run: inv coverity-upload ${{ secrets.COVERITY_EMAIL }} PowerDNS auth.tar.bz2
-
-  coverity-dnsdist:
-    name: coverity scan of dnsdist
-    if: ${{ inputs.product == 'dnsdist' }}
-    runs-on: ubuntu-22.04
-    env:
-      COVERITY_TOKEN: ${{ secrets.COVERITY_TOKEN }}
-      SANITIZERS:
-      UNIT_TESTS: no
-      REPO_HOME: ${{ github.workspace }}
-    steps:
-      - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          persist-credentials: false
-      - uses: actions/setup-python@v5
-        with:
-          python-version: '3.11'
-      # installing the python3-package does not work because of actions/setup-python which installs a different version in /opt/hostedtoolcache/Python
-      - name: Install python yaml
-        run: |
-          pip install pyyaml
-      - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
-      - run: inv install-clang
-      - run: inv install-lld-linker-if-needed
-      - run: inv install-dnsdist-build-deps --skipXDP
-      - run: inv install-coverity-tools dnsdist
-      - run: inv coverity-clang-configure
-      - run: inv ci-autoconf
-        working-directory: ./pdns/dnsdistdist/
-      - run: inv ci-install-rust $REPO_HOME
-        working-directory: ./pdns/dnsdistdist/
-      - run: inv ci-build-and-install-quiche $REPO_HOME
-        working-directory: ./pdns/dnsdistdist/
-      - run: inv ci-dnsdist-configure full autotools build-dir
-        working-directory: ./pdns/dnsdistdist/
-      - run: inv coverity-make
-        working-directory: ./pdns/dnsdistdist/
-      - run: inv coverity-tarball dnsdist.tar.bz2
-        working-directory: ./pdns/dnsdistdist/
-      - run: inv coverity-upload ${{ secrets.COVERITY_EMAIL }} dnsdist dnsdist.tar.bz2
-        working-directory: ./pdns/dnsdistdist/
-
-  coverity-rec:
-    name: coverity scan of the rec
-    if: ${{ inputs.product == 'recursor' }}
-    runs-on: ubuntu-22.04
-    env:
-      COVERITY_TOKEN: ${{ secrets.COVERITY_TOKEN }}
-      SANITIZERS:
-      UNIT_TESTS: no
-    steps:
-      - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          persist-credentials: false
-      - uses: actions/setup-python@v5
-        with:
-          python-version: '3.11'
-      - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
-      - run: inv install-clang
-      - run: inv install-rec-build-deps
-      - run: inv install-coverity-tools 'PowerDNS+Recursor'
-      - run: inv coverity-clang-configure
-      - run: inv ci-autoconf
-        working-directory: ./pdns/recursordist/
-      - run: inv ci-rec-configure full
-        working-directory: ./pdns/recursordist/
-      - run: inv coverity-make
-        working-directory: ./pdns/recursordist/
-      - run: inv coverity-tarball recursor.tar.bz2
-        working-directory: ./pdns/recursordist/
-      - run: inv coverity-upload ${{ secrets.COVERITY_EMAIL }} 'PowerDNS+Recursor' recursor.tar.bz2
-        working-directory: ./pdns/recursordist/
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
deleted file mode 100644 (file)
index 133107b..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
----
-name: 'Build Docker images'
-
-on:
-  schedule:
-    - cron: '0 4 * * *'
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-  actions: read
-
-jobs:
-  call-build-image-auth:
-    uses: PowerDNS/pdns/.github/workflows/build-docker-images.yml@master
-    if: ${{ vars.SCHEDULED_DOCKER }}
-    with:
-      product: auth
-      ref: ${{ github.ref_name }}
-      image-name: pdns-auth-master
-      image-tags: |-
-        latest
-      image-description: 'EXPERIMENTAL pdns auth image'
-      platforms: linux/amd64,linux/arm64/v8
-      push: true
-    secrets:
-      DOCKERHUB_ORGANIZATION_NAME: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}
-      DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
-      DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
-
-  call-build-image-recursor:
-    uses: PowerDNS/pdns/.github/workflows/build-docker-images.yml@master
-    if: ${{ vars.SCHEDULED_DOCKER }}
-    with:
-      product: recursor
-      ref: ${{ github.ref_name }}
-      image-name: pdns-recursor-master
-      image-tags: |-
-        latest
-      image-description: 'EXPERIMENTAL pdns recursor image'
-      platforms: linux/amd64,linux/arm64/v8
-      push: true
-    secrets:
-      DOCKERHUB_ORGANIZATION_NAME: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}
-      DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
-      DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
-
-  call-build-image-dnsdist:
-    uses: PowerDNS/pdns/.github/workflows/build-docker-images.yml@master
-    if: ${{ vars.SCHEDULED_DOCKER }}
-    with:
-      product: dnsdist
-      ref: ${{ github.ref_name }}
-      image-name: dnsdist-master
-      image-tags: |-
-        latest
-      image-description: 'EXPERIMENTAL dnsdist image'
-      platforms: linux/amd64,linux/arm64/v8
-      push: true
-    secrets:
-      DOCKERHUB_ORGANIZATION_NAME: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}
-      DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
-      DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
deleted file mode 100644 (file)
index 912f30a..0000000
+++ /dev/null
@@ -1,348 +0,0 @@
-# This workflow builds and publishes documentation for PowerDNS products.
-# It is controlled by the following variables and secrets:
-
-# Variables:
-# - PUBLISH_DOCS_TO_AWS: Set to 'true' to enable publishing to AWS
-# - PUBLISH_DOCS_TO_NETLIFY: Set to 'true' to enable publishing to Netlify
-# - PUBLISH_DOCS_TO_WEB1: Set to 'true' to enable publishing to Web1
-# - DOCS_WORKFLOW_BRANCH_OVERRIDE: Set to 'true' to allow publishing from non-master branches (for testing)
-
-# - AWS_REGION: AWS region for S3 and CloudFront
-# - AWS_S3_BUCKET_DOCS: S3 bucket name for documentation
-# - AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: CloudFront distribution ID for DNSdist docs
-# - AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: CloudFront distribution ID for PowerDNS docs
-
-# - NETLIFY_SITE_ID_DOCS: Netlify site ID for PowerDNS docs
-# - NETLIFY_SITE_ID_DNSDIST: Netlify site ID for DNSdist docs
-
-
-# - REC_DOCS_DIR: Directory for Recursor docs on Web1
-# - AUTH_DOCS_DIR: Directory for Authoritative Server docs on Web1
-# - WEB1_HOSTKEY: SSH host key for Web1
-# - DOCS_HOST: Hostname for documentation server
-
-# Secrets:
-# - AWS_ACCESS_KEY_ID: AWS access key ID
-# - AWS_SECRET_ACCESS_KEY: AWS secret access key
-# - NETLIFY_API_TOKEN: API token for Netlify
-# - WEB1_DOCS_SECRET: SSH key for Web1 access
-
----
-name: 'Documentation'
-
-on:
-  push:
-    branches: [master]
-  pull_request:
-    branches: [master]
-  workflow_dispatch: {}
-permissions:
-  contents: read
-
-env:
-  FORCE_JAVASCRIPT_ACTIONS_TO_NODE20: true
-
-jobs:
-  build-docs:
-    name: Build docs
-    runs-on: ubuntu-22.04
-    env:
-      BRANCH_NAME: ${{ github.ref_name }}
-    outputs:
-      pdns_version: ${{ steps.get-version.outputs.pdns_version }}
-    steps:
-      - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          persist-credentials: false
-      - uses: actions/setup-python@v5
-        with:
-          python-version: '3.11'
-      - run: pip install pyyaml
-      - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
-      - run: inv install-doc-deps
-      - run: inv install-doc-deps-pdf
-
-      - id: get-version
-        run: |
-          echo "pdns_version=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
-
-      # Build Auth docs
-      - run: inv ci-docs-build
-      - run: mv html auth-html-docs
-        working-directory: ./docs/_build
-      - run: tar cf auth-html-docs.tar auth-html-docs
-        working-directory: ./docs/_build
-      - uses: actions/upload-artifact@v4
-        with:
-          name: authoritative-html-docs-${{steps.get-version.outputs.pdns_version}}
-          path: ./docs/_build/auth-html-docs.tar
-      - run: inv ci-docs-build-pdf
-      - uses: actions/upload-artifact@v4
-        with:
-          name: PowerDNS-Authoritative-${{steps.get-version.outputs.pdns_version}}.pdf
-          path: ./docs/_build/latex/PowerDNS-Authoritative.pdf
-      - name: Copy PDF to HTML docs directory
-        run: |
-          cp ./docs/_build/latex/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
-      # Build Rec docs
-      - run: inv ci-metrics-rec-generate
-        working-directory: ./pdns/recursordist
-      - run: inv ci-docs-rec-generate
-        working-directory: ./pdns/recursordist/rec-rust-lib
-      - run: inv ci-docs-build
-        working-directory: ./pdns/recursordist
-      - run: mv html rec-html-docs
-        working-directory: ./pdns/recursordist/docs/_build
-      - run: tar cf rec-html-docs.tar rec-html-docs
-        working-directory: ./pdns/recursordist/docs/_build
-      - uses: actions/upload-artifact@v4
-        with:
-          name: recursor-html-docs-${{steps.get-version.outputs.pdns_version}}
-          path: ./pdns/recursordist/docs/_build/rec-html-docs.tar
-      - run: inv ci-docs-build-pdf
-        working-directory: ./pdns/recursordist
-      - uses: actions/upload-artifact@v4
-        with:
-          name: PowerDNS-Recursor-${{steps.get-version.outputs.pdns_version}}.pdf
-          path: ./pdns/recursordist/docs/_build/latex/PowerDNS-Recursor.pdf
-      - name: Copy PDF to HTML docs directory
-        run: |
-          cp ./pdns/recursordist/docs/_build/latex/PowerDNS-Recursor.pdf ./pdns/recursordist/docs/_build/rec-html-docs/
-
-      # Build DNSdist docs
-      - run: inv ci-docs-dnsdist-generate
-        working-directory: ./pdns/dnsdistdist
-      - run: inv ci-docs-build
-        working-directory: ./pdns/dnsdistdist
-      - run: mv html dnsdist-html-docs
-        working-directory: ./pdns/dnsdistdist/docs/_build
-      - run: tar cf dnsdist-html-docs.tar dnsdist-html-docs
-        working-directory: ./pdns/dnsdistdist/docs/_build
-      - uses: actions/upload-artifact@v4
-        with:
-          name: dnsdist-html-docs-${{steps.get-version.outputs.pdns_version}}
-          path: ./pdns/dnsdistdist/docs/_build/dnsdist-html-docs.tar
-      - run: inv ci-docs-build-pdf
-        working-directory: ./pdns/dnsdistdist
-      - uses: actions/upload-artifact@v4
-        with:
-          name: dnsdist-${{steps.get-version.outputs.pdns_version}}.pdf
-          path: ./pdns/dnsdistdist/docs/_build/latex/dnsdist.pdf
-      - name: Copy PDF to HTML docs directory
-        run: |
-          cp ./pdns/dnsdistdist/docs/_build/latex/dnsdist.pdf ./pdns/dnsdistdist/docs/_build/dnsdist-html-docs/
-
-      # Build website
-      - name: Build website
-        run: |
-          tar cf website.tar website
-      - uses: actions/upload-artifact@v4
-        with:
-          name: website-${{steps.get-version.outputs.pdns_version}}
-          path: website.tar
-
-  publish-to-netlify:
-    name: Publish to Netlify
-    needs: build-docs
-    if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_NETLIFY == 'true' }}
-    runs-on: ubuntu-22.04
-    env:
-      NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_API_TOKEN }}
-      NETLIFY_SITE_ID_DOCS: ${{ vars.NETLIFY_SITE_ID_DOCS }}
-      NETLIFY_SITE_ID_DNSDIST: ${{ vars.NETLIFY_SITE_ID_DNSDIST }}
-      PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
-    steps:
-      - name: Check required secrets
-        run: |
-          missing_secrets=()
-          [ -z "$NETLIFY_AUTH_TOKEN" ] && missing_secrets+=("NETLIFY_API_TOKEN")
-          [ -z "$NETLIFY_SITE_ID_DOCS" ] && missing_secrets+=("NETLIFY_SITE_ID_DOCS")
-          [ -z "$NETLIFY_SITE_ID_DNSDIST" ] && missing_secrets+=("NETLIFY_SITE_ID_DNSDIST")
-          if [ ${#missing_secrets[@]} -ne 0 ]; then
-            echo "Error: The following secrets are missing: ${missing_secrets[*]}"
-            exit 1
-          fi
-      - name: Install Netlify
-        run: npm install netlify-cli@17.36.3 -g
-      - uses: actions/download-artifact@v4
-        with:
-          path: artifacts
-
-      - name: Deploy docs to Netlify
-        run: |
-          mkdir -p docs_site/temp
-          tar -xf artifacts/website-$PDNS_VERSION/website.tar -C docs_site/temp
-          mv docs_site/temp/website/docs.powerdns.com/* docs_site/
-          rm -rf docs_site/temp
-          tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C docs_site
-          tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C docs_site
-          mv docs_site/auth-html-docs docs_site/authoritative
-          mv docs_site/rec-html-docs docs_site/recursor
-          cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf docs_site/authoritative/
-          cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf docs_site/recursor/
-          netlify deploy \
-            --dir ./docs_site \
-            --site $NETLIFY_SITE_ID_DOCS \
-            --auth $NETLIFY_AUTH_TOKEN \
-            --prod
-
-      - name: Deploy DNSdist docs to Netlify
-        run: |
-          tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
-          cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
-          netlify deploy \
-            --dir ./dnsdist-html-docs \
-            --site $NETLIFY_SITE_ID_DNSDIST \
-            --auth $NETLIFY_AUTH_TOKEN \
-            --prod
-
-  publish-to-web1:
-    name: Publish to Web1
-    needs: build-docs
-    if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_WEB1 == 'true' }}
-    runs-on: ubuntu-22.04
-    env:
-      PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
-      DOCS_HOST: ${{vars.DOCS_HOST}}
-      REC_DOCS_DIR: ${{vars.REC_DOCS_DIR}}
-      AUTH_DOCS_DIR: ${{vars.AUTH_DOCS_DIR}}
-      SSH_KEY: ${{secrets.WEB1_DOCS_SECRET}}
-      HOST_KEY: ${{vars.WEB1_HOSTKEY}}
-    steps:
-      - name: Check required secrets
-        run: |
-          missing_secrets=()
-          [ -z "$SSH_KEY" ] && missing_secrets+=("WEB1_DOCS_SECRET")
-          [ -z "$HOST_KEY" ] && missing_secrets+=("WEB1_HOSTKEY")
-          [ -z "$DOCS_HOST" ] && missing_secrets+=("DOCS_HOST")
-          [ -z "$AUTH_DOCS_DIR" ] && missing_secrets+=("AUTH_DOCS_DIR")
-          [ -z "$REC_DOCS_DIR" ] && missing_secrets+=("REC_DOCS_DIR")
-          if [ ${#missing_secrets[@]} -ne 0 ]; then
-            echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
-            exit 1
-          fi
-      - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          persist-credentials: false
-      - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
-      - uses: actions/download-artifact@v4
-        with:
-          path: artifacts
-      - id: setup-ssh
-        run: |-
-          inv ci-docs-add-ssh --ssh-key="$SSH_KEY" --host-key="$HOST_KEY"
-      - name: Publish Auth docs
-        run: |
-          mkdir -p ./docs/_build
-          tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C ./docs/_build/
-          cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
-          inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Authoritative.pdf" --username="docs_powerdns_com" --product="auth" --directory="/${AUTH_DOCS_DIR}/"
-      - name: Publish Recursor docs
-        run: |
-          tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C ./docs/_build/
-          cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf ./docs/_build/rec-html-docs/
-          inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Recursor.pdf" --username="docs_powerdns_com" --product="rec" --directory="/${REC_DOCS_DIR}/"
-      - name: Publish DNSdist docs
-        run: |
-          tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar -C ./docs/_build/
-          cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf ./docs/_build/dnsdist-html-docs/
-          inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="dnsdist.pdf" --username="dnsdist_org" --product="dnsdist" --directory="/${DNSDIST_DOCS_DIR}/"
-
-
-  publish-to-aws:
-    name: Publish to AWS
-    needs: build-docs
-    if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_AWS == 'true' }}
-    runs-on: ubuntu-22.04
-    env:
-      AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
-      AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
-      AWS_REGION: ${{ vars.AWS_REGION }}
-      AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }}
-      AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}
-      AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}
-      PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
-    steps:
-      - name: Check required secrets
-        run: |
-          missing_secrets=()
-          [ -z "$AWS_ACCESS_KEY_ID" ] && missing_secrets+=("AWS_ACCESS_KEY_ID")
-          [ -z "$AWS_SECRET_ACCESS_KEY" ] && missing_secrets+=("AWS_SECRET_ACCESS_KEY")
-          [ -z "$AWS_REGION" ] && missing_secrets+=("AWS_REGION")
-          [ -z "$AWS_S3_BUCKET_DOCS" ] && missing_secrets+=("AWS_S3_BUCKET_DOCS")
-          [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST")
-          [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS")
-          if [ ${#missing_secrets[@]} -ne 0 ]; then
-            echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
-            exit 1
-          fi
-      - uses: actions/checkout@v4
-        with:
-          persist-credentials: false
-      - uses: aws-actions/configure-aws-credentials@v4
-        with:
-          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
-          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
-          aws-region: ${{ vars.AWS_REGION }}
-      - name: Install and configure rclone
-        run: |
-          sudo apt-get update
-          sudo apt-get install -y rclone
-          mkdir -p ~/.config/rclone/
-          cat > ~/.config/rclone/rclone.conf << EOF
-          [docs-s3]
-          type = s3
-          provider = AWS
-          env_auth = true
-          region = $AWS_REGION
-          EOF
-      - name: Download artifacts
-        uses: actions/download-artifact@v4
-        with:
-          path: artifacts
-      - name: Publish to AWS
-        run: |
-          if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ]; then
-            tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
-            cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
-
-            # Copy files to S3
-            echo "Copying DNSdist docs to S3..."
-            rclone copy --checksum dnsdist-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/dnsdist.org/
-            
-            # Always run invalidation
-            echo "Running CloudFront invalidation for DNSdist..."
-            aws cloudfront create-invalidation \
-              --distribution-id $AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST \
-              --paths "/*"
-            echo "Invalidation completed."
-          fi
-
-          if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ]; then
-            tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar
-            cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf rec-html-docs/
-
-            # Copy all PowerDNS docs to S3
-            echo "Copying Recursor docs to S3..."
-            rclone copy --checksum rec-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/recursor/
-
-            tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar
-            cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf auth-html-docs/
-            echo "Copying Authoritative docs to S3..."
-            rclone copy --checksum auth-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/authoritative/
-
-            tar -xf artifacts/website-$PDNS_VERSION/website.tar
-            echo "Copying website files to S3..."
-            rclone copy --checksum website/docs.powerdns.com/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/
-
-            # Always run invalidation
-            echo "Running CloudFront invalidation for PowerDNS docs..."
-            aws cloudfront create-invalidation \
-              --distribution-id $AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS \
-              --paths "/*"
-            echo "Invalidation completed."
-          fi
-
diff --git a/.github/workflows/misc-dailies.yml b/.github/workflows/misc-dailies.yml
deleted file mode 100644 (file)
index 1fa9646..0000000
+++ /dev/null
@@ -1,180 +0,0 @@
-name: "Various daily checks"
-
-on:
-  schedule:
-    - cron: '34 4 * * *'
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-env:
-  CLANG_VERSION: '12'
-
-jobs:
-  el7-devtoolset:
-    if: ${{ vars.SCHEDULED_MISC_DAILIES }}
-    runs-on: ubuntu-22.04
-
-    steps:
-    - name: Check whether a newer devtoolset exists
-      run: |
-        if docker run --rm centos:7 bash -c 'yum install -y centos-release-scl-rh && yum info devtoolset-12-gcc-c++'
-        then
-          echo "::warning file=builder-support/dockerfiles/Dockerfile.rpmbuild::A newer devtoolset exists. Please edit builder-support/dockerfiles/Dockerfile.rpmbuild, builder-support/dockerfiles/Dockerfile.rpmbuild, and .github/workflows/dailies.yml"
-          exit 1
-        else
-          echo "::notice ::No newer devtoolset exists (good)"
-          exit 0
-        fi
-
-  check-debian-autoremovals:
-    if: ${{ vars.SCHEDULED_MISC_DAILIES }}
-    runs-on: ubuntu-22.04
-    steps:
-    - uses: actions/checkout@v4
-      with:
-        fetch-depth: 5
-        submodules: recursive
-        persist-credentials: false
-
-    - name: Check if Debian is about to toss us off a balcony
-      run: ./build-scripts/check-debian-autoremovals.py
-
-  coverity-auth:
-    name: coverity scan of the auth
-    if: ${{ vars.SCHEDULED_MISC_DAILIES }}
-    uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
-    with:
-      product: 'authoritative'
-    secrets:
-      COVERITY_TOKEN: ${{ secrets.coverity_auth_token }}
-      COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
-  coverity-dnsdist:
-    name: coverity scan of dnsdist
-    if: ${{ vars.SCHEDULED_MISC_DAILIES }}
-    uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
-    with:
-      product: 'dnsdist'
-    secrets:
-      COVERITY_TOKEN: ${{ secrets.coverity_dnsdist_token }}
-      COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
-  coverity-rec:
-    name: coverity scan of the rec
-    if: ${{ vars.SCHEDULED_MISC_DAILIES }}
-    uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
-    with:
-      product: 'recursor'
-    secrets:
-      COVERITY_TOKEN: ${{ secrets.coverity_rec_token }}
-      COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
-  # The jobs below check that only the pinned version of Python3 packages are installed with pip. Running in a container, a
-  # pip proxy registers all requests for installing packages with pip.  Then, the downloaded packages and their versions are compared
-  # with the list used for the installation (i.e. docs/requirements.txt, pdns/recursordist/docs/requirements.txt, etc). If a package
-  # is missing or a version does not match the one expected, this job fails, which makes the workflow fail.
-  #
-  # The pinned version plus hashes are generated using pip-compile using an input file that includes the original list of packages
-  # (pip-compile --generate-hashes -U requirements.in). "pip-compile" can be installed via pip-tools with Python 3.11, which is the version
-  # used in the CI. Any other Python version would end up with different versions for packages and could result in workflow failures.
-  #
-  # One recurring error thrown by this validation is when a new version of a pinned package is released for a "setup-requires" dependency
-  # of one of the packages in the list (see https://github.com/PowerDNS/pdns/pull/14596). The package version in “requirements.in” should
-  # be modified to solve this issue. In some cases, it is enough to generate again the list of packages, making sure to add the -U flag
-  # to force the upgrade: "pip-compile --generate-hashes -U requirements.in" (this could include upgrading other packages).
-
-  list-pip-requirement-files:
-    if: ${{ vars.SCHEDULED_MISC_DAILIES }}
-    runs-on: ubuntu-22.04
-    outputs:
-      req-files: ${{ steps.get-list-requirements.outputs.files }}
-    steps:
-    - uses: actions/checkout@v4
-      with:
-        persist-credentials: false
-    - name: Get all requirements.txt files and export them as outputs
-      id: get-list-requirements
-      run: |
-        echo "files=$(find . -name 'requirements.txt' | jq -R -s -c 'split("\n")[:-1]')" >> "$GITHUB_OUTPUT"
-
-  validate-pip-hashes:
-    if: ${{ vars.SCHEDULED_MISC_DAILIES }}
-    name: ${{ matrix.requirements-file }} - Validate list of packages and hashes
-    runs-on: ubuntu-22.04
-    needs: list-pip-requirement-files
-    services:
-      database:
-        image: epicwink/proxpi@sha256:a219ea0ef4f5b272eaf18bc5a5d00220c5aa07debb434d36161550862768aa93
-        ports:
-          - 5000:5000
-        options: >-
-          --restart always
-    strategy:
-      fail-fast: false
-      matrix:
-        requirements-file: ${{ fromJson(needs.list-pip-requirement-files.outputs.req-files) }}
-    env:
-      SERVICE_IP_ADDR: 127.0.0.1
-      REQUIREMENTS_FILE: ${{ matrix.requirements-file }}
-    steps:
-      - run: echo "${REQUIREMENTS_FILE}"
-      - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          persist-credentials: false
-      - uses: actions/setup-python@v5
-        with:
-          python-version: '3.11'
-      # Configure pip index-url set to proxpi
-      - run: pip config set global.index-url http://${{ env.SERVICE_IP_ADDR }}:5000/index/
-      - run: pip config set global.trusted-host ${{ env.SERVICE_IP_ADDR }}
-      - id: proxpi-docker
-        run: echo "id=$(docker ps | grep "epicwink/proxpi" | awk '{print $1}')" >> "$GITHUB_OUTPUT"
-      - run: pip install -r ${REQUIREMENTS_FILE}
-      - name: Get the list of packages requested to the pip proxy
-        env:
-          ID: ${{ steps.proxpi-docker.outputs.id }}
-        run: |
-          docker logs "${ID}" 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
-          cat /tmp/proxpi.log
-      - name: check only listed packages were installed
-        run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${REQUIREMENTS_FILE} || ( echo "$i not found" && exit 1 ); done
-
-  run-rec-bulk-test-on-ubicloud:
-    if: ${{ vars.UBICLOUD_DAILY_REC_BULKTEST == '1' }}
-    name: Run Recursor (master) Bulk Test On Ubicloud
-    runs-on: ubicloud-standard-8-ubuntu-2404
-    strategy:
-      matrix:
-        testsize: [500000]
-        IPv6: [0, 1]
-      max-parallel: 1
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          ref: ${{ inputs.branch-name }}
-          persist-credentials: false
-      - name: Add PowerDNS Repo
-        run: echo 'deb [signed-by=/etc/apt/keyrings/rec-master-pub.asc] http://repo.powerdns.com/ubuntu noble-rec-master main' | sudo tee /etc/apt/sources.list.d/pdns.list
-      - name: Add key
-        run: sudo install -d /etc/apt/keyrings; curl https://repo.powerdns.com/CBC8B383-pub.asc | sudo tee /etc/apt/keyrings/rec-master-pub.asc
-      - name: Apt Update
-        run: sudo apt-get update
-      - name: Install Recursor Package and PDNS Tools
-        run: sudo apt install pdns-recursor pdns-tools
-      - name: Get Top Million Names
-        run: curl --no-progress-meter -LO https://umbrella-static.s3.dualstack.us-west-1.amazonaws.com/top-1m.csv.zip && unzip top-1m.csv.zip -d .
-        working-directory: regression-tests
-      - name: Run Bulk Test
-        run: ./recursor-test 5300 ${{ matrix.testsize }} 8 2048 1024
-        working-directory: regression-tests
-        env:
-          RECURSOR: /usr/sbin/pdns_recursor
-          RECCONTROL: /usr/bin/rec_control
-          DNSBULKTEST: /usr/bin/dnsbulktest
-          THRESHOLD: 95
-          TRACE: no
-          IPv6: ${{ matrix.IPv6 }}
diff --git a/.github/workflows/secpoll.yml b/.github/workflows/secpoll.yml
deleted file mode 100644 (file)
index 314daa2..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
----
-name: 'Verify secpoll zone syntax'
-
-on:
-  push:
-  pull_request:
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-jobs:
-  build:
-    name: check secpoll zone
-    # on a ubuntu-24.04 VM
-    runs-on: ubuntu-24.04
-    steps:
-      - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
-      - uses: actions/checkout@v4
-        with:
-          fetch-depth: 5
-          submodules: recursive
-          persist-credentials: false
-      - run: sh docs/secpoll-check.sh docs/secpoll.zone
-      - run: echo 'deb [arch=amd64] http://repo.powerdns.com/ubuntu noble-auth-master main' | sudo tee /etc/apt/sources.list.d/pdns.list
-      - run: "echo -ne 'Package: pdns-*\nPin: origin repo.powerdns.com\nPin-Priority: 600\n' | sudo tee /etc/apt/preferences.d/pdns"
-      - run: sudo curl https://repo.powerdns.com/CBC8B383-pub.asc -o /etc/apt/trusted.gpg.d/CBC8B383-pub.asc
-      - run: sudo apt-get update
-      - run: sudo systemctl mask pdns
-      - run: sudo apt-get install -y pdns-server pdns-backend-sqlite3
-      - run: "echo -ne 'launch=gsqlite3\ngsqlite3-database=/var/lib/powerdns/pdns.sqlite3\n' | sudo tee /etc/powerdns/pdns.conf"
-      - run: sudo sqlite3 /var/lib/powerdns/pdns.sqlite3 < /usr/share/doc/pdns-backend-sqlite3/schema.sqlite3.sql
-      - run: sudo pdnsutil load-zone secpoll.powerdns.com docs/secpoll.zone
-      - run: sudo pdnsutil check-zone secpoll.powerdns.com
diff --git a/.github/workflows/spelling.yml b/.github/workflows/spelling.yml
deleted file mode 100644 (file)
index c250cd1..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-# spelling.yml is disabled per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-g86g-chm8-7r2p
-name: Workflow should not run!
-on:
-  push:
-    branches: ''
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
-  contents: read
-
-jobs:
-  placeholder:
-    name: Should be disabled
-    runs-on: ubuntu-latest
-    if: false
-    steps:
-    - name: Task
-      run: |
-        echo 'Running this task would be bad'
-        exit 1
diff --git a/.github/workflows/spelling2.yml b/.github/workflows/spelling2.yml
deleted file mode 100644 (file)
index 9c5259f..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-# spelling2.yml is disabled per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-p8r9-69g4-jwqq
-name: Workflow should not run!
-on:
-  push:
-    branches: ''
-
-jobs:
-  placeholder:
-    permissions:
-    name: Should be disabled
-    runs-on: ubuntu-latest
-    if: false
-    steps:
-    - name: Task
-      run: |
-        echo 'Running this task would be bad'
-        exit 1
diff --git a/.github/workflows/spelling3.yml b/.github/workflows/spelling3.yml
deleted file mode 100644 (file)
index e527948..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-# spelling.yml is blocked per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-g86g-chm8-7r2p
-# spelling2.yml is blocked per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-p8r9-69g4-jwqq
-name: Spell checking
-
-on:
-  push:
-    branches:
-      - "**"
-    tags-ignore:
-      - "**"
-  pull_request:
-    branches:
-      - "**"
-    types:
-      - "opened"
-      - "reopened"
-      - "synchronize"
-
-jobs:
-  spelling:
-    name: Spell checking
-    permissions:
-      # contents-read is needed to checkout in private repositories
-      contents: read
-      # pull-requests-read is needed for suppress_push_for_open_pull_request in private repositories
-      pull-requests: read
-      # security-events-write is needed according to the documentation:
-      # https://docs.github.com/en/code-security/code-scanning/integrating-with-code-scanning/uploading-a-sarif-file-to-github#uploading-a-code-scanning-analysis-with-github-actions
-      security-events: write
-    outputs:
-      followup: ${{ steps.spelling.outputs.followup }}
-    runs-on: ubuntu-latest
-    if: ${{ contains(github.event_name, 'pull_request') || github.event_name == 'push' }}
-    concurrency:
-      group: spelling-${{ github.event.pull_request.number || github.ref }}
-      # note: If you use only_check_changed_files, you do not want cancel-in-progress
-      cancel-in-progress: true
-    steps:
-      - name: check-spelling
-        id: spelling
-        uses: check-spelling/check-spelling@v0.0.25
-        with:
-          config: .github/actions/spell-check
-          suppress_push_for_open_pull_request: ${{ github.actor != 'dependabot[bot]' && 1 }}
-          checkout: true
-          check_file_names: 1
-          spell_check_this: powerdns/pdns@master
-          post_comment: 0
-          use_magic_file: 1
-          warnings: bad-regex,binary-file,deprecated-feature,ignored-expect-variant,large-file,limited-references,no-newline-at-eof,noisy-file,non-alpha-in-dictionary,token-is-substring,unexpected-line-ending,whitespace-in-dictionary,minified-file,unsupported-configuration,no-files-to-check,unclosed-block-ignore-begin,unclosed-block-ignore-end
-          use_sarif: ${{ (!github.event.pull_request || (github.repository_owner != 'PowerDNS' && github.event.pull_request.head.repo.full_name == github.repository)) && !vars.DO_NOT_USE_SARIF_REPORTING && 1 }}
-          dictionary_source_prefixes: >
-            {
-            "cspell": "https://raw.githubusercontent.com/check-spelling/cspell-dicts/v20241114/dictionaries/"
-            }
-          extra_dictionaries: |
-            cspell:software-terms/softwareTerms.txt
-            cspell:node/node.txt
-            cspell:php/php.txt
-            cspell:python/common/extra.txt
-            cspell:python/python/python-lib.txt
-            cspell:golang/go.txt
-            cspell:cpp/stdlib-cpp.txt
-            cspell:aws/aws.txt
-            cspell:filetypes/filetypes.txt
-            cspell:dart/dart.txt
-            cspell:rust/rust.txt
-            cspell:npm/npm.txt
-            cspell:cpp/stdlib-c.txt
-            cspell:fullstack/fullstack.txt
-            cspell:python/python/python.txt
-            cspell:typescript/typescript.txt
-            cspell:html/html.txt
-            cspell:java/java.txt
-            cspell:lua/lua.txt
-          check_extra_dictionaries: ""