echo "image-id=ghcr.io/powerdns/$DOCKER_IMAGE" >> "$GITHUB_OUTPUT"
echo "tag=$DEFAULT_IMAGE_TAG" >> "$GITHUB_OUTPUT"
- build-auth:
- name: build auth (${{ matrix.builder }})
- if: ${{ !github.event.schedule || vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
- runs-on: ubuntu-24.04
- needs: get-runner-container-image
- container:
- image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
- env:
- FUZZING_TARGETS: yes
- UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
- UNIT_TESTS: yes
- options: --sysctl net.ipv6.conf.all.disable_ipv6=0
- strategy:
- matrix:
- builder: [autotools, meson]
- fail-fast: false
- defaults:
- run:
- working-directory: ./pdns-${{ env.BUILDER_VERSION }}
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - name: get timestamp for cache
- id: get-stamp
- run: |
- echo "stamp=$(/bin/date +%s)" >> "$GITHUB_OUTPUT"
- shell: bash
- working-directory: .
- - run: mkdir -p ~/.ccache
- working-directory: .
- - name: let GitHub cache our ccache data
- uses: actions/cache@v4
- with:
- path: ~/.ccache
- key: auth-ccache-${{ matrix.builder }}-${{ steps.get-stamp.outputs.stamp }}
- restore-keys: auth-ccache-${{ matrix.builder }}
- - name: set sanitizers
- run: echo "SANITIZERS=${{ matrix.builder == 'meson' && 'address,undefined' || 'asan+ubsan' }}" >> "$GITHUB_ENV"
- working-directory: .
- - name: install pip build dependencies
- run: |
- python3 -m venv ${REPO_HOME}/.venv
- . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
- working-directory: .
- - run: ${{ env.INV_CMD }} install-auth-build-deps
- working-directory: .
- - run: ${{ env.INV_CMD }} ci-autoconf ${{ matrix.builder == 'meson' && '--meson' || '' }}
- working-directory: .
- - run: ${{ env.INV_CMD }} ci-auth-configure ${{ matrix.builder == 'meson' && '--meson' || '' }} -b pdns-${{ env.BUILDER_VERSION }}
- working-directory: .
- - run: ${{ env.INV_CMD }} ci-auth-build ${{ matrix.builder == 'meson' && '--meson' || '' }} # This runs under pdns-$BUILDER_VERSION/pdns/ for make bear
- - run: ${{ env.INV_CMD }} ci-auth-install-remotebackend-test-deps
- - if: ${{ matrix.builder == 'meson' }}
- run: ${{ env.INV_CMD }} install-auth-test-deps-only -b geoip
- - run: ${{ env.INV_CMD }} ci-auth-run-unit-tests ${{ matrix.builder == 'meson' && '--meson' || '' }}
- env:
- PDNS_BUILD_PATH: ../pdns-${{ env.BUILDER_VERSION }}
- - run: ${{ env.INV_CMD }} generate-coverage-info ./pdns-auth-testrunner $GITHUB_WORKSPACE
- if: ${{ env.COVERAGE == 'yes' && matrix.builder == 'meson' }}
- - name: Coveralls Parallel auth unit
- if: ${{ env.COVERAGE == 'yes' && matrix.builder == 'meson' }}
- uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
- with:
- flag-name: auth-unit-${{ env.SANITIZERS }}
- path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
- parallel: true
- allow-empty: true
- fail-on-error: false
- - run: ${{ env.INV_CMD }} ci-auth-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
- - run: ccache -s
- - name: Prepare binaries folder
- if: ${{ matrix.builder == 'meson' }}
- run: |
- echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- mkdir -p /opt/pdns-auth/bin
- for i in $(find . -maxdepth 1 -type f -executable); do cp ${i} /opt/pdns-auth/bin/; done
- mkdir -p /opt/pdns-auth/sbin; mv /opt/pdns-auth/bin/pdns-auth /opt/pdns-auth/sbin/
- - if: ${{ matrix.builder == 'meson' }}
- name: Store the binaries
- uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
- with:
- name: pdns-auth-${{ matrix.builder}}-${{ env.normalized-branch-name }}
- path: /opt/pdns-auth
- retention-days: 1
-
build-recursor:
name: build recursor
if: ${{ !github.event.schedule || vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
path: /opt/pdns-recursor
retention-days: 1
- build-dnsdist:
- name: build dnsdist
- if: ${{ !github.event.schedule || vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
- runs-on: ${{ ( vars.REPOSITORY_USE_UBICLOUD == '1' ) && 'ubicloud-standard-4-ubuntu-2404' || 'ubuntu-24.04' }}
- needs: get-runner-container-image
- strategy:
- matrix:
- builder: [autotools, meson]
- sanitizers: [asan+ubsan, tsan]
- features: [least, full]
- exclude:
- - sanitizers: tsan
- features: least
- fail-fast: false
- container:
- image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
- env:
- SANITIZERS: ${{ matrix.sanitizers }}
- UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
- UNIT_TESTS: yes
- FUZZING_TARGETS: yes
- options: --sysctl net.ipv6.conf.all.disable_ipv6=0
- defaults:
- run:
- working-directory: ./pdns/dnsdistdist/dnsdist-${{ env.BUILDER_VERSION }}
- env:
- CLANG_VERSION: ${{ contains(needs.get-runner-container-image.outputs.id, 'debian-11') && '13' || '19' }}
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - name: get timestamp for cache
- id: get-stamp
- run: |
- echo "stamp=$(/bin/date +%s)" >> "$GITHUB_OUTPUT"
- shell: bash
- working-directory: .
- - run: mkdir -p ~/.ccache
- working-directory: .
- - name: let GitHub cache our ccache data
- uses: actions/cache@v4
- with:
- path: ~/.ccache
- key: dnsdist-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ matrix.builder}}-ccache-${{ steps.get-stamp.outputs.stamp }}
- restore-keys: dnsdist-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ matrix.builder}}-ccache-
- - name: install pip build dependencies
- run: |
- python3 -m venv ${REPO_HOME}/.venv
- . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
- working-directory: .
- - run: ${{ env.INV_CMD }} install-clang
- working-directory: .
- - run: ${{ env.INV_CMD }} install-lld-linker-if-needed
- working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-install-rust ${REPO_HOME}
- working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-build-and-install-quiche ${REPO_HOME}
- working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-autoconf
- if: ${{ matrix.builder == 'autotools' }}
- working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-dnsdist-configure ${{ matrix.features }} ${{ matrix.builder }} dnsdist-${{ env.BUILDER_VERSION }}
- working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-make-distdir
- if: ${{ matrix.builder == 'autotools' }}
- working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-dnsdist-configure ${{ matrix.features }} ${{ matrix.builder }} dnsdist-${{ env.BUILDER_VERSION }}
- if: ${{ matrix.builder == 'autotools' }}
- - run: ${{ env.INV_CMD }} ci-dnsdist-make-bear ${{ matrix.builder }}
- - run: ${{ env.INV_CMD }} ci-dnsdist-run-unit-tests ${{ matrix.builder }}
- - run: ${{ env.INV_CMD }} generate-coverage-info ./testrunner $GITHUB_WORKSPACE
- if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' && matrix.builder == 'meson'}}
- - name: Coveralls Parallel dnsdist unit
- if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' && matrix.builder == 'meson' }}
- uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
- with:
- flag-name: dnsdist-unit-${{ matrix.features }}-${{ matrix.sanitizers }}
- path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
- parallel: true
- allow-empty: true
- fail-on-error: false
- - run: ${{ env.INV_CMD }} ci-dnsdist-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
- - run: ccache -s
- - name: Prepare binaries folder
- if: ${{ matrix.builder == 'meson' }}
- run: |
- echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- mkdir -p /opt/dnsdist/bin
- for i in $(find . -maxdepth 1 -type f -executable); do cp ${i} /opt/dnsdist/bin/; done
- - name: Store the binaries
- if: ${{ matrix.builder == 'meson' }}
- uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
- with:
- name: dnsdist-${{ matrix.features }}-${{ matrix.sanitizers }}-${{ matrix.builder}}-${{ env.normalized-branch-name }}
- path: /opt/dnsdist
- retention-days: 1
-
- test-auth-api:
- needs:
- - build-auth
- - get-runner-container-image
- runs-on: ubuntu-24.04
- container:
- image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
- env:
- UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
- ASAN_OPTIONS: detect_leaks=0
- TSAN_OPTIONS: "halt_on_error=1:suppressions=${{ env.REPO_HOME }}/pdns/dnsdistdist/dnsdist-tsan.supp"
- AUTH_BACKEND_IP_ADDR: "172.17.0.1"
- options: --sysctl net.ipv6.conf.all.disable_ipv6=0
- strategy:
- matrix:
- include:
- - backend: gsqlite3
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- - backend: gmysql
- image: mysql@sha256:4bc6bc963e6d8443453676cae56536f4b8156d78bae03c0145cbe47c2aad73bb # mysql:5.7.44
- - backend: gpgsql
- image: postgres@sha256:bbcaba1d74865ee6d6318b5e297d0df73d1f6b6d995cd892b60a2cf1440b716a # postgres:14.18
- - backend: lmdb
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- fail-fast: false
- services:
- database:
- image: ${{ matrix.image }}
- env:
- POSTGRES_USER: runner
- POSTGRES_HOST_AUTH_METHOD: trust
- MYSQL_ALLOW_EMPTY_PASSWORD: 1
- ports:
- - 3306:3306
- - 5432:5432
- # FIXME: this works around dist-upgrade stopping all docker containers. dist-upgrade is huge on these images anyway. Perhaps we do want to run our tasks in a Docker container too.
- options: >-
- --restart always
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- - name: Fetch the binaries
- uses: actions/download-artifact@v4
- with:
- name: pdns-auth-meson-${{ env.normalized-branch-name }}
- path: /opt/pdns-auth
- - name: install pip build dependencies
- run: |
- python3 -m venv ${REPO_HOME}/.venv
- . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
- - run: ${{ env.INV_CMD }} apt-fresh
- - run: ${{ env.INV_CMD }} install-clang-runtime
- - run: ${{ env.INV_CMD }} install-auth-test-deps -b ${{ matrix.backend }}
- - run: ${{ env.INV_CMD }} test-api auth -b ${{ matrix.backend }}
- - run: ${{ env.INV_CMD }} generate-coverage-info /opt/pdns-auth/sbin/pdns-auth $GITHUB_WORKSPACE
- if: ${{ env.COVERAGE == 'yes' }}
- - name: Coveralls Parallel auth API ${{ matrix.backend }}
- if: ${{ env.COVERAGE == 'yes' }}
- uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
- with:
- flag-name: auth-api-${{ matrix.backend }}
- path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
- parallel: true
- allow-empty: true
- fail-on-error: false
-
- test-auth-backend:
- needs:
- - build-auth
- - get-runner-container-image
- runs-on: ubuntu-24.04
- container:
- image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
- env:
- UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
- ASAN_OPTIONS: detect_leaks=0
- LDAPHOST: ldap://ldapserver/
- ODBCINI: /github/home/.odbc.ini
- AUTH_BACKEND_IP_ADDR: "172.17.0.1"
- options: --sysctl net.ipv6.conf.all.disable_ipv6=0
- strategy:
- matrix:
- include:
- - backend: remote
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: gmysql
- image: mysql@sha256:4bc6bc963e6d8443453676cae56536f4b8156d78bae03c0145cbe47c2aad73bb # mysql:5.7.44
- env:
- MYSQL_ALLOW_EMPTY_PASSWORD: 1
- ports:
- - 3306:3306
- - backend: gmysql
- image: mariadb@sha256:34adebbac117c8ce649040e009f520fb79e577c68cc4e57debdf91befa53907f # mariadb:10.11.13
- env:
- MYSQL_ALLOW_EMPTY_PASSWORD: 1
- ports:
- - 3306:3306
- - backend: gpgsql
- image: postgres@sha256:bbcaba1d74865ee6d6318b5e297d0df73d1f6b6d995cd892b60a2cf1440b716a # postgres:14.18
- env:
- POSTGRES_USER: runner
- POSTGRES_HOST_AUTH_METHOD: trust
- ports:
- - 5432:5432
- - backend: gsqlite3 # this also runs regression-tests.nobackend and pdnsutil test-algorithms
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: lmdb
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: bind
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: geoip
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: lua2
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: tinydns
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: authpy
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: godbc_sqlite3
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- - backend: godbc_mssql
- image: mcr.microsoft.com/mssql/server@sha256:b94071acd4612bfe60a73e265097c2b6388d14d9d493db8f37cf4479a4337480 # mcr.microsoft.com/mssql/server:2022-CU12-ubuntu-22.04
- env:
- ACCEPT_EULA: Y
- SA_PASSWORD: 'SAsa12%%-not-a-secret-password'
- ports:
- - 1433:1433
- - backend: ldap
- image: powerdns/ldap-regress@sha256:1cc8511d0eb28ac7169042e9b8ae88f9ed5f93b8ac3a550755c56eaaa153680e # powerdns/ldap-regress:1.2.4-1
- env:
- LDAP_LOG_LEVEL: 0
- CONTAINER_LOG_LEVEL: 4
- ports:
- - 389:389
- - backend: geoip_mmdb
- image: coscale/docker-sleep@sha256:7ac94378c23c68b47c623dee4b3ac694ed7201543df3feed668e487ef1102fc5
- env: {}
- ports: []
- fail-fast: false
- services:
- database:
- image: ${{ matrix.image }}
- env: ${{ matrix.env }}
- ports: ${{ matrix.ports }}
- # FIXME: this works around dist-upgrade stopping all docker containers. dist-upgrade is huge on these images anyway. Perhaps we do want to run our tasks in a Docker container too.
- options: >-
- --restart always
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- - name: Fetch the binaries
- uses: actions/download-artifact@v4
- with:
- name: pdns-auth-meson-${{ env.normalized-branch-name }}
- path: /opt/pdns-auth
- # FIXME: install recursor for backends that have ALIAS
- - name: install pip build dependencies
- run: |
- python3 -m venv ${REPO_HOME}/.venv
- . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
- - run: ${{ env.INV_CMD }} install-clang-runtime
- - run: ${{ env.INV_CMD }} install-auth-test-deps -b ${{ matrix.backend }}
- - run: ${{ env.INV_CMD }} test-auth-backend -b ${{ matrix.backend }}
- - run: ${{ env.INV_CMD }} generate-coverage-info /opt/pdns-auth/sbin/pdns-auth $GITHUB_WORKSPACE
- if: ${{ env.COVERAGE == 'yes' }}
- - name: Coveralls Parallel auth backend ${{ matrix.backend }}
- if: ${{ env.COVERAGE == 'yes' }}
- uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
- with:
- flag-name: auth-backend-${{ matrix.backend }}
- path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
- parallel: true
- allow-empty: true
- fail-on-error: false
-
- test-ixfrdist:
- needs:
- - build-auth
- - get-runner-container-image
- runs-on: ubuntu-24.04
- container:
- image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
- env:
- UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
- ASAN_OPTIONS: detect_leaks=0
- options: --sysctl net.ipv6.conf.all.disable_ipv6=0
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- - name: Fetch the binaries
- uses: actions/download-artifact@v4
- with:
- name: pdns-auth-meson-${{ env.normalized-branch-name }}
- path: /opt/pdns-auth
- - name: install pip build dependencies
- run: |
- python3 -m venv ${REPO_HOME}/.venv
- . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
- - run: ${{ env.INV_CMD }} install-clang-runtime
- - run: ${{ env.INV_CMD }} install-auth-test-deps
- - run: ${{ env.INV_CMD }} test-ixfrdist
- - run: ${{ env.INV_CMD }} generate-coverage-info /opt/pdns-auth/bin/ixfrdist $GITHUB_WORKSPACE
- if: ${{ env.COVERAGE == 'yes' }}
- - name: Coveralls Parallel ixfrdist
- if: ${{ env.COVERAGE == 'yes' }}
- uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
- with:
- flag-name: ixfrdist
- path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
- parallel: true
- allow-empty: true
- fail-on-error: false
-
test-recursor-api:
needs:
- build-recursor
# allow-empty: true
# fail-on-error: false
- test-dnsdist-regression:
- needs:
- - build-dnsdist
- - get-runner-container-image
- runs-on: ubuntu-24.04
- strategy:
- matrix:
- sanitizers: [asan+ubsan, tsan]
- fail-fast: false
- container:
- image: "${{ needs.get-runner-container-image.outputs.id }}:${{ needs.get-runner-container-image.outputs.tag }}"
- env:
- UBSAN_OPTIONS: "print_stacktrace=1:halt_on_error=1:suppressions=${{ env.REPO_HOME }}/build-scripts/UBSan.supp"
- # Disabling (intercept_send=0) the custom send wrappers for ASAN and TSAN because they cause the tools to report a race that doesn't exist on actual implementations of send(), see https://github.com/google/sanitizers/issues/1498
- ASAN_OPTIONS: intercept_send=0
- LSAN_OPTIONS: "suppressions=${{ env.REPO_HOME }}/pdns/dnsdistdist/dnsdist-lsan.supp"
- TSAN_OPTIONS: "halt_on_error=1:intercept_send=0:suppressions=${{ env.REPO_HOME }}/pdns/dnsdistdist/dnsdist-tsan.supp"
- # IncludeDir tests are disabled because of a weird interaction between TSAN and these tests which ever only happens on GH actions
- SKIP_INCLUDEDIR_TESTS: yes
- SANITIZERS: ${{ matrix.sanitizers }}
- COVERAGE: no
- options: --sysctl net.ipv6.conf.all.disable_ipv6=0 --privileged
- env:
- CLANG_VERSION: ${{ contains(needs.get-runner-container-image.outputs.id, 'debian-11') && '13' || '19' }}
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- - name: Fetch the binaries
- uses: actions/download-artifact@v4
- with:
- name: dnsdist-full-${{ matrix.sanitizers }}-meson-${{ env.normalized-branch-name }}
- path: /opt/dnsdist
- - name: install pip build dependencies
- run: |
- python3 -m venv ${REPO_HOME}/.venv
- . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
- - run: ${{ env.INV_CMD }} install-clang-runtime
- - run: ${{ env.INV_CMD }} install-dnsdist-test-deps $([ "$(. /etc/os-release && echo $VERSION_CODENAME)" = "bullseye" ] && echo "--skipXDP=True")
- - run: ${{ env.INV_CMD }} test-dnsdist $([ "$(. /etc/os-release && echo $VERSION_CODENAME)" = "bullseye" ] && echo "--skipXDP=True")
- - run: ${{ env.INV_CMD }} generate-coverage-info /opt/dnsdist/bin/dnsdist $GITHUB_WORKSPACE
- if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
- - name: Coveralls Parallel dnsdist regression
- if: ${{ env.COVERAGE == 'yes' && matrix.sanitizers != 'tsan' }}
- uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b
- with:
- flag-name: dnsdist-regression-full-${{ matrix.sanitizers }}
- path-to-lcov: $GITHUB_WORKSPACE/coverage.lcov
- parallel: true
- allow-empty: true
- fail-on-error: false
-
- swagger-syntax-check:
- if: ${{ !github.event.schedule || vars.SCHEDULED_JOBS_BUILD_AND_TEST_ALL }}
- runs-on: ubuntu-24.04
- container:
- image: ghcr.io/powerdns/base-pdns-ci-image/debian-12-pdns-base:master
- options: --sysctl net.ipv6.conf.all.disable_ipv6=0
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - name: install pip build dependencies
- run: |
- python3 -m venv ${REPO_HOME}/.venv
- . ${REPO_HOME}/.venv/bin/activate && pip install -r ${REPO_HOME}/meson/requirements.txt
- - run: ${{ env.INV_CMD }} install-swagger-tools
- - run: ${{ env.INV_CMD }} swagger-syntax-check
-
collect:
needs:
- - build-auth
- - build-dnsdist
- build-recursor
- - swagger-syntax-check
- - test-auth-api
- - test-auth-backend
- - test-dnsdist-regression
- - test-ixfrdist
- test-recursor-api
- test-recursor-regression
- test-recursor-bulk
runs-on: ${{ matrix.runner-os }}
strategy:
matrix:
- product: ['authoritative', 'recursor', 'dnsdist']
+ product: ['recursor']
os:
- centos-9-stream
- centos-10-stream
# Override automatic language detection by changing the below list
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
language: ['cpp']
- product: ['auth', 'rec', 'dnsdist']
+ product: ['rec']
# Learn more...
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
actions: read
jobs:
- call-build-image-auth:
- uses: PowerDNS/pdns/.github/workflows/build-docker-images.yml@master
- if: ${{ vars.SCHEDULED_DOCKER }}
- with:
- product: auth
- ref: ${{ github.ref_name }}
- image-name: pdns-auth-master
- image-tags: |-
- latest
- image-description: 'EXPERIMENTAL pdns auth image'
- platforms: linux/amd64,linux/arm64/v8
- push: true
- secrets:
- DOCKERHUB_ORGANIZATION_NAME: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}
- DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
-
call-build-image-recursor:
uses: PowerDNS/pdns/.github/workflows/build-docker-images.yml@master
if: ${{ vars.SCHEDULED_DOCKER }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
- call-build-image-dnsdist:
- uses: PowerDNS/pdns/.github/workflows/build-docker-images.yml@master
- if: ${{ vars.SCHEDULED_DOCKER }}
- with:
- product: dnsdist
- ref: ${{ github.ref_name }}
- image-name: dnsdist-master
- image-tags: |-
- latest
- image-description: 'EXPERIMENTAL dnsdist image'
- platforms: linux/amd64,linux/arm64/v8
- push: true
- secrets:
- DOCKERHUB_ORGANIZATION_NAME: ${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}
- DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
+++ /dev/null
-# This workflow builds and publishes documentation for PowerDNS products.
-# It is controlled by the following variables and secrets:
-
-# Variables:
-# - PUBLISH_DOCS_TO_AWS: Set to 'true' to enable publishing to AWS
-# - PUBLISH_DOCS_TO_NETLIFY: Set to 'true' to enable publishing to Netlify
-# - PUBLISH_DOCS_TO_WEB1: Set to 'true' to enable publishing to Web1
-# - DOCS_WORKFLOW_BRANCH_OVERRIDE: Set to 'true' to allow publishing from non-master branches (for testing)
-
-# - AWS_REGION: AWS region for S3 and CloudFront
-# - AWS_S3_BUCKET_DOCS: S3 bucket name for documentation
-# - AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: CloudFront distribution ID for DNSdist docs
-# - AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: CloudFront distribution ID for PowerDNS docs
-
-# - NETLIFY_SITE_ID_DOCS: Netlify site ID for PowerDNS docs
-# - NETLIFY_SITE_ID_DNSDIST: Netlify site ID for DNSdist docs
-
-
-# - REC_DOCS_DIR: Directory for Recursor docs on Web1
-# - AUTH_DOCS_DIR: Directory for Authoritative Server docs on Web1
-# - WEB1_HOSTKEY: SSH host key for Web1
-# - DOCS_HOST: Hostname for documentation server
-
-# Secrets:
-# - AWS_ACCESS_KEY_ID: AWS access key ID
-# - AWS_SECRET_ACCESS_KEY: AWS secret access key
-# - NETLIFY_API_TOKEN: API token for Netlify
-# - WEB1_DOCS_SECRET: SSH key for Web1 access
-
----
-name: 'Documentation'
-
-on:
- push:
- branches: [master]
- pull_request:
- branches: [master]
- workflow_dispatch: {}
-
-permissions:
- contents: read
-
-env:
- FORCE_JAVASCRIPT_ACTIONS_TO_NODE20: true
-
-jobs:
- build-docs:
- name: Build docs
- runs-on: ubuntu-22.04
- env:
- BRANCH_NAME: ${{ github.ref_name }}
- outputs:
- pdns_version: ${{ steps.get-version.outputs.pdns_version }}
- steps:
- - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - uses: actions/setup-python@v5
- with:
- python-version: '3.11'
- - run: pip install pyyaml
- - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
- - run: inv install-doc-deps
- - run: inv install-doc-deps-pdf
-
- - id: get-version
- run: |
- echo "pdns_version=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
-
- # Build Auth docs
- - run: inv ci-docs-build
- - run: mv html auth-html-docs
- working-directory: ./docs/_build
- - run: tar cf auth-html-docs.tar auth-html-docs
- working-directory: ./docs/_build
- - uses: actions/upload-artifact@v4
- with:
- name: authoritative-html-docs-${{steps.get-version.outputs.pdns_version}}
- path: ./docs/_build/auth-html-docs.tar
- - run: inv ci-docs-build-pdf
- - uses: actions/upload-artifact@v4
- with:
- name: PowerDNS-Authoritative-${{steps.get-version.outputs.pdns_version}}.pdf
- path: ./docs/_build/latex/PowerDNS-Authoritative.pdf
- - name: Copy PDF to HTML docs directory
- run: |
- cp ./docs/_build/latex/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
- # Build Rec docs
- - run: inv ci-metrics-rec-generate
- working-directory: ./pdns/recursordist
- - run: inv ci-docs-rec-generate
- working-directory: ./pdns/recursordist/rec-rust-lib
- - run: inv ci-docs-build
- working-directory: ./pdns/recursordist
- - run: mv html rec-html-docs
- working-directory: ./pdns/recursordist/docs/_build
- - run: tar cf rec-html-docs.tar rec-html-docs
- working-directory: ./pdns/recursordist/docs/_build
- - uses: actions/upload-artifact@v4
- with:
- name: recursor-html-docs-${{steps.get-version.outputs.pdns_version}}
- path: ./pdns/recursordist/docs/_build/rec-html-docs.tar
- - run: inv ci-docs-build-pdf
- working-directory: ./pdns/recursordist
- - uses: actions/upload-artifact@v4
- with:
- name: PowerDNS-Recursor-${{steps.get-version.outputs.pdns_version}}.pdf
- path: ./pdns/recursordist/docs/_build/latex/PowerDNS-Recursor.pdf
- - name: Copy PDF to HTML docs directory
- run: |
- cp ./pdns/recursordist/docs/_build/latex/PowerDNS-Recursor.pdf ./pdns/recursordist/docs/_build/rec-html-docs/
-
- # Build DNSdist docs
- - run: inv ci-docs-dnsdist-generate
- working-directory: ./pdns/dnsdistdist
- - run: inv ci-docs-build
- working-directory: ./pdns/dnsdistdist
- - run: mv html dnsdist-html-docs
- working-directory: ./pdns/dnsdistdist/docs/_build
- - run: tar cf dnsdist-html-docs.tar dnsdist-html-docs
- working-directory: ./pdns/dnsdistdist/docs/_build
- - uses: actions/upload-artifact@v4
- with:
- name: dnsdist-html-docs-${{steps.get-version.outputs.pdns_version}}
- path: ./pdns/dnsdistdist/docs/_build/dnsdist-html-docs.tar
- - run: inv ci-docs-build-pdf
- working-directory: ./pdns/dnsdistdist
- - uses: actions/upload-artifact@v4
- with:
- name: dnsdist-${{steps.get-version.outputs.pdns_version}}.pdf
- path: ./pdns/dnsdistdist/docs/_build/latex/dnsdist.pdf
- - name: Copy PDF to HTML docs directory
- run: |
- cp ./pdns/dnsdistdist/docs/_build/latex/dnsdist.pdf ./pdns/dnsdistdist/docs/_build/dnsdist-html-docs/
-
- # Build website
- - name: Build website
- run: |
- tar cf website.tar website
- - uses: actions/upload-artifact@v4
- with:
- name: website-${{steps.get-version.outputs.pdns_version}}
- path: website.tar
-
- publish-to-netlify:
- name: Publish to Netlify
- needs: build-docs
- if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_NETLIFY == 'true' }}
- runs-on: ubuntu-22.04
- env:
- NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_API_TOKEN }}
- NETLIFY_SITE_ID_DOCS: ${{ vars.NETLIFY_SITE_ID_DOCS }}
- NETLIFY_SITE_ID_DNSDIST: ${{ vars.NETLIFY_SITE_ID_DNSDIST }}
- PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
- steps:
- - name: Check required secrets
- run: |
- missing_secrets=()
- [ -z "$NETLIFY_AUTH_TOKEN" ] && missing_secrets+=("NETLIFY_API_TOKEN")
- [ -z "$NETLIFY_SITE_ID_DOCS" ] && missing_secrets+=("NETLIFY_SITE_ID_DOCS")
- [ -z "$NETLIFY_SITE_ID_DNSDIST" ] && missing_secrets+=("NETLIFY_SITE_ID_DNSDIST")
- if [ ${#missing_secrets[@]} -ne 0 ]; then
- echo "Error: The following secrets are missing: ${missing_secrets[*]}"
- exit 1
- fi
- - name: Install Netlify
- run: npm install netlify-cli@17.36.3 -g
- - uses: actions/download-artifact@v4
- with:
- path: artifacts
-
- - name: Deploy docs to Netlify
- run: |
- mkdir -p docs_site/temp
- tar -xf artifacts/website-$PDNS_VERSION/website.tar -C docs_site/temp
- mv docs_site/temp/website/docs.powerdns.com/* docs_site/
- rm -rf docs_site/temp
- tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C docs_site
- tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C docs_site
- mv docs_site/auth-html-docs docs_site/authoritative
- mv docs_site/rec-html-docs docs_site/recursor
- cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf docs_site/authoritative/
- cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf docs_site/recursor/
- netlify deploy \
- --dir ./docs_site \
- --site $NETLIFY_SITE_ID_DOCS \
- --auth $NETLIFY_AUTH_TOKEN \
- --prod
-
- - name: Deploy DNSdist docs to Netlify
- run: |
- tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
- cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
- netlify deploy \
- --dir ./dnsdist-html-docs \
- --site $NETLIFY_SITE_ID_DNSDIST \
- --auth $NETLIFY_AUTH_TOKEN \
- --prod
-
- publish-to-web1:
- name: Publish to Web1
- needs: build-docs
- if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_WEB1 == 'true' }}
- runs-on: ubuntu-22.04
- env:
- PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
- DOCS_HOST: ${{vars.DOCS_HOST}}
- REC_DOCS_DIR: ${{vars.REC_DOCS_DIR}}
- AUTH_DOCS_DIR: ${{vars.AUTH_DOCS_DIR}}
- SSH_KEY: ${{secrets.WEB1_DOCS_SECRET}}
- HOST_KEY: ${{vars.WEB1_HOSTKEY}}
- steps:
- - name: Check required secrets
- run: |
- missing_secrets=()
- [ -z "$SSH_KEY" ] && missing_secrets+=("WEB1_DOCS_SECRET")
- [ -z "$HOST_KEY" ] && missing_secrets+=("WEB1_HOSTKEY")
- [ -z "$DOCS_HOST" ] && missing_secrets+=("DOCS_HOST")
- [ -z "$AUTH_DOCS_DIR" ] && missing_secrets+=("AUTH_DOCS_DIR")
- [ -z "$REC_DOCS_DIR" ] && missing_secrets+=("REC_DOCS_DIR")
- if [ ${#missing_secrets[@]} -ne 0 ]; then
- echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
- exit 1
- fi
- - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - run: build-scripts/gh-actions-setup-inv-no-dist-upgrade
- - uses: actions/download-artifact@v4
- with:
- path: artifacts
- - id: setup-ssh
- run: |-
- inv ci-docs-add-ssh --ssh-key="$SSH_KEY" --host-key="$HOST_KEY"
- - name: Publish Auth docs
- run: |
- mkdir -p ./docs/_build
- tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C ./docs/_build/
- cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
- inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Authoritative.pdf" --username="docs_powerdns_com" --product="auth" --directory="/${AUTH_DOCS_DIR}/"
- - name: Publish Recursor docs
- run: |
- tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C ./docs/_build/
- cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf ./docs/_build/rec-html-docs/
- inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Recursor.pdf" --username="docs_powerdns_com" --product="rec" --directory="/${REC_DOCS_DIR}/"
- - name: Publish DNSdist docs
- run: |
- tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar -C ./docs/_build/
- cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf ./docs/_build/dnsdist-html-docs/
- inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="dnsdist.pdf" --username="dnsdist_org" --product="dnsdist" --directory="/${DNSDIST_DOCS_DIR}/"
-
-
- publish-to-aws:
- name: Publish to AWS
- needs: build-docs
- if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_AWS == 'true' }}
- runs-on: ubuntu-22.04
- env:
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_REGION: ${{ vars.AWS_REGION }}
- AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }}
- AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}
- AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}
- PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
- steps:
- - name: Check required secrets
- run: |
- missing_secrets=()
- [ -z "$AWS_ACCESS_KEY_ID" ] && missing_secrets+=("AWS_ACCESS_KEY_ID")
- [ -z "$AWS_SECRET_ACCESS_KEY" ] && missing_secrets+=("AWS_SECRET_ACCESS_KEY")
- [ -z "$AWS_REGION" ] && missing_secrets+=("AWS_REGION")
- [ -z "$AWS_S3_BUCKET_DOCS" ] && missing_secrets+=("AWS_S3_BUCKET_DOCS")
- [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST")
- [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS")
- if [ ${#missing_secrets[@]} -ne 0 ]; then
- echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
- exit 1
- fi
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - uses: aws-actions/configure-aws-credentials@v4
- with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- aws-region: ${{ vars.AWS_REGION }}
- - name: Install and configure rclone
- run: |
- sudo apt-get update
- sudo apt-get install -y rclone
- mkdir -p ~/.config/rclone/
- cat > ~/.config/rclone/rclone.conf << EOF
- [docs-s3]
- type = s3
- provider = AWS
- env_auth = true
- region = $AWS_REGION
- EOF
- - name: Download artifacts
- uses: actions/download-artifact@v4
- with:
- path: artifacts
- - name: Publish to AWS
- run: |
- if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ]; then
- tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
- cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
-
- # Copy files to S3
- echo "Copying DNSdist docs to S3..."
- rclone copy --checksum dnsdist-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/dnsdist.org/
-
- # Always run invalidation
- echo "Running CloudFront invalidation for DNSdist..."
- aws cloudfront create-invalidation \
- --distribution-id $AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST \
- --paths "/*"
- echo "Invalidation completed."
- fi
-
- if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ]; then
- tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar
- cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf rec-html-docs/
-
- # Copy all PowerDNS docs to S3
- echo "Copying Recursor docs to S3..."
- rclone copy --checksum rec-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/recursor/
-
- tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar
- cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf auth-html-docs/
- echo "Copying Authoritative docs to S3..."
- rclone copy --checksum auth-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/authoritative/
-
- tar -xf artifacts/website-$PDNS_VERSION/website.tar
- echo "Copying website files to S3..."
- rclone copy --checksum website/docs.powerdns.com/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/
-
- # Always run invalidation
- echo "Running CloudFront invalidation for PowerDNS docs..."
- aws cloudfront create-invalidation \
- --distribution-id $AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS \
- --paths "/*"
- echo "Invalidation completed."
- fi
-
+++ /dev/null
----
-name: 'Verify source code formatting; check Makefile.am sort order'
-
-on:
- push:
- pull_request:
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
- contents: read
-
-jobs:
- build:
- name: Verify source code formatting; check Makefile.am sort order
- # on a ubuntu-24.04 VM
- runs-on: ubuntu-24.04
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- persist-credentials: false
- - name: Update dependencies
- run: |
- sudo apt-get update
- sudo apt-get -qq --no-install-recommends install git clang-format-19
- - name: Run format-code on files that should be formatted
- run: |
- ./build-scripts/format-code $(find . -type f -name '*.[ch][ch]' | sort | comm -23 - .not-formatted)
- git --no-pager diff
- exit $(git diff | wc -l)
- - name: Report file names that are out of order in Makefile.am files
- run: |
- exitcode=0
- for f in $(find . -type f -name 'Makefile.am'); do
- ./build-scripts/test-sources-sorted.py ${f}
- if [ $? -ne 0 ]; then
- exitcode=1
- fi
- done
- exit ${exitcode}
+++ /dev/null
-name: "Various daily checks"
-
-on:
- schedule:
- - cron: '34 4 * * *'
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
- contents: read
-
-env:
- CLANG_VERSION: '12'
-
-jobs:
- el7-devtoolset:
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- runs-on: ubuntu-22.04
-
- steps:
- - name: Check whether a newer devtoolset exists
- run: |
- if docker run --rm centos:7 bash -c 'yum install -y centos-release-scl-rh && yum info devtoolset-12-gcc-c++'
- then
- echo "::warning file=builder-support/dockerfiles/Dockerfile.rpmbuild::A newer devtoolset exists. Please edit builder-support/dockerfiles/Dockerfile.rpmbuild, builder-support/dockerfiles/Dockerfile.rpmbuild, and .github/workflows/dailies.yml"
- exit 1
- else
- echo "::notice ::No newer devtoolset exists (good)"
- exit 0
- fi
-
- check-debian-autoremovals:
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- runs-on: ubuntu-22.04
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- persist-credentials: false
-
- - name: Check if Debian is about to toss us off a balcony
- run: ./build-scripts/check-debian-autoremovals.py
-
- coverity-auth:
- name: coverity scan of the auth
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
- with:
- product: 'authoritative'
- secrets:
- COVERITY_TOKEN: ${{ secrets.coverity_auth_token }}
- COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
- coverity-dnsdist:
- name: coverity scan of dnsdist
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
- with:
- product: 'dnsdist'
- secrets:
- COVERITY_TOKEN: ${{ secrets.coverity_dnsdist_token }}
- COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
- coverity-rec:
- name: coverity scan of the rec
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
- with:
- product: 'recursor'
- secrets:
- COVERITY_TOKEN: ${{ secrets.coverity_rec_token }}
- COVERITY_EMAIL: ${{ secrets.coverity_email }}
-
- # The jobs below check that only the pinned version of Python3 packages are installed with pip. Running in a container, a
- # pip proxy registers all requests for installing packages with pip. Then, the downloaded packages and their versions are compared
- # with the list used for the installation (i.e. docs/requirements.txt, pdns/recursordist/docs/requirements.txt, etc). If a package
- # is missing or a version does not match the one expected, this job fails, which makes the workflow fail.
- #
- # The pinned version plus hashes are generated using pip-compile using an input file that includes the original list of packages
- # (pip-compile --generate-hashes -U requirements.in). "pip-compile" can be installed via pip-tools with Python 3.11, which is the version
- # used in the CI. Any other Python version would end up with different versions for packages and could result in workflow failures.
- #
- # One recurring error thrown by this validation is when a new version of a pinned package is released for a "setup-requires" dependency
- # of one of the packages in the list (see https://github.com/PowerDNS/pdns/pull/14596). The package version in “requirements.in” should
- # be modified to solve this issue. In some cases, it is enough to generate again the list of packages, making sure to add the -U flag
- # to force the upgrade: "pip-compile --generate-hashes -U requirements.in" (this could include upgrading other packages).
-
- list-pip-requirement-files:
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- runs-on: ubuntu-22.04
- outputs:
- req-files: ${{ steps.get-list-requirements.outputs.files }}
- steps:
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - name: Get all requirements.txt files and export them as outputs
- id: get-list-requirements
- run: |
- echo "files=$(find . -name 'requirements.txt' | jq -R -s -c 'split("\n")[:-1]')" >> "$GITHUB_OUTPUT"
-
- validate-pip-hashes:
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- name: ${{ matrix.requirements-file }} - Validate list of packages and hashes
- runs-on: ubuntu-22.04
- needs: list-pip-requirement-files
- services:
- database:
- image: epicwink/proxpi@sha256:a219ea0ef4f5b272eaf18bc5a5d00220c5aa07debb434d36161550862768aa93
- ports:
- - 5000:5000
- options: >-
- --restart always
- strategy:
- fail-fast: false
- matrix:
- requirements-file: ${{ fromJson(needs.list-pip-requirement-files.outputs.req-files) }}
- env:
- SERVICE_IP_ADDR: 127.0.0.1
- REQUIREMENTS_FILE: ${{ matrix.requirements-file }}
- steps:
- - run: echo "${REQUIREMENTS_FILE}"
- - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - uses: actions/setup-python@v5
- with:
- python-version: '3.11'
- # Configure pip index-url set to proxpi
- - run: pip config set global.index-url http://${{ env.SERVICE_IP_ADDR }}:5000/index/
- - run: pip config set global.trusted-host ${{ env.SERVICE_IP_ADDR }}
- - id: proxpi-docker
- run: echo "id=$(docker ps | grep "epicwink/proxpi" | awk '{print $1}')" >> "$GITHUB_OUTPUT"
- - run: pip install -r ${REQUIREMENTS_FILE}
- - name: Get the list of packages requested to the pip proxy
- env:
- ID: ${{ steps.proxpi-docker.outputs.id }}
- run: |
- docker logs "${ID}" 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
- cat /tmp/proxpi.log
- - name: check only listed packages were installed
- run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${REQUIREMENTS_FILE} || ( echo "$i not found" && exit 1 ); done
-
- run-rec-bulk-test-on-ubicloud:
- if: ${{ vars.UBICLOUD_DAILY_REC_BULKTEST == '1' }}
- name: Run Recursor (master) Bulk Test On Ubicloud
- runs-on: ubicloud-standard-8-ubuntu-2404
- strategy:
- matrix:
- testsize: [500000]
- IPv6: [0, 1]
- max-parallel: 1
- steps:
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- ref: ${{ inputs.branch-name }}
- persist-credentials: false
- - name: Add PowerDNS Repo
- run: echo 'deb [signed-by=/etc/apt/keyrings/rec-master-pub.asc] http://repo.powerdns.com/ubuntu noble-rec-master main' | sudo tee /etc/apt/sources.list.d/pdns.list
- - name: Add key
- run: sudo install -d /etc/apt/keyrings; curl https://repo.powerdns.com/CBC8B383-pub.asc | sudo tee /etc/apt/keyrings/rec-master-pub.asc
- - name: Apt Update
- run: sudo apt-get update
- - name: Install Recursor Package and PDNS Tools
- run: sudo apt install pdns-recursor pdns-tools
- - name: Get Top Million Names
- run: curl --no-progress-meter -LO https://umbrella-static.s3.dualstack.us-west-1.amazonaws.com/top-1m.csv.zip && unzip top-1m.csv.zip -d .
- working-directory: regression-tests
- - name: Run Bulk Test
- run: ./recursor-test 5300 ${{ matrix.testsize }} 8 2048 1024
- working-directory: regression-tests
- env:
- RECURSOR: /usr/sbin/pdns_recursor
- RECCONTROL: /usr/bin/rec_control
- DNSBULKTEST: /usr/bin/dnsbulktest
- THRESHOLD: 95
- TRACE: no
- IPv6: ${{ matrix.IPv6 }}
+++ /dev/null
----
-name: 'Verify secpoll zone syntax'
-
-on:
- push:
- pull_request:
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
- contents: read
-
-jobs:
- build:
- name: check secpoll zone
- # on a ubuntu-24.04 VM
- runs-on: ubuntu-24.04
- steps:
- - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- - uses: actions/checkout@v4
- with:
- fetch-depth: 5
- submodules: recursive
- persist-credentials: false
- - run: sh docs/secpoll-check.sh docs/secpoll.zone
- - run: echo 'deb [arch=amd64] http://repo.powerdns.com/ubuntu noble-auth-master main' | sudo tee /etc/apt/sources.list.d/pdns.list
- - run: "echo -ne 'Package: pdns-*\nPin: origin repo.powerdns.com\nPin-Priority: 600\n' | sudo tee /etc/apt/preferences.d/pdns"
- - run: sudo curl https://repo.powerdns.com/CBC8B383-pub.asc -o /etc/apt/trusted.gpg.d/CBC8B383-pub.asc
- - run: sudo apt-get update
- - run: sudo systemctl mask pdns
- - run: sudo apt-get install -y pdns-server pdns-backend-sqlite3
- - run: "echo -ne 'launch=gsqlite3\ngsqlite3-database=/var/lib/powerdns/pdns.sqlite3\n' | sudo tee /etc/powerdns/pdns.conf"
- - run: sudo sqlite3 /var/lib/powerdns/pdns.sqlite3 < /usr/share/doc/pdns-backend-sqlite3/schema.sqlite3.sql
- - run: sudo pdnsutil load-zone secpoll.powerdns.com docs/secpoll.zone
- - run: sudo pdnsutil check-zone secpoll.powerdns.com
+++ /dev/null
-# spelling.yml is disabled per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-g86g-chm8-7r2p
-name: Workflow should not run!
-on:
- push:
- branches: ''
-
-permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
- contents: read
-
-jobs:
- placeholder:
- name: Should be disabled
- runs-on: ubuntu-latest
- if: false
- steps:
- - name: Task
- run: |
- echo 'Running this task would be bad'
- exit 1
+++ /dev/null
-# spelling2.yml is disabled per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-p8r9-69g4-jwqq
-name: Workflow should not run!
-on:
- push:
- branches: ''
-
-jobs:
- placeholder:
- permissions:
- name: Should be disabled
- runs-on: ubuntu-latest
- if: false
- steps:
- - name: Task
- run: |
- echo 'Running this task would be bad'
- exit 1
+++ /dev/null
-# spelling.yml is blocked per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-g86g-chm8-7r2p
-# spelling2.yml is blocked per https://github.com/check-spelling/check-spelling/security/advisories/GHSA-p8r9-69g4-jwqq
-name: Spell checking
-
-on:
- push:
- branches:
- - "**"
- tags-ignore:
- - "**"
- pull_request:
- branches:
- - "**"
- types:
- - "opened"
- - "reopened"
- - "synchronize"
-
-jobs:
- spelling:
- name: Spell checking
- permissions:
- # contents-read is needed to checkout in private repositories
- contents: read
- # pull-requests-read is needed for suppress_push_for_open_pull_request in private repositories
- pull-requests: read
- # security-events-write is needed according to the documentation:
- # https://docs.github.com/en/code-security/code-scanning/integrating-with-code-scanning/uploading-a-sarif-file-to-github#uploading-a-code-scanning-analysis-with-github-actions
- security-events: write
- outputs:
- followup: ${{ steps.spelling.outputs.followup }}
- runs-on: ubuntu-latest
- if: ${{ contains(github.event_name, 'pull_request') || github.event_name == 'push' }}
- concurrency:
- group: spelling-${{ github.event.pull_request.number || github.ref }}
- # note: If you use only_check_changed_files, you do not want cancel-in-progress
- cancel-in-progress: true
- steps:
- - name: check-spelling
- id: spelling
- uses: check-spelling/check-spelling@v0.0.25
- with:
- config: .github/actions/spell-check
- suppress_push_for_open_pull_request: ${{ github.actor != 'dependabot[bot]' && 1 }}
- checkout: true
- check_file_names: 1
- spell_check_this: powerdns/pdns@master
- post_comment: 0
- use_magic_file: 1
- warnings: bad-regex,binary-file,deprecated-feature,ignored-expect-variant,large-file,limited-references,no-newline-at-eof,noisy-file,non-alpha-in-dictionary,token-is-substring,unexpected-line-ending,whitespace-in-dictionary,minified-file,unsupported-configuration,no-files-to-check,unclosed-block-ignore-begin,unclosed-block-ignore-end
- use_sarif: ${{ (!github.event.pull_request || (github.repository_owner != 'PowerDNS' && github.event.pull_request.head.repo.full_name == github.repository)) && !vars.DO_NOT_USE_SARIF_REPORTING && 1 }}
- dictionary_source_prefixes: >
- {
- "cspell": "https://raw.githubusercontent.com/check-spelling/cspell-dicts/v20241114/dictionaries/"
- }
- extra_dictionaries: |
- cspell:software-terms/softwareTerms.txt
- cspell:node/node.txt
- cspell:php/php.txt
- cspell:python/common/extra.txt
- cspell:python/python/python-lib.txt
- cspell:golang/go.txt
- cspell:cpp/stdlib-cpp.txt
- cspell:aws/aws.txt
- cspell:filetypes/filetypes.txt
- cspell:dart/dart.txt
- cspell:rust/rust.txt
- cspell:npm/npm.txt
- cspell:cpp/stdlib-c.txt
- cspell:fullstack/fullstack.txt
- cspell:python/python/python.txt
- cspell:typescript/typescript.txt
- cspell:html/html.txt
- cspell:java/java.txt
- cspell:lua/lua.txt
- check_extra_dictionaries: ""