See https://docs.github.com/en/actions/security-for-github-actions/security-guides/security-hardening-for-github-actions#good-practices-for-mitigating-script-injection-attacks
LLVM_PROFILE_FILE: "/tmp/code-%p.profraw"
OPTIMIZATIONS: yes
INV_CMD: ". ${REPO_HOME}/.venv/bin/activate && inv"
+ BRANCH_NAME: ${{ inputs.branch-name || github.ref_name }}
jobs:
get-runner-container-image:
id: ${{ steps.get-runner-image.outputs.image-id }}
tag: ${{ steps.get-runner-image.outputs.tag }}
env:
- DEFAULT_RUNNER_DOCKER_IMAGE: base-pdns-ci-image/debian-12-pdns-base
DEFAULT_IMAGE_TAG: master # update when backporting, e.g. auth-4.9.x
+ DOCKER_IMAGE: ${{ inputs.runner-docker-image-name || 'base-pdns-ci-image/debian-12-pdns-base' }}
steps:
- id: get-runner-image
run: |
- echo "image-id=ghcr.io/powerdns/${{ inputs.runner-docker-image-name || env.DEFAULT_RUNNER_DOCKER_IMAGE }}" >> "$GITHUB_OUTPUT"
- echo "tag=${{ env.DEFAULT_IMAGE_TAG }}" >> "$GITHUB_OUTPUT"
+ echo "image-id=ghcr.io/powerdns/$DOCKER_IMAGE" >> "$GITHUB_OUTPUT"
+ echo "tag=$DEFAULT_IMAGE_TAG" >> "$GITHUB_OUTPUT"
build-auth:
name: build auth (${{ matrix.builder }})
- run: ${{ env.INV_CMD }} ci-auth-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
- run: ccache -s
- if: ${{ matrix.builder != 'meson' }}
- run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- if: ${{ matrix.builder != 'meson' }}
name: Store the binaries
uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
working-directory: .
- run: ${{ env.INV_CMD }} install-rec-build-deps
working-directory: ./pdns/recursordist/
- - run: ${{ env.INV_CMD }} ci-install-rust ${{ env.REPO_HOME }}
+ - run: ${{ env.INV_CMD }} ci-install-rust ${REPO_HOME}
working-directory: ./pdns/recursordist/
- run: ${{ env.INV_CMD }} ci-autoconf ${{ matrix.builder == 'meson' && '--meson' || '' }}
working-directory: ./pdns/recursordist/
- run: ${{ env.INV_CMD }} ci-rec-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
- run: ccache -s
- if: ${{ matrix.builder != 'meson' }}
- run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- if: ${{ matrix.builder != 'meson' }}
name: Store the binaries
uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
working-directory: .
- run: ${{ env.INV_CMD }} install-lld-linker-if-needed
working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-install-rust ${{ env.REPO_HOME }}
+ - run: ${{ env.INV_CMD }} ci-install-rust ${REPO_HOME}
working-directory: ./pdns/dnsdistdist/
- - run: ${{ env.INV_CMD }} ci-build-and-install-quiche ${{ env.REPO_HOME }}
+ - run: ${{ env.INV_CMD }} ci-build-and-install-quiche ${REPO_HOME}
working-directory: ./pdns/dnsdistdist/
- run: ${{ env.INV_CMD }} ci-autoconf
if: ${{ matrix.builder == 'autotools' }}
- run: ${{ env.INV_CMD }} ci-make-install
if: ${{ matrix.builder == 'autotools' }}
- run: ccache -s
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Store the binaries
if: ${{ matrix.builder == 'autotools' }}
uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
submodules: recursive
ref: ${{ inputs.branch-name }}
persist-credentials: false
- - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+ - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
- name: Fetch the binaries
uses: actions/download-artifact@v4
with:
- test-recursor-ubicloud-bulk
if: success() || failure()
runs-on: ubuntu-24.04
+ env:
+ NEEDS: ${{ toJSON(needs) }}
steps:
- name: Coveralls Parallel Finished
if: ${{ env.COVERAGE == 'yes' }}
- name: Install jq and jc
run: "sudo apt-get update && sudo apt-get install jq jc"
- name: Fail job if any of the previous jobs failed
- run: "for i in `echo '${{ toJSON(needs) }}' | jq -r '.[].result'`; do if [[ $i == 'failure' ]]; then echo '${{ toJSON(needs) }}'; exit 1; fi; done;"
+ run: "for i in `echo ${NEEDS} | jq -r '.[].result'`; do if [[ $i == 'failure' ]]; then echo ${NEEDS}; exit 1; fi; done;"
- uses: actions/checkout@v4
with:
fetch-depth: 5
- name: Get list of jobs in the workflow
run: "cat .github/workflows/build-and-test-all.yml | jc --yaml | jq -rS '.[].jobs | keys | .[]' | grep -vE 'collect|get-runner-container-image' | tee /tmp/workflow-jobs-list.yml"
- name: Get list of prerequisite jobs
- run: "echo '${{ toJSON(needs) }}' | jq -rS 'keys | .[]' | tee /tmp/workflow-needs-list.yml"
+ run: "echo ${NEEDS} | jq -rS 'keys | .[]' | tee /tmp/workflow-needs-list.yml"
- name: Fail if there is a job missing on the needs list
run: "if ! diff -q /tmp/workflow-jobs-list.yml /tmp/workflow-needs-list.yml; then exit 1; fi"
image-tag: ${{ steps.get-image-tag.outputs.tag }}
image-name: ${{ steps.get-image-metadata.outputs.name }}
image-description: ${{ steps.get-image-metadata.outputs.description }}
+ env:
+ INPUT_REF: ${{ inputs.ref }}
+ PRODUCT: ${{ inputs.product }}
steps:
- run: |
- echo '${{ inputs.ref }}' | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*' && tag=$(echo '${{ inputs.ref }}' | cut -d '-' -f 2-)
+ echo "${INPUT_REF}" | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*' && tag=$(echo "${INPUT_REF}" | cut -d '-' -f 2-)
echo "tag=$tag" >> $GITHUB_OUTPUT
id: get-image-tag
- run: |
- if $(echo '${{ inputs.ref }}' | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*'); then
- echo "version=$(echo '${{ inputs.ref }}' | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_ENV
- echo "branch=$(echo '${{ inputs.ref }}' | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_ENV
+ if $(echo "${INPUT_REF}" | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*'); then
+ echo "version=$(echo "${INPUT_REF}" | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_ENV
+ echo "branch=$(echo "${INPUT_REF}" | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_ENV
else
- echo "version=$(echo '${{ inputs.ref }}' | tr '/' '-')" >> $GITHUB_ENV
- echo "branch=${{ inputs.ref }}" >> $GITHUB_ENV
+ echo "version=$(echo "${INPUT_REF}" | tr '/' '-')" >> $GITHUB_ENV
+ echo "branch=${INPUT_REF}" >> $GITHUB_ENV
fi
- run: |
- if $(echo '${{ inputs.product }}'| grep -qq auth); then
- echo '${{ inputs.ref }}' | egrep -qq '^auth-.*' && description='PowerDNS Authoritative Server '$branch || description='EXPERIMENTAL pdns auth image'
+ if $(echo "$PRODUCT"| grep -qq auth); then
+ echo "${INPUT_REF}" | egrep -qq '^auth-.*' && description='PowerDNS Authoritative Server '$branch || description='EXPERIMENTAL pdns auth image'
echo "name=pdns-auth-$version" >> $GITHUB_OUTPUT
- elif (echo '${{ inputs.product }}'| grep -qq recursor); then
- echo '${{ inputs.ref }}' | egrep -qq '^rec-.*' && description='PowerDNS Recursor '$branch || description='EXPERIMENTAL pdns recursor image'
+ elif (echo "$PRODUCT"| grep -qq recursor); then
+ echo "${INPUT_REF}" | egrep -qq '^rec-.*' && description='PowerDNS Recursor '$branch || description='EXPERIMENTAL pdns recursor image'
echo "name=pdns-recursor-$version" >> $GITHUB_OUTPUT
else
- echo '${{ inputs.ref }}' | egrep -qq '^dnsdist-.*' && description='PowerDNS DNSDist '$branch || description='EXPERIMENTAL dnsdist image'
+ echo "${INPUT_REF}" | egrep -qq '^dnsdist-.*' && description='PowerDNS DNSDist '$branch || description='EXPERIMENTAL dnsdist image'
echo "name=dnsdist-$version" >> $GITHUB_OUTPUT
fi
echo "description=$description" >> $GITHUB_OUTPUT
jobs:
prepare:
+ env:
+ REF_NAME: ${{ github.ref_name }}
runs-on: ubuntu-24.04
outputs:
image-name-suffix: ${{ steps.get-image-version.outputs.version }}
image-tag: ${{ steps.get-image-tag.outputs.tag }}
image-description-suffix: ${{ steps.get-image-description.outputs.description }}
steps:
- - run: echo "version=$(echo '${{ github.ref_name }}' | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_OUTPUT
+ - run: echo "version=$(echo "${REF_NAME}" | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_OUTPUT
id: get-image-version
- - run: echo "tag=$(echo '${{ github.ref_name }}' | cut -d '-' -f 2-)" >> $GITHUB_OUTPUT
+ - run: echo "tag=$(echo "${REF_NAME}" | cut -d '-' -f 2-)" >> $GITHUB_OUTPUT
id: get-image-tag
- - run: echo "description=$(echo '${{ github.ref_name }}' | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_OUTPUT
+ - run: echo "description=$(echo "${REF_NAME}" | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_OUTPUT
id: get-image-description
call-build-image-auth:
persist-credentials: false
- name: validate reference only if image will be pushed
if: ${{ inputs.push }}
+ env:
+ REF: ${{ inputs.ref }}
run: |
- [[ "${{ inputs.ref }}" == "master" ]] || git describe --tags --exact-match
+ [[ "${REF}" == "master" ]] || git describe --tags --exact-match
build:
name: build docker image for a product
matrix:
runner-os: ${{ fromJson(needs.prepare-test-runner-os-list.outputs.runnerlist )}}
fail-fast: false
+ env:
+ INPUT_IMAGE_NAME: ${{ inputs.image-name }}
+ INPUT_IMAGE_TAGS: ${{ inputs.image-tags }}
+ INPUT_PLATFORMS: ${{ inputs.platforms }}
steps:
- name: Check running image
run: |
- image_name='${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${{ inputs.image-name }}'
- for tag in `echo '${{ inputs.image-tags }}' | tr '\n' ' '`; do
+ image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
+ for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
echo 'Testing: '${image_name}':'${tag};
# pdns-auth image returns a 134 exit code
docker run ${image_name}:${tag} --version || [ "$?" == "134" ]
done
- name: Check image digest matches
+ env:
+ EXPECTED_DIGEST: ${{ needs.build.outputs.image-digest }}
run: |
- output_digest='${{ needs.build.outputs.image-digest }}'
- image_name='${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${{ inputs.image-name }}'
- for tag in `echo '${{ inputs.image-tags }}' | tr '\n' ' '`; do
+ image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
+ for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
image_digest=$(docker inspect --format='{{index .RepoDigests 0}}' ${image_name}:${tag} | cut -d '@' -f 2)
- [[ "${output_digest}" == "${image_digest}" ]] || \
- ( echo "Image digest does not match => output_digest: "${output_digest}" - image_digest: "${image_digest} && exit 1 )
+ [[ "${EXPECTED_DIGEST}" == "${image_digest}" ]] || \
+ ( echo "Image digest does not match => output_digest: "${EXPECTED_DIGEST}" - image_digest: "${image_digest} && exit 1 )
done
- name: Check SBOM and Provenance
run: |
- image_name='${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${{ inputs.image-name }}'
- for tag in `echo '${{ inputs.image-tags }}' | tr '\n' ' '`; do
- if $(echo '${{ inputs.platforms }}' | grep -qq ','); then
+ image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
+ for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
+ if $(echo "${INPUT_PLATFORMS}" | grep -qq ','); then
docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .Provenance}}" | jq -e '."linux/'$(dpkg --print-architecture)'" | has("SLSA")'
docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .SBOM}}" | jq -e '."linux/'$(dpkg --print-architecture)'" | has("SPDX")'
else
ref: ${{ inputs.ref }}
persist-credentials: false
- id: get-oslist
+ env:
+ OS: ${{ inputs.os }}
+ REF: ${{ inputs.ref }}
run: |
available_targets=$(ls builder-support/dockerfiles/Dockerfile.target.* )
- for i in $(echo "${{ inputs.os }}"); do
+ for i in $(echo "${OS}"); do
if echo $available_targets | grep -qq $i; then
targets+=($i)
else
- if [[ "${{ inputs.ref }}" == "master" ]]; then
- echo "::error title=Dockerfile not found for ${i}::OS ${i} not available as target in ${{ inputs.ref }}" && exit 1
+ if [[ "${REF}" == "master" ]]; then
+ echo "::error title=Dockerfile not found for ${i}::OS ${i} not available as target in ${REF}" && exit 1
else
- echo "::warning title=Packages will not be generated for ${i}::OS ${i} not available as target in ${{ inputs.ref }}"
+ echo "::warning title=Packages will not be generated for ${i}::OS ${i} not available as target in ${REF}"
fi
fi
done
pkghashes-ubuntu-noble-x86_64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-noble-x86_64 }}
pkghashes-ubuntu-noble-aarch64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-noble-aarch64 }}
srchashes: ${{ steps.srchashes.outputs.srchashes }}
+ env:
+ IS_RELEASE: ${{ inputs.is_release}}
+ PRODUCT: ${{ inputs.product }}
+ OS: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
persist-credentials: false
# this builds packages and runs our unit tests (make check)
- - run: IS_RELEASE=${{ inputs.is_release}} builder/build.sh -v -m ${{ inputs.product }} ${{ matrix.os }}
+ - run: IS_RELEASE=${IS_RELEASE} builder/build.sh -v -m ${PRODUCT} ${OS}
- name: Get version number
run: |
echo "version=$(readlink builder/tmp/latest)" >> $GITHUB_OUTPUT
- name: Normalize package name
id: normalize-name
run: |
- if [ "x${{ inputs.product }}" = "xauthoritative" ]; then
+ if [ "x${PRODUCT}" = "xauthoritative" ]; then
echo "normalized-package-name=pdns" >> $GITHUB_OUTPUT
- elif [ "x${{ inputs.product }}" = "xrecursor" ]; then
+ elif [ "x${PRODUCT}" = "xrecursor" ]; then
echo "normalized-package-name=pdns-recursor" >> $GITHUB_OUTPUT
else
- echo "normalized-package-name=${{ inputs.product }}" >> $GITHUB_OUTPUT
+ echo "normalized-package-name=${PRODUCT}" >> $GITHUB_OUTPUT
fi
- name: Include architecture in the packages compressed file name
- run: for f in $(ls ./built_pkgs/*/*/*-${{ matrix.os }}.tar.bz2 | sed 's/\.tar.bz2$//'); do mv $f.tar.bz2 $f-${{ steps.getarch.outputs.target-arch }}.tar.bz2; done
+ env:
+ TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
+ run: for f in $(ls ./built_pkgs/*/*/*-${OS}.tar.bz2 | sed 's/\.tar.bz2$//'); do mv $f.tar.bz2 $f-${TARGET_ARCH}.tar.bz2; done
- name: Upload packages as GH artifacts
uses: actions/upload-artifact@v4
with:
- name: Extract packages from the tarball
# so we get provenance for individual packages (and the JSON package manifests from the builder)
id: extract
+ env:
+ TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
+ PACKAGE_NAME: ${{ steps.normalize-name.outputs.normalized-package-name }}
+ VERSION: ${{ steps.getversion.outputs.version }}
run: |
mkdir -m 700 -p ./packages/
- tar xvf ./built_pkgs/*/*/${{ steps.normalize-name.outputs.normalized-package-name }}-${{ steps.getversion.outputs.version }}-${{ matrix.os }}-${{ steps.getarch.outputs.target-arch }}.tar.bz2 -C ./packages/ --transform='s/.*\///'
+ tar xvf ./built_pkgs/*/*/${PACKAGE_NAME}-${VERSION}-${OS}-${TARGET_ARCH}.tar.bz2 -C ./packages/ --transform='s/.*\///'
- name: Generate package hashes for provenance
shell: bash
id: pkghashes
+ env:
+ TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
run: |
- echo "pkghashes-${{ matrix.os }}-${{ steps.getarch.outputs.target-arch }}=$(sha256sum ./packages/*.rpm ./packages/*.deb ./packages/*.json | base64 -w0)" >> $GITHUB_OUTPUT
+ echo "pkghashes-${OS}-${TARGET_ARCH}=$(sha256sum ./packages/*.rpm ./packages/*.deb ./packages/*.json | base64 -w0)" >> $GITHUB_OUTPUT
- name: Generate source hash for provenance
shell: bash
id: srchashes
+ env:
+ PACKAGE_NAME: ${{ steps.normalize-name.outputs.normalized-package-name }}
+ VERSION: ${{ steps.getversion.outputs.version }}
run: |
- echo "srchashes=$(sha256sum ./built_pkgs/*/*/${{ steps.normalize-name.outputs.normalized-package-name }}-${{ steps.getversion.outputs.version }}.tar.* | base64 -w0)" >> $GITHUB_OUTPUT
+ echo "srchashes=$(sha256sum ./built_pkgs/*/*/${PACKAGE_NAME}-${VERSION}.tar.* | base64 -w0)" >> $GITHUB_OUTPUT
- name: Upload packages to downloads.powerdns.com
env:
SSHKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
needs: [prepare, build]
name: Check if hashes were created for all requested targets
runs-on: ubuntu-24.04
+ env:
+ OUTPUTS: ${{ toJSON(needs.build.outputs) }}
+ OSLIST: ${{ needs.prepare.outputs.oslist }}
steps:
- name: Get list of outputs from build jobs
- run: echo '${{ toJSON(needs.build.outputs) }}' | jq 'keys[]' | grep -vE 'version|product-name' | tee /tmp/build-outputs.txt
+ run: echo "${OUTPUTS}" | jq 'keys[]' | grep -vE 'version|product-name' | tee /tmp/build-outputs.txt
- name: Get list of OS inputs
run: |
- for os in $(echo '${{ needs.prepare.outputs.oslist }}' | jq -r '.[]'); do
+ for os in $(echo "${OSLIST}" | jq -r '.[]'); do
for architecture in x86_64 ${{ vars.ARM64_USE_UBICLOUD == '1' && 'aarch64' || '' }}; do
echo "\"pkghashes-$os-$architecture\"" | tee -a /tmp/os-inputs.txt
done
HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
PRODUCT: ${{ needs.build.outputs.product-name }}
VERSION: ${{ needs.build.outputs.version }}
+ DOWNLOAD_PATH: ${{ steps.download-provenance.outputs.download-path }}
if:
"${{ env.SSHKEY != '' }}"
shell: bash
chmod 600 ~/.ssh/id_ed25519
echo "$HOSTKEY" > ~/.ssh/known_hosts
mkdir -m 755 -p "slsa/${PRODUCT}/${VERSION}/"
- mv ${{steps.download-provenance.outputs.download-path}}/*.jsonl "slsa/${PRODUCT}/${VERSION}"
+ mv "${DOWNLOAD_PATH}"/*.jsonl "slsa/${PRODUCT}/${VERSION}"
rsync -4rlptD slsa/* "$RSYNCTARGET"
working-directory: ./pdns/dnsdistdist/
run: |
inv ci-autoconf
- - run: inv ci-install-rust ${{ env.REPO_HOME }}
+ - run: inv ci-install-rust $REPO_HOME
if: matrix.product == 'dnsdist'
working-directory: ./pdns/dnsdistdist/
- - run: inv ci-build-and-install-quiche ${{ env.REPO_HOME }}
+ - run: inv ci-build-and-install-quiche $REPO_HOME
if: matrix.product == 'dnsdist'
working-directory: ./pdns/dnsdistdist/
- name: Configure dnsdist
if: matrix.product == 'rec'
run: |
inv install-rec-build-deps
- - run: inv ci-install-rust ${{ env.REPO_HOME }}
+ - run: inv ci-install-rust $REPO_HOME
if: matrix.product == 'rec'
working-directory: ./pdns/recursordist/
- name: Autoreconf rec
needs: analyze
runs-on: ubuntu-22.04
name: Check whether clang-tidy succeeded
+ env:
+ ANNOTATIONS_AUTH: ${{ needs.analyze.outputs.clang-tidy-annotations-auth }}
+ ANNOTATIONS_DIST: ${{ needs.analyze.outputs.clang-tidy-annotations-dnsdist }}
+ ANNOTATIONS_REC: ${{ needs.analyze.outputs.clang-tidy-annotations-rec }}
steps:
- run: |
- if [ "x${{ needs.analyze.outputs.clang-tidy-annotations-auth }}" != "x" -a "${{ needs.analyze.outputs.clang-tidy-annotations-auth }}" != "0" ]; then
+ if [ "x${ANNOTATIONS_AUTH}" != "x" -a "${ANNOTATIONS_AUTH}" != "0" ]; then
echo "::error::Auth clang-tidy failed"
exit 1
fi
- if [ "x${{ needs.analyze.outputs.clang-tidy-annotations-dnsdist }}" != "x" -a "${{ needs.analyze.outputs.clang-tidy-annotations-dnsdist }}" != "0" ]; then
+ if [ "x${ANNOTATIONS_DIST}" != "x" -a "${ANNOTATIONS_DIST}" != "0" ]; then
echo "::error::DNSdist clang-tidy failed"
exit 1
fi
- if [ "x${{needs.analyze.outputs.clang-tidy-annotations-rec }}" != "x" -a "${{needs.analyze.outputs.clang-tidy-annotations-rec }}" != "0" ]; then
+ if [ "x${ANNOTATIONS_REC}" != "x" -a "${ANNOTATIONS_REC}" != "0" ]; then
echo "::error::Rec clang-tidy failed"
exit 1
fi
- run: inv coverity-clang-configure
- run: inv ci-autoconf
working-directory: ./pdns/dnsdistdist/
- - run: inv ci-install-rust ${{ env.REPO_HOME }}
+ - run: inv ci-install-rust $REPO_HOME
working-directory: ./pdns/dnsdistdist/
- - run: inv ci-build-and-install-quiche ${{ env.REPO_HOME }}
+ - run: inv ci-build-and-install-quiche $REPO_HOME
working-directory: ./pdns/dnsdistdist/
- run: inv ci-dnsdist-configure full autotools build-dir
working-directory: ./pdns/dnsdistdist/
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_API_TOKEN }}
NETLIFY_SITE_ID_DOCS: ${{ vars.NETLIFY_SITE_ID_DOCS }}
NETLIFY_SITE_ID_DNSDIST: ${{ vars.NETLIFY_SITE_ID_DNSDIST }}
+ PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
steps:
- name: Check required secrets
run: |
- name: Deploy docs to Netlify
run: |
mkdir -p docs_site/temp
- tar -xf artifacts/website-${{needs.build-docs.outputs.pdns_version}}/website.tar -C docs_site/temp
+ tar -xf artifacts/website-$PDNS_VERSION/website.tar -C docs_site/temp
mv docs_site/temp/website/docs.powerdns.com/* docs_site/
rm -rf docs_site/temp
- tar -xf artifacts/authoritative-html-docs-${{needs.build-docs.outputs.pdns_version}}/auth-html-docs.tar -C docs_site
- tar -xf artifacts/recursor-html-docs-${{needs.build-docs.outputs.pdns_version}}/rec-html-docs.tar -C docs_site
+ tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C docs_site
+ tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C docs_site
mv docs_site/auth-html-docs docs_site/authoritative
mv docs_site/rec-html-docs docs_site/recursor
- cp artifacts/PowerDNS-Authoritative-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Authoritative.pdf docs_site/authoritative/
- cp artifacts/PowerDNS-Recursor-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Recursor.pdf docs_site/recursor/
+ cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf docs_site/authoritative/
+ cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf docs_site/recursor/
netlify deploy \
--dir ./docs_site \
--site $NETLIFY_SITE_ID_DOCS \
- name: Deploy DNSdist docs to Netlify
run: |
- tar -xf artifacts/dnsdist-html-docs-${{needs.build-docs.outputs.pdns_version}}/dnsdist-html-docs.tar
- cp artifacts/dnsdist-${{needs.build-docs.outputs.pdns_version}}.pdf/dnsdist.pdf dnsdist-html-docs/
+ tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
+ cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
netlify deploy \
--dir ./dnsdist-html-docs \
--site $NETLIFY_SITE_ID_DNSDIST \
needs: build-docs
if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_WEB1 == 'true' }}
runs-on: ubuntu-22.04
+ env:
+ PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
+ DOCS_HOST: ${{vars.DOCS_HOST}}
+ REC_DOCS_DIR: ${{vars.REC_DOCS_DIR}}
+ AUTH_DOCS_DIR: ${{vars.AUTH_DOCS_DIR}}
+ SSH_KEY: ${{secrets.WEB1_DOCS_SECRET}}
+ HOST_KEY: ${{vars.WEB1_HOSTKEY}}
steps:
- name: Check required secrets
run: |
missing_secrets=()
- [ -z "${{ secrets.WEB1_DOCS_SECRET }}" ] && missing_secrets+=("WEB1_DOCS_SECRET")
- [ -z "${{ vars.WEB1_HOSTKEY }}" ] && missing_secrets+=("WEB1_HOSTKEY")
- [ -z "${{ vars.DOCS_HOST }}" ] && missing_secrets+=("DOCS_HOST")
- [ -z "${{ vars.AUTH_DOCS_DIR }}" ] && missing_secrets+=("AUTH_DOCS_DIR")
- [ -z "${{ vars.REC_DOCS_DIR }}" ] && missing_secrets+=("REC_DOCS_DIR")
+ [ -z "$SSH_KEY" ] && missing_secrets+=("WEB1_DOCS_SECRET")
+ [ -z "$HOSTKEY" ] && missing_secrets+=("WEB1_HOSTKEY")
+ [ -z "$DOCS_HOST" ] && missing_secrets+=("DOCS_HOST")
+ [ -z "AUTH_DOCS_DIR" ] && missing_secrets+=("AUTH_DOCS_DIR")
+ [ -z "REC_DOCS_DIR" ] && missing_secrets+=("REC_DOCS_DIR")
if [ ${#missing_secrets[@]} -ne 0 ]; then
echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
exit 1
- id: setup-ssh
run: |-
inv ci-docs-add-ssh --ssh-key="$SSH_KEY" --host-key="$HOST_KEY"
- env:
- SSH_KEY: ${{secrets.WEB1_DOCS_SECRET}}
- HOST_KEY: ${{vars.WEB1_HOSTKEY}}
- name: Publish Auth docs
run: |
mkdir -p ./docs/_build
- tar -xf artifacts/authoritative-html-docs-${{needs.build-docs.outputs.pdns_version}}/auth-html-docs.tar -C ./docs/_build/
- cp artifacts/PowerDNS-Authoritative-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
+ tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C ./docs/_build/
+ cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Authoritative.pdf" --username="docs_powerdns_com" --product="auth" --directory="/${AUTH_DOCS_DIR}/"
- env:
- DOCS_HOST: ${{vars.DOCS_HOST}}
- AUTH_DOCS_DIR: ${{vars.AUTH_DOCS_DIR}}
- name: Publish Recursor docs
run: |
- tar -xf artifacts/recursor-html-docs-${{needs.build-docs.outputs.pdns_version}}/rec-html-docs.tar -C ./docs/_build/
- cp artifacts/PowerDNS-Recursor-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Recursor.pdf ./docs/_build/rec-html-docs/
+ tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C ./docs/_build/
+ cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf ./docs/_build/rec-html-docs/
inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Recursor.pdf" --username="docs_powerdns_com" --product="rec" --directory="/${REC_DOCS_DIR}/"
- env:
- DOCS_HOST: ${{vars.DOCS_HOST}}
- REC_DOCS_DIR: ${{vars.REC_DOCS_DIR}}
- name: Publish DNSdist docs
run: |
- tar -xf artifacts/dnsdist-html-docs-${{needs.build-docs.outputs.pdns_version}}/dnsdist-html-docs.tar -C ./docs/_build/
- cp artifacts/dnsdist-${{needs.build-docs.outputs.pdns_version}}.pdf/dnsdist.pdf ./docs/_build/dnsdist-html-docs/
+ tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar -C ./docs/_build/
+ cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf ./docs/_build/dnsdist-html-docs/
inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="dnsdist.pdf" --username="dnsdist_org" --product="dnsdist" --directory="/${DNSDIST_DOCS_DIR}/"
- env:
- DOCS_HOST: ${{vars.DOCS_HOST}}
publish-to-aws:
needs: build-docs
if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_AWS == 'true' }}
runs-on: ubuntu-22.04
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_REGION: ${{ vars.AWS_REGION }}
+ AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }}
+ AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}
+ AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}
+ PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
steps:
- name: Check required secrets
run: |
missing_secrets=()
- [ -z "${{ secrets.AWS_ACCESS_KEY_ID }}" ] && missing_secrets+=("AWS_ACCESS_KEY_ID")
- [ -z "${{ secrets.AWS_SECRET_ACCESS_KEY }}" ] && missing_secrets+=("AWS_SECRET_ACCESS_KEY")
- [ -z "${{ vars.AWS_REGION }}" ] && missing_secrets+=("AWS_REGION")
- [ -z "${{ vars.AWS_S3_BUCKET_DOCS }}" ] && missing_secrets+=("AWS_S3_BUCKET_DOCS")
- [ -z "${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST")
- [ -z "${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS")
+ [ -z "$AWS_ACCESS_KEY_ID" ] && missing_secrets+=("AWS_ACCESS_KEY_ID")
+ [ -z "$AWS_SECRET_ACCESS_KEY" ] && missing_secrets+=("AWS_SECRET_ACCESS_KEY")
+ [ -z "$AWS_REGION" ] && missing_secrets+=("AWS_REGION")
+ [ -z "$AWS_S3_BUCKET_DOCS" ] && missing_secrets+=("AWS_S3_BUCKET_DOCS")
+ [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST")
+ [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS")
if [ ${#missing_secrets[@]} -ne 0 ]; then
echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
exit 1
type = s3
provider = AWS
env_auth = true
- region = ${{ vars.AWS_REGION }}
+ region = $AWS_REGION
EOF
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: artifacts
- name: Publish to AWS
- env:
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_REGION: ${{ vars.AWS_REGION }}
- AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }}
- AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}
- AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}
run: |
if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ]; then
- tar -xf artifacts/dnsdist-html-docs-${{needs.build-docs.outputs.pdns_version}}/dnsdist-html-docs.tar
- cp artifacts/dnsdist-${{needs.build-docs.outputs.pdns_version}}.pdf/dnsdist.pdf dnsdist-html-docs/
-
+ tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
+ cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
+
# Copy files to S3
echo "Copying DNSdist docs to S3..."
rclone copy --checksum dnsdist-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/dnsdist.org/
--paths "/*"
echo "Invalidation completed."
fi
-
+
if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ]; then
- tar -xf artifacts/recursor-html-docs-${{needs.build-docs.outputs.pdns_version}}/rec-html-docs.tar
- cp artifacts/PowerDNS-Recursor-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Recursor.pdf rec-html-docs/
-
+ tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar
+ cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf rec-html-docs/
+
# Copy all PowerDNS docs to S3
echo "Copying Recursor docs to S3..."
rclone copy --checksum rec-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/recursor/
- tar -xf artifacts/authoritative-html-docs-${{needs.build-docs.outputs.pdns_version}}/auth-html-docs.tar
- cp artifacts/PowerDNS-Authoritative-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Authoritative.pdf auth-html-docs/
+ tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar
+ cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf auth-html-docs/
echo "Copying Authoritative docs to S3..."
rclone copy --checksum auth-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/authoritative/
- tar -xf artifacts/website-${{needs.build-docs.outputs.pdns_version}}/website.tar
+ tar -xf artifacts/website-$PDNS_VERSION/website.tar
echo "Copying website files to S3..."
rclone copy --checksum website/docs.powerdns.com/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/
name: ${{ matrix.requirements-file }} - Validate list of packages and hashes
runs-on: ubuntu-22.04
needs: list-pip-requirement-files
- env:
- SERVICE_IP_ADDR: 127.0.0.1
services:
database:
image: epicwink/proxpi
fail-fast: false
matrix:
requirements-file: ${{ fromJson(needs.list-pip-requirement-files.outputs.req-files) }}
+ env:
+ SERVICE_IP_ADDR: 127.0.0.1
+ REQUIREMENTS_FILE: ${{ matrix.requirements-file }}
steps:
- - run: echo "${{ matrix.requirements-file }}"
+ - run: echo "${REQUIREMENTS_FILE}"
- uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- uses: actions/checkout@v4
with:
- run: pip config set global.trusted-host ${{ env.SERVICE_IP_ADDR }}
- id: proxpi-docker
run: echo "id=$(docker ps | grep "epicwink/proxpi" | awk '{print $1}')" >> "$GITHUB_OUTPUT"
- - run: pip install -r ${{ matrix.requirements-file }}
+ - run: pip install -r ${REQUIREMENTS_FILE}
- name: Get the list of packages requested to the pip proxy
+ env:
+ ID: ${{ steps.proxpi-docker.outputs.id }}
run: |
- docker logs ${{ steps.proxpi-docker.outputs.id }} 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
+ docker logs "${ID}" 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
cat /tmp/proxpi.log
- name: check only listed packages were installed
- run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${{ matrix.requirements-file }} || ( echo "$i not found" && exit 1 ); done
+ run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${REQUIREMENTS_FILE} || ( echo "$i not found" && exit 1 ); done
run-rec-bulk-test-on-ubicloud:
if: ${{ vars.UBICLOUD_DAILY_REC_BULKTEST == '1' }}