]> git.ipfire.org Git - thirdparty/pdns.git/commitdiff
ci: Use an intermediate environment variable when processing input
authorRemi Gacogne <remi.gacogne@powerdns.com>
Tue, 27 May 2025 13:02:48 +0000 (15:02 +0200)
committerRemi Gacogne <remi.gacogne@powerdns.com>
Tue, 27 May 2025 13:10:44 +0000 (15:10 +0200)
See https://docs.github.com/en/actions/security-for-github-actions/security-guides/security-hardening-for-github-actions#good-practices-for-mitigating-script-injection-attacks

.github/workflows/build-and-test-all.yml
.github/workflows/build-docker-images-dispatch.yml
.github/workflows/build-docker-images-tags.yml
.github/workflows/build-docker-images.yml
.github/workflows/build-packages.yml
.github/workflows/codeql-analysis.yml
.github/workflows/coverity.yml
.github/workflows/documentation.yml
.github/workflows/misc-dailies.yml

index 1199eb7eb66d10c3cf0a95c4777da726d622cb4f..fbc4165770969caf55b76d5139529bddb19dd7cc 100644 (file)
@@ -32,6 +32,7 @@ env:
   LLVM_PROFILE_FILE: "/tmp/code-%p.profraw"
   OPTIMIZATIONS: yes
   INV_CMD: ". ${REPO_HOME}/.venv/bin/activate && inv"
+  BRANCH_NAME: ${{ inputs.branch-name || github.ref_name }}
 
 jobs:
   get-runner-container-image:
@@ -41,13 +42,13 @@ jobs:
       id: ${{ steps.get-runner-image.outputs.image-id }}
       tag: ${{ steps.get-runner-image.outputs.tag }}
     env:
-      DEFAULT_RUNNER_DOCKER_IMAGE: base-pdns-ci-image/debian-12-pdns-base
       DEFAULT_IMAGE_TAG: master # update when backporting, e.g. auth-4.9.x
+      DOCKER_IMAGE: ${{ inputs.runner-docker-image-name || 'base-pdns-ci-image/debian-12-pdns-base' }}
     steps:
       - id: get-runner-image
         run: |
-          echo "image-id=ghcr.io/powerdns/${{ inputs.runner-docker-image-name || env.DEFAULT_RUNNER_DOCKER_IMAGE }}" >> "$GITHUB_OUTPUT"
-          echo "tag=${{ env.DEFAULT_IMAGE_TAG }}" >> "$GITHUB_OUTPUT"
+          echo "image-id=ghcr.io/powerdns/$DOCKER_IMAGE" >> "$GITHUB_OUTPUT"
+          echo "tag=$DEFAULT_IMAGE_TAG" >> "$GITHUB_OUTPUT"
 
   build-auth:
     name: build auth (${{ matrix.builder }})
@@ -125,7 +126,7 @@ jobs:
       - run: ${{ env.INV_CMD }} ci-auth-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
       - run: ccache -s
       - if: ${{ matrix.builder != 'meson' }}
-        run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+        run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - if: ${{ matrix.builder != 'meson' }}
         name: Store the binaries
         uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
@@ -188,7 +189,7 @@ jobs:
         working-directory: .
       - run: ${{ env.INV_CMD }} install-rec-build-deps
         working-directory: ./pdns/recursordist/
-      - run: ${{ env.INV_CMD }} ci-install-rust ${{ env.REPO_HOME }}
+      - run: ${{ env.INV_CMD }} ci-install-rust ${REPO_HOME}
         working-directory: ./pdns/recursordist/
       - run: ${{ env.INV_CMD }} ci-autoconf ${{ matrix.builder == 'meson' && '--meson' || '' }}
         working-directory: ./pdns/recursordist/
@@ -210,7 +211,7 @@ jobs:
       - run: ${{ env.INV_CMD }} ci-rec-install ${{ matrix.builder == 'meson' && '--meson' || '' }}
       - run: ccache -s
       - if: ${{ matrix.builder != 'meson' }}
-        run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+        run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - if: ${{ matrix.builder != 'meson' }}
         name: Store the binaries
         uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
@@ -278,9 +279,9 @@ jobs:
         working-directory: .
       - run: ${{ env.INV_CMD }} install-lld-linker-if-needed
         working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-install-rust ${{ env.REPO_HOME }}
+      - run: ${{ env.INV_CMD }} ci-install-rust ${REPO_HOME}
         working-directory: ./pdns/dnsdistdist/
-      - run: ${{ env.INV_CMD }} ci-build-and-install-quiche ${{ env.REPO_HOME }}
+      - run: ${{ env.INV_CMD }} ci-build-and-install-quiche ${REPO_HOME}
         working-directory: ./pdns/dnsdistdist/
       - run: ${{ env.INV_CMD }} ci-autoconf
         if: ${{ matrix.builder == 'autotools' }}
@@ -308,7 +309,7 @@ jobs:
       - run: ${{ env.INV_CMD }} ci-make-install
         if: ${{ matrix.builder == 'autotools' }}
       - run: ccache -s
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Store the binaries
         if: ${{ matrix.builder == 'autotools' }}
         uses: actions/upload-artifact@v4 # this takes 30 seconds, maybe we want to tar
@@ -362,7 +363,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -494,7 +495,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -538,7 +539,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -588,7 +589,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -642,7 +643,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -697,7 +698,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -743,7 +744,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -804,7 +805,7 @@ jobs:
           submodules: recursive
           ref: ${{ inputs.branch-name }}
           persist-credentials: false
-      - run: echo "normalized-branch-name=${{ inputs.branch-name || github.ref_name }}" | tr "/" "-" >> "$GITHUB_ENV"
+      - run: echo "normalized-branch-name=$BRANCH_NAME" | tr "/" "-" >> "$GITHUB_ENV"
       - name: Fetch the binaries
         uses: actions/download-artifact@v4
         with:
@@ -865,6 +866,8 @@ jobs:
       - test-recursor-ubicloud-bulk
     if: success() || failure()
     runs-on: ubuntu-24.04
+    env:
+      NEEDS: ${{ toJSON(needs) }}
     steps:
       - name: Coveralls Parallel Finished
         if: ${{ env.COVERAGE == 'yes' }}
@@ -875,7 +878,7 @@ jobs:
       - name: Install jq and jc
         run: "sudo apt-get update && sudo apt-get install jq jc"
       - name: Fail job if any of the previous jobs failed
-        run: "for i in `echo '${{ toJSON(needs) }}' | jq -r '.[].result'`; do if [[ $i == 'failure' ]]; then echo '${{ toJSON(needs) }}'; exit 1; fi; done;"
+        run: "for i in `echo ${NEEDS} | jq -r '.[].result'`; do if [[ $i == 'failure' ]]; then echo ${NEEDS}; exit 1; fi; done;"
       - uses: actions/checkout@v4
         with:
           fetch-depth: 5
@@ -885,7 +888,7 @@ jobs:
       - name: Get list of jobs in the workflow
         run: "cat .github/workflows/build-and-test-all.yml | jc --yaml | jq -rS '.[].jobs | keys | .[]' | grep -vE 'collect|get-runner-container-image' | tee /tmp/workflow-jobs-list.yml"
       - name: Get list of prerequisite jobs
-        run: "echo '${{ toJSON(needs) }}' | jq -rS 'keys | .[]' | tee /tmp/workflow-needs-list.yml"
+        run: "echo ${NEEDS} | jq -rS 'keys | .[]' | tee /tmp/workflow-needs-list.yml"
       - name: Fail if there is a job missing on the needs list
         run: "if ! diff -q /tmp/workflow-jobs-list.yml /tmp/workflow-needs-list.yml; then exit 1; fi"
 
index 4a5de60b001993da21a3ec04a0cc840fced22dc4..a1ad31e7609b4e20965b1b4dce0d8f4d88d3fe09 100644 (file)
@@ -43,28 +43,31 @@ jobs:
       image-tag: ${{ steps.get-image-tag.outputs.tag }}
       image-name: ${{ steps.get-image-metadata.outputs.name }}
       image-description: ${{ steps.get-image-metadata.outputs.description }}
+    env:
+      INPUT_REF: ${{ inputs.ref }}
+      PRODUCT: ${{ inputs.product }}
     steps:
       - run: |
-          echo '${{ inputs.ref }}' | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*' && tag=$(echo '${{ inputs.ref }}' | cut -d '-' -f 2-)
+          echo "${INPUT_REF}" | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*' && tag=$(echo "${INPUT_REF}" | cut -d '-' -f 2-)
           echo "tag=$tag" >> $GITHUB_OUTPUT
         id: get-image-tag
       - run: |
-          if $(echo '${{ inputs.ref }}' | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*'); then
-            echo "version=$(echo '${{ inputs.ref }}' | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_ENV
-            echo "branch=$(echo '${{ inputs.ref }}' | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_ENV
+          if $(echo "${INPUT_REF}" | egrep -qq '^auth-.*|^rec-.*|^dnsdist-.*'); then
+            echo "version=$(echo "${INPUT_REF}" | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_ENV
+            echo "branch=$(echo "${INPUT_REF}" | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_ENV
           else
-            echo "version=$(echo '${{ inputs.ref }}' | tr '/' '-')" >> $GITHUB_ENV
-            echo "branch=${{ inputs.ref }}" >> $GITHUB_ENV
+            echo "version=$(echo "${INPUT_REF}" | tr '/' '-')" >> $GITHUB_ENV
+            echo "branch=${INPUT_REF}" >> $GITHUB_ENV
           fi
       - run: |
-          if $(echo '${{ inputs.product }}'| grep -qq auth); then
-            echo '${{ inputs.ref }}' | egrep -qq '^auth-.*' && description='PowerDNS Authoritative Server '$branch || description='EXPERIMENTAL pdns auth image'
+          if $(echo "$PRODUCT"| grep -qq auth); then
+            echo "${INPUT_REF}" | egrep -qq '^auth-.*' && description='PowerDNS Authoritative Server '$branch || description='EXPERIMENTAL pdns auth image'
             echo "name=pdns-auth-$version" >> $GITHUB_OUTPUT
-          elif (echo '${{ inputs.product }}'| grep -qq recursor); then
-            echo '${{ inputs.ref }}' | egrep -qq '^rec-.*' && description='PowerDNS Recursor '$branch || description='EXPERIMENTAL pdns recursor image'
+          elif (echo "$PRODUCT"| grep -qq recursor); then
+            echo "${INPUT_REF}" | egrep -qq '^rec-.*' && description='PowerDNS Recursor '$branch || description='EXPERIMENTAL pdns recursor image'
             echo "name=pdns-recursor-$version" >> $GITHUB_OUTPUT
           else
-            echo '${{ inputs.ref }}' | egrep -qq '^dnsdist-.*' && description='PowerDNS DNSDist '$branch || description='EXPERIMENTAL dnsdist image'
+            echo "${INPUT_REF}" | egrep -qq '^dnsdist-.*' && description='PowerDNS DNSDist '$branch || description='EXPERIMENTAL dnsdist image'
             echo "name=dnsdist-$version" >> $GITHUB_OUTPUT
           fi
           echo "description=$description" >> $GITHUB_OUTPUT
index b4f8196fa9ee5f194f28e7bdda815efc7e75fc39..992a8123aff5b5d82d53f89dfaeecdacac851a67 100644 (file)
@@ -14,17 +14,19 @@ permissions:
 
 jobs:
   prepare:
+    env:
+      REF_NAME: ${{ github.ref_name }}
     runs-on: ubuntu-24.04
     outputs:
       image-name-suffix: ${{ steps.get-image-version.outputs.version }}
       image-tag: ${{ steps.get-image-tag.outputs.tag }}
       image-description-suffix: ${{ steps.get-image-description.outputs.description }}
     steps:
-      - run: echo "version=$(echo '${{ github.ref_name }}' | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_OUTPUT
+      - run: echo "version=$(echo "${REF_NAME}" | cut -d '-' -f 2 | awk -F'.' '{print $1$2}')" >> $GITHUB_OUTPUT
         id: get-image-version
-      - run: echo "tag=$(echo '${{ github.ref_name }}' | cut -d '-' -f 2-)" >> $GITHUB_OUTPUT
+      - run: echo "tag=$(echo "${REF_NAME}" | cut -d '-' -f 2-)" >> $GITHUB_OUTPUT
         id: get-image-tag
-      - run: echo "description=$(echo '${{ github.ref_name }}' | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_OUTPUT
+      - run: echo "description=$(echo "${REF_NAME}" | cut -d '-' -f 2- | awk -F'.' '{print "v"$1"."$2".x"}')" >> $GITHUB_OUTPUT
         id: get-image-description
 
   call-build-image-auth:
index 18097d7667bc425d38ced9347a74b1035dc040be..80e1c62f28965974b378bf0426b7ff67596afe18 100644 (file)
@@ -63,8 +63,10 @@ jobs:
           persist-credentials: false
       - name: validate reference only if image will be pushed
         if: ${{ inputs.push }}
+        env:
+          REF: ${{ inputs.ref }}
         run: |
-          [[ "${{ inputs.ref }}" == "master" ]] || git describe --tags --exact-match
+          [[ "${REF}" == "master" ]] || git describe --tags --exact-match
 
   build:
     name: build docker image for a product
@@ -150,29 +152,34 @@ jobs:
       matrix:
         runner-os: ${{ fromJson(needs.prepare-test-runner-os-list.outputs.runnerlist )}}
       fail-fast: false
+    env:
+      INPUT_IMAGE_NAME: ${{ inputs.image-name }}
+      INPUT_IMAGE_TAGS: ${{ inputs.image-tags }}
+      INPUT_PLATFORMS: ${{ inputs.platforms }}
     steps:
       - name: Check running image
         run: |
-          image_name='${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${{ inputs.image-name }}'
-          for tag in `echo '${{ inputs.image-tags }}' | tr '\n' ' '`; do
+          image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
+          for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
             echo 'Testing: '${image_name}':'${tag};
             # pdns-auth image returns a 134 exit code
             docker run ${image_name}:${tag} --version || [ "$?" == "134" ]
           done
       - name: Check image digest matches
+        env:
+          EXPECTED_DIGEST: ${{ needs.build.outputs.image-digest }}
         run: |
-          output_digest='${{ needs.build.outputs.image-digest }}'
-          image_name='${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${{ inputs.image-name }}'
-          for tag in `echo '${{ inputs.image-tags }}' | tr '\n' ' '`; do
+          image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
+          for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
             image_digest=$(docker inspect --format='{{index .RepoDigests 0}}' ${image_name}:${tag} | cut -d '@' -f 2)
-            [[ "${output_digest}" == "${image_digest}" ]] || \
-              ( echo "Image digest does not match => output_digest: "${output_digest}" - image_digest: "${image_digest} && exit 1 )
+            [[ "${EXPECTED_DIGEST}" == "${image_digest}" ]] || \
+              ( echo "Image digest does not match => output_digest: "${EXPECTED_DIGEST}" - image_digest: "${image_digest} && exit 1 )
           done
       - name: Check SBOM and Provenance
         run: |
-          image_name='${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${{ inputs.image-name }}'
-          for tag in `echo '${{ inputs.image-tags }}' | tr '\n' ' '`; do
-            if $(echo '${{ inputs.platforms }}' | grep -qq ','); then
+          image_name="${{ secrets.DOCKERHUB_ORGANIZATION_NAME }}/${INPUT_IMAGE_NAME}"
+          for tag in `echo "${INPUT_IMAGE_TAGS}" | tr '\n' ' '`; do
+            if $(echo "${INPUT_PLATFORMS}" | grep -qq ','); then
               docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .Provenance}}" | jq -e '."linux/'$(dpkg --print-architecture)'" | has("SLSA")'
               docker buildx imagetools inspect ${image_name}:${tag} --format "{{json .SBOM}}" | jq -e '."linux/'$(dpkg --print-architecture)'" | has("SPDX")'
             else
index 9610e7c86281e239720f8d362a7b9b79911d30c0..0774dee52891c6dcebf8c1d04684b34e46010963 100644 (file)
@@ -63,16 +63,19 @@ jobs:
           ref: ${{ inputs.ref }}
           persist-credentials: false
       - id: get-oslist
+        env:
+          OS: ${{ inputs.os }}
+          REF: ${{ inputs.ref }}
         run: |
           available_targets=$(ls builder-support/dockerfiles/Dockerfile.target.* )
-          for i in $(echo "${{ inputs.os }}"); do
+          for i in $(echo "${OS}"); do
             if echo $available_targets | grep -qq $i; then
               targets+=($i)
             else
-              if [[ "${{ inputs.ref }}" == "master" ]]; then
-                echo "::error title=Dockerfile not found for ${i}::OS ${i} not available as target in ${{ inputs.ref }}" && exit 1
+              if [[ "${REF}" == "master" ]]; then
+                echo "::error title=Dockerfile not found for ${i}::OS ${i} not available as target in ${REF}" && exit 1
               else
-                echo "::warning title=Packages will not be generated for ${i}::OS ${i} not available as target in ${{ inputs.ref }}"
+                echo "::warning title=Packages will not be generated for ${i}::OS ${i} not available as target in ${REF}"
               fi
             fi
           done
@@ -109,6 +112,10 @@ jobs:
       pkghashes-ubuntu-noble-x86_64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-noble-x86_64 }}
       pkghashes-ubuntu-noble-aarch64: ${{ steps.pkghashes.outputs.pkghashes-ubuntu-noble-aarch64 }}
       srchashes: ${{ steps.srchashes.outputs.srchashes }}
+    env:
+      IS_RELEASE: ${{ inputs.is_release}}
+      PRODUCT: ${{ inputs.product }}
+      OS: ${{ matrix.os }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -117,7 +124,7 @@ jobs:
           ref: ${{ inputs.ref }}
           persist-credentials: false
       # this builds packages and runs our unit tests (make check)
-      - run: IS_RELEASE=${{ inputs.is_release}} builder/build.sh -v -m ${{ inputs.product }} ${{ matrix.os }}
+      - run: IS_RELEASE=${IS_RELEASE} builder/build.sh -v -m ${PRODUCT} ${OS}
       - name: Get version number
         run: |
           echo "version=$(readlink builder/tmp/latest)" >> $GITHUB_OUTPUT
@@ -129,15 +136,17 @@ jobs:
       - name: Normalize package name
         id: normalize-name
         run: |
-          if [ "x${{ inputs.product }}" = "xauthoritative" ]; then
+          if [ "x${PRODUCT}" = "xauthoritative" ]; then
             echo "normalized-package-name=pdns" >> $GITHUB_OUTPUT
-          elif [ "x${{ inputs.product }}" = "xrecursor" ]; then
+          elif [ "x${PRODUCT}" = "xrecursor" ]; then
             echo "normalized-package-name=pdns-recursor" >> $GITHUB_OUTPUT
           else
-            echo "normalized-package-name=${{ inputs.product }}" >> $GITHUB_OUTPUT
+            echo "normalized-package-name=${PRODUCT}" >> $GITHUB_OUTPUT
           fi
       - name: Include architecture in the packages compressed file name
-        run: for f in $(ls ./built_pkgs/*/*/*-${{ matrix.os }}.tar.bz2 | sed 's/\.tar.bz2$//'); do mv $f.tar.bz2 $f-${{ steps.getarch.outputs.target-arch }}.tar.bz2; done
+        env:
+          TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
+        run: for f in $(ls ./built_pkgs/*/*/*-${OS}.tar.bz2 | sed 's/\.tar.bz2$//'); do mv $f.tar.bz2 $f-${TARGET_ARCH}.tar.bz2; done
       - name: Upload packages as GH artifacts
         uses: actions/upload-artifact@v4
         with:
@@ -147,19 +156,28 @@ jobs:
       - name: Extract packages from the tarball
         # so we get provenance for individual packages (and the JSON package manifests from the builder)
         id: extract
+        env:
+          TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
+          PACKAGE_NAME: ${{ steps.normalize-name.outputs.normalized-package-name }}
+          VERSION: ${{ steps.getversion.outputs.version }}
         run: |
           mkdir -m 700 -p ./packages/
-          tar xvf ./built_pkgs/*/*/${{ steps.normalize-name.outputs.normalized-package-name }}-${{ steps.getversion.outputs.version }}-${{ matrix.os }}-${{ steps.getarch.outputs.target-arch }}.tar.bz2 -C ./packages/ --transform='s/.*\///'
+          tar xvf ./built_pkgs/*/*/${PACKAGE_NAME}-${VERSION}-${OS}-${TARGET_ARCH}.tar.bz2 -C ./packages/ --transform='s/.*\///'
       - name: Generate package hashes for provenance
         shell: bash
         id: pkghashes
+        env:
+          TARGET_ARCH: ${{ steps.getarch.outputs.target-arch }}
         run: |
-          echo "pkghashes-${{ matrix.os }}-${{ steps.getarch.outputs.target-arch }}=$(sha256sum ./packages/*.rpm ./packages/*.deb ./packages/*.json | base64 -w0)" >> $GITHUB_OUTPUT
+          echo "pkghashes-${OS}-${TARGET_ARCH}=$(sha256sum ./packages/*.rpm ./packages/*.deb ./packages/*.json | base64 -w0)" >> $GITHUB_OUTPUT
       - name: Generate source hash for provenance
         shell: bash
         id: srchashes
+        env:
+          PACKAGE_NAME: ${{ steps.normalize-name.outputs.normalized-package-name }}
+          VERSION: ${{ steps.getversion.outputs.version }}
         run: |
-          echo "srchashes=$(sha256sum ./built_pkgs/*/*/${{ steps.normalize-name.outputs.normalized-package-name }}-${{ steps.getversion.outputs.version }}.tar.* | base64 -w0)" >> $GITHUB_OUTPUT
+          echo "srchashes=$(sha256sum ./built_pkgs/*/*/${PACKAGE_NAME}-${VERSION}.tar.* | base64 -w0)" >> $GITHUB_OUTPUT
       - name: Upload packages to downloads.powerdns.com
         env:
           SSHKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_SECRET }}
@@ -178,12 +196,15 @@ jobs:
     needs: [prepare, build]
     name: Check if hashes were created for all requested targets
     runs-on: ubuntu-24.04
+    env:
+      OUTPUTS: ${{ toJSON(needs.build.outputs) }}
+      OSLIST: ${{ needs.prepare.outputs.oslist }}
     steps:
       - name: Get list of outputs from build jobs
-        run: echo '${{ toJSON(needs.build.outputs) }}' | jq 'keys[]' | grep -vE 'version|product-name' | tee /tmp/build-outputs.txt
+        run: echo "${OUTPUTS}" | jq 'keys[]' | grep -vE 'version|product-name' | tee /tmp/build-outputs.txt
       - name: Get list of OS inputs
         run: |
-          for os in $(echo '${{ needs.prepare.outputs.oslist }}' | jq -r '.[]'); do
+          for os in $(echo "${OSLIST}" | jq -r '.[]'); do
             for architecture in x86_64 ${{ vars.ARM64_USE_UBICLOUD == '1' && 'aarch64' || '' }}; do
               echo "\"pkghashes-$os-$architecture\"" | tee -a /tmp/os-inputs.txt
             done
@@ -252,6 +273,7 @@ jobs:
           HOSTKEY: ${{ secrets.DOWNLOADS_AUTOBUILT_HOSTKEY }}
           PRODUCT: ${{ needs.build.outputs.product-name }}
           VERSION: ${{ needs.build.outputs.version }}
+          DOWNLOAD_PATH: ${{ steps.download-provenance.outputs.download-path }}
         if:
           "${{ env.SSHKEY != '' }}"
         shell: bash
@@ -261,5 +283,5 @@ jobs:
           chmod 600 ~/.ssh/id_ed25519
           echo "$HOSTKEY" > ~/.ssh/known_hosts
           mkdir -m 755 -p "slsa/${PRODUCT}/${VERSION}/"
-          mv ${{steps.download-provenance.outputs.download-path}}/*.jsonl "slsa/${PRODUCT}/${VERSION}"
+          mv "${DOWNLOAD_PATH}"/*.jsonl "slsa/${PRODUCT}/${VERSION}"
           rsync -4rlptD slsa/* "$RSYNCTARGET"
index 5847eedb0833ed1c1b47d107076343bc783a883e..a49547608f3fc0abc4eebbe3cb486b1e87c7d67e 100644 (file)
@@ -160,10 +160,10 @@ jobs:
       working-directory: ./pdns/dnsdistdist/
       run: |
         inv ci-autoconf
-    - run: inv ci-install-rust ${{ env.REPO_HOME }}
+    - run: inv ci-install-rust $REPO_HOME
       if: matrix.product == 'dnsdist'
       working-directory: ./pdns/dnsdistdist/
-    - run: inv ci-build-and-install-quiche ${{ env.REPO_HOME }}
+    - run: inv ci-build-and-install-quiche $REPO_HOME
       if: matrix.product == 'dnsdist'
       working-directory: ./pdns/dnsdistdist/
     - name: Configure dnsdist
@@ -207,7 +207,7 @@ jobs:
       if: matrix.product == 'rec'
       run: |
         inv install-rec-build-deps
-    - run: inv ci-install-rust ${{ env.REPO_HOME }}
+    - run: inv ci-install-rust $REPO_HOME
       if: matrix.product == 'rec'
       working-directory: ./pdns/recursordist/
     - name: Autoreconf rec
@@ -259,17 +259,21 @@ jobs:
     needs: analyze
     runs-on: ubuntu-22.04
     name: Check whether clang-tidy succeeded
+    env:
+      ANNOTATIONS_AUTH: ${{ needs.analyze.outputs.clang-tidy-annotations-auth }}
+      ANNOTATIONS_DIST: ${{ needs.analyze.outputs.clang-tidy-annotations-dnsdist }}
+      ANNOTATIONS_REC: ${{ needs.analyze.outputs.clang-tidy-annotations-rec }}
     steps:
       - run: |
-          if [ "x${{ needs.analyze.outputs.clang-tidy-annotations-auth }}" != "x" -a "${{ needs.analyze.outputs.clang-tidy-annotations-auth }}" != "0" ]; then
+          if [ "x${ANNOTATIONS_AUTH}" != "x" -a "${ANNOTATIONS_AUTH}" != "0" ]; then
             echo "::error::Auth clang-tidy failed"
             exit 1
           fi
-          if [ "x${{ needs.analyze.outputs.clang-tidy-annotations-dnsdist }}" != "x" -a "${{ needs.analyze.outputs.clang-tidy-annotations-dnsdist }}" != "0" ]; then
+          if [ "x${ANNOTATIONS_DIST}" != "x" -a "${ANNOTATIONS_DIST}" != "0" ]; then
             echo "::error::DNSdist clang-tidy failed"
             exit 1
           fi
-          if [ "x${{needs.analyze.outputs.clang-tidy-annotations-rec }}" != "x" -a "${{needs.analyze.outputs.clang-tidy-annotations-rec }}" != "0" ]; then
+          if [ "x${ANNOTATIONS_REC}" != "x" -a "${ANNOTATIONS_REC}" != "0" ]; then
             echo "::error::Rec clang-tidy failed"
             exit 1
           fi
index bd0e432208b2165f59d8f332f43161d7410de47d..2cc99954c369cb70f6fa1ac73fcc778d6bac8f8c 100644 (file)
@@ -82,9 +82,9 @@ jobs:
       - run: inv coverity-clang-configure
       - run: inv ci-autoconf
         working-directory: ./pdns/dnsdistdist/
-      - run: inv ci-install-rust ${{ env.REPO_HOME }}
+      - run: inv ci-install-rust $REPO_HOME
         working-directory: ./pdns/dnsdistdist/
-      - run: inv ci-build-and-install-quiche ${{ env.REPO_HOME }}
+      - run: inv ci-build-and-install-quiche $REPO_HOME
         working-directory: ./pdns/dnsdistdist/
       - run: inv ci-dnsdist-configure full autotools build-dir
         working-directory: ./pdns/dnsdistdist/
index 9fe5cd6c67dcdf3e42ff4fea11eec617c2e770f3..40b41bc912c3b586596de4dc137a60f8b41e1e91 100644 (file)
@@ -149,6 +149,7 @@ jobs:
       NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_API_TOKEN }}
       NETLIFY_SITE_ID_DOCS: ${{ vars.NETLIFY_SITE_ID_DOCS }}
       NETLIFY_SITE_ID_DNSDIST: ${{ vars.NETLIFY_SITE_ID_DNSDIST }}
+      PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
     steps:
       - name: Check required secrets
         run: |
@@ -169,15 +170,15 @@ jobs:
       - name: Deploy docs to Netlify
         run: |
           mkdir -p docs_site/temp
-          tar -xf artifacts/website-${{needs.build-docs.outputs.pdns_version}}/website.tar -C docs_site/temp
+          tar -xf artifacts/website-$PDNS_VERSION/website.tar -C docs_site/temp
           mv docs_site/temp/website/docs.powerdns.com/* docs_site/
           rm -rf docs_site/temp
-          tar -xf artifacts/authoritative-html-docs-${{needs.build-docs.outputs.pdns_version}}/auth-html-docs.tar -C docs_site
-          tar -xf artifacts/recursor-html-docs-${{needs.build-docs.outputs.pdns_version}}/rec-html-docs.tar -C docs_site
+          tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C docs_site
+          tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C docs_site
           mv docs_site/auth-html-docs docs_site/authoritative
           mv docs_site/rec-html-docs docs_site/recursor
-          cp artifacts/PowerDNS-Authoritative-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Authoritative.pdf docs_site/authoritative/
-          cp artifacts/PowerDNS-Recursor-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Recursor.pdf docs_site/recursor/
+          cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf docs_site/authoritative/
+          cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf docs_site/recursor/
           netlify deploy \
             --dir ./docs_site \
             --site $NETLIFY_SITE_ID_DOCS \
@@ -186,8 +187,8 @@ jobs:
 
       - name: Deploy DNSdist docs to Netlify
         run: |
-          tar -xf artifacts/dnsdist-html-docs-${{needs.build-docs.outputs.pdns_version}}/dnsdist-html-docs.tar
-          cp artifacts/dnsdist-${{needs.build-docs.outputs.pdns_version}}.pdf/dnsdist.pdf dnsdist-html-docs/
+          tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
+          cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
           netlify deploy \
             --dir ./dnsdist-html-docs \
             --site $NETLIFY_SITE_ID_DNSDIST \
@@ -199,15 +200,22 @@ jobs:
     needs: build-docs
     if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_WEB1 == 'true' }}
     runs-on: ubuntu-22.04
+    env:
+      PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
+      DOCS_HOST: ${{vars.DOCS_HOST}}
+      REC_DOCS_DIR: ${{vars.REC_DOCS_DIR}}
+      AUTH_DOCS_DIR: ${{vars.AUTH_DOCS_DIR}}
+      SSH_KEY: ${{secrets.WEB1_DOCS_SECRET}}
+      HOST_KEY: ${{vars.WEB1_HOSTKEY}}
     steps:
       - name: Check required secrets
         run: |
           missing_secrets=()
-          [ -z "${{ secrets.WEB1_DOCS_SECRET }}" ] && missing_secrets+=("WEB1_DOCS_SECRET")
-          [ -z "${{ vars.WEB1_HOSTKEY }}" ] && missing_secrets+=("WEB1_HOSTKEY")
-          [ -z "${{ vars.DOCS_HOST }}" ] && missing_secrets+=("DOCS_HOST")
-          [ -z "${{ vars.AUTH_DOCS_DIR }}" ] && missing_secrets+=("AUTH_DOCS_DIR")
-          [ -z "${{ vars.REC_DOCS_DIR }}" ] && missing_secrets+=("REC_DOCS_DIR")
+          [ -z "$SSH_KEY" ] && missing_secrets+=("WEB1_DOCS_SECRET")
+          [ -z "$HOSTKEY" ] && missing_secrets+=("WEB1_HOSTKEY")
+          [ -z "$DOCS_HOST" ] && missing_secrets+=("DOCS_HOST")
+          [ -z "AUTH_DOCS_DIR" ] && missing_secrets+=("AUTH_DOCS_DIR")
+          [ -z "REC_DOCS_DIR" ] && missing_secrets+=("REC_DOCS_DIR")
           if [ ${#missing_secrets[@]} -ne 0 ]; then
             echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
             exit 1
@@ -223,33 +231,22 @@ jobs:
       - id: setup-ssh
         run: |-
           inv ci-docs-add-ssh --ssh-key="$SSH_KEY" --host-key="$HOST_KEY"
-        env:
-          SSH_KEY: ${{secrets.WEB1_DOCS_SECRET}}
-          HOST_KEY: ${{vars.WEB1_HOSTKEY}}
       - name: Publish Auth docs
         run: |
           mkdir -p ./docs/_build
-          tar -xf artifacts/authoritative-html-docs-${{needs.build-docs.outputs.pdns_version}}/auth-html-docs.tar -C ./docs/_build/
-          cp artifacts/PowerDNS-Authoritative-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
+          tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar -C ./docs/_build/
+          cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf ./docs/_build/auth-html-docs/
           inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Authoritative.pdf" --username="docs_powerdns_com" --product="auth" --directory="/${AUTH_DOCS_DIR}/"
-        env:
-          DOCS_HOST: ${{vars.DOCS_HOST}}
-          AUTH_DOCS_DIR: ${{vars.AUTH_DOCS_DIR}}
       - name: Publish Recursor docs
         run: |
-          tar -xf artifacts/recursor-html-docs-${{needs.build-docs.outputs.pdns_version}}/rec-html-docs.tar -C ./docs/_build/
-          cp artifacts/PowerDNS-Recursor-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Recursor.pdf ./docs/_build/rec-html-docs/
+          tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar -C ./docs/_build/
+          cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf ./docs/_build/rec-html-docs/
           inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="PowerDNS-Recursor.pdf" --username="docs_powerdns_com" --product="rec" --directory="/${REC_DOCS_DIR}/"
-        env:
-          DOCS_HOST: ${{vars.DOCS_HOST}}
-          REC_DOCS_DIR: ${{vars.REC_DOCS_DIR}}
       - name: Publish DNSdist docs
         run: |
-          tar -xf artifacts/dnsdist-html-docs-${{needs.build-docs.outputs.pdns_version}}/dnsdist-html-docs.tar -C ./docs/_build/
-          cp artifacts/dnsdist-${{needs.build-docs.outputs.pdns_version}}.pdf/dnsdist.pdf ./docs/_build/dnsdist-html-docs/
+          tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar -C ./docs/_build/
+          cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf ./docs/_build/dnsdist-html-docs/
           inv ci-docs-upload-master --docs-host="${DOCS_HOST}" --pdf="dnsdist.pdf" --username="dnsdist_org" --product="dnsdist" --directory="/${DNSDIST_DOCS_DIR}/"
-        env:
-          DOCS_HOST: ${{vars.DOCS_HOST}}
 
 
   publish-to-aws:
@@ -257,16 +254,24 @@ jobs:
     needs: build-docs
     if: ${{ (github.ref_name == 'master' || vars.DOCS_WORKFLOW_BRANCH_OVERRIDE == 'true') && vars.PUBLISH_DOCS_TO_AWS == 'true' }}
     runs-on: ubuntu-22.04
+    env:
+      AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+      AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+      AWS_REGION: ${{ vars.AWS_REGION }}
+      AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }}
+      AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}
+      AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}
+      PDNS_VERSION: ${{needs.build-docs.outputs.pdns_version}}
     steps:
       - name: Check required secrets
         run: |
           missing_secrets=()
-          [ -z "${{ secrets.AWS_ACCESS_KEY_ID }}" ] && missing_secrets+=("AWS_ACCESS_KEY_ID")
-          [ -z "${{ secrets.AWS_SECRET_ACCESS_KEY }}" ] && missing_secrets+=("AWS_SECRET_ACCESS_KEY")
-          [ -z "${{ vars.AWS_REGION }}" ] && missing_secrets+=("AWS_REGION")
-          [ -z "${{ vars.AWS_S3_BUCKET_DOCS }}" ] && missing_secrets+=("AWS_S3_BUCKET_DOCS")
-          [ -z "${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST")
-          [ -z "${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS")
+          [ -z "$AWS_ACCESS_KEY_ID" ] && missing_secrets+=("AWS_ACCESS_KEY_ID")
+          [ -z "$AWS_SECRET_ACCESS_KEY" ] && missing_secrets+=("AWS_SECRET_ACCESS_KEY")
+          [ -z "$AWS_REGION" ] && missing_secrets+=("AWS_REGION")
+          [ -z "$AWS_S3_BUCKET_DOCS" ] && missing_secrets+=("AWS_S3_BUCKET_DOCS")
+          [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST")
+          [ -z "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ] && missing_secrets+=("AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS")
           if [ ${#missing_secrets[@]} -ne 0 ]; then
             echo "Error: The following secrets/variables are missing: ${missing_secrets[*]}"
             exit 1
@@ -289,25 +294,18 @@ jobs:
           type = s3
           provider = AWS
           env_auth = true
-          region = ${{ vars.AWS_REGION }}
+          region = $AWS_REGION
           EOF
       - name: Download artifacts
         uses: actions/download-artifact@v4
         with:
           path: artifacts
       - name: Publish to AWS
-        env:
-          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
-          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
-          AWS_REGION: ${{ vars.AWS_REGION }}
-          AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }}
-          AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST }}
-          AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }}
         run: |
           if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DNSDIST" ]; then
-            tar -xf artifacts/dnsdist-html-docs-${{needs.build-docs.outputs.pdns_version}}/dnsdist-html-docs.tar
-            cp artifacts/dnsdist-${{needs.build-docs.outputs.pdns_version}}.pdf/dnsdist.pdf dnsdist-html-docs/
-            
+            tar -xf artifacts/dnsdist-html-docs-$PDNS_VERSION/dnsdist-html-docs.tar
+            cp artifacts/dnsdist-$PDNS_VERSION.pdf/dnsdist.pdf dnsdist-html-docs/
+
             # Copy files to S3
             echo "Copying DNSdist docs to S3..."
             rclone copy --checksum dnsdist-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/dnsdist.org/
@@ -319,21 +317,21 @@ jobs:
               --paths "/*"
             echo "Invalidation completed."
           fi
-          
+
           if [ -n "$AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS" ]; then
-            tar -xf artifacts/recursor-html-docs-${{needs.build-docs.outputs.pdns_version}}/rec-html-docs.tar
-            cp artifacts/PowerDNS-Recursor-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Recursor.pdf rec-html-docs/
-            
+            tar -xf artifacts/recursor-html-docs-$PDNS_VERSION/rec-html-docs.tar
+            cp artifacts/PowerDNS-Recursor-$PDNS_VERSION.pdf/PowerDNS-Recursor.pdf rec-html-docs/
+
             # Copy all PowerDNS docs to S3
             echo "Copying Recursor docs to S3..."
             rclone copy --checksum rec-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/recursor/
 
-            tar -xf artifacts/authoritative-html-docs-${{needs.build-docs.outputs.pdns_version}}/auth-html-docs.tar
-            cp artifacts/PowerDNS-Authoritative-${{needs.build-docs.outputs.pdns_version}}.pdf/PowerDNS-Authoritative.pdf auth-html-docs/
+            tar -xf artifacts/authoritative-html-docs-$PDNS_VERSION/auth-html-docs.tar
+            cp artifacts/PowerDNS-Authoritative-$PDNS_VERSION.pdf/PowerDNS-Authoritative.pdf auth-html-docs/
             echo "Copying Authoritative docs to S3..."
             rclone copy --checksum auth-html-docs/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/authoritative/
 
-            tar -xf artifacts/website-${{needs.build-docs.outputs.pdns_version}}/website.tar
+            tar -xf artifacts/website-$PDNS_VERSION/website.tar
             echo "Copying website files to S3..."
             rclone copy --checksum website/docs.powerdns.com/ docs-s3:$AWS_S3_BUCKET_DOCS/docs.powerdns.com/
 
index 2bf15749ee43636bdd555e958d6dc151ad34b61f..a83a7d417ca9ce000e62e5ac4410d321d872220e 100644 (file)
@@ -103,8 +103,6 @@ jobs:
     name: ${{ matrix.requirements-file }} - Validate list of packages and hashes
     runs-on: ubuntu-22.04
     needs: list-pip-requirement-files
-    env:
-      SERVICE_IP_ADDR: 127.0.0.1
     services:
       database:
         image: epicwink/proxpi
@@ -116,8 +114,11 @@ jobs:
       fail-fast: false
       matrix:
         requirements-file: ${{ fromJson(needs.list-pip-requirement-files.outputs.req-files) }}
+    env:
+      SERVICE_IP_ADDR: 127.0.0.1
+      REQUIREMENTS_FILE: ${{ matrix.requirements-file }}
     steps:
-      - run: echo "${{ matrix.requirements-file }}"
+      - run: echo "${REQUIREMENTS_FILE}"
       - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
       - uses: actions/checkout@v4
         with:
@@ -130,13 +131,15 @@ jobs:
       - run: pip config set global.trusted-host ${{ env.SERVICE_IP_ADDR }}
       - id: proxpi-docker
         run: echo "id=$(docker ps | grep "epicwink/proxpi" | awk '{print $1}')" >> "$GITHUB_OUTPUT"
-      - run: pip install -r ${{ matrix.requirements-file }}
+      - run: pip install -r ${REQUIREMENTS_FILE}
       - name: Get the list of packages requested to the pip proxy
+        env:
+          ID: ${{ steps.proxpi-docker.outputs.id }}
         run: |
-          docker logs ${{ steps.proxpi-docker.outputs.id }} 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
+          docker logs "${ID}" 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
           cat /tmp/proxpi.log
       - name: check only listed packages were installed
-        run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${{ matrix.requirements-file }} || ( echo "$i not found" && exit 1 ); done
+        run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${REQUIREMENTS_FILE} || ( echo "$i not found" && exit 1 ); done
 
   run-rec-bulk-test-on-ubicloud:
     if: ${{ vars.UBICLOUD_DAILY_REC_BULKTEST == '1' }}