COVERITY_TOKEN: ${{ secrets.coverity_rec_token }}
COVERITY_EMAIL: ${{ secrets.coverity_email }}
- # The jobs below check that only the pinned version of Python3 packages are installed with pip. Running in a container, a
- # pip proxy registers all requests for installing packages with pip. Then, the downloaded packages and their versions are compared
- # with the list used for the installation (i.e. docs/requirements.txt, pdns/recursordist/docs/requirements.txt, etc). If a package
- # is missing or a version does not match the one expected, this job fails, which makes the workflow fail.
- #
- # The pinned version plus hashes are generated using pip-compile using an input file that includes the original list of packages
- # (pip-compile --generate-hashes -U requirements.in). "pip-compile" can be installed via pip-tools with Python 3.11, which is the version
- # used in the CI. Any other Python version would end up with different versions for packages and could result in workflow failures.
- #
- # One recurring error thrown by this validation is when a new version of a pinned package is released for a "setup-requires" dependency
- # of one of the packages in the list (see https://github.com/PowerDNS/pdns/pull/14596). The package version in “requirements.in” should
- # be modified to solve this issue. In some cases, it is enough to generate again the list of packages, making sure to add the -U flag
- # to force the upgrade: "pip-compile --generate-hashes -U requirements.in" (this could include upgrading other packages).
-
- list-pip-requirement-files:
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- runs-on: ubuntu-22.04
- outputs:
- req-files: ${{ steps.get-list-requirements.outputs.files }}
- steps:
- - uses: actions/checkout@v5
- with:
- persist-credentials: false
- - name: Get all requirements.txt files and export them as outputs
- id: get-list-requirements
- run: |
- echo "files=$(find . -name 'requirements.txt' | jq -R -s -c 'split("\n")[:-1]')" >> "$GITHUB_OUTPUT"
-
- validate-pip-hashes:
- if: ${{ vars.SCHEDULED_MISC_DAILIES }}
- name: ${{ matrix.requirements-file }} - Validate list of packages and hashes
- runs-on: ubuntu-22.04
- needs: list-pip-requirement-files
- services:
- database:
- image: epicwink/proxpi@sha256:a219ea0ef4f5b272eaf18bc5a5d00220c5aa07debb434d36161550862768aa93
- ports:
- - 5000:5000
- options: >-
- --restart always
- strategy:
- fail-fast: false
- matrix:
- requirements-file: ${{ fromJson(needs.list-pip-requirement-files.outputs.req-files) }}
- env:
- SERVICE_IP_ADDR: 127.0.0.1
- REQUIREMENTS_FILE: ${{ matrix.requirements-file }}
- steps:
- - run: echo "${REQUIREMENTS_FILE}"
- - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- - uses: actions/checkout@v5
- with:
- persist-credentials: false
- - uses: actions/setup-python@v6
- with:
- python-version: '3.11'
- # Configure pip index-url set to proxpi
- - run: pip config set global.index-url http://${{ env.SERVICE_IP_ADDR }}:5000/index/
- - run: pip config set global.trusted-host ${{ env.SERVICE_IP_ADDR }}
- - id: proxpi-docker
- run: echo "id=$(docker ps | grep "epicwink/proxpi" | awk '{print $1}')" >> "$GITHUB_OUTPUT"
- - run: pip install -r ${REQUIREMENTS_FILE}
- - name: Get the list of packages requested to the pip proxy
- env:
- ID: ${{ steps.proxpi-docker.outputs.id }}
- run: |
- docker logs "${ID}" 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/g' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
- cat /tmp/proxpi.log
- - name: check only listed packages were installed
- run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${REQUIREMENTS_FILE} || ( echo "$i not found" && exit 1 ); done
-
run-rec-bulk-test-on-ubicloud:
if: ${{ vars.UBICLOUD_DAILY_REC_BULKTEST == '1' }}
name: Run Recursor (master) Bulk Test On Ubicloud
--- /dev/null
+name: "Verify all versions of python packages are pinned"
+
+on:
+ pull_request:
+ branches:
+ - master
+ paths:
+ - '**/requirements.txt'
+ - '**/requirements.in'
+ schedule:
+ - cron: '30 1 * * *'
+
+permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
+ contents: read
+
+jobs:
+ # The jobs below check that only the pinned version of Python3 packages are installed with pip. Running in a container, a
+ # pip proxy registers all requests for installing packages with pip. Then, the downloaded packages and their versions are compared
+ # with the list used for the installation (i.e. docs/requirements.txt, pdns/recursordist/docs/requirements.txt, etc). If a package
+ # is missing or a version does not match the one expected, this job fails, which makes the workflow fail.
+ #
+ # The pinned version plus hashes are generated using pip-compile using an input file that includes the original list of packages
+ # (pip-compile --generate-hashes -U requirements.in). "pip-compile" can be installed via pip-tools with Python 3.11, which is the version
+ # used in the CI. Any other Python version would end up with different versions for packages and could result in workflow failures.
+ #
+ # One recurring error thrown by this validation is when a new version of a pinned package is released for a "setup-requires" dependency
+ # of one of the packages in the list (see https://github.com/PowerDNS/pdns/pull/14596). The package version in “requirements.in” should
+ # be modified to solve this issue. In some cases, it is enough to generate again the list of packages, making sure to add the -U flag
+ # to force the upgrade: "pip-compile --generate-hashes -U requirements.in" (this could include upgrading other packages).
+
+ list-pip-requirement-files:
+ if: ${{ !github.event.schedule || vars.SCHEDULED_VERIFY_PINNING }}
+ runs-on: ubuntu-22.04
+ outputs:
+ req-files: ${{ steps.get-list-requirements.outputs.files }}
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: false
+ - name: Get all requirements.txt files and export them as outputs
+ id: get-list-requirements
+ run: |
+ echo "files=$(find . -name 'requirements.txt' | jq -R -s -c 'split("\n")[:-1]')" >> "$GITHUB_OUTPUT"
+
+ validate-pip-hashes:
+ if: ${{ !github.event.schedule || vars.SCHEDULED_VERIFY_PINNING }}
+ name: ${{ matrix.requirements-file }} - Validate list of packages and hashes
+ runs-on: ubuntu-22.04
+ needs: list-pip-requirement-files
+ services:
+ database:
+ image: epicwink/proxpi@sha256:a219ea0ef4f5b272eaf18bc5a5d00220c5aa07debb434d36161550862768aa93
+ ports:
+ - 5000:5000
+ options: >-
+ --restart always
+ strategy:
+ fail-fast: false
+ matrix:
+ requirements-file: ${{ fromJson(needs.list-pip-requirement-files.outputs.req-files) }}
+ env:
+ SERVICE_IP_ADDR: 127.0.0.1
+ REQUIREMENTS_FILE: ${{ matrix.requirements-file }}
+ steps:
+ - run: echo "${REQUIREMENTS_FILE}"
+ - uses: PowerDNS/pdns/set-ubuntu-mirror@meta
+ - uses: actions/checkout@v5
+ with:
+ persist-credentials: false
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.11'
+ # Configure pip index-url set to proxpi
+ - run: pip config set global.index-url http://${{ env.SERVICE_IP_ADDR }}:5000/index/
+ - run: pip config set global.trusted-host ${{ env.SERVICE_IP_ADDR }}
+ - id: proxpi-docker
+ run: echo "id=$(docker ps | grep "epicwink/proxpi" | awk '{print $1}')" >> "$GITHUB_OUTPUT"
+ - run: pip install -r ${REQUIREMENTS_FILE}
+ - name: Get the list of packages requested to the pip proxy
+ env:
+ ID: ${{ steps.proxpi-docker.outputs.id }}
+ run: |
+ docker logs "${ID}" 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/g' | egrep -v "pip==|setuptools==" > /tmp/proxpi.log
+ cat /tmp/proxpi.log
+ - name: check only listed packages were installed
+ run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${REQUIREMENTS_FILE} || ( echo "$i not found" && exit 1 ); done