- pip install --upgrade pip
- pip install apkg
- scripts/make-obs.sh
- - echo y | scripts/build-in-obs.sh $OBS_REPO
+ - echo y | scripts/ci/build-in-obs.sh $OBS_REPO
obs:release:
<<: *obs_trigger
.enable_repo_build: &enable_repo_build
before_script:
- - ./scripts/enable-repo-cznic-labs.sh knot-dns
+ - ./scripts/ci/enable-repo-cznic-labs.sh knot-dns
.pkg_test: &pkg_test
stage: pkg
run_target(
'bench',
- command: '../scripts/bench.sh',
+ command: '../scripts/meson/bench.sh',
)
cd "$(dirname ${0})"
OUTNAME="$1"
-CDEFS="../../scripts/gen-cdefs.sh"
+CDEFS="../../scripts/meson/gen-cdefs.sh"
LIBKRES="${MESON_BUILD_ROOT}/lib/libkres.so"
KRESD="${MESON_BUILD_ROOT}/daemon/kresd"
if [ ! -e "$LIBKRES" ]; then
# needed for get-archive
archive_url = "https://secure.nic.cz/files/knot-resolver/knot-resolver-{{ version }}.tar.xz"
signature_url = "https://secure.nic.cz/files/knot-resolver/knot-resolver-{{ version }}.tar.xz.asc"
-version_script = "scripts/upstream-version.sh"
+version_script = "scripts/lib/upstream-version.sh"
[apkg]
compat = 4
# man page
man_config = configuration_data()
man_config.set('version', meson.project_version())
-man_config.set('date', run_command('../scripts/get-date.sh', check: true).stdout())
+man_config.set('date', run_command('../scripts/lib/get-date.sh', check: true).stdout())
man_config.set('man_seealso_systemd', '')
if systemd_legacy_units == 'enabled'
endif
-make_doc = find_program('../scripts/make-doc.sh')
+make_doc = find_program('../scripts/meson/make-doc.sh')
run_target(
'doc',
command: make_doc
clangtidy = find_program('clang-tidy', required: false)
luacheck = find_program('luacheck', required: false)
flake8 = find_program('flake8', required: false)
-pylint_run = find_program('scripts/run-pylint.sh')
+pylint_run = find_program('scripts/meson/run-pylint.sh')
message('-------------------------')
if clangtidy.found()
--- /dev/null
+# Knot Resolver scripts
+
+These are auxillary scripts used for Knot Resolver development.
+
+The scripts in the root of this directory are meant to be executed directly by
+developers. Some may also be run by automated tools.
+
+There are also the following subdirectories. The scripts in these are *only
+ever* meant to be run by automated tools:
+
+- `ci`: specific to the CI/CD pipeline
+- `lib`: (potentially) generally useful scripts to be called by other scripts
+- `meson`: specific to the build system
+- `poe-tasks`: run by the `poe` script in the repository root
+ - `utils`: scripts additionally called by the `poe` tasks
+
+For more information about each script, see its content for explanatory
+comments.
# Push packaging files to OBS
#
# Example usage:
-# 1. ./scripts/make-obs.sh
-# 2. ./scripts/build-in-obs.sh knot-resolver-latest
+# 1. ./scripts/ci/make-obs.sh
+# 2. ./scripts/ci/build-in-obs.sh knot-resolver-latest
set -o errexit -o nounset -o xtrace
pkgdir='pkg/obs'
# * dpkg-buildpackage
#
# usage:
-# ./scripts/make-obs.sh [path.to.archive.xz] [1]
+# ./scripts/ci/make-obs.sh [path.to.archive.xz] [1]
#
# supply archives as optional arguments to build from,
# otherwise archive will be built from sources by apkg
# output at pkg/obs/ (removed on each run)
set -o errexit -o nounset
-pushd "$(dirname ${0})/.."
+pushd "$(dirname ${0})/../.."
OUTDIR="pkg/obs"
APKG_OPTS="-O $OUTDIR"
+++ /dev/null
-#!/usr/bin/env bash
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# $1 = top source directory
-# $2 = coverage data directory path
-# $3 = output directory for *.info files
-
-set -o errexit -o nounset
-shopt -s nullglob
-IFS=$'\n'
-
-TOPSRCDIR="$1"
-DATAROOT="$2"
-OUTDIR="$3"
-
-cd "${TOPSRCDIR}"
-for COVNAME in $(find "${DATAROOT}" -name .topdir_kresd_coverage)
-do
- find "${DATAROOT}" -name '*.gcda' -not -path "${DATAROOT}/*" -delete
- COVDIR="$(dirname "${COVNAME}")"
- COVDATA_FILENAMES=("${COVDIR}"/*) # filenames in BASH array
- (( ${#COVDATA_FILENAMES[*]} )) || continue # skip empty dirs
-
- cp -r -t ${TOPSRCDIR} "${COVDIR}"/*
- ${LCOV} -q --no-external --capture -d lib -d daemon -d modules -o "$(mktemp -p "${OUTDIR}" -t XXXXXXXX.c.info)" > /dev/null
-done
+++ /dev/null
-#!/usr/bin/env bash
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# generate variables for coverage testing
-# $1 = top source directory
-# $2 = coverage data directory path
-# $3 = name of test/new subdirectory name
-# $4 = [optional] --export to generate export commands
-
-set -o errexit -o nounset
-shopt -s nullglob
-
-test -z "${COVERAGE:-}" && exit 0 # not enabled, do nothing
-test ! -z "${V:-}" && set -o xtrace # verbose mode
-
-EXPORT=""
-test "${4:-}" == "--export" && EXPORT="export "
-TOPSRCDIR="$1"
-DATAROOT="$2"
-OUTPATH="$2/$3"
-
-# check that output directory is empty
-# beware: Makefile will always call coverage_env.sh for all targets
-# so directories get created but not populated
-# i.e. test -d is not sufficient check
-OUTPATH_FILENAMES=("${OUTPATH}"/*) # filenames in BASH array
-(( ${#OUTPATH_FILENAMES[*]} )) && echo "false" && >&2 echo "fatal: output directory ${OUTPATH} must be empty (or non-existent)" && exit 1
-
-mkdir -p "${OUTPATH}"
-# convert paths to absolute
-pushd "${OUTPATH}" &> /dev/null
-touch .topdir_kresd_coverage
-OUTPATH="$(pwd -P)"
-popd &> /dev/null
-
-# determine GCOV_PREFIX_STRIP value for current source directory
-TOPSRCDIR_SLASHES="${TOPSRCDIR//[^\/]/}" # remove everything except /
-GCOV_PREFIX_STRIP="${#TOPSRCDIR_SLASHES}" # number of / == number of components
-
-KRESD_COVERAGE_STATS="${OUTPATH}/luacov.stats.out"
-GCOV_PREFIX="${OUTPATH}"
-echo "${EXPORT}KRESD_COVERAGE_STATS=\"${KRESD_COVERAGE_STATS}\" ${EXPORT}GCOV_PREFIX=\"${GCOV_PREFIX}\" ${EXPORT}GCOV_PREFIX_STRIP=\"${GCOV_PREFIX_STRIP}\""
#!/usr/bin/env bash
# SPDX-License-Identifier: GPL-3.0-or-later
set -o nounset
-cd "$(dirname $0)/.."
+cd "$(dirname $0)/../.."
# Get date from NEWS if possible (regular release)
DATE=$(head -n1 < NEWS | sed 's/.*(\(.*\)).*/\1/' | grep -E '^[0-9]{4}-[0-9]{2}-[0-9]{2}$$')
+++ /dev/null
-#!/usr/bin/env bash
-# SPDX-License-Identifier: GPL-3.0-or-later
-# Generate stats file in luacov format indicating that files named on stdin
-# were not processed.
-#
-# Normally luacov does not know about files which were not loaded so
-# without this manual addition the files are missing in coverage report.
-
-# Usage:
-# $ luacov_gen_empty.sh < list_of_lua_files > luacov.empty_stats.out
-
-set -o errexit -o nounset
-IFS=$'\n'
-
-while read FILENAME
-do
- echo -e "0:${FILENAME}\n "
-done
+++ /dev/null
-#!/usr/bin/env luajit
--- SPDX-License-Identifier: GPL-3.0-or-later
-
-local luacov = require('luacov')
-local ReporterBase = require('luacov.reporter').ReporterBase
-local LcovReporter = setmetatable({}, ReporterBase)
-LcovReporter.__index = LcovReporter
-
-function LcovReporter:on_new_file(filename)
- self.finfo = self.current_files[filename] or {name=filename, coverage={}}
-end
-
-function LcovReporter:on_mis_line(_, lineno, _)
- self.finfo.coverage[lineno] = self.finfo.coverage[lineno] or 0
-end
-
-function LcovReporter:on_hit_line(_, lineno, _, hits)
- self.finfo.coverage[lineno] = (self.finfo.coverage[lineno] or 0) + hits
-end
-
-function LcovReporter:on_end_file()
- self.current_files[self.finfo.name] = self.finfo
- self.finfo = nil
-end
-
--- Write out results in lcov format
-local function write_lcov_info(files)
- for fname, finfo in pairs(files) do
- local instrumented, nonzero = 0, 0
- print('TN:')
- print(string.format('SF:%s', fname))
- for i, hits in pairs(finfo.coverage) do
- print(string.format('DA:%d,%d', i, hits))
- instrumented = instrumented + 1
- if hits > 0 then
- nonzero = nonzero + 1
- end
- end
- print(string.format('LH:%d', nonzero))
- print(string.format('LF:%d', instrumented))
- print('end_of_record')
- end
-end
-
--- Accumulate total coverage
-local all_files = {}
-for _, fname in ipairs(arg) do
- local conf = luacov.load_config()
- conf.statsfile = fname
- local reporter = assert(LcovReporter:new(conf))
- reporter.current_files = all_files
- reporter:run()
- reporter:close()
-end
-
--- Write results
-write_lcov_info(all_files)
+++ /dev/null
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-
-function install_pipx {
- python3 -m pip install --user pipx
- python3 -m pipx ensurepath
- export PATH="$PATH:/root/.local/bin" # hack to make binaries installed with pipx work
-}
-
-function pipx {
- python3 -m pipx ${@}
-}
-
-function init_debian {
- export DEBIAN_FRONTEND=noninteractive
-
- # upgrade system to latest
- apt-get update -qqq
- apt-get upgrade -y -qqq
-
- # configure repository with Knot Resolver dependencies
- apt-get -y -qqq install apt-transport-https lsb-release ca-certificates wget curl gnupg2
- sh -c 'echo "deb http://download.opensuse.org/repositories/home:/CZ-NIC:/knot-resolver-build/Debian_10/ /" > /etc/apt/sources.list.d/home:CZ-NIC:knot-resolver-build.list'
- sh -c 'curl -fsSL https://download.opensuse.org/repositories/home:CZ-NIC:knot-resolver-build/Debian_10/Release.key | gpg --dearmor > /etc/apt/trusted.gpg.d/home_CZ-NIC_knot-resolver-build.gpg'
- apt-get update -qqq
-
- # apkg
- apt-get install -y python3-pip meson git python3-venv
-}
-
-function init_fedora {
- # upgrade system to latest and install pip
- dnf upgrade -y
- dnf install -y python3-pip
-}
-
-
-# system setup
-if command -v dnf; then
- init_fedora
-elif command -v apt-get; then
- init_debian
-else
- echo "System not supported."
- exit 1
-fi
-
-# install apkg
-install_pipx
-pipx install apkg
-
-# prepare the repo
-#git clone https://gitlab.nic.cz/knot/knot-resolver
-cd /repo
-git config --global user.email "automated-script"
-git config --global user.name "Automated Script"
-git checkout manager-integration-without-submodule
-git submodule update --init --recursive
-
-# build the package
-apkg system-setup
-apkg build -b
-apkg srcpkg
-
-
-
-
-
-
+++ /dev/null
-#!/usr/bin/env luajit
--- SPDX-License-Identifier: GPL-3.0-or-later
-
--- parse install commands from stdin
--- input: PREFIX=... make install --dry-run --always-make
--- output: <install path> <source path>
--- (or sed commands if --sed was specified)
-
-output = 'list'
-if #arg > 1 or arg[1] == '-h' or arg[1] == '--help' then
- print(string.format([[
-Read install commands and map install paths to paths in source directory.
-
-Usage:
-$ PREFIX=... make install --dry-run --always-make | %s
-
-Example output:
-/kresd/git/.local/lib/kdns_modules/policy.lua modules/policy/policy.lua
-
-Option --sed will produce output suitable as input suitable for sed.]],
- arg[0]))
- os.exit(1)
-elseif #arg == 0 then
- output = 'list'
-elseif arg[1] == '--sed' then
- output = 'sed'
-else
- print('Invalid arguments. See --help.')
- os.exit(2)
-end
-
--- remove double // from paths and remove trailing /
-function normalize_path(path)
- assert(path)
- repeat
- path, changes = path:gsub('//', '/')
- until changes == 0
- return path:gsub('/$', '')
-end
-
-function is_opt(word)
- return word:match('^-')
-end
-
--- opts requiring additional argument to be skipped
-local ignored_opts_with_arg = {
- ['--backup'] = true,
- ['-g'] = true,
- ['--group'] = true,
- ['-m'] = true,
- ['--mode'] = true,
- ['-o'] = true,
- ['--owner'] = true,
- ['--strip-program'] = true,
- ['--suffix'] = true,
-}
-
--- state machine junctions caused by --opts
--- returns: new state (expect, mode) and target name if any
-function parse_opts(word, expect, mode)
- if word == '--' then
- return 'names', mode, nil -- no options anymore
- elseif word == '-d' or word == '--directory' then
- return 'opt_or_name', 'newdir', nil
- elseif word == '-t' or word == '--target-directory' then
- return 'targetdir', mode, nil
- elseif word:match('^--target-directory=') then
- return 'opt_or_name', mode, string.sub(word, 20)
- elseif ignored_opts_with_arg[word] then
- return 'ignore', mode, nil -- ignore next word
- else
- return expect, mode, nil -- unhandled opt
- end
-end
-
-
--- cmd: complete install command line: install -m 0644 -t dest src1 src2
--- dirs: names known to be directories: name => true
--- returns: updated dirs
-function process_cmd(cmd, dirs)
- -- print('# ' .. cmd)
- sanity_check(cmd)
- local expect = 'install'
- local mode = 'copy' -- copy or newdir
- local target -- last argument or argument for install -t
- local names = {} -- non-option arguments
-
- for word in cmd:gmatch('%S+') do
- if expect == 'install' then -- parsing 'install'
- assert(word == 'install')
- expect = 'opt_or_name'
- elseif expect == 'opt_or_name' then
- if is_opt(word) then
- expect, mode, newtarget = parse_opts(word, expect, mode)
- target = newtarget or target
- else
- if mode == 'copy' then
- table.insert(names, word)
- elseif mode == 'newdir' then
- local path = normalize_path(word)
- dirs[path] = true
- else
- assert(false, 'bad mode')
- end
- end
- elseif expect == 'targetdir' then
- local path = normalize_path(word)
- dirs[path] = true
- target = word
- expect = 'opt_or_name'
- elseif expect == 'names' then
- table.insert(names, word)
- elseif expect == 'ignore' then
- expect = 'opt_or_name'
- else
- assert(false, 'bad expect')
- end
- end
- if mode == 'newdir' then
- -- no mapping to print, this cmd just created directory
- return dirs
- end
-
- if not target then -- last argument is the target
- target = table.remove(names)
- end
- assert(target, 'fatal: no target in install cmd')
- target = normalize_path(target)
-
- for _, name in pairs(names) do
- basename = string.gsub(name, "(.*/)(.*)", "%2")
- if not dirs[target] then
- print('fatal: target directory "' .. target .. '" was not created yet!')
- os.exit(2)
- end
- -- mapping installed name -> source name
- if output == 'list' then
- print(target .. '/' .. basename, name)
- elseif output == 'sed' then
- print(string.format([[s`%s`%s`g]],
- target .. '/' .. basename, name))
- else
- assert(false, 'unsupported output')
- end
- end
- return dirs
-end
-
-function sanity_check(cmd)
- -- shell quotation is not supported
- assert(not cmd:match('"'), 'quotes " are not supported')
- assert(not cmd:match("'"), "quotes ' are not supported")
- assert(not cmd:match('\\'), "escapes like \\ are not supported")
- assert(cmd:match('^install%s'), 'not an install command')
-end
-
--- remember directories created by install -d so we can expand relative paths
-local dirs = {}
-while true do
- local cmd = io.read("*line")
- if not cmd then
- break
- end
- local isinstall = cmd:match('^install%s')
- if isinstall then
- dirs = process_cmd(cmd, dirs)
- end
-end
#!/usr/bin/env bash
# SPDX-License-Identifier: GPL-3.0-or-later
set -o errexit -o nounset
-cd "$(dirname "${0}")/.."
+cd "$(dirname "${0}")/../.."
# generate JSON schema for the manager's declarative config
pushd manager
# SPDX-License-Identifier: GPL-3.0-or-later
set -o errexit -o nounset
-cd "$(dirname ${0})/.."
+cd "$(dirname ${0})/../.."
# Find Python modules and standalone Python scripts
FILES=$(find ./tests/pytests \
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
aggregate_rv=0
function check_rv {
# check formatting using black
echo -e "${yellow}Checking formatting using black...${reset}"
-black manager/knot_resolver_manager tests/manager scripts/create_setup.py --check --diff
+black manager/knot_resolver_manager tests/manager scripts/poe-tasks/utils/create_setup.py --check --diff
check_rv $?
echo
# check that setup.py is not behind pyproject.toml
echo -e "${yellow}Checking setup.py${reset}"
-python scripts/create_setup.py | diff - setup.py
+python scripts/poe-tasks/utils/create_setup.py | diff - setup.py
check_rv $?
python setup.py --help > /dev/null
check_rv $?
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
reconfigure=''
if [ -f .build_kresd/ninja.build ]; then
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
echo Building Knot Resolver documentation
echo ------------------------------------
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
# validate all configuration examples
for example in $PWD/etc/config/config.example.*.yaml;
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
-dirs="manager/knot_resolver_manager/ tests/manager scripts/create_setup.py build_c_extensions.py"
+dirs="manager/knot_resolver_manager/ tests/manager scripts/poe-tasks/utils/create_setup.py build_c_extensions.py"
# run black code formater
black $dirs
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
# create setup.py
-python scripts/create_setup.py > setup.py
+python scripts/poe-tasks/utils/create_setup.py > setup.py
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
# run knot-resolver client
python3 -m knot_resolver_manager.cli $@
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
build_kresd
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
build_kresd
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
echo The debug server will be listening on port localhost:5678
echo Use VSCode remote attach feature to connect to the debug server
# ensure consistent behaviour
src_dir="$(dirname "$(realpath "$0")")"
-source $src_dir/_env.sh
+source $src_dir/utils/_env.sh
# run pytest
env PYTHONPATH=. pytest --junitxml=unit.junit.xml --cov=manager/knot_resolver_manager --show-capture=all tests/manager
+++ /dev/null
-#!/usr/bin/env bash
-# SPDX-License-Identifier: GPL-3.0-or-later
-set -o errexit -o nounset
-
-# following checkers are disabled on purpose:
-# Clang does not support attribute cleanup and this is causing false positives in following checkers:
-# unix.Malloc
-# alpha.unix.SimpleStream
-# alpha.unix.Stream
-# https://bugs.llvm.org/show_bug.cgi?id=3888
-
-# These are disabled for other reasons:
-# alpha.clone.CloneChecker # way too many false positives
-# alpha.core.CastToStruct # we use this pattern too much, hard to avoid in many cases
-# alpha.deadcode.UnreachableCode # false positives/flags sanity checks depending on implementation details
-# alpha.security.MallocOverflow # not smart enough to infer max values from data types
-
-exec scan-build --status-bugs -no-failure-reports \
--analyzer-config aggressive-binary-operation-simplification=true \
--disable-checker unix.Malloc \
--enable-checker alpha.core.BoolAssignment \
--enable-checker alpha.core.CastSize \
--enable-checker alpha.core.Conversion \
--enable-checker alpha.core.DynamicTypeChecker \
--enable-checker alpha.core.FixedAddr \
--enable-checker alpha.core.IdenticalExpr \
--enable-checker alpha.core.PointerArithm \
--enable-checker alpha.core.PointerSub \
--enable-checker alpha.core.SizeofPtr \
--enable-checker alpha.core.TestAfterDivZero \
--enable-checker alpha.cplusplus.IteratorRange \
--enable-checker alpha.security.ArrayBound \
--enable-checker alpha.security.ArrayBoundV2 \
--enable-checker alpha.security.ReturnPtrRange \
--enable-checker alpha.security.taint.TaintPropagation \
--enable-checker alpha.unix.BlockInCriticalSection \
--enable-checker alpha.unix.Chroot \
--enable-checker alpha.unix.PthreadLock \
--enable-checker alpha.unix.cstring.BufferOverlap \
--enable-checker alpha.unix.cstring.NotNullTerminated \
--enable-checker alpha.unix.cstring.OutOfBounds \
--enable-checker nullability.NullableDereferenced \
--enable-checker nullability.NullablePassedToNonnull \
--enable-checker nullability.NullableReturnedFromNonnull \
--enable-checker optin.performance.Padding \
--enable-checker optin.portability.UnixAPI \
--enable-checker security.FloatLoopCounter \
--enable-checker valist.CopyToSelf \
--enable-checker valist.Uninitialized \
--enable-checker valist.Unterminated \
-"$@"
]
-run_configtest = find_program('../../scripts/test-config.sh')
+run_configtest = find_program('../../scripts/meson/test-config.sh')
foreach config_test : config_tests
['yaml', 'PyYAML (for deckard)'],
]
-prepare_deckard = find_program('../../scripts/test-integration-prepare.sh')
+prepare_deckard = find_program('../../scripts/meson/test-integration-prepare.sh')
deckard_env = environment()
deckard_env.prepend('PATH', sbin_dir)