From: Andrei Pavel Date: Thu, 6 Jun 2024 12:41:10 +0000 (+0300) Subject: [#3287] fix pycodestyle warnings X-Git-Tag: Kea-2.7.0~14 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=8e37580e591920369b534bf5825b940009a22e25;p=thirdparty%2Fkea.git [#3287] fix pycodestyle warnings - E111 indentation is not a multiple of 4 - E117 over-indented - E125 continuation line with same indent as next logical line - E127 continuation line over-indented for visual indent - E128 continuation line under-indented for visual indent - E129 visually indented line with same indent as next logical line - E131 continuation line unaligned for hanging indent - E201 whitespace after '[' - E201 whitespace after '{' - E202 whitespace before ')' - E202 whitespace before ']' - E202 whitespace before '}' - E203 whitespace before ' - E203 whitespace before ',' - E222 multiple spaces after operator - E225 missing whitespace around operator - E228 missing whitespace around modulo operator - E231 missing whitespace after ',' - E251 unexpected spaces around keyword / parameter equals - E261 at least two spaces before inline comment - E262 inline comment should start with '# ' - E265 block comment should start with '# ' - E301 expected 1 blank line, found 0 - E302 expected 2 blank lines, found 1 - E303 too many blank lines (2) - E305 expected 2 blank lines after class or function definition, found 1 - E306 expected 1 blank line before a nested definition, found 0 - E401 multiple imports on one line - E402 module level import not at top of file - E501 line too long - E502 the backslash is redundant between brackets - E703 statement ends with a semicolon - E713 test for membership should be 'not in' - E722 do not use bare 'except' - E741 ambiguous variable name 'l' - W605 invalid escape sequence '\/' --- diff --git a/doc/sphinx/api2doc.py b/doc/sphinx/api2doc.py index 1f0a0ed21e..b1c1a59288 100755 --- a/doc/sphinx/api2doc.py +++ b/doc/sphinx/api2doc.py @@ -38,8 +38,8 @@ def read_input_files(files): # use OrderedDict to preserve order of fields in cmd-syntax try: descr = json.load(fp, object_pairs_hook=collections.OrderedDict) - except: - print('\nError while processing %s\n\n' % f) + except Exception as e: + print(f'\nError while processing {f}: {e}\n\n') raise if name != descr['name']: exit("Expected name == descr['name'], but name is {name} and descr['name'] is {descr['name']}") @@ -78,14 +78,14 @@ API Reference for dm, funcs in sorted(daemons.items()): rst += '.. _commands-%s:\n\n' % dm rst += 'Commands supported by `%s` daemon: ' % dm - funcs = sorted([ ':ref:`%s `' % (f['name'], f['name']) for f in funcs]) + funcs = sorted([':ref:`%s `' % (f['name'], f['name']) for f in funcs]) rst += ', '.join(funcs) rst += '.\n\n' for h, funcs in sorted(hooks.items()): rst += '.. _commands-%s:\n\n' % h rst += 'Commands supported by `%s` hook library: ' % h - funcs = sorted([ ':ref:`%s `' % (f['name'], f['name']) for f in funcs]) + funcs = sorted([':ref:`%s `' % (f['name'], f['name']) for f in funcs]) rst += ', '.join(funcs) rst += '.\n\n' @@ -112,16 +112,16 @@ API Reference # availability rst += 'Availability: %s ' % func['avail'] - rst += '(:ref:`%s ` hook library)' % (func['hook'], func['hook']) if 'hook' in func else '(built-in)' + rst += f'(:ref:`{func["hook"]} ` hook library)' if 'hook' in func else '(built-in)' rst += '\n\n' # access try: access = func['access'] - except: - print('\naccess missing in %s\n\n' % name) + except Exception as e: + print(f'\naccess missing in {name}: {e}\n\n') raise - if not access in ['read', 'write']: + if access not in ['read', 'write']: print('\nUnknown access %s in %s\n\n' % (access, name)) raise ValueError('access must be read or write') rst += 'Access: %s *(parameter ignored in this Kea version)* \n\n' % access @@ -151,8 +151,8 @@ API Reference rst += '\n\n' if 'cmd-comment' in func: - for l in func['cmd-comment']: - rst += "%s\n" % l + for line in func['cmd-comment']: + rst += "%s\n" % line rst += '\n' # response syntax @@ -184,7 +184,8 @@ API Reference rst += '- 1 - error\n' rst += '- 2 - unsupported\n' rst += '- 3 - empty (command was completed successfully, but no data was affected or returned)\n' - rst += '- 4 - conflict (command could not apply requested configuration changes because they were in conflict with the server state)\n\n' + rst += '- 4 - conflict (command could not apply requested configuration changes because they were ' + rst += 'in conflict with the server state)\n\n' return rst diff --git a/doc/sphinx/conf.py b/doc/sphinx/conf.py index f0ca589908..fc8fc548b1 100644 --- a/doc/sphinx/conf.py +++ b/doc/sphinx/conf.py @@ -16,7 +16,7 @@ import os # import sys # sys.path.insert(0, os.path.abspath('.')) -# to avoid "sphinx.errors.SphinxParallelError: RecursionError: maximum recursion depth exceeded while pickling an object" +# to avoid sphinx.errors.SphinxParallelError: RecursionError: maximum recursion depth exceeded while pickling an object import sys sys.setrecursionlimit(5000) @@ -45,7 +45,7 @@ with open(config_ac_path) as f: release = candidate_release break version = release -dashed_version_series='-'.join(version.split('.')[0:2]) +dashed_version_series = '-'.join(version.split('.')[0:2]) # now let's replace versions with odd minor number with dev if int(dashed_version_series[-1]) % 2 != 0: @@ -146,7 +146,7 @@ pygments_style = None # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -#html_theme = 'alabaster' +# html_theme = 'alabaster' html_theme = 'sphinx_rtd_theme' html_logo = 'static/kea-imageonly-100bw.png' @@ -154,9 +154,9 @@ html_logo = 'static/kea-imageonly-100bw.png' # further. For a list of options available for each theme, see the # documentation. # -#html_theme_options = { -# "logo": "kea-logo-100x70.png", -#} +# html_theme_options = { +# "logo": "kea-logo-100x70.png", +# } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -177,7 +177,7 @@ html_static_path = ['static'] # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -#htmlhelp_basename = 'KeaAdministratorReferenceManualdoc' +# htmlhelp_basename = 'KeaAdministratorReferenceManualdoc' # -- Options for LaTeX output ------------------------------------------------ @@ -241,10 +241,11 @@ todo_include_todos = True # -- Substitutions ----------------------------------------------------------- -rst_prolog=""" +rst_prolog = """ .. |cloudsmith_repo| replace:: kea-{dashed_version_series} """.format(dashed_version_series=dashed_version_series) + # -- Functions --------------------------------------------------------------- # Do generation of api.rst and kea-messages.rst here in conf.py instead of Makefile.am @@ -278,17 +279,17 @@ def run_generate_docs(_): # The first entry on this list is the actual file to copy, the second is a unique name # that will be used when copied over to arm/ directory. FILES_TO_COPY = [ - [ '../../platforms.rst', 'platforms.rst' ], - [ '../examples/template-power-user-home/info.md', 'template-power-user-home.md' ], - [ '../examples/template-power-user-home/kea-ca-1.conf', 'template-power-user-home-ca-1.conf' ], - [ '../examples/template-power-user-home/kea-ca-2.conf', 'template-power-user-home-ca-2.conf' ], - [ '../examples/template-power-user-home/kea-dhcp4-1.conf', 'template-power-user-home-dhcp4-1.conf' ], - [ '../examples/template-power-user-home/kea-dhcp4-2.conf', 'template-power-user-home-dhcp4-2.conf' ], - [ '../examples/template-ha-mt-tls/info.md', 'template-ha-mt-tls.md' ], - [ '../examples/template-ha-mt-tls/kea-ca-1.conf', 'template-ha-mt-tls-ca-1.conf' ], - [ '../examples/template-ha-mt-tls/kea-ca-2.conf', 'template-ha-mt-tls-ca-2.conf' ], - [ '../examples/template-ha-mt-tls/kea-dhcp4-1.conf', 'template-ha-mt-tls-dhcp4-1.conf' ], - [ '../examples/template-ha-mt-tls/kea-dhcp4-2.conf', 'template-ha-mt-tls-dhcp4-2.conf' ] + ['../../platforms.rst', 'platforms.rst'], + ['../examples/template-power-user-home/info.md', 'template-power-user-home.md'], + ['../examples/template-power-user-home/kea-ca-1.conf', 'template-power-user-home-ca-1.conf'], + ['../examples/template-power-user-home/kea-ca-2.conf', 'template-power-user-home-ca-2.conf'], + ['../examples/template-power-user-home/kea-dhcp4-1.conf', 'template-power-user-home-dhcp4-1.conf'], + ['../examples/template-power-user-home/kea-dhcp4-2.conf', 'template-power-user-home-dhcp4-2.conf'], + ['../examples/template-ha-mt-tls/info.md', 'template-ha-mt-tls.md'], + ['../examples/template-ha-mt-tls/kea-ca-1.conf', 'template-ha-mt-tls-ca-1.conf'], + ['../examples/template-ha-mt-tls/kea-ca-2.conf', 'template-ha-mt-tls-ca-2.conf'], + ['../examples/template-ha-mt-tls/kea-dhcp4-1.conf', 'template-ha-mt-tls-dhcp4-1.conf'], + ['../examples/template-ha-mt-tls/kea-dhcp4-2.conf', 'template-ha-mt-tls-dhcp4-2.conf'] ] from shutil import copyfile @@ -298,6 +299,7 @@ def run_generate_docs(_): print("Copying %s to %s" % (src, dst)) copyfile(src, dst) + # custom setup hook def setup(app): app.add_crossref_type('isccmd', 'isccmd') diff --git a/doc/sphinx/mes2doc.py b/doc/sphinx/mes2doc.py index 91d13cb740..a9ee4e856f 100755 --- a/doc/sphinx/mes2doc.py +++ b/doc/sphinx/mes2doc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright (C) 2019-2021 Internet Systems Consortium, Inc. ("ISC") +# Copyright (C) 2019-2024 Internet Systems Consortium, Inc. ("ISC") # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this @@ -58,7 +58,7 @@ def read_input_files(files): messages[msg_id] = (section, msg_id, msg_text, msg_descr) # start next message - m = re.search('^%\s?([A-Z0-9_]+)\s+(.*)', line); + m = re.search(r'^%\s?([A-Z0-9_]+)\s+(.*)', line) msg_id, msg_text = m.groups() msg_descr = [] @@ -107,12 +107,12 @@ used to indicate a placeholder for data that is provided by the Kea code during rst += msg_text + '\n\n' - rst += ''.join([' ' + l + '\n' for l in msg_descr]) + rst += ''.join([' ' + line + '\n' for line in msg_descr]) rst += '\n' - return rst + def generate(in_files, out_file): messages = read_input_files(in_files) diff --git a/hammer.py b/hammer.py index 7d930f572b..13d092c153 100755 --- a/hammer.py +++ b/hammer.py @@ -90,8 +90,8 @@ SYSTEMS = { '24.04': True, }, 'debian': { - '8': False, - '9': False, + '8': False, + '9': False, '10': True, '11': True, '12': True, @@ -245,7 +245,7 @@ Vagrant.configure("2") do |config| end """ -RECOMMENDED_VAGRANT_VERSION='2.2.16' +RECOMMENDED_VAGRANT_VERSION = '2.2.16' log = logging.getLogger() @@ -256,12 +256,14 @@ def red(txt): return '\033[1;31m%s\033[0;0m' % txt return txt + def green(txt): """Return colorized (if the terminal supports it) or plain text.""" if sys.stdout.isatty(): return '\033[0;32m%s\033[0;0m' % txt return txt + def blue(txt): """Return colorized (if the terminal supports it) or plain text.""" if sys.stdout.isatty(): @@ -287,24 +289,24 @@ def get_system_revision(): revision = revision[0] if not system or not revision: raise Exception('fallback to /etc/os-release') - except: + except Exception: if os.path.exists('/etc/os-release'): vals = {} with open('/etc/os-release') as f: - for l in f.readlines(): - if '=' in l: - key, val = l.split('=', 1) + for line in f.readlines(): + if '=' in line: + key, val = line.split('=', 1) vals[key.strip()] = val.strip().replace('"', '') for i in ['ID', 'ID_LIKE']: if i in vals: - system_candidates=vals[i].strip('"').split() + system_candidates = vals[i].strip('"').split() for system_candidate in system_candidates: if system_candidate in SYSTEMS: system = system_candidate break else: - continue + continue break if system is None: raise Exception('cannot determine system') @@ -381,13 +383,16 @@ def execute(cmd, timeout=60, cwd=None, env=None, raise_error=True, dry_run=False for attempt in range(attempts): if interactive: - # Issue: [B602:subprocess_popen_with_shell_equals_true] subprocess call with shell=True identified, security issue. + # Issue: [B602:subprocess_popen_with_shell_equals_true] subprocess call with shell=True identified, + # security issue. p = subprocess.Popen(cmd, cwd=cwd, env=env, shell=True) # nosec B602 exitcode = p.wait() else: - # Issue: [B602:subprocess_popen_with_shell_equals_true] subprocess call with shell=True identified, security issue. - p = subprocess.Popen(cmd, cwd=cwd, env=env, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # nosec B602 + # Issue: [B602:subprocess_popen_with_shell_equals_true] subprocess call with shell=True identified, + # security issue. + p = subprocess.Popen(cmd, cwd=cwd, env=env, shell=True, # nosec B602 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if capture: output = '' @@ -458,7 +463,7 @@ def _prepare_installed_packages_cache_for_debs(): for line in out.splitlines(): line = line.strip() - m = re.search('^([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+(.+)', line) + m = re.search(r'^([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+(.+)', line) if not m: continue status, name, version, arch, descr = m.groups() @@ -510,7 +515,8 @@ def install_pkgs(pkgs, timeout=60, env=None, check_times=False, pkg_cache=None): pkg_cache = {} # prepare cache if needed - if not pkg_cache and system in ['centos', 'rhel', 'fedora', 'debian', 'ubuntu', 'rocky']:#, 'alpine']: # TODO: complete caching support for alpine + if not pkg_cache and system in ['centos', 'rhel', 'fedora', 'debian', 'ubuntu', + 'rocky']: # , 'alpine']: # TODO: complete caching support for alpine if system in ['centos', 'rhel', 'fedora', 'rocky']: pkg_cache.update(_prepare_installed_packages_cache_for_rpms()) elif system in ['debian', 'ubuntu']: @@ -676,7 +682,7 @@ class VagrantEnv(object): capture=True, raise_error=False) if exitcode != 0: if 'There is container on your system' in out and 'lxc-destroy' in out: - m = re.search('`lxc-destroy.*?`', out) + m = re.search(r'`lxc-destroy.*?`', out) if m: # destroy some old container cmd = m.group(0)[1:-1] @@ -699,8 +705,8 @@ class VagrantEnv(object): # Reason for nosec: it is clearly a https link. with urllib.request.urlopen(url) as response: # nosec B310 data = response.read() - except: - log.exception('ignored exception') + except Exception as e: + log.exception(f'ignored exception: {e}') return {} data = json.loads(data) return data @@ -728,7 +734,7 @@ class VagrantEnv(object): if provider_found: try: v = int(ver['number']) - except: + except ValueError: return ver['number'] if v > latest_version: latest_version = v @@ -743,7 +749,7 @@ class VagrantEnv(object): return "not created" _, out = execute("vagrant status", cwd=self.vagrant_dir, timeout=15, capture=True, quiet=True) - m = re.search('default\s+(.+)\(', out) + m = re.search(r'default\s+(.+)\(', out) if not m: raise Exception('cannot get status in:\n%s' % out) return m.group(1).strip() @@ -759,7 +765,6 @@ class VagrantEnv(object): execute("vagrant reload --no-provision --force", cwd=self.vagrant_dir, timeout=15 * 60, dry_run=self.dry_run) - def package(self): """Package Vagrant system into Vagrant box.""" execute('vagrant halt', cwd=self.vagrant_dir, dry_run=self.dry_run, raise_error=False, attempts=3) @@ -791,7 +796,7 @@ class VagrantEnv(object): # reset machine-id execute('sudo rm -f %s/rootfs/var/lib/dbus/machine-id' % lxc_container_path) - #execute('sudo truncate -s 0 %s/rootfs/etc/machine-id' % lxc_container_path) + # execute('sudo truncate -s 0 %s/rootfs/etc/machine-id' % lxc_container_path) execute('sudo rm -f %s/rootfs/etc/machine-id' % lxc_container_path) # pack rootfs @@ -908,7 +913,7 @@ class VagrantEnv(object): if self.system in ['ubuntu', 'debian']: upload_cmd += ' -X POST -H "Content-Type: multipart/form-data" --data-binary "@%s" ' - file_ext = 'deb' # include both '.deb' and '.ddeb' files + file_ext = 'deb' # include both '.deb' and '.ddeb' files elif self.system in ['fedora', 'centos', 'rhel', 'rocky']: upload_cmd += ' --upload-file %s ' @@ -960,8 +965,8 @@ class VagrantEnv(object): cmd = 'scp -F %s -r default:/home/vagrant/aggregated_tests.xml .' % ssh_cfg_path execute(cmd, cwd=self.vagrant_dir) - except: # pylint: disable=bare-except - log.exception('ignored issue with parsing unit test results') + except Exception as e: + log.exception(f'ignored issue with parsing unit test results: {e}') return total, passed @@ -1109,8 +1114,8 @@ def _install_gtest_sources(): gtest_version = '1.14.0' gtest_path = f'/usr/src/googletest-release-{gtest_version}/googletest' if os.path.exists(gtest_path): - log.info(f'gtest is already installed in {gtest_path}.') - return + log.info(f'gtest is already installed in {gtest_path}.') + return execute('mkdir -p ~/.hammer-tmp') cmd = 'wget --no-verbose -O ~/.hammer-tmp/gtest.tar.gz ' @@ -1122,17 +1127,17 @@ def _install_gtest_sources(): execute('rm -rf ~/.hammer-tmp') -def _install_libyang_from_sources(ignore_errors = False): +def _install_libyang_from_sources(ignore_errors=False): """Install libyang from sources.""" for prefix in ['/usr', '/usr/local']: libyang_so_candidates = [f'{prefix}/lib/libyang.so', f'{prefix}/lib64/libyang.so'] libyang_header = f'{prefix}/include/libyang/version.h' if (any(os.path.exists(i) for i in libyang_so_candidates) and os.path.exists(libyang_header) and - execute(f"grep -F '#define LY_VERSION_MAJOR 2' '{libyang_header}'", raise_error=False) == 0): + execute(f"grep -F '#define LY_VERSION_MAJOR 2' '{libyang_header}'", raise_error=False) == 0): log.info(f'libyang is already installed at {libyang_header}.') return - version='v2.1.4' + version = 'v2.1.4' execute('rm -rf ~/.hammer-tmp') execute('mkdir -p ~/.hammer-tmp') @@ -1140,7 +1145,8 @@ def _install_libyang_from_sources(ignore_errors = False): execute('git clone https://github.com/CESNET/libyang.git ~/.hammer-tmp/libyang') execute(f'git checkout {version}', cwd='~/.hammer-tmp/libyang') execute('mkdir ~/.hammer-tmp/libyang/build') - execute('cmake -DBUILD_TESTING=OFF -DCMAKE_C_FLAGS="-Wno-incompatible-pointer-types" ..', cwd='~/.hammer-tmp/libyang/build') + execute('cmake -DBUILD_TESTING=OFF -DCMAKE_C_FLAGS="-Wno-incompatible-pointer-types" ..', + cwd='~/.hammer-tmp/libyang/build') execute('make -j $(nproc || gnproc || echo 1)', cwd='~/.hammer-tmp/libyang/build') execute('sudo make install', cwd='~/.hammer-tmp/libyang/build') system, revision = get_system_revision() @@ -1154,17 +1160,17 @@ def _install_libyang_from_sources(ignore_errors = False): execute('rm -rf ~/.hammer-tmp') -def _install_sysrepo_from_sources(ignore_errors = False): +def _install_sysrepo_from_sources(ignore_errors=False): """Install sysrepo from sources.""" for prefix in ['/usr', '/usr/local']: sysrepo_so_candidates = [f'{prefix}/lib/libsysrepo.so', f'{prefix}/lib64/libsysrepo.so'] sysrepo_header = f'{prefix}/include/sysrepo/version.h' if (any(os.path.exists(i) for i in sysrepo_so_candidates) and os.path.exists(sysrepo_header) and - execute(f"grep -F '#define SR_VERSION_MAJOR 7' '{sysrepo_header}'", raise_error=False) == 0): + execute(f"grep -F '#define SR_VERSION_MAJOR 7' '{sysrepo_header}'", raise_error=False) == 0): log.info(f'sysrepo is already installed at {sysrepo_header}.') return - version='v2.2.12' + version = 'v2.2.12' # Create repository for YANG modules and change ownership to current user. execute('sudo mkdir -p /etc/sysrepo') @@ -1190,17 +1196,17 @@ def _install_sysrepo_from_sources(ignore_errors = False): execute('rm -rf ~/.hammer-tmp') -def _install_libyang_cpp_from_sources(ignore_errors = False): +def _install_libyang_cpp_from_sources(ignore_errors=False): """Install libyang-cpp from sources.""" for prefix_lib in ['/usr/lib', '/usr/lib64', '/usr/local/lib', '/usr/local/lib64']: libyang_cpp_so = f'{prefix_lib}/libyang-cpp.so' libyang_cpp_pc = f'{prefix_lib}/pkgconfig/libyang-cpp.pc' if (os.path.exists(libyang_cpp_so) and os.path.exists(libyang_cpp_pc) and - execute(f"grep -F 'Version: 1.1.0' '{libyang_cpp_pc}'", raise_error=False) == 0): + execute(f"grep -F 'Version: 1.1.0' '{libyang_cpp_pc}'", raise_error=False) == 0): log.info(f'libyang-cpp is already installed at {libyang_cpp_so}.') return - version='ae7d649ea75da081725c119dd553b2ef3121a6f8' + version = 'ae7d649ea75da081725c119dd553b2ef3121a6f8' execute('rm -rf ~/.hammer-tmp') execute('mkdir -p ~/.hammer-tmp') @@ -1208,9 +1214,11 @@ def _install_libyang_cpp_from_sources(ignore_errors = False): execute('git clone https://github.com/CESNET/libyang-cpp.git ~/.hammer-tmp/libyang-cpp') execute(f'git checkout {version}', cwd='~/.hammer-tmp/libyang-cpp') # New cpp compiler is more picky about missing headers. (ex. Fedora 40) - return_code = execute('sudo grep "#include " ~/.hammer-tmp/libyang-cpp/src/Context.cpp', raise_error=False) + return_code = execute('sudo grep "#include " ~/.hammer-tmp/libyang-cpp/src/Context.cpp', + raise_error=False) if return_code == 1: - execute('sed -i "/#include /a #include " ~/.hammer-tmp/libyang-cpp/src/Context.cpp') + execute(r'sed -i "/#include /a #include " ' + '~/.hammer-tmp/libyang-cpp/src/Context.cpp') execute('mkdir ~/.hammer-tmp/libyang-cpp/build') execute('cmake -DBUILD_TESTING=OFF .. ', cwd='~/.hammer-tmp/libyang-cpp/build') execute('make -j $(nproc || gnproc || echo 1)', cwd='~/.hammer-tmp/libyang-cpp/build') @@ -1226,17 +1234,17 @@ def _install_libyang_cpp_from_sources(ignore_errors = False): execute('rm -rf ~/.hammer-tmp') -def _install_sysrepo_cpp_from_sources(ignore_errors = False): +def _install_sysrepo_cpp_from_sources(ignore_errors=False): """Install sysrepo-cpp from sources.""" for prefix_lib in ['/usr/lib', '/usr/lib64', '/usr/local/lib', '/usr/local/lib64']: sysrepo_cpp_so = f'{prefix_lib}/libsysrepo-cpp.so' sysrepo_cpp_pc = f'{prefix_lib}/pkgconfig/sysrepo-cpp.pc' if (os.path.exists(sysrepo_cpp_so) and os.path.exists(sysrepo_cpp_pc) and - execute(f"grep -F 'Version: 1.1.0' '{sysrepo_cpp_pc}'", raise_error=False) == 0): + execute(f"grep -F 'Version: 1.1.0' '{sysrepo_cpp_pc}'", raise_error=False) == 0): log.info(f'sysrepo-cpp is already installed at {sysrepo_cpp_so}.') return - version='02634174ffc60568301c3d9b9b7cf710cff6a586' + version = '02634174ffc60568301c3d9b9b7cf710cff6a586' execute('rm -rf ~/.hammer-tmp') execute('mkdir -p ~/.hammer-tmp') @@ -1258,7 +1266,7 @@ def _install_sysrepo_cpp_from_sources(ignore_errors = False): execute('rm -rf ~/.hammer-tmp') -def _install_netconf_libraries_from_sources(ignore_errors = False): +def _install_netconf_libraries_from_sources(ignore_errors=False): _install_libyang_from_sources(ignore_errors) _install_sysrepo_from_sources(ignore_errors) _install_libyang_cpp_from_sources(ignore_errors) @@ -1321,8 +1329,8 @@ def _configure_mysql(system, revision, features): # Some systems, usually old ones, might require a cerain PKCS format # of the key. Try to regenerate it here, but don't stop if it fails. # If the key is wrong, it will fail later anyway. - exit_code = execute('openssl rsa -in src/lib/asiolink/testutils/ca/kea-server.key ' \ - '-out src/lib/asiolink/testutils/ca/kea-server.key', raise_error=False) + exit_code = execute('openssl rsa -in src/lib/asiolink/testutils/ca/kea-server.key ' + '-out src/lib/asiolink/testutils/ca/kea-server.key', raise_error=False) if exit_code != 0: log.warning(f'openssl command failed with exit code {exit_code}, but continuing...') for file in [ @@ -1375,7 +1383,8 @@ ssl_key = {cert_dir}/kea-client.key elif system == 'freebsd': cmd = "echo 'SET PASSWORD = \"\";' " - cmd += "| sudo mysql -u root --password=\"$(sudo cat /root/.mysql_secret | grep -v '^#')\" --connect-expired-password" + cmd += ("| sudo mysql -u root --password=\"$(sudo cat /root/.mysql_secret | grep -v '^#')\"" + " --connect-expired-password") execute(cmd, raise_error=False) elif system == 'alpine': @@ -1405,7 +1414,8 @@ ssl_key = {cert_dir}/kea-client.key if 'tls' in features: # ALTER USER is the best place to put the REQUIRE but, if it is not # supported, then downgrade to GRANT. - exit_code = execute('''sudo mysql -u root -e "ALTER USER 'keatest_secure'@'localhost' REQUIRE X509;"''', raise_error=False) + exit_code = execute('''sudo mysql -u root -e "ALTER USER 'keatest_secure'@'localhost' REQUIRE X509;"''', + raise_error=False) if exit_code == 0: # If ALTER succeeds, then we still have to GRANT without REQUIRE. execute('''sudo mysql -u root -e "GRANT ALL ON keatest.* TO 'keatest_secure'@'localhost';"''') @@ -1473,9 +1483,10 @@ def _restart_postgresql(system, revision): log.error('Command "sudo systemctl restart postgresql.service" failed. Here is the journal:') _, output = execute('sudo journalctl -xu postgresql.service', raise_error=False) log.error('And here are the logs:') - _, output = execute("sudo -u postgres psql -A -t -c 'SELECT pg_current_logfile()'", capture=True, quiet=True) + _, output = execute("sudo -u postgres psql -A -t -c 'SELECT pg_current_logfile()'", + capture=True, quiet=True) logfile = os.path.basename(output.strip()) - _, output = execute(f'sudo find /var -type f -name "{logfile}" -exec cat {{}} \;', raise_error=False) + _, output = execute(fr'sudo find /var -type f -name "{logfile}" -exec cat {{}} \;', raise_error=False) sys.exit(exit_code) @@ -1484,7 +1495,7 @@ def _restart_postgresql(system, revision): # and user both set to 'all'. This is to not affect authentication of # `postgres` user which should have a separate entry. def _change_postgresql_auth_method(connection_type, auth_method, hba_file): - execute("sudo sed -i.bak 's/^{}\(.*\)all\(.*\)all\(.*\) [a-z0-9]*$/{}\\1all\\2all\\3 {}/g' '{}'".format( + execute(r"sudo sed -i.bak 's/^{}\(.*\)all\(.*\)all\(.*\) [a-z0-9]*$/{}\1all\2all\3 {}/g' '{}'".format( connection_type, connection_type, auth_method, hba_file), cwd='/tmp') @@ -1520,12 +1531,13 @@ def _configure_pgsql(system, revision, features): var_db_postgres_data = output.rstrip() # Create postgres internals. - execute('sudo test ! -d {} && sudo /usr/local/etc/rc.d/postgresql oneinitdb || true'.format(var_db_postgres_data)) + execute(f'sudo test ! -d {var_db_postgres_data} && sudo /usr/local/etc/rc.d/postgresql oneinitdb || true') - # if the file '/var/db/postgres/data*/postmaster.opts' does not exist the 'restart' of postgresql will fail with error: + # if the file '/var/db/postgres/data*/postmaster.opts' does not exist the 'restart' of postgresql will fail + # with error: # pg_ctl: could not read file "/var/db/postgres/data*/postmaster.opts" # the initial start of the postgresql will create the 'postmaster.opts' file - execute('sudo test ! -f {}/postmaster.opts && sudo service postgresql onestart || true'.format(var_db_postgres_data)) + execute(f'sudo test ! -f {var_db_postgres_data}/postmaster.opts && sudo service postgresql onestart || true') _enable_postgresql(system, revision) _restart_postgresql(system, revision) @@ -1541,8 +1553,8 @@ def _configure_pgsql(system, revision, features): # before any other local auth method for higher priority. Let's simulate # that by putting it just after the auth header. if 0 != execute("sudo cat {} | grep -E '^local.*all.*postgres'".format(hba_file), raise_error=False): - auth_header='# TYPE DATABASE USER ADDRESS METHOD' - postgres_auth_line='local all postgres ident' + auth_header = '# TYPE DATABASE USER ADDRESS METHOD' + postgres_auth_line = 'local all postgres ident' # The "\\" followed by newline is for BSD support. execute("""sudo sed -i.bak '/{}/a\\ {} @@ -1610,7 +1622,7 @@ def _get_package_version(package: str): elif system in ['centos', 'fedora', 'rhel', 'rocky']: cmd = "dnf list {} -y | tr -s ' ' | cut -d ' ' -f 2 | tail -n 1" elif system == 'freebsd': - cmd = "pkg search {0} | grep -Eo '^{0}-[0-9_,\.]+' | sed 's/{0}-//g'" + cmd = r"pkg search {0} | grep -Eo '^{0}-[0-9_,\.]+' | sed 's/{0}-//g'" elif system == 'arch': cmd = "pacman -Qi {} | tr -s ' ' | grep -F 'Version :' | cut -d ' ' -f 3" else: @@ -1732,9 +1744,11 @@ def install_packages_local(system, revision, features, check_times, ignore_error packages.extend(['postgresql', 'postgresql-server']) if revision == '9': packages.append('postgresql13-devel') + def link_pg_config(): if not os.path.exists('/usr/bin/pg_config'): execute('sudo ln -s /usr/pgsql-13/bin/pg_config /usr/bin/pg_config') + deferred_functions.append(link_pg_config) else: packages.append('postgresql-devel') @@ -1873,7 +1887,6 @@ def install_packages_local(system, revision, features, check_times, ignore_error else: packages.extend(['mariadb-client', 'mariadb-server', 'libmariadb-dev-compat']) - if 'pgsql' in features: if revision == '16.04': packages.extend(['postgresql-client', 'libpq-dev', 'postgresql', 'postgresql-server-dev-all']) @@ -2268,8 +2281,10 @@ def _build_rpm(system, revision, features, tarball_path, env, check_times, dry_r for f in os.listdir(rpm_dir): if f == 'kea.spec': continue - execute('cp %s %s/SOURCES' % (os.path.join(rpm_dir, f), rpm_root_path), check_times=check_times, dry_run=dry_run) - execute('cp %s %s/SPECS' % (os.path.join(rpm_dir, 'kea.spec'), rpm_root_path), check_times=check_times, dry_run=dry_run) + execute('cp %s %s/SOURCES' % (os.path.join(rpm_dir, f), rpm_root_path), check_times=check_times, + dry_run=dry_run) + execute('cp %s %s/SPECS' % (os.path.join(rpm_dir, 'kea.spec'), rpm_root_path), check_times=check_times, + dry_run=dry_run) execute('cp %s %s/SOURCES' % (tarball_path, rpm_root_path), check_times=check_times, dry_run=dry_run) services_list = ['kea-dhcp4.service', 'kea-dhcp6.service', 'kea-dhcp-ddns.service', 'kea-ctrl-agent.service'] @@ -2277,11 +2292,11 @@ def _build_rpm(system, revision, features, tarball_path, env, check_times, dry_r # centos/rhel 7 does not support some fields in systemd unit files so they need to be commented out if system == 'centos' and revision == '7': for f in services_list: - for k in ['RuntimeDirectory', 'RuntimeDirectoryPreserve', 'LogsDirectory', 'LogsDirectoryMode', 'StateDirectory', 'ConfigurationDirectory']: - cmd = "sed -i -E 's/^(%s=.*)/#\\1/' %s" % (k, f) + for k in ['RuntimeDirectory', 'RuntimeDirectoryPreserve', 'LogsDirectory', 'LogsDirectoryMode', + 'StateDirectory', 'ConfigurationDirectory']: + cmd = r"sed -i -E 's/^(%s=.*)/#\1/' %s" % (k, f) execute(cmd, cwd=rpm_dir, check_times=check_times, dry_run=dry_run) - # do rpm build cmd = "rpmbuild --define 'kea_version %s' --define 'isc_version %s' -ba %s/SPECS/kea.spec" cmd += " -D'_topdir %s'" @@ -2291,7 +2306,8 @@ def _build_rpm(system, revision, features, tarball_path, env, check_times, dry_r if 'install' in features: # install packages - execute('rpm -qa | grep isc-kea | xargs sudo rpm -e', check_times=check_times, dry_run=dry_run, raise_error=False) + execute('rpm -qa | grep isc-kea | xargs sudo rpm -e', check_times=check_times, dry_run=dry_run, + raise_error=False) execute(f'sudo rpm -i {rpm_root_path}/RPMS/{arch.strip()}/*rpm', check_times=check_times, dry_run=dry_run) # check if kea services can be started @@ -2310,7 +2326,8 @@ def _build_deb(system, revision, features, tarball_path, env, check_times, dry_r install_pkgs('apt-transport-https', env=env, check_times=check_times) # See if a .deb package had been previously uploaded. - _, output = execute("curl -o /dev/null -s -w '%{{http_code}}' {}/dists/kea/Release 2>/dev/null".format(repo_url), capture=True) + _, output = execute("curl -o /dev/null -s -w '%{{http_code}}' {}/dists/kea/Release 2>/dev/null".format(repo_url), + capture=True) http_code = output.rstrip() release_file_exists = (http_code == '200') if release_file_exists: @@ -2318,9 +2335,9 @@ def _build_deb(system, revision, features, tarball_path, env, check_times, dry_r else: repo_name = 'kea-%s-%s-%s' % (pkg_version.rsplit('.', 1)[0], system, revision) log.error(f'{repo_url}/dists/kea/Release does not exist. ' - f'This is usually caused by no package existing in {repo_name}. ' - 'You can solve this by uploading any package.' - 'Continuing, but the build will likely fail.') + f'This is usually caused by no package existing in {repo_name}. ' + 'You can solve this by uploading any package.' + 'Continuing, but the build will likely fail.') # try apt update for up to 10 times if there is an error for _ in range(10): @@ -2336,30 +2353,37 @@ def _build_deb(system, revision, features, tarball_path, env, check_times, dry_r src_path = glob.glob('kea-src/*')[0] # update version, etc - execute('sed -i -e s/{VERSION}/%s/ changelog' % pkg_version, cwd='kea-src/kea-%s/debian' % pkg_version, check_times=check_times, dry_run=dry_run) - execute('sed -i -e s/{ISC_VERSION}/%s/ changelog' % pkg_isc_version, cwd='kea-src/kea-%s/debian' % pkg_version, check_times=check_times, dry_run=dry_run) - execute('sed -i -e s/{ISC_VERSION}/%s/ rules' % pkg_isc_version, cwd='kea-src/kea-%s/debian' % pkg_version, check_times=check_times, dry_run=dry_run) + execute('sed -i -e s/{VERSION}/%s/ changelog' % pkg_version, cwd='kea-src/kea-%s/debian' % pkg_version, + check_times=check_times, dry_run=dry_run) + execute('sed -i -e s/{ISC_VERSION}/%s/ changelog' % pkg_isc_version, cwd='kea-src/kea-%s/debian' % pkg_version, + check_times=check_times, dry_run=dry_run) + execute('sed -i -e s/{ISC_VERSION}/%s/ rules' % pkg_isc_version, cwd='kea-src/kea-%s/debian' % pkg_version, + check_times=check_times, dry_run=dry_run) - services_list = ['isc-kea-dhcp4.isc-kea-dhcp4-server.service', 'isc-kea-dhcp6.isc-kea-dhcp6-server.service', 'isc-kea-dhcp-ddns.isc-kea-dhcp-ddns-server.service', 'isc-kea-ctrl-agent.service'] + services_list = ['isc-kea-dhcp4.isc-kea-dhcp4-server.service', 'isc-kea-dhcp6.isc-kea-dhcp6-server.service', + 'isc-kea-dhcp-ddns.isc-kea-dhcp-ddns-server.service', 'isc-kea-ctrl-agent.service'] # debian 9 does not support some fields in systemd unit files so they need to be commented out if system == 'debian' and revision == '9': for f in services_list: - for k in ['RuntimeDirectory', 'RuntimeDirectoryPreserve', 'LogsDirectory', 'LogsDirectoryMode', 'StateDirectory', 'ConfigurationDirectory']: + for k in ['RuntimeDirectory', 'RuntimeDirectoryPreserve', 'LogsDirectory', 'LogsDirectoryMode', + 'StateDirectory', 'ConfigurationDirectory']: cmd = "sed -i -E 's/^(%s=.*)/#\\1/' %s" % (k, f) execute(cmd, cwd='kea-src/kea-%s/debian' % pkg_version, check_times=check_times, dry_run=dry_run) # do deb build env['LIBRARY_PATH'] = f'/usr/lib/{arch.strip()}-linux-gnu' env['LD_LIBRARY_PATH'] = f'/usr/lib/{arch.strip()}-linux-gnu' - cmd = 'debuild --preserve-envvar=LD_LIBRARY_PATH --preserve-envvar=LIBRARY_PATH --preserve-envvar=CCACHE_DIR --prepend-path=/usr/lib/ccache -i -us -uc -b' + cmd = ('debuild --preserve-envvar=LD_LIBRARY_PATH --preserve-envvar=LIBRARY_PATH --preserve-envvar=CCACHE_DIR ' + '--prepend-path=/usr/lib/ccache -i -us -uc -b') execute(cmd, env=env, cwd=src_path, timeout=60 * 40, check_times=check_times, dry_run=dry_run) if 'install' in features: # install packages execute('sudo dpkg -i kea-src/*deb', check_times=check_times, dry_run=dry_run) # check if kea services can be started - services_list = ['isc-kea-dhcp4-server.service', 'isc-kea-dhcp6-server.service', 'isc-kea-dhcp-ddns-server.service', 'isc-kea-ctrl-agent.service'] + services_list = ['isc-kea-dhcp4-server.service', 'isc-kea-dhcp6-server.service', + 'isc-kea-dhcp-ddns-server.service', 'isc-kea-ctrl-agent.service'] _check_installed_rpm_or_debs(services_list) @@ -2379,7 +2403,8 @@ def _build_alpine_apk(system, revision, features, tarball_path, env, check_times tardir = os.path.dirname(tarball_path) if not tardir: tardir = '.' - cmd = 'cd %s; export kea_chks=`sha512sum kea-%s.tar.gz`; cd -; sed -i -e "s/KEA_CHECKSUM/${kea_chks}/" kea-src/APKBUILD' % (tardir, pkg_version) + cmd = ('cd %s; export kea_chks=`sha512sum kea-%s.tar.gz`; cd -; ' + 'sed -i -e "s/KEA_CHECKSUM/${kea_chks}/" kea-src/APKBUILD' % (tardir, pkg_version)) execute(cmd, check_times=check_times, dry_run=dry_run) cmd = 'sed -i -e s/KEA_VERSION/%s/ kea-src/APKBUILD' % pkg_version execute(cmd, check_times=check_times, dry_run=dry_run) @@ -2595,12 +2620,12 @@ def ensure_hammer_deps(): if exitcode != 0: _install_vagrant() else: - m = re.search('Installed Version: ([\d\.]+)', out, re.I) + m = re.search(r'Installed Version: ([\d\.]+)', out, re.I) ver = m.group(1) vagrant = [int(v) for v in ver.split('.')] recommended_vagrant = [int(v) for v in RECOMMENDED_VAGRANT_VERSION.split('.')] if vagrant < recommended_vagrant: - m = re.search('Latest Version: ([\d\.]+)', out, re.I) + m = re.search(r'Latest Version: ([\d\.]+)', out, re.I) if m is None: # Vagrant was unable to check for the latest version of Vagrant. # Attempt to upgrade to the recommended version to fix it. @@ -2609,7 +2634,6 @@ def ensure_hammer_deps(): ver = m.group(1) _install_vagrant(ver, upgrade=True) - exitcode = execute('vagrant plugin list | grep vagrant-lxc', raise_error=False) if exitcode != 0: execute('vagrant plugin install vagrant-lxc') @@ -2727,7 +2751,6 @@ def parse_args(): help='Do not allow executing commands infinitely.') parent_parser2.add_argument('-n', '--dry-run', action='store_true', help='Print only what would be done.') - parser = subparsers.add_parser('ensure-hammer-deps', help="Install Hammer dependencies on current, host system.") parser = subparsers.add_parser('supported-systems', @@ -2776,17 +2799,17 @@ def parse_args(): "To get the list of created systems run: ./hammer.py created-systems.") parser.add_argument('-d', '--directory', help='Path to directory with Vagrantfile.') parser = subparsers.add_parser('package-box', - help="Prepare system from scratch and package it into Vagrant Box. Prepared box can be " - "later deployed to Vagrant Cloud.", + help="Prepare system from scratch and package it into Vagrant Box. " + "Prepared box can be later deployed to Vagrant Cloud.", parents=[parent_parser1, parent_parser2]) parser.add_argument('--repository-url', default=None, help='Repository for 3rd party dependencies and for uploading built packages.') parser.add_argument('-u', '--reuse', action='store_true', - help='Reuse existing system image, otherwise (default case) if there is any existing then destroy it first.') + help='Reuse existing system image, otherwise (default case) if there is any existing then ' + 'destroy it first.') parser.add_argument('-k', '--skip-upload', action='store_true', help='Skip uploading prepared box to cloud, otherwise (default case) upload it.') - args = main_parser.parse_args() return args, main_parser @@ -2796,7 +2819,7 @@ def list_supported_systems(): """List systems hammer can support (with supported providers).""" for system in SYSTEMS: print('%s:' % system) - for release,supported in SYSTEMS[system].items(): + for release, supported in SYSTEMS[system].items(): if not supported: continue providers = [] @@ -2807,6 +2830,7 @@ def list_supported_systems(): providers = ', '.join(providers) print(' - %s: %s' % (release, providers)) + def list_created_systems(): """List VMs that are created on this host by Hammer.""" _, output = execute('vagrant global-status --prune', quiet=True, capture=True) @@ -2943,8 +2967,8 @@ def _check_system_revision(system, revision): log.error(msg) sys.exit(1) if not SYSTEMS[system][revision]: - log.warning(f'{system} ${revision} is no longer officially supported. ' \ - 'The script will continue in a best-effort manner.') + log.warning(f'{system} ${revision} is no longer officially supported. ' + 'The script will continue in a best-effort manner.') def _prepare_ccache_dir(ccache_dir, system, revision): @@ -2995,7 +3019,7 @@ def upload_to_repo(args, pkgs_dir): file_ext = '' if system in ['ubuntu', 'debian']: upload_cmd += ' -X POST -H "Content-Type: multipart/form-data" --data-binary "@%s" ' - file_ext = 'deb' # include both '.deb' and '.ddeb' files + file_ext = 'deb' # include both '.deb' and '.ddeb' files elif system in ['fedora', 'centos', 'rhel', 'rocky']: upload_cmd += ' --upload-file %s ' @@ -3020,7 +3044,7 @@ def upload_to_repo(args, pkgs_dir): log.info("file path: %s", fp) cmd = upload_cmd % fp - attempts=4 + attempts = 4 while attempts > 0: exitcode, output = execute(cmd, capture=True, raise_error=False) if exitcode != 0 and '504 Gateway Time-out' in output: @@ -3051,7 +3075,7 @@ def build_cmd(args): args.ccache_dir, args.pkg_version, args.pkg_isc_version, args.repository_url, pkgs_dir) # NOTE: upload the locally build packages and leave; the rest of the code is vagrant specific if args.upload: - upload_to_repo(args,pkgs_dir) + upload_to_repo(args, pkgs_dir) return @@ -3147,7 +3171,8 @@ def main(): features = set(['docs', 'perfdhcp', 'shell', 'mysql', 'pgsql', 'gssapi', 'native-pkg']) log.info('Enabled features: %s', ' '.join(features)) - package_box(args.provider, args.system, args.revision, features, args.dry_run, args.check_times, args.reuse, args.skip_upload) + package_box(args.provider, args.system, args.revision, features, args.dry_run, args.check_times, args.reuse, + args.skip_upload) elif args.command == "prepare-system": prepare_system_cmd(args) diff --git a/src/bin/shell/kea_conn.py b/src/bin/shell/kea_conn.py index 887e4d004a..c2f27661ed 100644 --- a/src/bin/shell/kea_conn.py +++ b/src/bin/shell/kea_conn.py @@ -1,4 +1,4 @@ -# Copyright (C) 2017-2021 Internet Systems Consortium, Inc. ("ISC") +# Copyright (C) 2017-2024 Internet Systems Consortium, Inc. ("ISC") # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this @@ -8,6 +8,7 @@ This file contains classes used for communication with Control Agent. """ + class CARequest: """ This class defines the HTTP request to be sent. @@ -54,7 +55,7 @@ class CARequest: if self.service is not None: self.service = [x for x in self.service if x] if len(self.service) > 0: - self.content += ', "service": ["' + '","'.join(self.service) + '"]' + self.content += ', "service": ["' + '","'.join(self.service) + '"]' if len(self.args) > 1: self.content += ', "arguments": { ' + self.args + ' }' self.content += ' }' @@ -65,12 +66,12 @@ class CARequest: In particular, this method generates Content-Length and its value. """ - self.headers['User-Agent'] = "Kea-shell/%s"%(self.version) + self.headers['User-Agent'] = "Kea-shell/%s" % (self.version) self.headers['Accept'] = '*/*' if self.auth is not None: - self.headers['Authorization'] = "Basic %s"%(self.auth) + self.headers['Authorization'] = "Basic %s" % (self.auth) self.headers['Content-Type'] = 'application/json' - self.headers['Content-Length'] = "%d"%(len(self.content)) + self.headers['Content-Length'] = "%d" % (len(self.content)) class CAResponse: diff --git a/src/bin/shell/kea_connector3.py b/src/bin/shell/kea_connector3.py index a14fb2618f..bcc493a1a0 100644 --- a/src/bin/shell/kea_connector3.py +++ b/src/bin/shell/kea_connector3.py @@ -12,7 +12,8 @@ import urllib.request import ssl import os -from kea_conn import CAResponse # CARequest +from kea_conn import CAResponse # CARequest + def send_to_control_agent(params): """ Sends a request to Control Agent, receives a response and returns it.""" diff --git a/src/bin/shell/tests/shell_unittest.py.in b/src/bin/shell/tests/shell_unittest.py.in index 08a05f53dc..1e097ae46b 100644 --- a/src/bin/shell/tests/shell_unittest.py.in +++ b/src/bin/shell/tests/shell_unittest.py.in @@ -1,6 +1,6 @@ #!@PYTHON@ -# Copyright (C) 2017-2020 Internet Systems Consortium, Inc. ("ISC") +# Copyright (C) 2017-2024 Internet Systems Consortium, Inc. ("ISC") # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this @@ -16,6 +16,7 @@ from base64 import b64encode from kea_conn import CARequest + class CARequestUnitTest(unittest.TestCase): """ This class is dedicated to testing CARequest class. That class @@ -195,5 +196,6 @@ class CARequestUnitTest(unittest.TestCase): This method is called after each test. Currently it does nothing. """ + if __name__ == '__main__': unittest.main() diff --git a/src/lib/util/python/const2hdr.py b/src/lib/util/python/const2hdr.py index e89a735459..4882d93563 100644 --- a/src/lib/util/python/const2hdr.py +++ b/src/lib/util/python/const2hdr.py @@ -1,4 +1,4 @@ -# Copyright (C) 2013-2017 Internet Systems Consortium, Inc. ("ISC") +# Copyright (C) 2013-2024 Internet Systems Consortium, Inc. ("ISC") # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this @@ -39,13 +39,14 @@ with open(filename_in) as file_in, open(filename_out, "w") as file_out: file_out.write("#ifndef BIND10_COMMON_DEFS_H\n" + "#define BIND10_COMMON_DEFS_H\n" + "\n" + - "// \\file " + filename_out + "\n" + -'''// \\brief Common shared constants\n -// This file contains common definitions of constants used across the sources. -// It includes, but is not limited to the definitions of messages sent from -// one process to another. Since the names should be self-explanatory and -// the variables here are used mostly to synchronize the same values across -// multiple programs, separate documentation for each variable is not provided. + "// \\file " + filename_out + ''' +/// @brief Common shared constants +/// +/// This file contains common definitions of constants used across the sources. +/// It includes, but is not limited to the definitions of messages sent from +/// one process to another. Since the names should be self-explanatory and +/// the variables here are used mostly to synchronize the same values across +/// multiple programs, separate documentation for each variable is not provided. ''') continue # Extract the constant. Remove the values and add "extern" diff --git a/src/lib/util/python/gen_wiredata.py.in b/src/lib/util/python/gen_wiredata.py.in index fe2d0c4252..923e2aadeb 100644 --- a/src/lib/util/python/gen_wiredata.py.in +++ b/src/lib/util/python/gen_wiredata.py.in @@ -316,7 +316,12 @@ What you are expected to do is as follows: examples. """ -import configparser, re, time, socket, sys, base64 +import configparser +import re +import time +import socket +import sys +import base64 from datetime import datetime from optparse import OptionParser @@ -326,44 +331,42 @@ re_string = re.compile(r"\'(.*)\'$") dnssec_timefmt = '%Y%m%d%H%M%S' -dict_qr = { 'query' : 0, 'response' : 1 } -dict_opcode = { 'query' : 0, 'iquery' : 1, 'status' : 2, 'notify' : 4, - 'update' : 5 } +dict_qr = {'query': 0, 'response': 1} +dict_opcode = {'query': 0, 'iquery': 1, 'status': 2, 'notify': 4, + 'update': 5} rdict_opcode = dict([(dict_opcode[k], k.upper()) for k in dict_opcode.keys()]) -dict_rcode = { 'noerror' : 0, 'formerr' : 1, 'servfail' : 2, 'nxdomain' : 3, - 'notimp' : 4, 'refused' : 5, 'yxdomain' : 6, 'yxrrset' : 7, - 'nxrrset' : 8, 'notauth' : 9, 'notzone' : 10 } +dict_rcode = {'noerror': 0, 'formerr': 1, 'servfail': 2, 'nxdomain': 3, + 'notimp': 4, 'refused': 5, 'yxdomain': 6, 'yxrrset': 7, + 'nxrrset': 8, 'notauth': 9, 'notzone': 10} rdict_rcode = dict([(dict_rcode[k], k.upper()) for k in dict_rcode.keys()]) -dict_rrtype = { 'none' : 0, 'a' : 1, 'ns' : 2, 'md' : 3, 'mf' : 4, 'cname' : 5, - 'soa' : 6, 'mb' : 7, 'mg' : 8, 'mr' : 9, 'null' : 10, - 'wks' : 11, 'ptr' : 12, 'hinfo' : 13, 'minfo' : 14, 'mx' : 15, - 'txt' : 16, 'rp' : 17, 'afsdb' : 18, 'x25' : 19, 'isdn' : 20, - 'rt' : 21, 'nsap' : 22, 'nsap_tr' : 23, 'sig' : 24, 'key' : 25, - 'px' : 26, 'gpos' : 27, 'aaaa' : 28, 'loc' : 29, 'nxt' : 30, - 'srv' : 33, 'naptr' : 35, 'kx' : 36, 'cert' : 37, 'a6' : 38, - 'dname' : 39, 'opt' : 41, 'apl' : 42, 'ds' : 43, 'sshfp' : 44, - 'ipseckey' : 45, 'rrsig' : 46, 'nsec' : 47, 'dnskey' : 48, - 'dhcid' : 49, 'nsec3' : 50, 'nsec3param' : 51, 'tlsa' : 52, 'hip' : 55, - 'spf' : 99, 'unspec' : 103, 'tkey' : 249, 'tsig' : 250, - 'dlv' : 32769, 'ixfr' : 251, 'axfr' : 252, 'mailb' : 253, - 'maila' : 254, 'any' : 255, 'caa' : 257 } +dict_rrtype = {'none': 0, 'a': 1, 'ns': 2, 'md': 3, 'mf': 4, 'cname': 5, + 'soa': 6, 'mb': 7, 'mg': 8, 'mr': 9, 'null': 10, + 'wks': 11, 'ptr': 12, 'hinfo': 13, 'minfo': 14, 'mx': 15, + 'txt': 16, 'rp': 17, 'afsdb': 18, 'x25': 19, 'isdn': 20, + 'rt': 21, 'nsap': 22, 'nsap_tr': 23, 'sig': 24, 'key': 25, + 'px': 26, 'gpos': 27, 'aaaa': 28, 'loc': 29, 'nxt': 30, + 'srv': 33, 'naptr': 35, 'kx': 36, 'cert': 37, 'a6': 38, + 'dname': 39, 'opt': 41, 'apl': 42, 'ds': 43, 'sshfp': 44, + 'ipseckey': 45, 'rrsig': 46, 'nsec': 47, 'dnskey': 48, + 'dhcid': 49, 'nsec3': 50, 'nsec3param': 51, 'tlsa': 52, 'hip': 55, + 'spf': 99, 'unspec': 103, 'tkey': 249, 'tsig': 250, + 'dlv': 32769, 'ixfr': 251, 'axfr': 252, 'mailb': 253, + 'maila': 254, 'any': 255, 'caa': 257} rdict_rrtype = dict([(dict_rrtype[k], k.upper()) for k in dict_rrtype.keys()]) -dict_rrclass = { 'in' : 1, 'ch' : 3, 'hs' : 4, 'any' : 255 } -rdict_rrclass = dict([(dict_rrclass[k], k.upper()) for k in \ - dict_rrclass.keys()]) -dict_algorithm = { 'rsamd5' : 1, 'dh' : 2, 'dsa' : 3, 'ecc' : 4, - 'rsasha1' : 5 } -dict_nsec3_algorithm = { 'reserved' : 0, 'sha1' : 1 } -rdict_algorithm = dict([(dict_algorithm[k], k.upper()) for k in \ - dict_algorithm.keys()]) -rdict_nsec3_algorithm = dict([(dict_nsec3_algorithm[k], k.upper()) for k in \ - dict_nsec3_algorithm.keys()]) - -header_xtables = { 'qr' : dict_qr, 'opcode' : dict_opcode, - 'rcode' : dict_rcode } -question_xtables = { 'rrtype' : dict_rrtype, 'rrclass' : dict_rrclass } - -def parse_value(value, xtable = {}): +dict_rrclass = {'in': 1, 'ch': 3, 'hs': 4, 'any': 255} +rdict_rrclass = dict([(dict_rrclass[k], k.upper()) for k in dict_rrclass.keys()]) +dict_algorithm = {'rsamd5': 1, 'dh': 2, 'dsa': 3, 'ecc': 4, + 'rsasha1': 5} +dict_nsec3_algorithm = {'reserved': 0, 'sha1': 1} +rdict_algorithm = dict([(dict_algorithm[k], k.upper()) for k in dict_algorithm.keys()]) +rdict_nsec3_algorithm = dict([(dict_nsec3_algorithm[k], k.upper()) for k in dict_nsec3_algorithm.keys()]) + +header_xtables = {'qr': dict_qr, 'opcode': dict_opcode, + 'rcode': dict_rcode} +question_xtables = {'rrtype': dict_rrtype, 'rrclass': dict_rrclass} + + +def parse_value(value, xtable={}): if re.search(re_hex, value): return int(value, 16) if re.search(re_decimal, value): @@ -376,48 +379,54 @@ def parse_value(value, xtable = {}): return xtable[lovalue] return value + def code_totext(code, dict): if code in dict.keys(): return dict[code] + '(' + str(code) + ')' return str(code) + def encode_name(name, absolute=True): # make sure the name is dot-terminated. duplicate dots will be ignored # below. name += '.' labels = name.split('.') wire = '' - for l in labels: - if len(l) > 4 and l[0:4] == 'ptr=': + for label in labels: + if len(label) > 4 and label[0:4] == 'ptr=': # special meta-syntax for compression pointer wire += '%04x' % (0xc000 | int(l[4:])) break - if absolute or len(l) > 0: - wire += '%02x' % len(l) - wire += ''.join(['%02x' % ord(ch) for ch in l]) - if len(l) == 0: + if absolute or len(label) > 0: + wire += '%02x' % len(label) + wire += ''.join(['%02x' % ord(ch) for ch in label]) + if len(label) == 0: break return wire + def encode_string(name, len=None): if type(name) is int and len is not None: return '%0.*x' % (len * 2, name) return ''.join(['%02x' % ord(ch) for ch in name]) + def encode_bytes(name, len=None): if type(name) is int and len is not None: return '%0.*x' % (len * 2, name) return ''.join(['%02x' % ch for ch in name]) + def count_namelabels(name): if name == '.': # special case return 0 - m = re.match('^(.*)\.$', name) + m = re.match(r'^(.*)\.$', name) if m: name = m.group(1) return len(name.split('.')) -def get_config(config, section, configobj, xtables = {}): + +def get_config(config, section, configobj, xtables={}): try: for field in config.options(section): value = config.get(section, field) @@ -430,12 +439,14 @@ def get_config(config, section, configobj, xtables = {}): return False return True + def print_header(f, input_file): f.write('''### ### This data file was auto-generated from ''' + input_file + ''' ### ''') + class Name: '''Implements rendering a single domain name in the test data format. @@ -449,7 +460,8 @@ class Name: ''' name = 'example.com' - pointer = None # no compression by default + pointer = None # no compression by default + def dump(self, f): name = self.name if self.pointer is not None: @@ -464,6 +476,7 @@ class Name: f.write('%s' % name_wire) f.write('\n') + class DNSHeader: '''Implements rendering a DNS Header section in the test data format. @@ -516,6 +529,7 @@ class DNSHeader: f.write('%04x %04x %04x %04x\n' % (self.qdcount, self.ancount, self.nscount, self.arcount)) + class DNSQuestion: '''Implements rendering a DNS question in the test data format. @@ -545,6 +559,7 @@ class DNSQuestion: f.write(encode_name(self.name)) f.write(' %04x %04x\n' % (self.rrtype, self.rrclass)) + class EDNS: '''Implements rendering EDNS OPT RR in the test data format. @@ -569,13 +584,14 @@ class EDNS: do = 0 mbz = 0 rdlen = 0 + def dump(self, f): f.write('\n# EDNS OPT RR\n') f.write('# NAME=%s TYPE=%s UDPSize=%d ExtRcode=%s Version=%s DO=%d\n' % (self.name, code_totext(dict_rrtype['opt'], rdict_rrtype), self.udpsize, self.extrcode, self.version, 1 if self.do else 0)) - + code_vers = (self.extrcode << 8) | (self.version & 0x00ff) extflags = (self.do << 15) | (self.mbz & ~0x8000) f.write('%s %04x %04x %04x %04x\n' % @@ -584,6 +600,7 @@ class EDNS: f.write('# RDLEN=%d\n' % self.rdlen) f.write('%04x\n' % self.rdlen) + class RR: '''This is a base class for various types of RR test data. For each RR type (A, AAAA, NS, etc), we define a derived class of RR @@ -647,6 +664,7 @@ class RR: f.write('\n# %s RDATA%s\n' % (type_txt, rdlen_spec)) f.write('%s\n' % rdlen_data) + class A(RR): '''Implements rendering A RDATA (of class IN) in the test data format. @@ -667,6 +685,7 @@ class A(RR): f.write('%02x%02x%02x%02x\n' % (bin_address[0], bin_address[1], bin_address[2], bin_address[3])) + class AAAA(RR): '''Implements rendering AAAA RDATA (of class IN) in the test data format. @@ -688,6 +707,7 @@ class AAAA(RR): [f.write('%02x' % x) for x in bin_address] f.write('\n') + class NS(RR): '''Implements rendering NS RDATA in the test data format. @@ -707,6 +727,7 @@ class NS(RR): f.write('# NS name=%s\n' % (self.nsname)) f.write('%s\n' % nsname_wire) + class SOA(RR): '''Implements rendering SOA RDATA in the test data format. @@ -728,6 +749,7 @@ class SOA(RR): retry = 300 expire = 3600000 minimum = 1200 + def dump(self, f): mname_wire = encode_name(self.mname) rname_wire = encode_name(self.rname) @@ -743,6 +765,7 @@ class SOA(RR): self.retry, self.expire, self.minimum)) + class TXT(RR): '''Implements rendering TXT RDATA in the test data format. @@ -796,6 +819,7 @@ class TXT(RR): ' ' if len(wirestring_list[i]) > 0 else '', wirestring_list[i])) + class RP(RR): '''Implements rendering RP RDATA in the test data format. @@ -807,6 +831,7 @@ class RP(RR): ''' mailbox = 'root.example.com' text = 'rp-text.example.com' + def dump(self, f): mailbox_wire = encode_name(self.mailbox) text_wire = encode_name(self.text) @@ -818,6 +843,7 @@ class RP(RR): f.write('# MAILBOX=%s TEXT=%s\n' % (self.mailbox, self.text)) f.write('%s %s\n' % (mailbox_wire, text_wire)) + class SSHFP(RR): '''Implements rendering SSHFP RDATA in the test data format. @@ -830,6 +856,7 @@ class SSHFP(RR): algorithm = 2 fingerprint_type = 1 fingerprint = '123456789abcdef67890123456789abcdef67890' + def dump(self, f): if self.rdlen is None: self.rdlen = 2 + (len(self.fingerprint) / 2) @@ -841,6 +868,7 @@ class SSHFP(RR): self.fingerprint)) f.write('%02x %02x %s\n' % (self.algorithm, self.fingerprint_type, self.fingerprint)) + class CAA(RR): '''Implements rendering CAA RDATA in the test data format. @@ -853,20 +881,20 @@ class CAA(RR): flags = 0 tag = 'issue' value = 'ca.example.net' + def dump(self, f): if self.rdlen is None: self.rdlen = 1 + 1 + len(self.tag) + len(self.value) else: self.rdlen = int(self.rdlen) self.dump_header(f, self.rdlen) - f.write('# FLAGS=%d TAG=%s VALUE=%s\n' % \ - (self.flags, self.tag, self.value)) - f.write('%02x %02x ' % \ - (self.flags, len(self.tag))) + f.write('# FLAGS=%d TAG=%s VALUE=%s\n' % (self.flags, self.tag, self.value)) + f.write('%02x %02x ' % (self.flags, len(self.tag))) f.write(encode_string(self.tag)) f.write(encode_string(self.value)) f.write('\n') + class DNSKEY(RR): '''Implements rendering DNSKEY RDATA in the test data format. @@ -903,6 +931,7 @@ class DNSKEY(RR): f.write('# DIGEST=%s\n' % (self.digest)) f.write('%s\n' % (encode_bytes(decoded_digest))) + class NSECBASE(RR): '''Implements rendering NSEC/NSEC3 type bitmaps commonly used for these RRs. The NSEC and NSEC3 classes will be inherited from this @@ -928,8 +957,9 @@ class NSECBASE(RR): ''' nbitmap = 1 # number of bitmaps block = 0 - maplen = None # default bitmap length, auto-calculate + maplen = None # default bitmap length, auto-calculate bitmap = '040000000003' # an arbitrarily chosen bitmap sample + def dump(self, f): # first, construct the bitmap data block_list = [] @@ -946,17 +976,16 @@ class NSECBASE(RR): maplen_list.append(self.__dict__[key_maplen]) else: maplen_list.append(self.maplen) - if maplen_list[-1] is None: # calculate it if not specified + if maplen_list[-1] is None: # calculate it if not specified maplen_list[-1] = int(len(bitmap_list[-1]) / 2) key_block = 'block' + str(i) if key_block in self.__dict__: - block_list.append(self.__dict__[key_block]) + block_list.append(self.__dict__[key_block]) else: block_list.append(self.block) # dump RR-type specific part (NSEC or NSEC3) - self.dump_fixedpart(f, 2 * self.nbitmap + \ - int(len(''.join(bitmap_list)) / 2)) + self.dump_fixedpart(f, 2 * self.nbitmap + int(len(''.join(bitmap_list)) / 2)) # dump the bitmap for i in range(0, self.nbitmap): @@ -965,6 +994,7 @@ class NSECBASE(RR): f.write('%02x %02x %s\n' % (block_list[i], maplen_list[i], bitmap_list[i])) + class NSEC(NSECBASE): '''Implements rendering NSEC RDATA in the test data format. @@ -976,6 +1006,7 @@ class NSEC(NSECBASE): ''' nextname = 'next.example.com' + def dump_fixedpart(self, f, bitmap_totallen): name_wire = encode_name(self.nextname) if self.rdlen is None: @@ -987,6 +1018,7 @@ class NSEC(NSECBASE): int(len(name_wire) / 2))) f.write('%s\n' % name_wire) + class NSEC3PARAM(RR): '''Implements rendering NSEC3PARAM RDATA in the test data format. @@ -1035,6 +1067,7 @@ class NSEC3PARAM(RR): ' ' if len(self.salt) > 0 else '', encode_string(self.salt))) + class NSEC3(NSECBASE, NSEC3PARAM): '''Implements rendering NSEC3 RDATA in the test data format. @@ -1049,12 +1082,12 @@ class NSEC3(NSECBASE, NSEC3PARAM): hashlen = 20 hash = 'h' * hashlen + def dump_fixedpart(self, f, bitmap_totallen): if self.rdlen is None: # if rdlen needs to be calculated, it must be based on the bitmap # length, because the configured maplen can be fake. - self.rdlen = 4 + 1 + len(self.salt) + 1 + len(self.hash) \ - + bitmap_totallen + self.rdlen = 4 + 1 + len(self.salt) + 1 + len(self.hash) + bitmap_totallen self.dump_header(f, self.rdlen) self._dump_params(f) f.write("# Hash Len=%d, Hash='%s'\n" % (self.hashlen, self.hash)) @@ -1062,6 +1095,7 @@ class NSEC3(NSECBASE, NSEC3PARAM): ' ' if len(self.hash) > 0 else '', encode_string(self.hash))) + class RRSIG(RR): '''Implements rendering RRSIG RDATA in the test data format. @@ -1130,6 +1164,7 @@ class RRSIG(RR): f.write('# Tag=%d Signer=%s and Signature\n' % (self.tag, self.signer)) f.write('%04x %s %s\n' % (self.tag, name_wire, sig_wire)) + class TKEY(RR): '''Implements rendering TKEY RDATA in the test data format. @@ -1201,8 +1236,7 @@ class TKEY(RR): else: other_data = encode_string(self.other_data, other_len) if self.rdlen is None: - self.rdlen = int(len(name_wire) / 2 + 16 + len(key) / 2 + \ - len(other_data) / 2) + self.rdlen = int(len(name_wire) / 2 + 16 + len(key) / 2 + len(other_data) / 2) self.dump_header(f, self.rdlen) f.write('# Algorithm=%s\n' % self.algorithm) f.write('%s\n' % name_wire) @@ -1216,6 +1250,7 @@ class TKEY(RR): f.write('%04x%s\n' % (other_len, ' ' + other_data if len(other_data) > 0 else '')) + class TLSA(RR): '''Implements rendering TLSA RDATA in the test data format. @@ -1230,18 +1265,20 @@ class TLSA(RR): selector = 0 matching_type = 1 certificate_association_data = 'd2abde240d7cd3ee6b4b28c54df034b97983a1d16e8a410e4561cb106618e971' + def dump(self, f): if self.rdlen is None: self.rdlen = 2 + (len(self.certificate_association_data) / 2) else: self.rdlen = int(self.rdlen) self.dump_header(f, self.rdlen) - f.write('# CERTIFICATE_USAGE=%d SELECTOR=%d MATCHING_TYPE=%d CERTIFICATE_ASSOCIATION_DATA=%s\n' %\ - (self.certificate_usage, self.selector, self.matching_type,\ + f.write('# CERTIFICATE_USAGE=%d SELECTOR=%d MATCHING_TYPE=%d CERTIFICATE_ASSOCIATION_DATA=%s\n' % + (self.certificate_usage, self.selector, self.matching_type, self.certificate_association_data)) - f.write('%02x %02x %02x %s\n' % (self.certificate_usage, self.selector, self.matching_type,\ + f.write('%02x %02x %02x %s\n' % (self.certificate_usage, self.selector, self.matching_type, self.certificate_association_data)) + class TSIG(RR): '''Implements rendering TSIG RDATA in the test data format. @@ -1293,7 +1330,7 @@ class TSIG(RR): error = 0 other_len = None # 6 if error is BADTIME; otherwise 0 other_data = None # use time_signed + fudge + 1 for BADTIME - dict_macsize = { 'hmac-md5' : 16, 'hmac-sha1' : 20, 'hmac-sha256' : 32 } + dict_macsize = {'hmac-md5': 16, 'hmac-sha1': 20, 'hmac-sha256': 32} # TSIG has some special defaults def __init__(self): @@ -1312,21 +1349,18 @@ class TSIG(RR): mac_size = self.dict_macsize[self.algorithm] else: raise RuntimeError('TSIG Mac Size cannot be determined') - mac = encode_string('x' * mac_size) if self.mac is None else \ - encode_string(self.mac, mac_size) + mac = encode_string('x' * mac_size) if self.mac is None else encode_string(self.mac, mac_size) other_len = self.other_len if other_len is None: # 18 = BADTIME other_len = 6 if self.error == 18 else 0 other_data = self.other_data if other_data is None: - other_data = '%012x' % (self.time_signed + self.fudge + 1) \ - if self.error == 18 else '' + other_data = '%012x' % (self.time_signed + self.fudge + 1) if self.error == 18 else '' else: other_data = encode_string(self.other_data, other_len) if self.rdlen is None: - self.rdlen = int(len(name_wire) / 2 + 16 + len(mac) / 2 + \ - len(other_data) / 2) + self.rdlen = int(len(name_wire) / 2 + 16 + len(mac) / 2 + len(other_data) / 2) self.dump_header(f, self.rdlen) f.write('# Algorithm=%s Time-Signed=%d Fudge=%d\n' % (self.algorithm, self.time_signed, self.fudge)) @@ -1334,11 +1368,12 @@ class TSIG(RR): f.write('# MAC Size=%d MAC=(see hex)\n' % mac_size) f.write('%04x%s\n' % (mac_size, ' ' + mac if len(mac) > 0 else '')) f.write('# Original-ID=%d Error=%d\n' % (self.original_id, self.error)) - f.write('%04x %04x\n' % (self.original_id, self.error)) + f.write('%04x %04x\n' % (self.original_id, self.error)) f.write('# Other-Len=%d Other-Data=(see hex)\n' % other_len) f.write('%04x%s\n' % (other_len, ' ' + other_data if len(other_data) > 0 else '')) + class MINFO(RR): '''Implements rendering MINFO RDATA in the test data format. @@ -1350,6 +1385,7 @@ class MINFO(RR): ''' rmailbox = 'rmailbox.example.com' emailbox = 'emailbox.example.com' + def dump(self, f): rmailbox_wire = encode_name(self.rmailbox) emailbox_wire = encode_name(self.emailbox) @@ -1361,6 +1397,7 @@ class MINFO(RR): f.write('# RMAILBOX=%s EMAILBOX=%s\n' % (self.rmailbox, self.emailbox)) f.write('%s %s\n' % (rmailbox_wire, emailbox_wire)) + class AFSDB(RR): '''Implements rendering AFSDB RDATA in the test data format. @@ -1372,6 +1409,7 @@ class AFSDB(RR): ''' subtype = 1 server = 'afsdb.example.com' + def dump(self, f): server_wire = encode_name(self.server) if self.rdlen is None: @@ -1382,11 +1420,12 @@ class AFSDB(RR): f.write('# SUBTYPE=%d SERVER=%s\n' % (self.subtype, self.server)) f.write('%04x %s\n' % (self.subtype, server_wire)) + # Build section-class mapping -config_param = { 'name' : (Name, {}), - 'header' : (DNSHeader, header_xtables), - 'question' : (DNSQuestion, question_xtables), - 'edns' : (EDNS, {}) } +config_param = {'name': (Name, {}), + 'header': (DNSHeader, header_xtables), + 'question': (DNSQuestion, question_xtables), + 'edns': (EDNS, {})} for rrtype in dict_rrtype.keys(): # For any supported RR types add the tuple of (RR_CLASS, {}). # We expect KeyError as not all the types are supported, and simply @@ -1397,14 +1436,17 @@ for rrtype in dict_rrtype.keys(): except KeyError: pass + def get_config_param(section): s = section - m = re.match('^([^:]+)/\d+$', section) + m = re.match(r'^([^:]+)/\d+$', section) if m: s = m.group(1) return config_param[s] -usage = '''usage: %prog [options] input_file''' + +usage = 'usage: %prog [options] input_file' + if __name__ == "__main__": parser = OptionParser(usage=usage) @@ -1419,7 +1461,7 @@ if __name__ == "__main__": outputfile = options.output if not outputfile: - m = re.match('(.*)\.[^.]+$', configfile) + m = re.match(r'(.*)\.[^.]+$', configfile) if m: outputfile = m.group(1) else: diff --git a/src/share/database/scripts/utils/are-scripts-in-sync.py b/src/share/database/scripts/utils/are-scripts-in-sync.py index 4ac33e0cfa..7a8fd9a58d 100755 --- a/src/share/database/scripts/utils/are-scripts-in-sync.py +++ b/src/share/database/scripts/utils/are-scripts-in-sync.py @@ -60,12 +60,14 @@ def filter_the_noise(file, text, is_upgrade_script): if m is not None: version = max(version, int(m[0]) if len(m) else 0) if version == 0: - print(f"ERROR: expected schema version upgrade statement of format \"SET version = '\\d+', minor = '\\d+';\" in file \"{file}\", but not found.", file=sys.stderr) + print("ERROR: expected schema version upgrade statement of format " + f"\"SET version = '\\d+', minor = '\\d+';\" in file \"{file}\", but not found.", file=sys.stderr) sys.exit(2) append = False result = [] - first_delimiter = r'< 0 def has_column(self, table_name, column_name): - sql = db.text( - "SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = :table_name AND COLUMN_NAME = :column_name" - ) + sql = db.text("SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS " + "WHERE TABLE_NAME = :table_name AND COLUMN_NAME = :column_name") result = self.connection.execute(sql, {"table_name": table_name, "column_name": column_name}).fetchone() return result[0] > 0 + def traverse(state, parents, json_object): if state.config.is_ignored_parameter(state.get_current_name()): return @@ -138,7 +143,9 @@ def traverse(state, parents, json_object): elif state.get_path_len() == 2 and not isinstance(json_object, list) and not isinstance(json_object, dict): # Global primitive value, e.g. boolean. Kea has a dedicated table for them. - comment = cprint(text='Set primitive value {0} in {1} table'.format(state.sql_current_name(), state.sql_global_table_name()), color='green') + comment = cprint(text='Set primitive value {0} in {1} table'.format(state.sql_current_name(), + state.sql_global_table_name()), + color='green') else: # Handle primitives at lower levels. @@ -148,20 +155,26 @@ def traverse(state, parents, json_object): # If the primitive belongs to a hierarchy of two maps, the structure of # the lower level map must be flattened and the respective parameters # must be moved to the upper level map. - comment = cprint(text='Create column {0} in the parent table'.format(state.sql_current_name()), color='red') + comment = cprint(text='Create column {0} in the parent table'.format(state.sql_current_name()), + color='red') else: # An exception is the parameter belonging to the top level maps, e.g. # Dhcp4/map/primitive. This primitive goes to the dhcp4_global_parameter # table. - comment = cprint(text='Use global parameter {0}'.format(state.sql_current_global_name()), color='yellow') + comment = cprint(text='Use global parameter {0}'.format(state.sql_current_global_name()), + color='yellow') elif isinstance(parents[-1], dict) and isinstance(parents[-2], list): # A list of maps deserves its own table. For example: subnet4 or # shared_networks, option_def etc. if state.database.has_column(state.sql_parent_table_name(), state.sql_current_name()): - comment = cprint(text='Column {0} in {1} table exists'.format(state.sql_current_name(), state.sql_parent_table_name()), color='green') + comment = cprint(text='Column {0} in {1} table exists'.format(state.sql_current_name(), + state.sql_parent_table_name()), + color='green') else: - comment = cprint(text='Create column {0} in {1} table'.format(state.sql_current_name(), state.sql_parent_table_name()), color='red') + comment = cprint(text='Create column {0} in {1} table'.format(state.sql_current_name(), + state.sql_parent_table_name()), + color='red') elif isinstance(json_object, list): if json_object and isinstance(json_object[0], dict): @@ -170,12 +183,14 @@ def traverse(state, parents, json_object): else: comment = cprint(text='Create table {0}'.format(state.sql_table_name()), color='red') else: - comment = cprint(text='Unable to determine children types because all-keys file contains no children for this object', color='red') + comment = cprint(text=('Unable to determine children types because all-keys file contains no children ' + 'for this object'), color='red') elif isinstance(json_object, dict): if len(parents) > 1 and isinstance(parents[-2], dict): if state.get_path_len() == 2: - comment = cprint(text='Parameters belonging to this map should be in {0}'.format(state.sql_global_table_name()), color='yellow') + comment = cprint(text='Parameters belonging to this map should be in {0}'.format( + state.sql_global_table_name()), color='yellow') # Format printing the current object depending on its type. if isinstance(json_object, dict): @@ -200,6 +215,7 @@ def traverse(state, parents, json_object): # along with a comment. print('{0}/{1}: {2}'.format(state.get_path(), type(json_object).__name__, comment)) + def main(): parser = argparse.ArgumentParser(description='Kea Developer Tools') parser.add_argument('--all-keys-file', metavar='all_keys_file', required=True, @@ -237,5 +253,6 @@ def main(): traverse(State(config, database), [], parsed) + if __name__ == '__main__': main() diff --git a/tools/reorder_message_file.py b/tools/reorder_message_file.py index 57cd5ee2e3..9483a58d96 100644 --- a/tools/reorder_message_file.py +++ b/tools/reorder_message_file.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011-2015 Internet Systems Consortium, Inc. ("ISC") +# Copyright (C) 2011-2024 Internet Systems Consortium, Inc. ("ISC") # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this @@ -18,6 +18,7 @@ import sys + def remove_empty_leading_trailing(lines): """ Removes leading and trailing empty lines. @@ -112,7 +113,7 @@ def make_dict(lines): message_key = canonicalise_message_line(lines[0]) message_lines = [message_key] - index = 1; + index = 1 while index < len(lines): if lines[index].startswith("%"): # Start of new message @@ -145,8 +146,8 @@ def print_dict(dictionary): count = count + 1 # ... and the entry itself. - for l in dictionary[msgid]: - print(l.strip()) + for line in dictionary[msgid]: + print(line.strip()) def process_file(filename):