From: Vasek Sraier Date: Sun, 12 Sep 2021 20:51:37 +0000 (+0200) Subject: improved error messages + custom values propagating to strict data model X-Git-Tag: v6.0.0a1~125^2~10 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=9942e2f35d003aefdac11d296cabce911253af65;p=thirdparty%2Fknot-resolver.git improved error messages + custom values propagating to strict data model Error messages now contain information about their origin. If it is known, the user is informed about the specific place in tree where an error occured. + Consolidated all exceptions at one place. closes #21 Propagating TimeUnits to strict data model, because that way, it's not ambiguous whether they represent seconds or milliseconds. --- diff --git a/manager/knot_resolver_manager/datamodel/dnssec_config.py b/manager/knot_resolver_manager/datamodel/dnssec_config.py index 75eca9f92..7622808e5 100644 --- a/manager/knot_resolver_manager/datamodel/dnssec_config.py +++ b/manager/knot_resolver_manager/datamodel/dnssec_config.py @@ -35,8 +35,8 @@ class DnssecStrict(DataValidator): trust_anchor_signal_query: bool time_skew_detection: bool keep_removed: int - refresh_time: Optional[int] - hold_down_time: int + refresh_time: Optional[TimeUnit] + hold_down_time: TimeUnit trust_anchors: Optional[List[str]] negative_trust_anchors: Optional[List[str]] diff --git a/manager/knot_resolver_manager/datamodel/lua_config.py b/manager/knot_resolver_manager/datamodel/lua_config.py index dce1b204f..6a924fa20 100644 --- a/manager/knot_resolver_manager/datamodel/lua_config.py +++ b/manager/knot_resolver_manager/datamodel/lua_config.py @@ -1,7 +1,7 @@ from typing import Optional +from knot_resolver_manager.exceptions import ValidationException from knot_resolver_manager.utils import DataParser, DataValidator -from knot_resolver_manager.utils.exceptions import DataValidationException class Lua(DataParser): @@ -17,4 +17,4 @@ class LuaStrict(DataValidator): def _validate(self) -> None: if self.script and self.script_file: - raise DataValidationException("'lua.script' and 'lua.script-file' are both defined, only one can be used") + raise ValidationException("'lua.script' and 'lua.script-file' are both defined, only one can be used") diff --git a/manager/knot_resolver_manager/datamodel/lua_template.j2 b/manager/knot_resolver_manager/datamodel/lua_template.j2 index 61a0195e6..89639fce0 100644 --- a/manager/knot_resolver_manager/datamodel/lua_template.j2 +++ b/manager/knot_resolver_manager/datamodel/lua_template.j2 @@ -47,7 +47,7 @@ trust_anchors.remove('.') {{ "modules.unload('detect_time_skew')" if not cfg.dnssec.time_skew_detection }} trust_anchors.keep_removed = {{ cfg.dnssec.keep_removed }} -{{ "trust_anchors.refresh_time = "+cfg.dnssec.refresh_time|string if cfg.dnssec.refresh_time }} +{{ "trust_anchors.refresh_time = "+cfg.dnssec.refresh_time.seconds()|string if cfg.dnssec.refresh_time }} -- dnssec.trust-anchors {% if cfg.dnssec.trust_anchors %} diff --git a/manager/knot_resolver_manager/datamodel/options_config.py b/manager/knot_resolver_manager/datamodel/options_config.py index 8c29cc7d6..b5ce487ee 100644 --- a/manager/knot_resolver_manager/datamodel/options_config.py +++ b/manager/knot_resolver_manager/datamodel/options_config.py @@ -30,7 +30,7 @@ class Options(DataParser): class PredictionStrict(DataValidator): - window: int + window: TimeUnit period: int def _validate(self) -> None: diff --git a/manager/knot_resolver_manager/datamodel/server_config.py b/manager/knot_resolver_manager/datamodel/server_config.py index 98ba66d9f..a6327e593 100644 --- a/manager/knot_resolver_manager/datamodel/server_config.py +++ b/manager/knot_resolver_manager/datamodel/server_config.py @@ -5,7 +5,8 @@ from typing import Optional, Union from typing_extensions import Literal -from knot_resolver_manager.utils import DataParser, DataValidationException, DataValidator +from knot_resolver_manager.exceptions import ValidationException +from knot_resolver_manager.utils import DataParser, DataValidator from knot_resolver_manager.utils.types import LiteralEnum logger = logging.getLogger(__name__) @@ -21,7 +22,7 @@ def _cpu_count() -> int: ) cpus = os.cpu_count() if cpus is None: - raise DataValidationException( + raise ValidationException( "The number of available CPUs to automatically set the number of running" "'kresd' workers could not be determined." "The number can be specified manually in 'server:instances' configuration option." @@ -69,15 +70,15 @@ class ServerStrict(DataValidator): return obj.hostname elif obj.hostname is None: return socket.gethostname() - raise DataValidationException(f"Unexpected value for 'server.hostname': {obj.workers}") + raise ValueError(f"Unexpected value for 'server.hostname': {obj.workers}") def _workers(self, obj: Server) -> int: if isinstance(obj.workers, int): return obj.workers elif obj.workers == "auto": return _cpu_count() - raise DataValidationException(f"Unexpected value for 'server.workers': {obj.workers}") + raise ValueError(f"Unexpected value for 'server.workers': {obj.workers}") def _validate(self) -> None: if self.workers < 0: - raise DataValidationException("Number of workers must be non-negative") + raise ValueError("Number of workers must be non-negative") diff --git a/manager/knot_resolver_manager/datamodel/types.py b/manager/knot_resolver_manager/datamodel/types.py index f3dc24973..17aed46da 100644 --- a/manager/knot_resolver_manager/datamodel/types.py +++ b/manager/knot_resolver_manager/datamodel/types.py @@ -5,7 +5,8 @@ from enum import Enum, auto from pathlib import Path from typing import Any, Dict, Optional, Pattern, Union -from knot_resolver_manager.utils import CustomValueType, DataValidationException +from knot_resolver_manager.exceptions import DataValidationException +from knot_resolver_manager.utils import CustomValueType from knot_resolver_manager.utils.data_parser_validator import DataParser, DataValidator logger = logging.getLogger(__name__) @@ -15,7 +16,7 @@ class Unit(CustomValueType): _re: Pattern[str] _units: Dict[Optional[str], int] - def __init__(self, source_value: Any) -> None: + def __init__(self, source_value: Any, object_path: str = "/") -> None: super().__init__(source_value) self._value: int self._value_orig: Union[str, int] @@ -25,24 +26,32 @@ class Unit(CustomValueType): if grouped: val, unit = grouped.groups() if unit is None: - raise DataValidationException(f"Missing units. Accepted units are {list(type(self)._units.keys())}") + raise DataValidationException( + f"Missing units. Accepted units are {list(type(self)._units.keys())}", object_path + ) elif unit not in type(self)._units: raise DataValidationException( f"Used unexpected unit '{unit}' for {type(self).__name__}." - f" Accepted units are {list(type(self)._units.keys())}" + f" Accepted units are {list(type(self)._units.keys())}", + object_path, ) self._value = int(val) * type(self)._units[unit] else: - raise DataValidationException(f"{type(self._value)} Failed to convert: {self}") + raise DataValidationException(f"{type(self._value)} Failed to convert: {self}", object_path) elif isinstance(source_value, int): raise DataValidationException( "We do not accept number without units." - f" Please convert the value to string an add a unit - {list(type(self)._units.keys())}" + f" Please convert the value to string an add a unit - {list(type(self)._units.keys())}", + object_path, ) + elif isinstance(source_value, type(self)): + self._value_orig = source_value._value_orig + self._value = source_value._value else: raise DataValidationException( f"Unexpected input type for Unit type - {type(source_value)}." - " Cause might be invalid format or invalid type." + " Cause might be invalid format or invalid type.", + object_path, ) def __int__(self) -> int: @@ -54,6 +63,9 @@ class Unit(CustomValueType): """ return str(self._value_orig) + def __repr__(self) -> str: + return f"Unit[{type(self).__name__},{self._value_orig}]" + def __eq__(self, o: object) -> bool: """ Two instances are equal when they represent the same size @@ -74,15 +86,15 @@ class TimeUnit(Unit): _re = re.compile(r"^(\d+)\s{0,1}([smhd]s?){0,1}$") _units = {"ms": 1, "s": 1000, "m": 60 * 1000, "h": 3600 * 1000, "d": 24 * 3600 * 1000} - def seconds(self): + def seconds(self) -> int: return self._value // 1000 - def millis(self): + def millis(self) -> int: return self._value class AnyPath(CustomValueType): - def __init__(self, source_value: Any) -> None: + def __init__(self, source_value: Any, object_path: str = "/") -> None: super().__init__(source_value) if isinstance(source_value, AnyPath): self._value = source_value._value @@ -90,13 +102,15 @@ class AnyPath(CustomValueType): self._value: Path = Path(source_value) else: raise DataValidationException( - f"Expected file path in a string, got '{source_value}' with type '{type(source_value)}'" + f"Expected file path in a string, got '{source_value}' with type '{type(source_value)}'", object_path ) try: self._value = self._value.resolve(strict=False) except RuntimeError as e: - raise DataValidationException("Failed to resolve given file path. Is there a symlink loop?") from e + raise DataValidationException( + "Failed to resolve given file path. Is there a symlink loop?", object_path + ) from e def __str__(self) -> str: return str(self._value) @@ -149,7 +163,7 @@ class ListenStrict(DataValidator): elif present == {"interface", ...}: return ListenType.INTERFACE else: - raise DataValidationException( + raise ValueError( "Listen configuration contains multiple incompatible options at once. " "You can use (IP and PORT) or (UNIX_SOCKET) or (INTERFACE)." ) @@ -158,16 +172,14 @@ class ListenStrict(DataValidator): if origin.port is None: return None if not 0 <= origin.port <= 65_535: - raise DataValidationException(f"Port value {origin.port} out of range of usual 2-byte port value") + raise ValueError(f"Port value {origin.port} out of range of usual 2-byte port value") return origin.port def _ip(self, origin: Listen): if origin.ip is None: return None - try: - return ipaddress.ip_address(origin.ip) - except ValueError as e: - raise DataValidationException(f"Failed to parse IP address from '{origin.ip}'") from e + # throws value error, so that get's caught outside of this function + return ipaddress.ip_address(origin.ip) def _validate(self) -> None: # we already check that it's there is only one option in the `_typ` method @@ -175,17 +187,18 @@ class ListenStrict(DataValidator): class IPNetwork(CustomValueType): - def __init__(self, source_value: Any) -> None: + def __init__(self, source_value: Any, object_path: str = "/") -> None: super().__init__(source_value) if isinstance(source_value, str): try: self._value: Union[ipaddress.IPv4Network, ipaddress.IPv6Network] = ipaddress.ip_network(source_value) except ValueError as e: - raise DataValidationException("Failed to parse IP network.") from e + raise DataValidationException("Failed to parse IP network.", object_path) from e else: raise DataValidationException( f"Unexpected value for a network subnet. Expected string, got '{source_value}'" - " with type '{type(source_value)}'" + " with type '{type(source_value)}'", + object_path, ) def to_std(self) -> Union[ipaddress.IPv4Network, ipaddress.IPv6Network]: @@ -202,23 +215,25 @@ class IPNetwork(CustomValueType): class IPv6Network96(CustomValueType): - def __init__(self, source_value: Any) -> None: - super().__init__(source_value) + def __init__(self, source_value: Any, object_path: str = "/") -> None: + super().__init__(source_value, object_path=object_path) if isinstance(source_value, str): try: self._value: ipaddress.IPv6Network = ipaddress.IPv6Network(source_value) except ValueError as e: - raise DataValidationException("Failed to parse IPv6 /96 network.") from e + raise DataValidationException("Failed to parse IPv6 /96 network.", object_path) from e if self._value.prefixlen != 96: raise DataValidationException( "Expected IPv6 network address with /96 prefix lenght." - f" Got prefix lenght of {self._value.prefixlen}" + f" Got prefix lenght of {self._value.prefixlen}", + object_path, ) else: raise DataValidationException( "Unexpected value for a network subnet." - f" Expected string, got '{source_value}' with type '{type(source_value)}'" + f" Expected string, got '{source_value}' with type '{type(source_value)}'", + object_path, ) def __str__(self) -> str: diff --git a/manager/knot_resolver_manager/exceptions.py b/manager/knot_resolver_manager/exceptions.py index 192cd7739..3079ac098 100644 --- a/manager/knot_resolver_manager/exceptions.py +++ b/manager/knot_resolver_manager/exceptions.py @@ -1,2 +1,33 @@ -class SubprocessControllerException(Exception): +class KresdManagerException(Exception): + """ + Base class for all custom exceptions we use in our code + """ + + +class SubprocessControllerException(KresdManagerException): + pass + + +class TreeException(KresdManagerException): + def __init__(self, msg: str, tree_path: str) -> None: + super().__init__(msg) + self._tree_path = tree_path + + def where(self) -> str: + return self._tree_path + + +class DataParsingException(TreeException): + pass + + +class DataValidationException(TreeException): + pass + + +class ParsingException(KresdManagerException): + pass + + +class ValidationException(KresdManagerException): pass diff --git a/manager/knot_resolver_manager/kres_manager.py b/manager/knot_resolver_manager/kres_manager.py index 3253ba510..51ee5683e 100644 --- a/manager/knot_resolver_manager/kres_manager.py +++ b/manager/knot_resolver_manager/kres_manager.py @@ -9,13 +9,13 @@ import knot_resolver_manager.kresd_controller from knot_resolver_manager import kres_id from knot_resolver_manager.compat.asyncio import create_task from knot_resolver_manager.constants import KRESD_CONFIG_FILE, WATCHDOG_INTERVAL +from knot_resolver_manager.exceptions import KresdManagerException from knot_resolver_manager.kresd_controller.interface import ( Subprocess, SubprocessController, SubprocessStatus, SubprocessType, ) -from knot_resolver_manager.utils import DataValidationException from knot_resolver_manager.utils.async_utils import writefile from .datamodel import KresConfig, KresConfigStrict @@ -161,7 +161,7 @@ class KresManager: last = self.get_last_used_config_strict() if last is not None: await self._write_config(last) - raise DataValidationException("Canary kresd instance failed. Config is invalid.") + raise KresdManagerException("Canary kresd instance failed. Config is invalid.") logger.debug("Canary process test passed, Applying new config to all workers") self._last_used_config = config diff --git a/manager/knot_resolver_manager/server.py b/manager/knot_resolver_manager/server.py index 8603fe67e..8d7d934e3 100644 --- a/manager/knot_resolver_manager/server.py +++ b/manager/knot_resolver_manager/server.py @@ -11,9 +11,10 @@ from aiohttp.web import middleware from aiohttp.web_response import json_response from knot_resolver_manager.constants import MANAGER_CONFIG_FILE +from knot_resolver_manager.exceptions import KresdManagerException, ParsingException, TreeException, ValidationException from knot_resolver_manager.kresd_controller import get_controller_by_name from knot_resolver_manager.kresd_controller.interface import SubprocessController -from knot_resolver_manager.utils import DataValidationException, Format +from knot_resolver_manager.utils import Format from knot_resolver_manager.utils.async_utils import readfile from .datamodel import KresConfig @@ -82,9 +83,16 @@ async def error_handler(request: web.Request, handler: Any): try: return await handler(request) - except DataValidationException as e: - logger.error("Failed to parse given data in API request", exc_info=True) - return web.Response(text=f"Data validation failed: {e}", status=HTTPStatus.BAD_REQUEST) + except KresdManagerException as e: + if isinstance(e, TreeException): + return web.Response( + text=f"Configuration validation failed @ '{e.where()}': {e}", status=HTTPStatus.BAD_REQUEST + ) + elif isinstance(e, (ParsingException, ValidationException)): + return web.Response(text=f"Configuration validation failed: {e}", status=HTTPStatus.BAD_REQUEST) + else: + logger.error("Request processing failed", exc_info=True) + return web.Response(text=f"Request processing failed: {e}", status=HTTPStatus.INTERNAL_SERVER_ERROR) def setup_routes(app: web.Application): diff --git a/manager/knot_resolver_manager/utils/__init__.py b/manager/knot_resolver_manager/utils/__init__.py index 1149f96bc..19ba814ad 100644 --- a/manager/knot_resolver_manager/utils/__init__.py +++ b/manager/knot_resolver_manager/utils/__init__.py @@ -2,8 +2,6 @@ from typing import Any, Callable, Iterable, Optional, Type, TypeVar from .custom_types import CustomValueType from .data_parser_validator import DataParser, DataValidator, Format -from .exceptions import DataParsingException, DataValidationException -from .overload import Overloaded T = TypeVar("T") @@ -55,13 +53,8 @@ def contains_element_matching(cond: Callable[[T], bool], arr: Iterable[T]) -> bo __all__ = [ - "ignore_exceptions_optional", - "ignore_exceptions", "Format", "CustomValueType", "DataParser", "DataValidator", - "DataParsingException", - "DataValidationException", - "Overloaded", ] diff --git a/manager/knot_resolver_manager/utils/custom_types.py b/manager/knot_resolver_manager/utils/custom_types.py index 18a088b6f..219ca1fc8 100644 --- a/manager/knot_resolver_manager/utils/custom_types.py +++ b/manager/knot_resolver_manager/utils/custom_types.py @@ -21,7 +21,7 @@ class CustomValueType: raise a `DataValidationException` in case of errors. """ - def __init__(self, source_value: Any) -> None: + def __init__(self, source_value: Any, object_path: str = "/") -> None: pass def __int__(self) -> int: diff --git a/manager/knot_resolver_manager/utils/data_parser_validator.py b/manager/knot_resolver_manager/utils/data_parser_validator.py index 3b9c95f1b..fa3fcbf73 100644 --- a/manager/knot_resolver_manager/utils/data_parser_validator.py +++ b/manager/knot_resolver_manager/utils/data_parser_validator.py @@ -9,8 +9,13 @@ import yaml from yaml.constructor import ConstructorError from yaml.nodes import MappingNode +from knot_resolver_manager.exceptions import ( + DataParsingException, + DataValidationException, + ParsingException, + ValidationException, +) from knot_resolver_manager.utils.custom_types import CustomValueType -from knot_resolver_manager.utils.exceptions import DataParsingException from knot_resolver_manager.utils.types import ( get_attr_type, get_generic_type_argument, @@ -56,7 +61,9 @@ def _to_primitive(obj: Any) -> Any: return obj -def _validated_object_type(cls: Type[Any], obj: Any, default: Any = ..., use_default: bool = False) -> Any: +def _validated_object_type( + cls: Type[Any], obj: Any, default: Any = ..., use_default: bool = False, object_path: str = "/" +) -> Any: """ Given an expected type `cls` and a value object `obj`, validate the type of `obj` and return it """ @@ -74,21 +81,21 @@ def _validated_object_type(cls: Type[Any], obj: Any, default: Any = ..., use_def if obj is None: return None else: - raise DataParsingException(f"Expected None, found '{obj}'.") + raise DataParsingException(f"Expected None, found '{obj}'.", object_path) # Union[*variants] (handles Optional[T] due to the way the typing system works) elif is_union(cls): variants = get_generic_type_arguments(cls) for v in variants: try: - return _validated_object_type(v, obj) + return _validated_object_type(v, obj, object_path=object_path) except DataParsingException: pass - raise DataParsingException(f"Union {cls} could not be parsed - parsing of all variants failed.") + raise DataParsingException(f"Union {cls} could not be parsed - parsing of all variants failed.", object_path) # after this, there is no place for a None object elif obj is None: - raise DataParsingException(f"Unexpected None value for type {cls}") + raise DataParsingException(f"Unexpected None value for type {cls}", object_path) # int elif cls == int: @@ -96,7 +103,7 @@ def _validated_object_type(cls: Type[Any], obj: Any, default: Any = ..., use_def # except for CustomValueType class instances if is_obj_type(obj, int) or isinstance(obj, CustomValueType): return int(obj) - raise DataParsingException(f"Expected int, found {type(obj)}") + raise DataParsingException(f"Expected int, found {type(obj)}", object_path) # str elif cls == str: @@ -107,11 +114,12 @@ def _validated_object_type(cls: Type[Any], obj: Any, default: Any = ..., use_def raise DataParsingException( "Expected str, found bool. Be careful, that YAML parsers consider even" ' "no" and "yes" as a bool. Search for the Norway Problem for more' - " details. And please use quotes explicitly." + " details. And please use quotes explicitly.", + object_path, ) else: raise DataParsingException( - f"Expected str (or number that would be cast to string), but found type {type(obj)}" + f"Expected str (or number that would be cast to string), but found type {type(obj)}", object_path ) # bool @@ -119,7 +127,7 @@ def _validated_object_type(cls: Type[Any], obj: Any, default: Any = ..., use_def if is_obj_type(obj, bool): return obj else: - raise DataParsingException(f"Expected bool, found {type(obj)}") + raise DataParsingException(f"Expected bool, found {type(obj)}", object_path) # float elif cls == float: @@ -134,57 +142,60 @@ def _validated_object_type(cls: Type[Any], obj: Any, default: Any = ..., use_def if obj == expected: return obj else: - raise DataParsingException(f"Literal {cls} is not matched with the value {obj}") + raise DataParsingException(f"Literal {cls} is not matched with the value {obj}", object_path) # Dict[K,V] elif is_dict(cls): key_type, val_type = get_generic_type_arguments(cls) try: return { - _validated_object_type(key_type, key): _validated_object_type(val_type, val) for key, val in obj.items() + _validated_object_type(key_type, key, object_path=f"{object_path} @ key {key}"): _validated_object_type( + val_type, val, object_path=f"{object_path} @ value for key {key}" + ) + for key, val in obj.items() } except AttributeError as e: raise DataParsingException( - f"Expected dict-like object, but failed to access its .items() method. Value was {obj}", e - ) + f"Expected dict-like object, but failed to access its .items() method. Value was {obj}", object_path + ) from e # any Enums (probably used only internally in DataValidator) elif is_enum(cls): if isinstance(obj, cls): return obj else: - raise DataParsingException("Unexpected value '{obj}' for enum '{cls}'") + raise DataParsingException(f"Unexpected value '{obj}' for enum '{cls}'", object_path) # List[T] elif is_list(cls): inner_type = get_generic_type_argument(cls) - return [_validated_object_type(inner_type, val) for val in obj] + return [_validated_object_type(inner_type, val, object_path=f"{object_path}[]") for val in obj] # Tuple[A,B,C,D,...] elif is_tuple(cls): types = get_generic_type_arguments(cls) - return tuple(_validated_object_type(typ, val) for typ, val in zip(types, obj)) + return tuple(_validated_object_type(typ, val, object_path=object_path) for typ, val in zip(types, obj)) # CustomValueType subclasses elif inspect.isclass(cls) and issubclass(cls, CustomValueType): # no validation performed, the implementation does it in the constuctor - return cls(obj) + return cls(obj, object_path=object_path) # nested DataParser subclasses elif inspect.isclass(cls) and issubclass(cls, DataParser): # we should return DataParser, we expect to be given a dict, # because we can construct a DataParser from it if isinstance(obj, dict): - return cls(obj) # type: ignore - raise DataParsingException(f"Expected '{dict}' object, found '{type(obj)}'") + return cls(obj, object_path=object_path) # type: ignore + raise DataParsingException(f"Expected '{dict}' object, found '{type(obj)}'", object_path) # nested DataValidator subclasses elif inspect.isclass(cls) and issubclass(cls, DataValidator): # we should return DataValidator, we expect to be given a DataParser, # because we can construct a DataValidator from it if isinstance(obj, DataParser): - return cls(obj) - raise DataParsingException(f"Expected instance of '{DataParser}' class, found '{type(obj)}'") + return cls(obj, object_path=object_path) + raise DataParsingException(f"Expected instance of '{DataParser}' class, found '{type(obj)}'", object_path) # if the object matches, just pass it through elif inspect.isclass(cls) and isinstance(obj, cls): @@ -194,7 +205,8 @@ def _validated_object_type(cls: Type[Any], obj: Any, default: Any = ..., use_def else: raise DataParsingException( f"Type {cls} cannot be parsed. This is a implementation error. " - "Please fix your types in the class or improve the parser/validator." + "Please fix your types in the class or improve the parser/validator.", + object_path, ) @@ -204,7 +216,7 @@ def json_raise_duplicates(pairs: List[Tuple[Any, Any]]) -> Optional[Any]: dict_out: Dict[Any, Any] = {} for key, val in pairs: if key in dict_out: - raise DataParsingException(f"Duplicate attribute key detected: {key}") + raise ParsingException(f"Duplicate attribute key detected: {key}") dict_out[key] = val return dict_out @@ -231,7 +243,7 @@ class RaiseDuplicatesLoader(yaml.SafeLoader): # check for duplicate keys if key in mapping: - raise DataParsingException(f"duplicate key detected: {key_node.start_mark}") + raise ParsingException(f"duplicate key detected: {key_node.start_mark}") value = self.construct_object(value_node, deep=deep) # type: ignore mapping[key] = value return mapping @@ -267,7 +279,7 @@ class Format(Enum): "text/vnd.yaml": Format.YAML, } if mime_type not in formats: - raise DataParsingException("Unsupported MIME type") + raise ParsingException("Unsupported MIME type") return formats[mime_type] @@ -278,7 +290,7 @@ _SUBTREE_MUTATION_PATH_PATTERN = re.compile(r"^(/[^/]+)*/?$") class DataParser: - def __init__(self, obj: Optional[Dict[Any, Any]] = None): + def __init__(self, obj: Optional[Dict[Any, Any]] = None, object_path: str = "/"): cls = self.__class__ annot = cls.__dict__.get("__annotations__", {}) @@ -295,7 +307,7 @@ class DataParser: use_default = hasattr(cls, name) default = getattr(cls, name, ...) - value = _validated_object_type(python_type, val, default, use_default) + value = _validated_object_type(python_type, val, default, use_default, object_path=f"{object_path}/{name}") setattr(self, name, value) # check for unused keys @@ -307,7 +319,9 @@ class DataParser: additional_info = ( " The problem might be that you are using '_', but you should be using '-' instead." ) - raise DataParsingException(f"Attribute '{key}' was not provided with any value." + additional_info) + raise DataParsingException( + f"Attribute '{key}' was not provided with any value." + additional_info, object_path + ) @classmethod def parse_from(cls: Type[_T], fmt: Format, text: str): @@ -352,7 +366,7 @@ class DataParser: # prepare and validate the path object path = path[:-1] if path.endswith("/") else path if re.match(_SUBTREE_MUTATION_PATH_PATTERN, path) is None: - raise DataParsingException("Provided object path for mutation is invalid.") + raise ParsingException("Provided object path for mutation is invalid.") path = path[1:] if path.startswith("/") else path # now, the path variable should contain '/' separated field names @@ -370,14 +384,16 @@ class DataParser: segment = dash_segment.replace("-", "_") if segment == "": - raise DataParsingException(f"Unexpectedly empty segment in path '{path}'") + raise ParsingException(f"Unexpectedly empty segment in path '{path}'") elif is_internal_field(segment): - raise DataParsingException("No, changing internal fields (starting with _) is not allowed. Nice try.") + raise ParsingException( + "No, changing internal fields (starting with _) is not allowed. Nice try though." + ) elif hasattr(obj, segment): parent = obj obj = getattr(parent, segment) else: - raise DataParsingException( + raise ParsingException( f"Path segment '{dash_segment}' does not match any field on the provided parent object" ) assert parent is not None @@ -393,7 +409,7 @@ class DataParser: class DataValidator: - def __init__(self, obj: DataParser): + def __init__(self, obj: DataParser, object_path: str = ""): cls = self.__class__ anot = cls.__dict__.get("__annotations__", {}) @@ -403,11 +419,20 @@ class DataValidator: # use transformation function if available if hasattr(self, f"_{attr_name}"): - value = getattr(self, f"_{attr_name}")(obj) + try: + value = getattr(self, f"_{attr_name}")(obj) + except (ValueError, ValidationException) as e: + if len(e.args) > 0 and isinstance(e.args[0], str): + msg = e.args[0] + else: + msg = "Failed to validate value type" + raise DataValidationException(msg, object_path) from e elif hasattr(obj, attr_name): value = getattr(obj, attr_name) else: - raise DataParsingException(f"DataParser object {obj} is missing '{attr_name}' attribute.") + raise DataValidationException( + f"DataParser object {obj} is missing '{attr_name}' attribute.", object_path + ) setattr(self, attr_name, _validated_object_type(attr_type, value)) diff --git a/manager/knot_resolver_manager/utils/exceptions.py b/manager/knot_resolver_manager/utils/exceptions.py deleted file mode 100644 index f6e81509e..000000000 --- a/manager/knot_resolver_manager/utils/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -class DataParsingException(Exception): - pass - - -class DataValidationException(Exception): - pass diff --git a/manager/pyproject.toml b/manager/pyproject.toml index 610d0cb23..b04e2c588 100644 --- a/manager/pyproject.toml +++ b/manager/pyproject.toml @@ -112,6 +112,7 @@ disable= [ "no-else-raise", # not helpful for readability, when we want explicit branches "raising-bad-type", # handled by type checker "too-many-arguments", # sure, but how can we change the signatures to take less arguments? artificially create objects with arguments? That's stupid... + "no-member", # checked by pyright ] [tool.pylint.SIMILARITIES] diff --git a/manager/tests/datamodel/test_config.py b/manager/tests/datamodel/test_config.py index 83d21c3a0..2adeb70d9 100644 --- a/manager/tests/datamodel/test_config.py +++ b/manager/tests/datamodel/test_config.py @@ -1,4 +1,5 @@ from knot_resolver_manager.datamodel import KresConfig, KresConfigStrict +from knot_resolver_manager.datamodel.types import TimeUnit def test_dns64_true_default(): @@ -18,7 +19,7 @@ def test_dnssec_true_default(): assert strict.dnssec.time_skew_detection == True assert strict.dnssec.keep_removed == 0 assert strict.dnssec.refresh_time == None - assert strict.dnssec.hold_down_time == 30 * 24 * 60 ** 2 + assert strict.dnssec.hold_down_time == TimeUnit("30d") assert strict.dnssec.trust_anchors == None assert strict.dnssec.negative_trust_anchors == None diff --git a/manager/tests/datamodel/test_datamodel_types.py b/manager/tests/datamodel/test_datamodel_types.py index bc1431d6f..a3708e812 100644 --- a/manager/tests/datamodel/test_datamodel_types.py +++ b/manager/tests/datamodel/test_datamodel_types.py @@ -12,28 +12,29 @@ from knot_resolver_manager.datamodel.types import ( SizeUnit, TimeUnit, ) -from knot_resolver_manager.utils import DataParser, DataValidationException, DataValidator +from knot_resolver_manager.exceptions import KresdManagerException +from knot_resolver_manager.utils import DataParser, DataValidator def test_size_unit(): assert SizeUnit("5368709120B") == SizeUnit("5242880K") == SizeUnit("5120M") == SizeUnit("5G") - with raises(DataValidationException): + with raises(KresdManagerException): SizeUnit("-5368709120B") - with raises(DataValidationException): + with raises(KresdManagerException): SizeUnit(-5368709120) - with raises(DataValidationException): + with raises(KresdManagerException): SizeUnit("5120MM") def test_time_unit(): assert TimeUnit("1d") == TimeUnit("24h") == TimeUnit("1440m") == TimeUnit("86400s") - with raises(DataValidationException): + with raises(KresdManagerException): TimeUnit("-1") - with raises(DataValidationException): + with raises(KresdManagerException): TimeUnit(-24) - with raises(DataValidationException): + with raises(KresdManagerException): TimeUnit("1440mm") assert TimeUnit("10ms").millis() == 10 @@ -119,7 +120,7 @@ def test_listen(): ip: 127.0.0.1 """ ) - with raises(DataValidationException): + with raises(KresdManagerException): ListenStrict(o) @@ -128,7 +129,7 @@ def test_network(): assert o.to_std().prefixlen == 24 assert o.to_std() == ipaddress.IPv4Network("10.11.12.0/24") - with raises(DataValidationException): + with raises(KresdManagerException): # because only the prefix can have non-zero bits IPNetwork("10.11.12.13/8") @@ -136,8 +137,8 @@ def test_network(): def test_ipv6_96_network(): _ = IPv6Network96("fe80::/96") - with raises(DataValidationException): + with raises(KresdManagerException): IPv6Network96("fe80::/95") - with raises(DataValidationException): + with raises(KresdManagerException): IPv6Network96("10.11.12.3/96") diff --git a/manager/tests/datamodel/test_dnssec_config.py b/manager/tests/datamodel/test_dnssec_config.py index 9b7c9dad7..6031b457d 100644 --- a/manager/tests/datamodel/test_dnssec_config.py +++ b/manager/tests/datamodel/test_dnssec_config.py @@ -43,8 +43,8 @@ def test_validating(): assert strict.trust_anchor_signal_query == False assert strict.time_skew_detection == False assert strict.keep_removed == 3 - assert strict.refresh_time == 10 - assert strict.hold_down_time == 45 * 24 * 60 ** 2 + assert strict.refresh_time == TimeUnit("10s") + assert strict.hold_down_time == TimeUnit("45d") assert strict.trust_anchors == [ ". 3600 IN DS 19036 8 2 49AAC11D7B6F6446702E54A1607371607A1A41855200FD2CE1CDDE32F24E8FB5" diff --git a/manager/tests/datamodel/test_lua_config.py b/manager/tests/datamodel/test_lua_config.py index 75129bba0..43acf56ee 100644 --- a/manager/tests/datamodel/test_lua_config.py +++ b/manager/tests/datamodel/test_lua_config.py @@ -1,7 +1,7 @@ from pytest import raises from knot_resolver_manager.datamodel.lua_config import Lua, LuaStrict -from knot_resolver_manager.utils.exceptions import DataValidationException +from knot_resolver_manager.exceptions import KresdManagerException yaml = """ script-only: true @@ -28,5 +28,5 @@ script: -- lua script script-file: path/to/file """ - with raises(DataValidationException): + with raises(KresdManagerException): LuaStrict(Lua.from_yaml(yaml2)) diff --git a/manager/tests/datamodel/test_options_config.py b/manager/tests/datamodel/test_options_config.py index ec57beeba..53aa25e73 100644 --- a/manager/tests/datamodel/test_options_config.py +++ b/manager/tests/datamodel/test_options_config.py @@ -53,7 +53,7 @@ def test_validating(): assert strict.violators_workarounds == True assert strict.serve_stale == True - assert strict.prediction.window == 10 * 60 + assert strict.prediction.window == TimeUnit("10m") assert strict.prediction.period == 20 @@ -62,5 +62,5 @@ def test_prediction_true_defaults(): y = OptionsStrict(x) assert x.prediction == True - assert y.prediction.window == 900 + assert y.prediction.window == TimeUnit("15m") assert y.prediction.period == 24 diff --git a/manager/tests/utils/test_data_parser_validator.py b/manager/tests/utils/test_data_parser_validator.py index 5431f3db7..090177abb 100644 --- a/manager/tests/utils/test_data_parser_validator.py +++ b/manager/tests/utils/test_data_parser_validator.py @@ -3,8 +3,8 @@ from typing import Dict, List, Optional, Tuple, Union from pytest import raises from typing_extensions import Literal -from knot_resolver_manager.utils import DataParser, DataValidationException, DataValidator, Format -from knot_resolver_manager.utils.exceptions import DataParsingException +from knot_resolver_manager.exceptions import DataParsingException +from knot_resolver_manager.utils import DataParser, DataValidator, Format def test_primitive(): @@ -251,7 +251,7 @@ def test_partial_mutations(): def _validate(self) -> None: if self.workers < 0: - raise DataValidationException("Number of workers must be non-negative") + raise ValueError("Number of workers must be non-negative") yaml = """ workers: auto diff --git a/manager/tests/utils/test_overloaded.py b/manager/tests/utils/test_overloaded.py index 5f2d811d0..bcbdbc57c 100644 --- a/manager/tests/utils/test_overloaded.py +++ b/manager/tests/utils/test_overloaded.py @@ -1,6 +1,6 @@ from typing import Optional -from knot_resolver_manager.utils import Overloaded +from knot_resolver_manager.utils.overload import Overloaded def test_simple(): diff --git a/manager/typings/pytest/__init__.pyi b/manager/typings/pytest/__init__.pyi new file mode 100644 index 000000000..1a485dd4f --- /dev/null +++ b/manager/typings/pytest/__init__.pyi @@ -0,0 +1,36 @@ +""" +This type stub file was generated by pyright. +""" + +from _pytest import __version__ +from _pytest.assertion import register_assert_rewrite +from _pytest.compat import _setup_collect_fakemodule +from _pytest.config import ExitCode, UsageError, cmdline, hookimpl, hookspec, main +from _pytest.debugging import pytestPDB as __pytestPDB +from _pytest.fixtures import fillfixtures as _fillfuncargs +from _pytest.fixtures import fixture, yield_fixture +from _pytest.freeze_support import freeze_includes +from _pytest.main import Session +from _pytest.mark import MARK_GEN as mark +from _pytest.mark import param +from _pytest.nodes import Collector, File, Item +from _pytest.outcomes import exit, fail, importorskip, skip, xfail +from _pytest.python import Class, Function, Instance, Module, Package +from _pytest.python_api import approx, raises +from _pytest.recwarn import deprecated_call, warns +from _pytest.warning_types import ( + PytestAssertRewriteWarning, + PytestCacheWarning, + PytestCollectionWarning, + PytestConfigWarning, + PytestDeprecationWarning, + PytestExperimentalApiWarning, + PytestUnhandledCoroutineWarning, + PytestUnknownMarkWarning, + PytestWarning, +) + +""" +pytest: unit and functional testing with Python. +""" +set_trace = ... diff --git a/manager/typings/pytest/__main__.pyi b/manager/typings/pytest/__main__.pyi new file mode 100644 index 000000000..de3c14cad --- /dev/null +++ b/manager/typings/pytest/__main__.pyi @@ -0,0 +1,9 @@ +""" +This type stub file was generated by pyright. +""" + +""" +pytest entry point +""" +if __name__ == "__main__": + ...