From cb2ae4235a7a8dcf2d980c2d5d295747916b1280 Mon Sep 17 00:00:00 2001 From: Vasek Sraier Date: Fri, 10 Mar 2023 19:36:15 +0100 Subject: [PATCH] manager: modeling: refactoring --- .../datamodel/cache_schema.py | 6 +- .../datamodel/config_schema.py | 6 +- .../datamodel/dns64_schema.py | 4 +- .../datamodel/dnssec_schema.py | 6 +- .../datamodel/forward_zone_schema.py | 4 +- .../datamodel/logging_schema.py | 10 +- .../datamodel/lua_schema.py | 4 +- .../datamodel/management_schema.py | 4 +- .../datamodel/monitoring_schema.py | 6 +- .../datamodel/network_schema.py | 16 +- .../datamodel/options_schema.py | 8 +- .../datamodel/policy_schema.py | 12 +- .../datamodel/rpz_schema.py | 4 +- .../datamodel/slice_schema.py | 4 +- .../datamodel/static_hints_schema.py | 4 +- .../datamodel/stub_zone_schema.py | 6 +- .../datamodel/types/types.py | 3 +- .../datamodel/view_schema.py | 4 +- .../datamodel/webmgmt_schema.py | 4 +- .../utils/modeling/README.md | 16 +- .../utils/modeling/__init__.py | 3 +- .../utils/modeling/base_schema.py | 278 ++++++++++-------- .../utils/modeling/query.py | 6 +- .../unit/utils/modeling/test_base_schema.py | 30 +- 24 files changed, 237 insertions(+), 211 deletions(-) diff --git a/manager/knot_resolver_manager/datamodel/cache_schema.py b/manager/knot_resolver_manager/datamodel/cache_schema.py index 6813c6855..783b2c15d 100644 --- a/manager/knot_resolver_manager/datamodel/cache_schema.py +++ b/manager/knot_resolver_manager/datamodel/cache_schema.py @@ -1,10 +1,10 @@ from typing import List, Optional from knot_resolver_manager.datamodel.types import Dir, DomainName, File, SizeUnit, TimeUnit -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class PrefillSchema(BaseSchema): +class PrefillSchema(ConfigSchema): """ Prefill the cache periodically by importing zone data obtained over HTTP. @@ -25,7 +25,7 @@ class PrefillSchema(BaseSchema): raise ValueError("cache prefilling is not yet supported for non-root zones") -class CacheSchema(BaseSchema): +class CacheSchema(ConfigSchema): """ DNS resolver cache configuration. diff --git a/manager/knot_resolver_manager/datamodel/config_schema.py b/manager/knot_resolver_manager/datamodel/config_schema.py index 3d8bd5fa7..f61806c20 100644 --- a/manager/knot_resolver_manager/datamodel/config_schema.py +++ b/manager/knot_resolver_manager/datamodel/config_schema.py @@ -26,7 +26,7 @@ from knot_resolver_manager.datamodel.stub_zone_schema import StubZoneSchema from knot_resolver_manager.datamodel.types import AbsoluteDir, IntPositive from knot_resolver_manager.datamodel.view_schema import ViewSchema from knot_resolver_manager.datamodel.webmgmt_schema import WebmgmtSchema -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema logger = logging.getLogger(__name__) @@ -78,8 +78,8 @@ def _default_max_worker_count() -> Optional[int]: return MAX_WORKERS -class KresConfig(BaseSchema): - class Raw(BaseSchema): +class KresConfig(ConfigSchema): + class Raw(ConfigSchema): """ Knot Resolver declarative configuration. diff --git a/manager/knot_resolver_manager/datamodel/dns64_schema.py b/manager/knot_resolver_manager/datamodel/dns64_schema.py index 0561f56e3..55d3200a2 100644 --- a/manager/knot_resolver_manager/datamodel/dns64_schema.py +++ b/manager/knot_resolver_manager/datamodel/dns64_schema.py @@ -1,8 +1,8 @@ from knot_resolver_manager.datamodel.types import IPv6Network96 -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class Dns64Schema(BaseSchema): +class Dns64Schema(ConfigSchema): """ DNS64 (RFC 6147) configuration. diff --git a/manager/knot_resolver_manager/datamodel/dnssec_schema.py b/manager/knot_resolver_manager/datamodel/dnssec_schema.py index f81374d56..3eb5ec3d6 100644 --- a/manager/knot_resolver_manager/datamodel/dnssec_schema.py +++ b/manager/knot_resolver_manager/datamodel/dnssec_schema.py @@ -1,10 +1,10 @@ from typing import List, Optional from knot_resolver_manager.datamodel.types import IntNonNegative, TimeUnit -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class TrustAnchorFileSchema(BaseSchema): +class TrustAnchorFileSchema(ConfigSchema): """ Trust-anchor zonefile configuration. @@ -18,7 +18,7 @@ class TrustAnchorFileSchema(BaseSchema): read_only: bool = False -class DnssecSchema(BaseSchema): +class DnssecSchema(ConfigSchema): """ DNSSEC configuration. diff --git a/manager/knot_resolver_manager/datamodel/forward_zone_schema.py b/manager/knot_resolver_manager/datamodel/forward_zone_schema.py index b52c3ee96..8b7973b6c 100644 --- a/manager/knot_resolver_manager/datamodel/forward_zone_schema.py +++ b/manager/knot_resolver_manager/datamodel/forward_zone_schema.py @@ -2,10 +2,10 @@ from typing import List, Optional, Union from knot_resolver_manager.datamodel.policy_schema import ForwardServerSchema from knot_resolver_manager.datamodel.types import DomainName, IPAddressOptionalPort, PolicyFlagEnum -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class ForwardZoneSchema(BaseSchema): +class ForwardZoneSchema(ConfigSchema): """ Configuration of Forward Zone. diff --git a/manager/knot_resolver_manager/datamodel/logging_schema.py b/manager/knot_resolver_manager/datamodel/logging_schema.py index 1ba395685..1217db233 100644 --- a/manager/knot_resolver_manager/datamodel/logging_schema.py +++ b/manager/knot_resolver_manager/datamodel/logging_schema.py @@ -4,7 +4,7 @@ from typing import Any, List, Optional, Set, Type, Union, cast from typing_extensions import Literal from knot_resolver_manager.datamodel.types import FilePath, TimeUnit -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema from knot_resolver_manager.utils.modeling.base_schema import is_obj_type_valid try: @@ -69,7 +69,7 @@ LogGroupsEnum: TypeAlias = Literal[ ] -class DnstapSchema(BaseSchema): +class DnstapSchema(ConfigSchema): """ Logging DNS queries and responses to a unix socket. @@ -86,7 +86,7 @@ class DnstapSchema(BaseSchema): log_tcp_rtt: bool = True -class DebuggingSchema(BaseSchema): +class DebuggingSchema(ConfigSchema): """ Advanced debugging parameters for kresd (Knot Resolver daemon). @@ -99,8 +99,8 @@ class DebuggingSchema(BaseSchema): assertion_fork: TimeUnit = TimeUnit("5m") -class LoggingSchema(BaseSchema): - class Raw(BaseSchema): +class LoggingSchema(ConfigSchema): + class Raw(ConfigSchema): """ Logging and debugging configuration. diff --git a/manager/knot_resolver_manager/datamodel/lua_schema.py b/manager/knot_resolver_manager/datamodel/lua_schema.py index 174c8540a..bff8e289f 100644 --- a/manager/knot_resolver_manager/datamodel/lua_schema.py +++ b/manager/knot_resolver_manager/datamodel/lua_schema.py @@ -1,9 +1,9 @@ from typing import Optional -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class LuaSchema(BaseSchema): +class LuaSchema(ConfigSchema): """ Custom Lua configuration. diff --git a/manager/knot_resolver_manager/datamodel/management_schema.py b/manager/knot_resolver_manager/datamodel/management_schema.py index 80c0efafa..09daa3ff3 100644 --- a/manager/knot_resolver_manager/datamodel/management_schema.py +++ b/manager/knot_resolver_manager/datamodel/management_schema.py @@ -1,10 +1,10 @@ from typing import Optional from knot_resolver_manager.datamodel.types import FilePath, IPAddressPort -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class ManagementSchema(BaseSchema): +class ManagementSchema(ConfigSchema): """ Configuration of management HTTP API. diff --git a/manager/knot_resolver_manager/datamodel/monitoring_schema.py b/manager/knot_resolver_manager/datamodel/monitoring_schema.py index d2f259ef8..e4cdabe85 100644 --- a/manager/knot_resolver_manager/datamodel/monitoring_schema.py +++ b/manager/knot_resolver_manager/datamodel/monitoring_schema.py @@ -3,10 +3,10 @@ from typing import Union from typing_extensions import Literal from knot_resolver_manager.datamodel.types import DomainName, IPAddress, PortNumber, TimeUnit -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class GraphiteSchema(BaseSchema): +class GraphiteSchema(ConfigSchema): host: Union[IPAddress, DomainName] port: PortNumber = PortNumber(2003) prefix: str = "" @@ -14,7 +14,7 @@ class GraphiteSchema(BaseSchema): tcp: bool = False -class MonitoringSchema(BaseSchema): +class MonitoringSchema(ConfigSchema): """ --- enabled: configures, whether statistics module will be loaded into resolver diff --git a/manager/knot_resolver_manager/datamodel/network_schema.py b/manager/knot_resolver_manager/datamodel/network_schema.py index 7667bbd96..0a177e395 100644 --- a/manager/knot_resolver_manager/datamodel/network_schema.py +++ b/manager/knot_resolver_manager/datamodel/network_schema.py @@ -15,12 +15,12 @@ from knot_resolver_manager.datamodel.types import ( PortNumber, SizeUnit, ) -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema KindEnum = Literal["dns", "xdp", "dot", "doh-legacy", "doh2"] -class EdnsBufferSizeSchema(BaseSchema): +class EdnsBufferSizeSchema(ConfigSchema): """ EDNS payload size advertised in DNS packets. @@ -33,7 +33,7 @@ class EdnsBufferSizeSchema(BaseSchema): downstream: SizeUnit = SizeUnit("1232B") -class AddressRenumberingSchema(BaseSchema): +class AddressRenumberingSchema(ConfigSchema): """ Renumbers addresses in answers to different address space. @@ -46,7 +46,7 @@ class AddressRenumberingSchema(BaseSchema): destination: IPAddress -class TLSSchema(BaseSchema): +class TLSSchema(ConfigSchema): """ TLS configuration, also affects DNS over TLS and DNS over HTTPS. @@ -71,8 +71,8 @@ class TLSSchema(BaseSchema): raise ValueError("'sticket_secret' and 'sticket_secret_file' are both defined, only one can be used") -class ListenSchema(BaseSchema): - class Raw(BaseSchema): +class ListenSchema(ConfigSchema): + class Raw(ConfigSchema): """ Configuration of listening interface. @@ -135,7 +135,7 @@ class ListenSchema(BaseSchema): ) -class ProxyProtocolSchema(BaseSchema): +class ProxyProtocolSchema(ConfigSchema): """ PROXYv2 protocol configuration. @@ -146,7 +146,7 @@ class ProxyProtocolSchema(BaseSchema): allow: List[Union[IPAddress, IPNetwork]] -class NetworkSchema(BaseSchema): +class NetworkSchema(ConfigSchema): """ Network connections and protocols configuration. diff --git a/manager/knot_resolver_manager/datamodel/options_schema.py b/manager/knot_resolver_manager/datamodel/options_schema.py index 1ec5072d3..cee709a2f 100644 --- a/manager/knot_resolver_manager/datamodel/options_schema.py +++ b/manager/knot_resolver_manager/datamodel/options_schema.py @@ -3,12 +3,12 @@ from typing import Any, Union from typing_extensions import Literal from knot_resolver_manager.datamodel.types import IntNonNegative, TimeUnit -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema GlueCheckingEnum = Literal["normal", "strict", "permissive"] -class PredictionSchema(BaseSchema): +class PredictionSchema(ConfigSchema): """ Helps keep the cache hot by prefetching expiring records and learning usage patterns and repetitive queries. @@ -21,8 +21,8 @@ class PredictionSchema(BaseSchema): period: IntNonNegative = IntNonNegative(24) -class OptionsSchema(BaseSchema): - class Raw(BaseSchema): +class OptionsSchema(ConfigSchema): + class Raw(ConfigSchema): """ Fine-tuning global parameters of DNS resolver operation. diff --git a/manager/knot_resolver_manager/datamodel/policy_schema.py b/manager/knot_resolver_manager/datamodel/policy_schema.py index 072406a0e..3f5962ff7 100644 --- a/manager/knot_resolver_manager/datamodel/policy_schema.py +++ b/manager/knot_resolver_manager/datamodel/policy_schema.py @@ -10,10 +10,10 @@ from knot_resolver_manager.datamodel.types import ( PolicyFlagEnum, TimeUnit, ) -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class FilterSchema(BaseSchema): +class FilterSchema(ConfigSchema): """ Query filtering configuration. @@ -28,7 +28,7 @@ class FilterSchema(BaseSchema): qtype: Optional[DNSRecordTypeEnum] = None -class AnswerSchema(BaseSchema): +class AnswerSchema(ConfigSchema): """ Configuration of custom resource record for DNS answer. @@ -45,7 +45,7 @@ class AnswerSchema(BaseSchema): nodata: bool = False -class ForwardServerSchema(BaseSchema): +class ForwardServerSchema(ConfigSchema): """ Configuration of Forward server. @@ -92,7 +92,7 @@ def _validate_policy_action(policy_action: Union["ActionSchema", "PolicySchema"] ) -class ActionSchema(BaseSchema): +class ActionSchema(ConfigSchema): """ Configuration of policy action. @@ -114,7 +114,7 @@ class ActionSchema(BaseSchema): _validate_policy_action(self) -class PolicySchema(BaseSchema): +class PolicySchema(ConfigSchema): """ Configuration of policy rule. diff --git a/manager/knot_resolver_manager/datamodel/rpz_schema.py b/manager/knot_resolver_manager/datamodel/rpz_schema.py index 050eeed1a..633e34a5b 100644 --- a/manager/knot_resolver_manager/datamodel/rpz_schema.py +++ b/manager/knot_resolver_manager/datamodel/rpz_schema.py @@ -1,10 +1,10 @@ from typing import List, Optional from knot_resolver_manager.datamodel.types import File, PolicyActionEnum, PolicyFlagEnum -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class RPZSchema(BaseSchema): +class RPZSchema(ConfigSchema): """ Configuration or Response Policy Zone (RPZ). diff --git a/manager/knot_resolver_manager/datamodel/slice_schema.py b/manager/knot_resolver_manager/datamodel/slice_schema.py index 119d9a16b..0c7cdea19 100644 --- a/manager/knot_resolver_manager/datamodel/slice_schema.py +++ b/manager/knot_resolver_manager/datamodel/slice_schema.py @@ -3,10 +3,10 @@ from typing import List, Optional from typing_extensions import Literal from knot_resolver_manager.datamodel.policy_schema import ActionSchema -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class SliceSchema(BaseSchema): +class SliceSchema(ConfigSchema): """ Split the entire DNS namespace into distinct slices. diff --git a/manager/knot_resolver_manager/datamodel/static_hints_schema.py b/manager/knot_resolver_manager/datamodel/static_hints_schema.py index 409526301..7d39fcf40 100644 --- a/manager/knot_resolver_manager/datamodel/static_hints_schema.py +++ b/manager/knot_resolver_manager/datamodel/static_hints_schema.py @@ -1,10 +1,10 @@ from typing import Dict, List, Optional from knot_resolver_manager.datamodel.types import DomainName, File, IPAddress, TimeUnit -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class StaticHintsSchema(BaseSchema): +class StaticHintsSchema(ConfigSchema): """ Static hints for forward records (A/AAAA) and reverse records (PTR) diff --git a/manager/knot_resolver_manager/datamodel/stub_zone_schema.py b/manager/knot_resolver_manager/datamodel/stub_zone_schema.py index 1ff980562..76e4d82c5 100644 --- a/manager/knot_resolver_manager/datamodel/stub_zone_schema.py +++ b/manager/knot_resolver_manager/datamodel/stub_zone_schema.py @@ -1,10 +1,10 @@ from typing import List, Optional, Union from knot_resolver_manager.datamodel.types import DomainName, IPAddressOptionalPort, PolicyFlagEnum -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class StubServerSchema(BaseSchema): +class StubServerSchema(ConfigSchema): """ Configuration of Stub server. @@ -15,7 +15,7 @@ class StubServerSchema(BaseSchema): address: IPAddressOptionalPort -class StubZoneSchema(BaseSchema): +class StubZoneSchema(ConfigSchema): """ Configuration of Stub Zone. diff --git a/manager/knot_resolver_manager/datamodel/types/types.py b/manager/knot_resolver_manager/datamodel/types/types.py index fceb51e89..02614e226 100644 --- a/manager/knot_resolver_manager/datamodel/types/types.py +++ b/manager/knot_resolver_manager/datamodel/types/types.py @@ -1,7 +1,6 @@ import ipaddress import re -from pathlib import Path -from typing import Any, Dict, Optional, Tuple, Type, TypeVar, Union +from typing import Any, Dict, Optional, Type, Union from knot_resolver_manager.datamodel.types.base_types import IntRangeBase, PatternBase, StrBase, UnitBase from knot_resolver_manager.utils.modeling import BaseValueType diff --git a/manager/knot_resolver_manager/datamodel/view_schema.py b/manager/knot_resolver_manager/datamodel/view_schema.py index 1e231c211..f84ab428b 100644 --- a/manager/knot_resolver_manager/datamodel/view_schema.py +++ b/manager/knot_resolver_manager/datamodel/view_schema.py @@ -1,10 +1,10 @@ from typing import List, Optional from knot_resolver_manager.datamodel.types import IPNetwork, PolicyFlagEnum -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class ViewSchema(BaseSchema): +class ViewSchema(ConfigSchema): """ Configuration parameters that allow you to create personalized policy rules and other. diff --git a/manager/knot_resolver_manager/datamodel/webmgmt_schema.py b/manager/knot_resolver_manager/datamodel/webmgmt_schema.py index f8174f22b..41cc33877 100644 --- a/manager/knot_resolver_manager/datamodel/webmgmt_schema.py +++ b/manager/knot_resolver_manager/datamodel/webmgmt_schema.py @@ -1,10 +1,10 @@ from typing import Optional from knot_resolver_manager.datamodel.types import File, FilePath, InterfacePort -from knot_resolver_manager.utils.modeling import BaseSchema +from knot_resolver_manager.utils.modeling import ConfigSchema -class WebmgmtSchema(BaseSchema): +class WebmgmtSchema(ConfigSchema): """ Configuration of legacy web management endpoint. diff --git a/manager/knot_resolver_manager/utils/modeling/README.md b/manager/knot_resolver_manager/utils/modeling/README.md index eec99e33c..97c68b54e 100644 --- a/manager/knot_resolver_manager/utils/modeling/README.md +++ b/manager/knot_resolver_manager/utils/modeling/README.md @@ -5,12 +5,12 @@ The utilities also take care of parsing, validating and creating JSON schemas an ## Creating schema -Schema is created using `BaseSchema` class. Schema structure is specified using annotations. +Schema is created using `ConfigSchema` class. Schema structure is specified using annotations. ```python -from .modeling import BaseSchema +from .modeling import ConfigSchema -class SimpleSchema(BaseSchema): +class SimpleSchema(ConfigSchema): integer: int = 5 # a default value can be specified string: str boolean: bool @@ -21,7 +21,7 @@ Words in multi-word names are separated by underscore `_` (e.g. `simple_schema`) ```python from typing import Dict, List, Optional, Union -class ComplexSchema(BaseSchema): +class ComplexSchema(ConfigSchema): optional: Optional[str] # this field is optional union: Union[int, str] # integer and string are both valid list: List[int] # list of integers @@ -36,7 +36,7 @@ If a some additional validation needs to be done, there is `_validate()` method `ValueError` exception should be raised in case of validation error. ```python -class FieldsSchema(BaseSchema): +class FieldsSchema(ConfigSchema): field1: int field2: int @@ -53,8 +53,8 @@ Transformation method must be named based on field (`value` in this example) wit In this example, the `Layer2Schema` is structure for input data and `Layer1Schema` is for result data. ```python -class Layer1Schema(BaseSchema): - class Layer2Schema(BaseSchema): +class Layer1Schema(ConfigSchema): + class Layer2Schema(ConfigSchema): value: Union[str, int] _LAYER = Layer2Schema @@ -72,7 +72,7 @@ class Layer1Schema(BaseSchema): Created schema can be documented using simple docstring. Json schema is created by calling `json_schema()` method on schema class. JSON schema includes description from docstring, defaults, etc. ```python -SimpleSchema(BaseSchema): +SimpleSchema(ConfigSchema): """ This is description for SimpleSchema itself. diff --git a/manager/knot_resolver_manager/utils/modeling/__init__.py b/manager/knot_resolver_manager/utils/modeling/__init__.py index 9404775fa..ec1ab6d72 100644 --- a/manager/knot_resolver_manager/utils/modeling/__init__.py +++ b/manager/knot_resolver_manager/utils/modeling/__init__.py @@ -1,10 +1,11 @@ -from .base_schema import BaseSchema +from .base_schema import BaseSchema, ConfigSchema from .base_value_type import BaseValueType from .parsing import parse, parse_json, parse_yaml, try_to_parse __all__ = [ "BaseValueType", "BaseSchema", + "ConfigSchema", "parse", "parse_yaml", "parse_json", diff --git a/manager/knot_resolver_manager/utils/modeling/base_schema.py b/manager/knot_resolver_manager/utils/modeling/base_schema.py index b35367909..ddf096114 100644 --- a/manager/knot_resolver_manager/utils/modeling/base_schema.py +++ b/manager/knot_resolver_manager/utils/modeling/base_schema.py @@ -11,7 +11,6 @@ from .base_value_type import BaseValueType from .exceptions import AggregateDataValidationError, DataDescriptionError, DataValidationError from .renaming import Renamed, renamed from .types import ( - NoneType, get_generic_type_argument, get_generic_type_arguments, get_optional_inner_type, @@ -26,7 +25,6 @@ from .types import ( is_union, ) - T = TypeVar("T") @@ -198,7 +196,7 @@ def _describe_type(typ: Type[Any]) -> Dict[Any, Any]: raise NotImplementedError(f"Trying to get JSON schema for type '{typ}', which is not implemented") -TSource = Union[None, "NoRenameBaseSchema", Dict[str, Any]] +TSource = Union[None, "BaseSchema", Dict[str, Any]] def _create_untouchable(name: str) -> object: @@ -212,14 +210,14 @@ def _create_untouchable(name: str) -> object: return _Untouchable() -class Mapper: - def _validated_tuple(self, tp: Type[Any], obj: Tuple[Any, ...], object_path: str) -> Tuple[Any, ...]: +class ObjectMapper: + def _create_tuple(self, tp: Type[Any], obj: Tuple[Any, ...], object_path: str) -> Tuple[Any, ...]: types = get_generic_type_arguments(tp) errs: List[DataValidationError] = [] res: List[Any] = [] for i, (t, val) in enumerate(zip(types, obj)): try: - res.append(self.validated_object_type(t, val, object_path=f"{object_path}[{i}]")) + res.append(self.map_object(t, val, object_path=f"{object_path}[{i}]")) except DataValidationError as e: errs.append(e) if len(errs) == 1: @@ -228,15 +226,15 @@ class Mapper: raise AggregateDataValidationError(object_path, child_exceptions=errs) return tuple(res) - def _validated_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]: + def _create_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]: key_type, val_type = get_generic_type_arguments(tp) try: errs: List[DataValidationError] = [] res: Dict[Any, Any] = {} for key, val in obj.items(): try: - nkey = self.validated_object_type(key_type, key, object_path=f"{object_path}[{key}]") - nval = self.validated_object_type(val_type, val, object_path=f"{object_path}[{key}]") + nkey = self.map_object(key_type, key, object_path=f"{object_path}[{key}]") + nval = self.map_object(val_type, val, object_path=f"{object_path}[{key}]") res[nkey] = nval except DataValidationError as e: errs.append(e) @@ -250,13 +248,16 @@ class Mapper: f"Expected dict-like object, but failed to access its .items() method. Value was {obj}", object_path ) from e - def _validated_list(self, tp: Type[Any], obj: List[Any], object_path: str) -> List[Any]: + def _create_list(self, tp: Type[Any], obj: List[Any], object_path: str) -> List[Any]: + if isinstance(obj, str): + raise DataValidationError("expected list, got string", object_path) + inner_type = get_generic_type_argument(tp) errs: List[DataValidationError] = [] res: List[Any] = [] for i, val in enumerate(obj): try: - res.append(self.validated_object_type(inner_type, val, object_path=f"{object_path}[{i}]")) + res.append(self.map_object(inner_type, val, object_path=f"{object_path}[{i}]")) except DataValidationError as e: errs.append(e) if len(errs) == 1: @@ -265,7 +266,81 @@ class Mapper: raise AggregateDataValidationError(object_path, child_exceptions=errs) return res - def validated_object_type( + def _create_str(self, obj: Any, object_path: str) -> str: + # we are willing to cast any primitive value to string, but no compound values are allowed + if is_obj_type(obj, (str, float, int)) or isinstance(obj, BaseValueType): + return str(obj) + elif is_obj_type(obj, bool): + raise DataValidationError( + "Expected str, found bool. Be careful, that YAML parsers consider even" + ' "no" and "yes" as a bool. Search for the Norway Problem for more' + " details. And please use quotes explicitly.", + object_path, + ) + else: + raise DataValidationError( + f"expected str (or number that would be cast to string), but found type {type(obj)}", object_path + ) + + def _create_int(self, obj: Any, object_path: str) -> int: + # we don't want to make an int out of anything else than other int + # except for BaseValueType class instances + if is_obj_type(obj, int) or isinstance(obj, BaseValueType): + return int(obj) + raise DataValidationError(f"expected int, found {type(obj)}", object_path) + + def _create_union(self, tp: Type[T], obj: Any, object_path: str) -> T: + variants = get_generic_type_arguments(tp) + errs: List[DataValidationError] = [] + for v in variants: + try: + return self.map_object(v, obj, object_path=object_path) + except DataValidationError as e: + errs.append(e) + + raise DataValidationError("could not parse any of the possible variants", object_path, child_exceptions=errs) + + def _create_optional(self, tp: Type[Optional[T]], obj: Any, object_path: str) -> Optional[T]: + inner: Type[Any] = get_optional_inner_type(tp) + if obj is None: + return None + else: + return self.map_object(inner, obj, object_path=object_path) + + def _create_bool(self, obj: Any, object_path: str) -> bool: + if is_obj_type(obj, bool): + return obj + else: + raise DataValidationError(f"expected bool, found {type(obj)}", object_path) + + def _create_literal(self, tp: Type[Any], obj: Any, object_path: str) -> Any: + expected = get_generic_type_arguments(tp) + if obj in expected: + return obj + else: + raise DataValidationError(f"'{obj}' does not match any of the expected values {expected}", object_path) + + def _create_base_schema_object(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseSchema": + if isinstance(obj, (dict, BaseSchema)): + return tp(obj, object_path=object_path) + raise DataValidationError(f"expected 'dict' or 'NoRenameBaseSchema' object, found '{type(obj)}'", object_path) + + def create_value_type_object(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseValueType": + if isinstance(obj, tp): + # if we already have a custom value type, just pass it through + return obj + else: + # no validation performed, the implementation does it in the constuctor + try: + return tp(obj, object_path=object_path) + except ValueError as e: + if len(e.args) > 0 and isinstance(e.args[0], str): + msg = e.args[0] + else: + msg = f"Failed to validate value against {tp} type" + raise DataValidationError(msg, object_path) from e + + def map_object( self, tp: Type[Any], obj: Any, @@ -274,7 +349,8 @@ class Mapper: object_path: str = "/", ) -> Any: """ - Given an expected type `cls` and a value object `obj`, validate the type of `obj` and return it + Given an expected type `cls` and a value object `obj`, return a new object of the given type and map fields of `obj` into it. During the mapping procedure, + runtime type checking is performed. """ # Disabling these checks, because I think it's much more readable as a single function @@ -294,25 +370,11 @@ class Mapper: # Optional[T] (could be technically handled by Union[*variants], but this way we have better error reporting) elif is_optional(tp): - inner: Type[Any] = get_optional_inner_type(tp) - if obj is None: - return None - else: - return self.validated_object_type(inner, obj, object_path=object_path) + return self._create_optional(tp, obj, object_path) # Union[*variants] elif is_union(tp): - variants = get_generic_type_arguments(tp) - errs: List[DataValidationError] = [] - for v in variants: - try: - return self.validated_object_type(v, obj, object_path=object_path) - except DataValidationError as e: - errs.append(e) - - raise DataValidationError( - "could not parse any of the possible variants", object_path, child_exceptions=errs - ) + return self._create_union(tp, obj, object_path) # after this, there is no place for a None object elif obj is None: @@ -320,54 +382,30 @@ class Mapper: # int elif tp == int: - # we don't want to make an int out of anything else than other int - # except for BaseValueType class instances - if is_obj_type(obj, int) or isinstance(obj, BaseValueType): - return int(obj) - raise DataValidationError(f"expected int, found {type(obj)}", object_path) + return self._create_int(obj, object_path) # str elif tp == str: - # we are willing to cast any primitive value to string, but no compound values are allowed - if is_obj_type(obj, (str, float, int)) or isinstance(obj, BaseValueType): - return str(obj) - elif is_obj_type(obj, bool): - raise DataValidationError( - "Expected str, found bool. Be careful, that YAML parsers consider even" - ' "no" and "yes" as a bool. Search for the Norway Problem for more' - " details. And please use quotes explicitly.", - object_path, - ) - else: - raise DataValidationError( - f"expected str (or number that would be cast to string), but found type {type(obj)}", object_path - ) + return self._create_str(obj, object_path) # bool elif tp == bool: - if is_obj_type(obj, bool): - return obj - else: - raise DataValidationError(f"expected bool, found {type(obj)}", object_path) + return self._create_bool(obj, object_path) # float elif tp == float: raise NotImplementedError( - "Floating point values are not supported in the parser." + "Floating point values are not supported in the object mapper." " Please implement them and be careful with type coercions" ) # Literal[T] elif is_literal(tp): - expected = get_generic_type_arguments(tp) - if obj in expected: - return obj - else: - raise DataValidationError(f"'{obj}' does not match any of the expected values {expected}", object_path) + return self._create_literal(tp, obj, object_path) # Dict[K,V] elif is_dict(tp): - return self._validated_dict(tp, obj, object_path) + return self._create_dict(tp, obj, object_path) # any Enums (probably used only internally in DataValidator) elif is_enum(tp): @@ -378,13 +416,11 @@ class Mapper: # List[T] elif is_list(tp): - if isinstance(obj, str): - raise DataValidationError("expected list, got string", object_path) - return self._validated_list(tp, obj, object_path) + return self._create_list(tp, obj, object_path) # Tuple[A,B,C,D,...] elif is_tuple(tp): - return self._validated_tuple(tp, obj, object_path) + return self._create_tuple(tp, obj, object_path) # type of obj and cls type match elif is_obj_type(obj, tp): @@ -397,11 +433,11 @@ class Mapper: # BaseValueType subclasses elif inspect.isclass(tp) and issubclass(tp, BaseValueType): - return self.construct_value_type(tp, obj, object_path) + return self.create_value_type_object(tp, obj, object_path) # nested BaseSchema subclasses - elif inspect.isclass(tp) and issubclass(tp, NoRenameBaseSchema): - return self.construct_base_schema(tp, obj, object_path) + elif inspect.isclass(tp) and issubclass(tp, BaseSchema): + return self._create_base_schema_object(tp, obj, object_path) # if the object matches, just pass it through elif inspect.isclass(tp) and isinstance(obj, tp): @@ -415,39 +451,13 @@ class Mapper: object_path, ) - def construct_base_schema(self, tp: Type[Any], obj: Any, object_path: str) -> "NoRenameBaseSchema": - if isinstance(obj, (dict, NoRenameBaseSchema)): - return tp(obj, object_path=object_path) # type: ignore - raise DataValidationError( - f"expected 'dict' or 'NoRenameBaseSchema' object, found '{type(obj)}'", object_path - ) - - def construct_value_type(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseValueType": - if isinstance(obj, tp): - # if we already have a custom value type, just pass it through - return obj - else: - # no validation performed, the implementation does it in the constuctor - try: - return tp(obj, object_path=object_path) - except ValueError as e: - if len(e.args) > 0 and isinstance(e.args[0], str): - msg = e.args[0] - else: - msg = f"Failed to validate value against {tp} type" - raise DataValidationError(msg, object_path) from e - - def load(self, clazz: Type[T], obj: Any, default: Any = ..., use_default: bool = False) -> T: - return self.validated_object_type(clazz, obj, default, use_default) - - @classmethod - def is_obj_type_valid(cls, obj: Any, tp: Type[Any]) -> bool: + def is_obj_type_valid(self, obj: Any, tp: Type[Any]) -> bool: """ Runtime type checking. Validate, that a given object is of a given type. """ try: - cls().validated_object_type(tp, obj) + self.map_object(tp, obj) return True except (DataValidationError, ValueError): return False @@ -455,14 +465,14 @@ class Mapper: def _assign_default(self, obj: Any, name: str, python_type: Any, object_path: str) -> None: cls = obj.__class__ default = getattr(cls, name, None) - value = self.validated_object_type(python_type, default, object_path=f"{object_path}/{name}") + value = self.map_object(python_type, default, object_path=f"{object_path}/{name}") setattr(obj, name, value) def _assign_field(self, obj: Any, name: str, python_type: Any, value: Any, object_path: str) -> None: - value = self.validated_object_type(python_type, value, object_path=f"{object_path}/{name}") + value = self.map_object(python_type, value, object_path=f"{object_path}/{name}") setattr(obj, name, value) - def _assign_fields(self, obj: Any, source: Union[Dict[str, Any], "NoRenameBaseSchema", None], object_path: str) -> Set[str]: + def _assign_fields(self, obj: Any, source: Union[Dict[str, Any], "BaseSchema", None], object_path: str) -> Set[str]: """ Order of assignment: 1. all direct assignments @@ -539,16 +549,19 @@ class Mapper: msg = "Failed to validate value type" raise DataValidationError(msg, object_path) from e - def object_constructor(self, obj: Any, source: TSource, object_path: str) -> None: - # make sure that all raw data checks passed on the source object - if source is None: - source = {} + def object_constructor(self, obj: Any, source: Union["BaseSchema", Dict[Any, Any]], object_path: str) -> None: + """ + Delegated constructor for the NoRenameBaseSchema class. - if not isinstance(source, (NoRenameBaseSchema, dict)): - raise DataValidationError(f"expected dict-like object, found '{type(source)}'", object_path) + The reason this method is delegated to the mapper is due to renaming. Like this, we don't have to + worry about a different BaseSchema class, when we want to have dynamically renamed fields. + """ + # As this is a delegated constructor, we must ignore protected access warnings + # pylint: disable=protected-access - # save source (2 underscores should invoke Python's build-in mangling and we wont hopefully have collistions with data fields) - obj.__source: Union[Dict[str, Any], NoRenameBaseSchema] = source # type: ignore + # sanity check + if not isinstance(source, (BaseSchema, dict)): # type: ignore + raise DataValidationError(f"expected dict-like object, found '{type(source)}'", object_path) # construct lower level schema first if configured to do so if obj._LAYER is not None: @@ -558,7 +571,7 @@ class Mapper: used_keys = self._assign_fields(obj, source, object_path) # check for unused keys in the source object - if source and not isinstance(source, NoRenameBaseSchema): + if source and not isinstance(source, BaseSchema): unused = source.keys() - used_keys if len(unused) > 0: keys = ", ".join((f"'{u}'" for u in unused)) @@ -574,7 +587,7 @@ class Mapper: raise DataValidationError(e.args[0] if len(e.args) > 0 else "Validation error", object_path) from e -class NoRenameBaseSchema(Serializable): +class BaseSchema(Serializable): """ Base class for modeling configuration schema. It somewhat resembles standard dataclasses with additional functionality: @@ -637,15 +650,22 @@ class NoRenameBaseSchema(Serializable): See tests/utils/test_modelling.py for example usage. """ - _LAYER: Optional[Type["NoRenameBaseSchema"]] = None - _MAPPER: Mapper = Mapper() + _LAYER: Optional[Type["BaseSchema"]] = None + _MAPPER: ObjectMapper = ObjectMapper() + + def __init_subclass__(cls) -> None: + return super().__init_subclass__() def __init__(self, source: TSource = None, object_path: str = ""): + # save source data (and drop information about nullness) + source = source or {} + self.__source: Union[Dict[str, Any], BaseSchema] = source + + # delegate the rest of the constructor self._MAPPER.object_constructor(self, source, object_path) - self.__source: Union[Dict[str, Any], NoRenameBaseSchema] def get_unparsed_data(self) -> Dict[str, Any]: - if isinstance(self.__source, NoRenameBaseSchema): + if isinstance(self.__source, BaseSchema): return self.__source.get_unparsed_data() elif isinstance(self.__source, Renamed): return self.__source.original() @@ -678,7 +698,7 @@ class NoRenameBaseSchema(Serializable): return True @classmethod - def json_schema(cls: Type["NoRenameBaseSchema"], include_schema_definition: bool = True) -> Dict[Any, Any]: + def json_schema(cls: Type["BaseSchema"], include_schema_definition: bool = True) -> Dict[Any, Any]: if cls._LAYER is not None: return cls._LAYER.json_schema(include_schema_definition=include_schema_definition) @@ -702,33 +722,39 @@ class NoRenameBaseSchema(Serializable): return res -class RenamedMapper(Mapper): - def _validated_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]: +class RenamingObjectMapper(ObjectMapper): + """ + Same as object mapper, but it uses collection wrappers from the module `renamed` to perform dynamic field renaming. + + More specifically: + - it renames all properties in (nested) objects + - it does not rename keys in dictionaries + """ + + def _create_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]: if isinstance(obj, Renamed): obj = obj.original() - return super()._validated_dict(tp, obj, object_path) + return super()._create_dict(tp, obj, object_path) - def construct_base_schema(self, tp: Type[Any], obj: Any, object_path: str) -> "NoRenameBaseSchema": + def _create_base_schema_object(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseSchema": if isinstance(obj, dict): obj = renamed(obj) - return super().construct_base_schema(tp, obj, object_path) + return super()._create_base_schema_object(tp, obj, object_path) - def object_constructor(self, obj: Any, source: TSource, object_path: str) -> None: + def object_constructor(self, obj: Any, source: Union["BaseSchema", Dict[Any, Any]], object_path: str) -> None: if isinstance(source, dict): source = renamed(source) return super().object_constructor(obj, source, object_path) -# export as a standalone functions for backwards compatibility -load = RenamedMapper().load -is_obj_type_valid = RenamedMapper.is_obj_type_valid +# export as a standalone functions for simplicity compatibility +is_obj_type_valid = ObjectMapper().is_obj_type_valid +map_object = ObjectMapper().map_object -class BaseSchema(NoRenameBaseSchema): +class ConfigSchema(BaseSchema): """ - In Knot Resolver Manager, we need renamed keys most of the time, as we are using the modelling - tools mostly for configuration schema. That's why the normal looking name BaseSchema does renaming - and NoRenameBaseSchema is the opposite. + Same as BaseSchema, but maps with RenamingObjectMapper """ - _MAPPER: Mapper = RenamedMapper() + _MAPPER: ObjectMapper = RenamingObjectMapper() diff --git a/manager/knot_resolver_manager/utils/modeling/query.py b/manager/knot_resolver_manager/utils/modeling/query.py index bff51cabb..786cf645f 100644 --- a/manager/knot_resolver_manager/utils/modeling/query.py +++ b/manager/knot_resolver_manager/utils/modeling/query.py @@ -4,7 +4,7 @@ from typing import Any, List, Optional, Tuple, Union from typing_extensions import Literal -from knot_resolver_manager.utils.modeling.base_schema import NoRenameBaseSchema, load +from knot_resolver_manager.utils.modeling.base_schema import BaseSchema, map_object from knot_resolver_manager.utils.modeling.json_pointer import json_ptr_resolve @@ -12,7 +12,7 @@ class PatchError(Exception): pass -class Op(NoRenameBaseSchema, ABC): +class Op(BaseSchema, ABC): @abstractmethod def eval(self, fakeroot: Any) -> Any: """ @@ -169,7 +169,7 @@ def query( elif method == "patch": tp = List[Union[AddOp, RemoveOp, MoveOp, CopyOp, TestOp, ReplaceOp]] - transaction: tp = load(tp, payload) + transaction: tp = map_object(tp, payload) for i, op in enumerate(transaction): try: diff --git a/manager/tests/unit/utils/modeling/test_base_schema.py b/manager/tests/unit/utils/modeling/test_base_schema.py index c68c8c6f1..ca41572d1 100644 --- a/manager/tests/unit/utils/modeling/test_base_schema.py +++ b/manager/tests/unit/utils/modeling/test_base_schema.py @@ -4,19 +4,19 @@ import pytest from pytest import raises from typing_extensions import Literal -from knot_resolver_manager.utils.modeling import BaseSchema, parse_json, parse_yaml +from knot_resolver_manager.utils.modeling import ConfigSchema, parse_json, parse_yaml from knot_resolver_manager.utils.modeling.exceptions import DataDescriptionError, DataValidationError -class _TestBool(BaseSchema): +class _TestBool(ConfigSchema): v: bool -class _TestInt(BaseSchema): +class _TestInt(ConfigSchema): v: int -class _TestStr(BaseSchema): +class _TestStr(ConfigSchema): v: str @@ -54,8 +54,8 @@ def test_parsing_str_invalid(): @pytest.mark.parametrize("typ,val", [(_TestInt, 5), (_TestBool, False), (_TestStr, "test")]) -def test_parsing_nested(typ: Type[BaseSchema], val: Any): - class UpperSchema(BaseSchema): +def test_parsing_nested(typ: Type[ConfigSchema], val: Any): + class UpperSchema(ConfigSchema): l: typ yaml = f""" @@ -68,7 +68,7 @@ l: def test_parsing_simple_compound_types(): - class TestSchema(BaseSchema): + class TestSchema(ConfigSchema): l: List[int] d: Dict[str, str] t: Tuple[str, int] @@ -97,7 +97,7 @@ t: def test_parsing_nested_compound_types(): - class TestSchema(BaseSchema): + class TestSchema(ConfigSchema): i: int o: Optional[Dict[str, str]] @@ -119,7 +119,7 @@ o: def test_dash_conversion(): - class TestSchema(BaseSchema): + class TestSchema(ConfigSchema): awesome_field: Dict[str, str] yaml = """ @@ -132,7 +132,7 @@ awesome-field: def test_eq(): - class B(BaseSchema): + class B(ConfigSchema): a: _TestInt field: str @@ -147,7 +147,7 @@ def test_eq(): def test_docstring_parsing_valid(): - class NormalDescription(BaseSchema): + class NormalDescription(ConfigSchema): """ Does nothing special Really @@ -156,7 +156,7 @@ def test_docstring_parsing_valid(): desc = NormalDescription.json_schema() assert desc["description"] == "Does nothing special\nReally" - class FieldsDescription(BaseSchema): + class FieldsDescription(ConfigSchema): """ This is an awesome test class --- @@ -172,14 +172,14 @@ def test_docstring_parsing_valid(): assert schema["properties"]["field"]["description"] == "This field does nothing interesting" assert schema["properties"]["value"]["description"] == "Neither does this" - class NoDescription(BaseSchema): + class NoDescription(ConfigSchema): nothing: str _ = NoDescription.json_schema() def test_docstring_parsing_invalid(): - class AdditionalItem(BaseSchema): + class AdditionalItem(ConfigSchema): """ This class is wrong --- @@ -192,7 +192,7 @@ def test_docstring_parsing_invalid(): with raises(DataDescriptionError): _ = AdditionalItem.json_schema() - class WrongDescription(BaseSchema): + class WrongDescription(ConfigSchema): """ This class is wrong --- -- 2.47.3