]> git.ipfire.org Git - thirdparty/knot-resolver.git/commitdiff
manager: modeling: refactoring
authorVasek Sraier <git@vakabus.cz>
Fri, 10 Mar 2023 18:36:15 +0000 (19:36 +0100)
committerVasek Sraier <git@vakabus.cz>
Tue, 28 Mar 2023 13:24:22 +0000 (15:24 +0200)
24 files changed:
manager/knot_resolver_manager/datamodel/cache_schema.py
manager/knot_resolver_manager/datamodel/config_schema.py
manager/knot_resolver_manager/datamodel/dns64_schema.py
manager/knot_resolver_manager/datamodel/dnssec_schema.py
manager/knot_resolver_manager/datamodel/forward_zone_schema.py
manager/knot_resolver_manager/datamodel/logging_schema.py
manager/knot_resolver_manager/datamodel/lua_schema.py
manager/knot_resolver_manager/datamodel/management_schema.py
manager/knot_resolver_manager/datamodel/monitoring_schema.py
manager/knot_resolver_manager/datamodel/network_schema.py
manager/knot_resolver_manager/datamodel/options_schema.py
manager/knot_resolver_manager/datamodel/policy_schema.py
manager/knot_resolver_manager/datamodel/rpz_schema.py
manager/knot_resolver_manager/datamodel/slice_schema.py
manager/knot_resolver_manager/datamodel/static_hints_schema.py
manager/knot_resolver_manager/datamodel/stub_zone_schema.py
manager/knot_resolver_manager/datamodel/types/types.py
manager/knot_resolver_manager/datamodel/view_schema.py
manager/knot_resolver_manager/datamodel/webmgmt_schema.py
manager/knot_resolver_manager/utils/modeling/README.md
manager/knot_resolver_manager/utils/modeling/__init__.py
manager/knot_resolver_manager/utils/modeling/base_schema.py
manager/knot_resolver_manager/utils/modeling/query.py
manager/tests/unit/utils/modeling/test_base_schema.py

index 6813c68555912dc4cbb0d19f455eabf3617d2ee3..783b2c15d5494a681da3e3362a079fb3d7deb093 100644 (file)
@@ -1,10 +1,10 @@
 from typing import List, Optional
 
 from knot_resolver_manager.datamodel.types import Dir, DomainName, File, SizeUnit, TimeUnit
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class PrefillSchema(BaseSchema):
+class PrefillSchema(ConfigSchema):
     """
     Prefill the cache periodically by importing zone data obtained over HTTP.
 
@@ -25,7 +25,7 @@ class PrefillSchema(BaseSchema):
             raise ValueError("cache prefilling is not yet supported for non-root zones")
 
 
-class CacheSchema(BaseSchema):
+class CacheSchema(ConfigSchema):
     """
     DNS resolver cache configuration.
 
index 3d8bd5fa73e1c4e2ec7c43e6ff344324f5553d06..f61806c201ee811db49d9b854d17faa849566ad1 100644 (file)
@@ -26,7 +26,7 @@ from knot_resolver_manager.datamodel.stub_zone_schema import StubZoneSchema
 from knot_resolver_manager.datamodel.types import AbsoluteDir, IntPositive
 from knot_resolver_manager.datamodel.view_schema import ViewSchema
 from knot_resolver_manager.datamodel.webmgmt_schema import WebmgmtSchema
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 logger = logging.getLogger(__name__)
 
@@ -78,8 +78,8 @@ def _default_max_worker_count() -> Optional[int]:
     return MAX_WORKERS
 
 
-class KresConfig(BaseSchema):
-    class Raw(BaseSchema):
+class KresConfig(ConfigSchema):
+    class Raw(ConfigSchema):
         """
         Knot Resolver declarative configuration.
 
index 0561f56e342456a2babc1c4fc01a17184b0426b1..55d3200a21acc6197854641f14f315d6df4c1212 100644 (file)
@@ -1,8 +1,8 @@
 from knot_resolver_manager.datamodel.types import IPv6Network96
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class Dns64Schema(BaseSchema):
+class Dns64Schema(ConfigSchema):
     """
     DNS64 (RFC 6147) configuration.
 
index f81374d563118fa0b6c2ddcd6f33ae96b7d99d28..3eb5ec3d66dafca8b96da387e9e1eb03b3916326 100644 (file)
@@ -1,10 +1,10 @@
 from typing import List, Optional
 
 from knot_resolver_manager.datamodel.types import IntNonNegative, TimeUnit
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class TrustAnchorFileSchema(BaseSchema):
+class TrustAnchorFileSchema(ConfigSchema):
     """
     Trust-anchor zonefile configuration.
 
@@ -18,7 +18,7 @@ class TrustAnchorFileSchema(BaseSchema):
     read_only: bool = False
 
 
-class DnssecSchema(BaseSchema):
+class DnssecSchema(ConfigSchema):
     """
     DNSSEC configuration.
 
index b52c3ee96cac3502346b2d23cba6b3f95beef69c..8b7973b6ce48429df380614047ea95bc590a1dd1 100644 (file)
@@ -2,10 +2,10 @@ from typing import List, Optional, Union
 
 from knot_resolver_manager.datamodel.policy_schema import ForwardServerSchema
 from knot_resolver_manager.datamodel.types import DomainName, IPAddressOptionalPort, PolicyFlagEnum
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class ForwardZoneSchema(BaseSchema):
+class ForwardZoneSchema(ConfigSchema):
     """
     Configuration of Forward Zone.
 
index 1ba395685f2a478a1e421f7d8cde6e6e48af95c9..1217db233d00f3c9aea8d819c54b9a94591cf563 100644 (file)
@@ -4,7 +4,7 @@ from typing import Any, List, Optional, Set, Type, Union, cast
 from typing_extensions import Literal
 
 from knot_resolver_manager.datamodel.types import FilePath, TimeUnit
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 from knot_resolver_manager.utils.modeling.base_schema import is_obj_type_valid
 
 try:
@@ -69,7 +69,7 @@ LogGroupsEnum: TypeAlias = Literal[
 ]
 
 
-class DnstapSchema(BaseSchema):
+class DnstapSchema(ConfigSchema):
     """
     Logging DNS queries and responses to a unix socket.
 
@@ -86,7 +86,7 @@ class DnstapSchema(BaseSchema):
     log_tcp_rtt: bool = True
 
 
-class DebuggingSchema(BaseSchema):
+class DebuggingSchema(ConfigSchema):
     """
     Advanced debugging parameters for kresd (Knot Resolver daemon).
 
@@ -99,8 +99,8 @@ class DebuggingSchema(BaseSchema):
     assertion_fork: TimeUnit = TimeUnit("5m")
 
 
-class LoggingSchema(BaseSchema):
-    class Raw(BaseSchema):
+class LoggingSchema(ConfigSchema):
+    class Raw(ConfigSchema):
         """
         Logging and debugging configuration.
 
index 174c8540ac0f9a01f28bfd5f70ed0e15175bfc31..bff8e289fa1eb78faf810b7e85860067fa3844d0 100644 (file)
@@ -1,9 +1,9 @@
 from typing import Optional
 
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class LuaSchema(BaseSchema):
+class LuaSchema(ConfigSchema):
     """
     Custom Lua configuration.
 
index 80c0efafa2d1bd3b609172655721528a3f5a403d..09daa3ff321b065484d1484946f3fdbd2d24c07a 100644 (file)
@@ -1,10 +1,10 @@
 from typing import Optional
 
 from knot_resolver_manager.datamodel.types import FilePath, IPAddressPort
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class ManagementSchema(BaseSchema):
+class ManagementSchema(ConfigSchema):
     """
     Configuration of management HTTP API.
 
index d2f259ef8281c075597c11d18fc94a2210b6a0b4..e4cdabe854146bdaba6188f5a288940ac7cc084f 100644 (file)
@@ -3,10 +3,10 @@ from typing import Union
 from typing_extensions import Literal
 
 from knot_resolver_manager.datamodel.types import DomainName, IPAddress, PortNumber, TimeUnit
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class GraphiteSchema(BaseSchema):
+class GraphiteSchema(ConfigSchema):
     host: Union[IPAddress, DomainName]
     port: PortNumber = PortNumber(2003)
     prefix: str = ""
@@ -14,7 +14,7 @@ class GraphiteSchema(BaseSchema):
     tcp: bool = False
 
 
-class MonitoringSchema(BaseSchema):
+class MonitoringSchema(ConfigSchema):
     """
     ---
     enabled: configures, whether statistics module will be loaded into resolver
index 7667bbd966cceca430992b69ab2505507267872a..0a177e395f51c888ee57a9de26f1d199bfc0575e 100644 (file)
@@ -15,12 +15,12 @@ from knot_resolver_manager.datamodel.types import (
     PortNumber,
     SizeUnit,
 )
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 KindEnum = Literal["dns", "xdp", "dot", "doh-legacy", "doh2"]
 
 
-class EdnsBufferSizeSchema(BaseSchema):
+class EdnsBufferSizeSchema(ConfigSchema):
     """
     EDNS payload size advertised in DNS packets.
 
@@ -33,7 +33,7 @@ class EdnsBufferSizeSchema(BaseSchema):
     downstream: SizeUnit = SizeUnit("1232B")
 
 
-class AddressRenumberingSchema(BaseSchema):
+class AddressRenumberingSchema(ConfigSchema):
     """
     Renumbers addresses in answers to different address space.
 
@@ -46,7 +46,7 @@ class AddressRenumberingSchema(BaseSchema):
     destination: IPAddress
 
 
-class TLSSchema(BaseSchema):
+class TLSSchema(ConfigSchema):
     """
     TLS configuration, also affects DNS over TLS and DNS over HTTPS.
 
@@ -71,8 +71,8 @@ class TLSSchema(BaseSchema):
             raise ValueError("'sticket_secret' and 'sticket_secret_file' are both defined, only one can be used")
 
 
-class ListenSchema(BaseSchema):
-    class Raw(BaseSchema):
+class ListenSchema(ConfigSchema):
+    class Raw(ConfigSchema):
         """
         Configuration of listening interface.
 
@@ -135,7 +135,7 @@ class ListenSchema(BaseSchema):
             )
 
 
-class ProxyProtocolSchema(BaseSchema):
+class ProxyProtocolSchema(ConfigSchema):
     """
     PROXYv2 protocol configuration.
 
@@ -146,7 +146,7 @@ class ProxyProtocolSchema(BaseSchema):
     allow: List[Union[IPAddress, IPNetwork]]
 
 
-class NetworkSchema(BaseSchema):
+class NetworkSchema(ConfigSchema):
     """
     Network connections and protocols configuration.
 
index 1ec5072d3dfcefe09dadf5d3df5fa6563abbeaa2..cee709a2fdb03b9ca260ec97af4fbc196122a985 100644 (file)
@@ -3,12 +3,12 @@ from typing import Any, Union
 from typing_extensions import Literal
 
 from knot_resolver_manager.datamodel.types import IntNonNegative, TimeUnit
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 GlueCheckingEnum = Literal["normal", "strict", "permissive"]
 
 
-class PredictionSchema(BaseSchema):
+class PredictionSchema(ConfigSchema):
     """
     Helps keep the cache hot by prefetching expiring records and learning usage patterns and repetitive queries.
 
@@ -21,8 +21,8 @@ class PredictionSchema(BaseSchema):
     period: IntNonNegative = IntNonNegative(24)
 
 
-class OptionsSchema(BaseSchema):
-    class Raw(BaseSchema):
+class OptionsSchema(ConfigSchema):
+    class Raw(ConfigSchema):
         """
         Fine-tuning global parameters of DNS resolver operation.
 
index 072406a0e7d99913510b470d8e6779d50c2bf54c..3f5962ff7a57b3bd98b6f042a575904d0b6b8b0b 100644 (file)
@@ -10,10 +10,10 @@ from knot_resolver_manager.datamodel.types import (
     PolicyFlagEnum,
     TimeUnit,
 )
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class FilterSchema(BaseSchema):
+class FilterSchema(ConfigSchema):
     """
     Query filtering configuration.
 
@@ -28,7 +28,7 @@ class FilterSchema(BaseSchema):
     qtype: Optional[DNSRecordTypeEnum] = None
 
 
-class AnswerSchema(BaseSchema):
+class AnswerSchema(ConfigSchema):
     """
     Configuration of custom resource record for DNS answer.
 
@@ -45,7 +45,7 @@ class AnswerSchema(BaseSchema):
     nodata: bool = False
 
 
-class ForwardServerSchema(BaseSchema):
+class ForwardServerSchema(ConfigSchema):
     """
     Configuration of Forward server.
 
@@ -92,7 +92,7 @@ def _validate_policy_action(policy_action: Union["ActionSchema", "PolicySchema"]
                 )
 
 
-class ActionSchema(BaseSchema):
+class ActionSchema(ConfigSchema):
     """
     Configuration of policy action.
 
@@ -114,7 +114,7 @@ class ActionSchema(BaseSchema):
         _validate_policy_action(self)
 
 
-class PolicySchema(BaseSchema):
+class PolicySchema(ConfigSchema):
     """
     Configuration of policy rule.
 
index 050eeed1a12b47ef4adae994ddae1ab223b22565..633e34a5be58f24d210f33821ae764b14b5f4584 100644 (file)
@@ -1,10 +1,10 @@
 from typing import List, Optional
 
 from knot_resolver_manager.datamodel.types import File, PolicyActionEnum, PolicyFlagEnum
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class RPZSchema(BaseSchema):
+class RPZSchema(ConfigSchema):
     """
     Configuration or Response Policy Zone (RPZ).
 
index 119d9a16b9d37722d39aadb0ca96c60b7dff90fd..0c7cdea19277672f180990288a68b656939735d9 100644 (file)
@@ -3,10 +3,10 @@ from typing import List, Optional
 from typing_extensions import Literal
 
 from knot_resolver_manager.datamodel.policy_schema import ActionSchema
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class SliceSchema(BaseSchema):
+class SliceSchema(ConfigSchema):
     """
     Split the entire DNS namespace into distinct slices.
 
index 40952630156a65a95e2aba7d9a64e248cd1dea65..7d39fcf40b249075fe6078fd42102367bacc48cf 100644 (file)
@@ -1,10 +1,10 @@
 from typing import Dict, List, Optional
 
 from knot_resolver_manager.datamodel.types import DomainName, File, IPAddress, TimeUnit
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class StaticHintsSchema(BaseSchema):
+class StaticHintsSchema(ConfigSchema):
     """
     Static hints for forward records (A/AAAA) and reverse records (PTR)
 
index 1ff980562c5832566196fe88c30f4a1c8e5e3a66..76e4d82c5eaa331097ac93a708d7084e3b1847e0 100644 (file)
@@ -1,10 +1,10 @@
 from typing import List, Optional, Union
 
 from knot_resolver_manager.datamodel.types import DomainName, IPAddressOptionalPort, PolicyFlagEnum
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class StubServerSchema(BaseSchema):
+class StubServerSchema(ConfigSchema):
     """
     Configuration of Stub server.
 
@@ -15,7 +15,7 @@ class StubServerSchema(BaseSchema):
     address: IPAddressOptionalPort
 
 
-class StubZoneSchema(BaseSchema):
+class StubZoneSchema(ConfigSchema):
     """
     Configuration of Stub Zone.
 
index fceb51e8968f6cd9c797452680e7c7d59203de84..02614e22644f821df458e74ef4698f4b41d7cd59 100644 (file)
@@ -1,7 +1,6 @@
 import ipaddress
 import re
-from pathlib import Path
-from typing import Any, Dict, Optional, Tuple, Type, TypeVar, Union
+from typing import Any, Dict, Optional, Type, Union
 
 from knot_resolver_manager.datamodel.types.base_types import IntRangeBase, PatternBase, StrBase, UnitBase
 from knot_resolver_manager.utils.modeling import BaseValueType
index 1e231c2112851d065400cee01c48acf1b116124f..f84ab428b229e8aa107724a0f0bad5a07c20e1b4 100644 (file)
@@ -1,10 +1,10 @@
 from typing import List, Optional
 
 from knot_resolver_manager.datamodel.types import IPNetwork, PolicyFlagEnum
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class ViewSchema(BaseSchema):
+class ViewSchema(ConfigSchema):
     """
     Configuration parameters that allow you to create personalized policy rules and other.
 
index f8174f22b20b2f2879047f19e37e77af4a98fa51..41cc33877d000791afc65e2161bc2dc852a09926 100644 (file)
@@ -1,10 +1,10 @@
 from typing import Optional
 
 from knot_resolver_manager.datamodel.types import File, FilePath, InterfacePort
-from knot_resolver_manager.utils.modeling import BaseSchema
+from knot_resolver_manager.utils.modeling import ConfigSchema
 
 
-class WebmgmtSchema(BaseSchema):
+class WebmgmtSchema(ConfigSchema):
     """
     Configuration of legacy web management endpoint.
 
index eec99e33c47251c9545d16ea9f4eb9313837d064..97c68b54e5f3ad29b6cc957ab05d8679416bfb13 100644 (file)
@@ -5,12 +5,12 @@ The utilities also take care of parsing, validating and creating JSON schemas an
 
 ## Creating schema
 
-Schema is created using `BaseSchema` class. Schema structure is specified using annotations.
+Schema is created using `ConfigSchema` class. Schema structure is specified using annotations.
 
 ```python
-from .modeling import BaseSchema
+from .modeling import ConfigSchema
 
-class SimpleSchema(BaseSchema):
+class SimpleSchema(ConfigSchema):
     integer: int = 5    # a default value can be specified
     string: str
     boolean: bool
@@ -21,7 +21,7 @@ Words in multi-word names are separated by underscore `_` (e.g. `simple_schema`)
 ```python
 from typing import Dict, List, Optional, Union
 
-class ComplexSchema(BaseSchema):
+class ComplexSchema(ConfigSchema):
     optional: Optional[str]     # this field is optional
     union: Union[int, str]      # integer and string are both valid
     list: List[int]             # list of integers
@@ -36,7 +36,7 @@ If a some additional validation needs to be done, there is `_validate()` method
 `ValueError` exception should be raised in case of validation error.
 
 ```python
-class FieldsSchema(BaseSchema):
+class FieldsSchema(ConfigSchema):
     field1: int
     field2: int
 
@@ -53,8 +53,8 @@ Transformation method must be named based on field (`value` in this example) wit
 In this example, the `Layer2Schema` is structure for input data and `Layer1Schema` is for result data.
 
 ```python
-class Layer1Schema(BaseSchema):
-    class Layer2Schema(BaseSchema):
+class Layer1Schema(ConfigSchema):
+    class Layer2Schema(ConfigSchema):
         value: Union[str, int]
 
     _LAYER = Layer2Schema
@@ -72,7 +72,7 @@ class Layer1Schema(BaseSchema):
 Created schema can be documented using simple docstring. Json schema is created by calling `json_schema()` method on schema class. JSON schema includes description from docstring, defaults, etc.
 
 ```python
-SimpleSchema(BaseSchema):
+SimpleSchema(ConfigSchema):
     """
     This is description for SimpleSchema itself.
 
index 9404775fa5c55efe5144cadac8d974e1407002ef..ec1ab6d720a70d6a67bf3c5ba0d50f4960a51d51 100644 (file)
@@ -1,10 +1,11 @@
-from .base_schema import BaseSchema
+from .base_schema import BaseSchema, ConfigSchema
 from .base_value_type import BaseValueType
 from .parsing import parse, parse_json, parse_yaml, try_to_parse
 
 __all__ = [
     "BaseValueType",
     "BaseSchema",
+    "ConfigSchema",
     "parse",
     "parse_yaml",
     "parse_json",
index b35367909a43e9dd67580272d9ba3bb4a89f6c79..ddf096114f0010869b203e5a6cc977770399cbb7 100644 (file)
@@ -11,7 +11,6 @@ from .base_value_type import BaseValueType
 from .exceptions import AggregateDataValidationError, DataDescriptionError, DataValidationError
 from .renaming import Renamed, renamed
 from .types import (
-    NoneType,
     get_generic_type_argument,
     get_generic_type_arguments,
     get_optional_inner_type,
@@ -26,7 +25,6 @@ from .types import (
     is_union,
 )
 
-
 T = TypeVar("T")
 
 
@@ -198,7 +196,7 @@ def _describe_type(typ: Type[Any]) -> Dict[Any, Any]:
     raise NotImplementedError(f"Trying to get JSON schema for type '{typ}', which is not implemented")
 
 
-TSource = Union[None, "NoRenameBaseSchema", Dict[str, Any]]
+TSource = Union[None, "BaseSchema", Dict[str, Any]]
 
 
 def _create_untouchable(name: str) -> object:
@@ -212,14 +210,14 @@ def _create_untouchable(name: str) -> object:
     return _Untouchable()
 
 
-class Mapper:
-    def _validated_tuple(self, tp: Type[Any], obj: Tuple[Any, ...], object_path: str) -> Tuple[Any, ...]:
+class ObjectMapper:
+    def _create_tuple(self, tp: Type[Any], obj: Tuple[Any, ...], object_path: str) -> Tuple[Any, ...]:
         types = get_generic_type_arguments(tp)
         errs: List[DataValidationError] = []
         res: List[Any] = []
         for i, (t, val) in enumerate(zip(types, obj)):
             try:
-                res.append(self.validated_object_type(t, val, object_path=f"{object_path}[{i}]"))
+                res.append(self.map_object(t, val, object_path=f"{object_path}[{i}]"))
             except DataValidationError as e:
                 errs.append(e)
         if len(errs) == 1:
@@ -228,15 +226,15 @@ class Mapper:
             raise AggregateDataValidationError(object_path, child_exceptions=errs)
         return tuple(res)
 
-    def _validated_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]:
+    def _create_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]:
         key_type, val_type = get_generic_type_arguments(tp)
         try:
             errs: List[DataValidationError] = []
             res: Dict[Any, Any] = {}
             for key, val in obj.items():
                 try:
-                    nkey = self.validated_object_type(key_type, key, object_path=f"{object_path}[{key}]")
-                    nval = self.validated_object_type(val_type, val, object_path=f"{object_path}[{key}]")
+                    nkey = self.map_object(key_type, key, object_path=f"{object_path}[{key}]")
+                    nval = self.map_object(val_type, val, object_path=f"{object_path}[{key}]")
                     res[nkey] = nval
                 except DataValidationError as e:
                     errs.append(e)
@@ -250,13 +248,16 @@ class Mapper:
                 f"Expected dict-like object, but failed to access its .items() method. Value was {obj}", object_path
             ) from e
 
-    def _validated_list(self, tp: Type[Any], obj: List[Any], object_path: str) -> List[Any]:
+    def _create_list(self, tp: Type[Any], obj: List[Any], object_path: str) -> List[Any]:
+        if isinstance(obj, str):
+            raise DataValidationError("expected list, got string", object_path)
+
         inner_type = get_generic_type_argument(tp)
         errs: List[DataValidationError] = []
         res: List[Any] = []
         for i, val in enumerate(obj):
             try:
-                res.append(self.validated_object_type(inner_type, val, object_path=f"{object_path}[{i}]"))
+                res.append(self.map_object(inner_type, val, object_path=f"{object_path}[{i}]"))
             except DataValidationError as e:
                 errs.append(e)
         if len(errs) == 1:
@@ -265,7 +266,81 @@ class Mapper:
             raise AggregateDataValidationError(object_path, child_exceptions=errs)
         return res
 
-    def validated_object_type(
+    def _create_str(self, obj: Any, object_path: str) -> str:
+        # we are willing to cast any primitive value to string, but no compound values are allowed
+        if is_obj_type(obj, (str, float, int)) or isinstance(obj, BaseValueType):
+            return str(obj)
+        elif is_obj_type(obj, bool):
+            raise DataValidationError(
+                "Expected str, found bool. Be careful, that YAML parsers consider even"
+                ' "no" and "yes" as a bool. Search for the Norway Problem for more'
+                " details. And please use quotes explicitly.",
+                object_path,
+            )
+        else:
+            raise DataValidationError(
+                f"expected str (or number that would be cast to string), but found type {type(obj)}", object_path
+            )
+
+    def _create_int(self, obj: Any, object_path: str) -> int:
+        # we don't want to make an int out of anything else than other int
+        # except for BaseValueType class instances
+        if is_obj_type(obj, int) or isinstance(obj, BaseValueType):
+            return int(obj)
+        raise DataValidationError(f"expected int, found {type(obj)}", object_path)
+
+    def _create_union(self, tp: Type[T], obj: Any, object_path: str) -> T:
+        variants = get_generic_type_arguments(tp)
+        errs: List[DataValidationError] = []
+        for v in variants:
+            try:
+                return self.map_object(v, obj, object_path=object_path)
+            except DataValidationError as e:
+                errs.append(e)
+
+        raise DataValidationError("could not parse any of the possible variants", object_path, child_exceptions=errs)
+
+    def _create_optional(self, tp: Type[Optional[T]], obj: Any, object_path: str) -> Optional[T]:
+        inner: Type[Any] = get_optional_inner_type(tp)
+        if obj is None:
+            return None
+        else:
+            return self.map_object(inner, obj, object_path=object_path)
+
+    def _create_bool(self, obj: Any, object_path: str) -> bool:
+        if is_obj_type(obj, bool):
+            return obj
+        else:
+            raise DataValidationError(f"expected bool, found {type(obj)}", object_path)
+
+    def _create_literal(self, tp: Type[Any], obj: Any, object_path: str) -> Any:
+        expected = get_generic_type_arguments(tp)
+        if obj in expected:
+            return obj
+        else:
+            raise DataValidationError(f"'{obj}' does not match any of the expected values {expected}", object_path)
+
+    def _create_base_schema_object(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseSchema":
+        if isinstance(obj, (dict, BaseSchema)):
+            return tp(obj, object_path=object_path)
+        raise DataValidationError(f"expected 'dict' or 'NoRenameBaseSchema' object, found '{type(obj)}'", object_path)
+
+    def create_value_type_object(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseValueType":
+        if isinstance(obj, tp):
+            # if we already have a custom value type, just pass it through
+            return obj
+        else:
+            # no validation performed, the implementation does it in the constuctor
+            try:
+                return tp(obj, object_path=object_path)
+            except ValueError as e:
+                if len(e.args) > 0 and isinstance(e.args[0], str):
+                    msg = e.args[0]
+                else:
+                    msg = f"Failed to validate value against {tp} type"
+                raise DataValidationError(msg, object_path) from e
+
+    def map_object(
         self,
         tp: Type[Any],
         obj: Any,
@@ -274,7 +349,8 @@ class Mapper:
         object_path: str = "/",
     ) -> Any:
         """
-        Given an expected type `cls` and a value object `obj`, validate the type of `obj` and return it
+        Given an expected type `cls` and a value object `obj`, return a new object of the given type and map fields of `obj` into it. During the mapping procedure,
+        runtime type checking is performed.
         """
 
         # Disabling these checks, because I think it's much more readable as a single function
@@ -294,25 +370,11 @@ class Mapper:
 
         # Optional[T]  (could be technically handled by Union[*variants], but this way we have better error reporting)
         elif is_optional(tp):
-            inner: Type[Any] = get_optional_inner_type(tp)
-            if obj is None:
-                return None
-            else:
-                return self.validated_object_type(inner, obj, object_path=object_path)
+            return self._create_optional(tp, obj, object_path)
 
         # Union[*variants]
         elif is_union(tp):
-            variants = get_generic_type_arguments(tp)
-            errs: List[DataValidationError] = []
-            for v in variants:
-                try:
-                    return self.validated_object_type(v, obj, object_path=object_path)
-                except DataValidationError as e:
-                    errs.append(e)
-
-            raise DataValidationError(
-                "could not parse any of the possible variants", object_path, child_exceptions=errs
-            )
+            return self._create_union(tp, obj, object_path)
 
         # after this, there is no place for a None object
         elif obj is None:
@@ -320,54 +382,30 @@ class Mapper:
 
         # int
         elif tp == int:
-            # we don't want to make an int out of anything else than other int
-            # except for BaseValueType class instances
-            if is_obj_type(obj, int) or isinstance(obj, BaseValueType):
-                return int(obj)
-            raise DataValidationError(f"expected int, found {type(obj)}", object_path)
+            return self._create_int(obj, object_path)
 
         # str
         elif tp == str:
-            # we are willing to cast any primitive value to string, but no compound values are allowed
-            if is_obj_type(obj, (str, float, int)) or isinstance(obj, BaseValueType):
-                return str(obj)
-            elif is_obj_type(obj, bool):
-                raise DataValidationError(
-                    "Expected str, found bool. Be careful, that YAML parsers consider even"
-                    ' "no" and "yes" as a bool. Search for the Norway Problem for more'
-                    " details. And please use quotes explicitly.",
-                    object_path,
-                )
-            else:
-                raise DataValidationError(
-                    f"expected str (or number that would be cast to string), but found type {type(obj)}", object_path
-                )
+            return self._create_str(obj, object_path)
 
         # bool
         elif tp == bool:
-            if is_obj_type(obj, bool):
-                return obj
-            else:
-                raise DataValidationError(f"expected bool, found {type(obj)}", object_path)
+            return self._create_bool(obj, object_path)
 
         # float
         elif tp == float:
             raise NotImplementedError(
-                "Floating point values are not supported in the parser."
+                "Floating point values are not supported in the object mapper."
                 " Please implement them and be careful with type coercions"
             )
 
         # Literal[T]
         elif is_literal(tp):
-            expected = get_generic_type_arguments(tp)
-            if obj in expected:
-                return obj
-            else:
-                raise DataValidationError(f"'{obj}' does not match any of the expected values {expected}", object_path)
+            return self._create_literal(tp, obj, object_path)
 
         # Dict[K,V]
         elif is_dict(tp):
-            return self._validated_dict(tp, obj, object_path)
+            return self._create_dict(tp, obj, object_path)
 
         # any Enums (probably used only internally in DataValidator)
         elif is_enum(tp):
@@ -378,13 +416,11 @@ class Mapper:
 
         # List[T]
         elif is_list(tp):
-            if isinstance(obj, str):
-                raise DataValidationError("expected list, got string", object_path)
-            return self._validated_list(tp, obj, object_path)
+            return self._create_list(tp, obj, object_path)
 
         # Tuple[A,B,C,D,...]
         elif is_tuple(tp):
-            return self._validated_tuple(tp, obj, object_path)
+            return self._create_tuple(tp, obj, object_path)
 
         # type of obj and cls type match
         elif is_obj_type(obj, tp):
@@ -397,11 +433,11 @@ class Mapper:
 
         # BaseValueType subclasses
         elif inspect.isclass(tp) and issubclass(tp, BaseValueType):
-            return self.construct_value_type(tp, obj, object_path)
+            return self.create_value_type_object(tp, obj, object_path)
 
         # nested BaseSchema subclasses
-        elif inspect.isclass(tp) and issubclass(tp, NoRenameBaseSchema):
-            return self.construct_base_schema(tp, obj, object_path)
+        elif inspect.isclass(tp) and issubclass(tp, BaseSchema):
+            return self._create_base_schema_object(tp, obj, object_path)
 
         # if the object matches, just pass it through
         elif inspect.isclass(tp) and isinstance(obj, tp):
@@ -415,39 +451,13 @@ class Mapper:
                 object_path,
             )
 
-    def construct_base_schema(self, tp: Type[Any], obj: Any, object_path: str) -> "NoRenameBaseSchema":
-        if isinstance(obj, (dict, NoRenameBaseSchema)):
-            return tp(obj, object_path=object_path)  # type: ignore
-        raise DataValidationError(
-            f"expected 'dict' or 'NoRenameBaseSchema' object, found '{type(obj)}'", object_path
-        )
-
-    def construct_value_type(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseValueType":
-        if isinstance(obj, tp):
-            # if we already have a custom value type, just pass it through
-            return obj
-        else:
-            # no validation performed, the implementation does it in the constuctor
-            try:
-                return tp(obj, object_path=object_path)
-            except ValueError as e:
-                if len(e.args) > 0 and isinstance(e.args[0], str):
-                    msg = e.args[0]
-                else:
-                    msg = f"Failed to validate value against {tp} type"
-                raise DataValidationError(msg, object_path) from e
-
-    def load(self, clazz: Type[T], obj: Any, default: Any = ..., use_default: bool = False) -> T:
-        return self.validated_object_type(clazz, obj, default, use_default)
-
-    @classmethod
-    def is_obj_type_valid(cls, obj: Any, tp: Type[Any]) -> bool:
+    def is_obj_type_valid(self, obj: Any, tp: Type[Any]) -> bool:
         """
         Runtime type checking. Validate, that a given object is of a given type.
         """
 
         try:
-            cls().validated_object_type(tp, obj)
+            self.map_object(tp, obj)
             return True
         except (DataValidationError, ValueError):
             return False
@@ -455,14 +465,14 @@ class Mapper:
     def _assign_default(self, obj: Any, name: str, python_type: Any, object_path: str) -> None:
         cls = obj.__class__
         default = getattr(cls, name, None)
-        value = self.validated_object_type(python_type, default, object_path=f"{object_path}/{name}")
+        value = self.map_object(python_type, default, object_path=f"{object_path}/{name}")
         setattr(obj, name, value)
 
     def _assign_field(self, obj: Any, name: str, python_type: Any, value: Any, object_path: str) -> None:
-        value = self.validated_object_type(python_type, value, object_path=f"{object_path}/{name}")
+        value = self.map_object(python_type, value, object_path=f"{object_path}/{name}")
         setattr(obj, name, value)
 
-    def _assign_fields(self, obj: Any, source: Union[Dict[str, Any], "NoRenameBaseSchema", None], object_path: str) -> Set[str]:
+    def _assign_fields(self, obj: Any, source: Union[Dict[str, Any], "BaseSchema", None], object_path: str) -> Set[str]:
         """
         Order of assignment:
           1. all direct assignments
@@ -539,16 +549,19 @@ class Mapper:
                 msg = "Failed to validate value type"
             raise DataValidationError(msg, object_path) from e
 
-    def object_constructor(self, obj: Any, source: TSource, object_path: str) -> None:
-        # make sure that all raw data checks passed on the source object
-        if source is None:
-            source = {}
+    def object_constructor(self, obj: Any, source: Union["BaseSchema", Dict[Any, Any]], object_path: str) -> None:
+        """
+        Delegated constructor for the NoRenameBaseSchema class.
 
-        if not isinstance(source, (NoRenameBaseSchema, dict)):
-            raise DataValidationError(f"expected dict-like object, found '{type(source)}'", object_path)
+        The reason this method is delegated to the mapper is due to renaming. Like this, we don't have to
+        worry about a different BaseSchema class, when we want to have dynamically renamed fields.
+        """
+        # As this is a delegated constructor, we must ignore protected access warnings
+        # pylint: disable=protected-access
 
-        # save source (2 underscores should invoke Python's build-in mangling and we wont hopefully have collistions with data fields)
-        obj.__source: Union[Dict[str, Any], NoRenameBaseSchema] = source  # type: ignore
+        # sanity check
+        if not isinstance(source, (BaseSchema, dict)):  # type: ignore
+            raise DataValidationError(f"expected dict-like object, found '{type(source)}'", object_path)
 
         # construct lower level schema first if configured to do so
         if obj._LAYER is not None:
@@ -558,7 +571,7 @@ class Mapper:
         used_keys = self._assign_fields(obj, source, object_path)
 
         # check for unused keys in the source object
-        if source and not isinstance(source, NoRenameBaseSchema):
+        if source and not isinstance(source, BaseSchema):
             unused = source.keys() - used_keys
             if len(unused) > 0:
                 keys = ", ".join((f"'{u}'" for u in unused))
@@ -574,7 +587,7 @@ class Mapper:
             raise DataValidationError(e.args[0] if len(e.args) > 0 else "Validation error", object_path) from e
 
 
-class NoRenameBaseSchema(Serializable):
+class BaseSchema(Serializable):
     """
     Base class for modeling configuration schema. It somewhat resembles standard dataclasses with additional
     functionality:
@@ -637,15 +650,22 @@ class NoRenameBaseSchema(Serializable):
     See tests/utils/test_modelling.py for example usage.
     """
 
-    _LAYER: Optional[Type["NoRenameBaseSchema"]] = None
-    _MAPPER: Mapper = Mapper()
+    _LAYER: Optional[Type["BaseSchema"]] = None
+    _MAPPER: ObjectMapper = ObjectMapper()
+
+    def __init_subclass__(cls) -> None:
+        return super().__init_subclass__()
 
     def __init__(self, source: TSource = None, object_path: str = ""):
+        # save source data (and drop information about nullness)
+        source = source or {}
+        self.__source: Union[Dict[str, Any], BaseSchema] = source
+
+        # delegate the rest of the constructor
         self._MAPPER.object_constructor(self, source, object_path)
-        self.__source: Union[Dict[str, Any], NoRenameBaseSchema]
 
     def get_unparsed_data(self) -> Dict[str, Any]:
-        if isinstance(self.__source, NoRenameBaseSchema):
+        if isinstance(self.__source, BaseSchema):
             return self.__source.get_unparsed_data()
         elif isinstance(self.__source, Renamed):
             return self.__source.original()
@@ -678,7 +698,7 @@ class NoRenameBaseSchema(Serializable):
         return True
 
     @classmethod
-    def json_schema(cls: Type["NoRenameBaseSchema"], include_schema_definition: bool = True) -> Dict[Any, Any]:
+    def json_schema(cls: Type["BaseSchema"], include_schema_definition: bool = True) -> Dict[Any, Any]:
         if cls._LAYER is not None:
             return cls._LAYER.json_schema(include_schema_definition=include_schema_definition)
 
@@ -702,33 +722,39 @@ class NoRenameBaseSchema(Serializable):
         return res
 
 
-class RenamedMapper(Mapper):
-    def _validated_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]:
+class RenamingObjectMapper(ObjectMapper):
+    """
+    Same as object mapper, but it uses collection wrappers from the module `renamed` to perform dynamic field renaming.
+
+    More specifically:
+    - it renames all properties in (nested) objects
+    - it does not rename keys in dictionaries
+    """
+
+    def _create_dict(self, tp: Type[Any], obj: Dict[Any, Any], object_path: str) -> Dict[Any, Any]:
         if isinstance(obj, Renamed):
             obj = obj.original()
-        return super()._validated_dict(tp, obj, object_path)
+        return super()._create_dict(tp, obj, object_path)
 
-    def construct_base_schema(self, tp: Type[Any], obj: Any, object_path: str) -> "NoRenameBaseSchema":
+    def _create_base_schema_object(self, tp: Type[Any], obj: Any, object_path: str) -> "BaseSchema":
         if isinstance(obj, dict):
             obj = renamed(obj)
-        return super().construct_base_schema(tp, obj, object_path)
+        return super()._create_base_schema_object(tp, obj, object_path)
 
-    def object_constructor(self, obj: Any, source: TSource, object_path: str) -> None:
+    def object_constructor(self, obj: Any, source: Union["BaseSchema", Dict[Any, Any]], object_path: str) -> None:
         if isinstance(source, dict):
             source = renamed(source)
         return super().object_constructor(obj, source, object_path)
 
 
-# export as a standalone functions for backwards compatibility
-load = RenamedMapper().load
-is_obj_type_valid = RenamedMapper.is_obj_type_valid
+# export as a standalone functions for simplicity compatibility
+is_obj_type_valid = ObjectMapper().is_obj_type_valid
+map_object = ObjectMapper().map_object
 
 
-class BaseSchema(NoRenameBaseSchema):
+class ConfigSchema(BaseSchema):
     """
-    In Knot Resolver Manager, we need renamed keys most of the time, as we are using the modelling
-    tools mostly for configuration schema. That's why the normal looking name BaseSchema does renaming
-    and NoRenameBaseSchema is the opposite.
+    Same as BaseSchema, but maps with RenamingObjectMapper
     """
 
-    _MAPPER: Mapper = RenamedMapper()
+    _MAPPER: ObjectMapper = RenamingObjectMapper()
index bff51cabb6c0ac3084dfadbccdccd5c7dfdc989a..786cf645fafa4138a78dd7a662d6d9c853f12124 100644 (file)
@@ -4,7 +4,7 @@ from typing import Any, List, Optional, Tuple, Union
 
 from typing_extensions import Literal
 
-from knot_resolver_manager.utils.modeling.base_schema import NoRenameBaseSchema, load
+from knot_resolver_manager.utils.modeling.base_schema import BaseSchema, map_object
 from knot_resolver_manager.utils.modeling.json_pointer import json_ptr_resolve
 
 
@@ -12,7 +12,7 @@ class PatchError(Exception):
     pass
 
 
-class Op(NoRenameBaseSchema, ABC):
+class Op(BaseSchema, ABC):
     @abstractmethod
     def eval(self, fakeroot: Any) -> Any:
         """
@@ -169,7 +169,7 @@ def query(
 
     elif method == "patch":
         tp = List[Union[AddOp, RemoveOp, MoveOp, CopyOp, TestOp, ReplaceOp]]
-        transaction: tp = load(tp, payload)
+        transaction: tp = map_object(tp, payload)
 
         for i, op in enumerate(transaction):
             try:
index c68c8c6f1359e81bfca07692feff7a6914dad1c1..ca41572d1de4c5b92a5860c70fe1bc7c5c363292 100644 (file)
@@ -4,19 +4,19 @@ import pytest
 from pytest import raises
 from typing_extensions import Literal
 
-from knot_resolver_manager.utils.modeling import BaseSchema, parse_json, parse_yaml
+from knot_resolver_manager.utils.modeling import ConfigSchema, parse_json, parse_yaml
 from knot_resolver_manager.utils.modeling.exceptions import DataDescriptionError, DataValidationError
 
 
-class _TestBool(BaseSchema):
+class _TestBool(ConfigSchema):
     v: bool
 
 
-class _TestInt(BaseSchema):
+class _TestInt(ConfigSchema):
     v: int
 
 
-class _TestStr(BaseSchema):
+class _TestStr(ConfigSchema):
     v: str
 
 
@@ -54,8 +54,8 @@ def test_parsing_str_invalid():
 
 
 @pytest.mark.parametrize("typ,val", [(_TestInt, 5), (_TestBool, False), (_TestStr, "test")])
-def test_parsing_nested(typ: Type[BaseSchema], val: Any):
-    class UpperSchema(BaseSchema):
+def test_parsing_nested(typ: Type[ConfigSchema], val: Any):
+    class UpperSchema(ConfigSchema):
         l: typ
 
     yaml = f"""
@@ -68,7 +68,7 @@ l:
 
 
 def test_parsing_simple_compound_types():
-    class TestSchema(BaseSchema):
+    class TestSchema(ConfigSchema):
         l: List[int]
         d: Dict[str, str]
         t: Tuple[str, int]
@@ -97,7 +97,7 @@ t:
 
 
 def test_parsing_nested_compound_types():
-    class TestSchema(BaseSchema):
+    class TestSchema(ConfigSchema):
         i: int
         o: Optional[Dict[str, str]]
 
@@ -119,7 +119,7 @@ o:
 
 
 def test_dash_conversion():
-    class TestSchema(BaseSchema):
+    class TestSchema(ConfigSchema):
         awesome_field: Dict[str, str]
 
     yaml = """
@@ -132,7 +132,7 @@ awesome-field:
 
 
 def test_eq():
-    class B(BaseSchema):
+    class B(ConfigSchema):
         a: _TestInt
         field: str
 
@@ -147,7 +147,7 @@ def test_eq():
 
 
 def test_docstring_parsing_valid():
-    class NormalDescription(BaseSchema):
+    class NormalDescription(ConfigSchema):
         """
         Does nothing special
         Really
@@ -156,7 +156,7 @@ def test_docstring_parsing_valid():
     desc = NormalDescription.json_schema()
     assert desc["description"] == "Does nothing special\nReally"
 
-    class FieldsDescription(BaseSchema):
+    class FieldsDescription(ConfigSchema):
         """
         This is an awesome test class
         ---
@@ -172,14 +172,14 @@ def test_docstring_parsing_valid():
     assert schema["properties"]["field"]["description"] == "This field does nothing interesting"
     assert schema["properties"]["value"]["description"] == "Neither does this"
 
-    class NoDescription(BaseSchema):
+    class NoDescription(ConfigSchema):
         nothing: str
 
     _ = NoDescription.json_schema()
 
 
 def test_docstring_parsing_invalid():
-    class AdditionalItem(BaseSchema):
+    class AdditionalItem(ConfigSchema):
         """
         This class is wrong
         ---
@@ -192,7 +192,7 @@ def test_docstring_parsing_invalid():
     with raises(DataDescriptionError):
         _ = AdditionalItem.json_schema()
 
-    class WrongDescription(BaseSchema):
+    class WrongDescription(ConfigSchema):
         """
         This class is wrong
         ---