]> git.ipfire.org Git - thirdparty/knot-resolver.git/commitdiff
python: specified or removed type check ignore
authorAleš Mrázek <ales.mrazek@nic.cz>
Sat, 10 Jan 2026 23:58:29 +0000 (00:58 +0100)
committerAleš Mrázek <ales.mrazek@nic.cz>
Mon, 12 Jan 2026 13:47:51 +0000 (14:47 +0100)
Ruff linter: PGH003 Use specific rule codes when ignoring type issues

python/knot_resolver/controller/interface.py
python/knot_resolver/manager/metrics/prometheus.py
python/knot_resolver/utils/async_utils.py
python/knot_resolver/utils/modeling/base_schema.py
python/knot_resolver/utils/modeling/parsing.py
python/knot_resolver/utils/modeling/renaming.py
python/knot_resolver/utils/modeling/types.py

index e82a38dc8189460798729034341d8d0dc641ecbb..28add74935f1f7cb067b76a9806e838c06c6e42e 100644 (file)
@@ -54,7 +54,7 @@ class KresID:
             # Ignoring typing here, because I can't find a way how to make the _used dict
             # typed based on subclass. I am not even sure that it's different between subclasses,
             # it's probably still the same dict. But we don't really care about it
-            return cls._used[typ][n]  # type: ignore
+            return cls._used[typ][n]  # type: ignore[return-value]
         val = cls(typ, n, _i_know_what_i_am_doing=True)
         cls._used[typ][n] = val
         return val
@@ -232,7 +232,7 @@ class Subprocess(ABC):
 
                 # proper closing of the socket is only implemented in later versions of python
                 if sys.version_info >= (3, 7):
-                    await writer.wait_closed()  # type: ignore
+                    await writer.wait_closed()
 
 
 class SubprocessController(ABC):
index 0b589793e396413050e8399e2c6d94c1c7a8b4e8..4e2431665bff37a5084f9df5fed8402278d63682 100644 (file)
@@ -15,12 +15,12 @@ from .collect import collect_kresd_workers_metrics
 logger = logging.getLogger(__name__)
 
 if PROMETHEUS_LIB:
-    from prometheus_client import exposition  # type: ignore
-    from prometheus_client.bridge.graphite import GraphiteBridge  # type: ignore
+    from prometheus_client import exposition
+    from prometheus_client.bridge.graphite import GraphiteBridge
     from prometheus_client.core import (
         REGISTRY,
         CounterMetricFamily,
-        GaugeMetricFamily,  # type: ignore
+        GaugeMetricFamily,
         HistogramMetricFamily,
         Metric,
     )
@@ -31,19 +31,19 @@ if PROMETHEUS_LIB:
 
     def _counter(name: str, description: str, label: Tuple[str, str], value: float) -> CounterMetricFamily:
         c = CounterMetricFamily(name, description, labels=(label[0],))
-        c.add_metric((label[1],), value)  # type: ignore
+        c.add_metric((label[1],), value)
         return c
 
     def _gauge(name: str, description: str, label: Tuple[str, str], value: float) -> GaugeMetricFamily:
         c = GaugeMetricFamily(name, description, labels=(label[0],))
-        c.add_metric((label[1],), value)  # type: ignore
+        c.add_metric((label[1],), value)
         return c
 
     def _histogram(
         name: str, description: str, label: Tuple[str, str], buckets: List[Tuple[str, int]], sum_value: float
     ) -> HistogramMetricFamily:
         c = HistogramMetricFamily(name, description, labels=(label[0],))
-        c.add_metric((label[1],), buckets, sum_value=sum_value)  # type: ignore
+        c.add_metric((label[1],), buckets, sum_value=sum_value)
         return c
 
     def _parse_resolver_metrics(instance_id: "KresID", metrics: Any) -> Generator[Metric, None, None]:
@@ -413,7 +413,7 @@ if PROMETHEUS_LIB:
             _graphite_bridge = GraphiteBridge(
                 (str(config.monitoring.graphite.host), int(config.monitoring.graphite.port))
             )
-            _graphite_bridge.start(  # type: ignore
+            _graphite_bridge.start(
                 interval=config.monitoring.graphite.interval.seconds(), prefix=str(config.monitoring.graphite.prefix)
             )
 
@@ -441,7 +441,7 @@ async def init_prometheus(config_store: ConfigStore) -> None:
         # init and register metrics collector
         global _metrics_collector
         _metrics_collector = KresPrometheusMetricsCollector(config_store)
-        REGISTRY.register(_metrics_collector)  # type: ignore
+        REGISTRY.register(_metrics_collector)  # type: ignore[arg-type]
 
         # register graphite bridge
         await config_store.register_verifier(_deny_turning_off_graphite_bridge)
@@ -455,5 +455,5 @@ async def report_prometheus() -> Optional[bytes]:
             await _metrics_collector.collect_kresd_stats()
         else:
             raise RuntimeError("Function invoked before initializing the module!")
-        return exposition.generate_latest()  # type: ignore
+        return exposition.generate_latest()
     return None
index 49838ea5d434da06f4022ec2532aa728dde7d52d..a9715b720c8db3db4369811514d09e5fc003c6e8 100644 (file)
@@ -14,7 +14,7 @@ from knot_resolver.utils.compat.asyncio import to_thread
 
 def unblock_signals() -> None:
     if sys.version_info >= (3, 8):
-        signal.pthread_sigmask(signal.SIG_UNBLOCK, signal.valid_signals())  # type: ignore
+        signal.pthread_sigmask(signal.SIG_UNBLOCK, signal.valid_signals())
     else:
         # the list of signals is not exhaustive, but it should cover all signals we might ever want to block
         signal.pthread_sigmask(
index a7a9e9a059c52d9661f4dbe41fc3076f4c46585d..40c854d6533c39484895a78bb87a79f78deeceb8 100644 (file)
@@ -380,7 +380,7 @@ class ObjectMapper:
 
     def _create_default(self, obj: Any) -> Any:
         if isinstance(obj, _LazyDefault):
-            return obj.instantiate()  # type: ignore
+            return obj.instantiate()
         return obj
 
     def map_object(  # noqa: PLR0911, PLR0912
@@ -482,7 +482,7 @@ class ObjectMapper:
         if is_generic_type_wrapper(tp):
             inner_type = get_generic_type_wrapper_argument(tp)
             obj_valid = self.map_object(inner_type, obj, object_path)
-            return tp(obj_valid, object_path=object_path)  # type: ignore
+            return tp(obj_valid, object_path=object_path)
 
         # nested BaseSchema subclasses
         if inspect.isclass(tp) and issubclass(tp, BaseSchema):
@@ -606,7 +606,7 @@ class ObjectMapper:
         # As this is a delegated constructor, we must ignore protected access warnings
 
         # sanity check
-        if not isinstance(source, (BaseSchema, dict)):  # type: ignore
+        if not isinstance(source, (BaseSchema, dict)):
             raise DataValidationError(f"expected dict-like object, found '{type(source)}'", object_path)
 
         # construct lower level schema first if configured to do so
index a5a862dda892c99fbdd6e55d74ae1f1bdbfb1cde..b0d92dca81d5774cdd6d85f8f10907e761a0409f 100644 (file)
@@ -29,10 +29,10 @@ class _RaiseDuplicatesLoader(yaml.SafeLoader):
             raise ConstructorError(None, None, f"expected a mapping node, but found {node.id}", node.start_mark)
         mapping: Dict[Any, Any] = {}
         for key_node, value_node in node.value:
-            key = self.construct_object(key_node, deep=deep)  # type: ignore
+            key = self.construct_object(key_node, deep=deep)
             # we need to check, that the key object can be used in a hash table
             try:
-                _ = hash(key)  # type: ignore
+                _ = hash(key)
             except TypeError as exc:
                 raise ConstructorError(
                     "while constructing a mapping",
@@ -44,7 +44,7 @@ class _RaiseDuplicatesLoader(yaml.SafeLoader):
             # check for duplicate keys
             if key in mapping:
                 raise DataParsingError(f"duplicate key detected: {key_node.start_mark}")
-            value = self.construct_object(value_node, deep=deep)  # type: ignore
+            value = self.construct_object(value_node, deep=deep)
             mapping[key] = value
         return mapping
 
@@ -57,7 +57,7 @@ class DataFormat(Enum):
         if self is DataFormat.YAML:
             # RaiseDuplicatesLoader extends yaml.SafeLoader, so this should be safe
             # https://python.land/data-processing/python-yaml#PyYAML_safe_load_vs_load
-            return renamed(yaml.load(text, Loader=_RaiseDuplicatesLoader))  # type: ignore
+            return renamed(yaml.load(text, Loader=_RaiseDuplicatesLoader))
         if self is DataFormat.JSON:
             return renamed(json.loads(text, object_pairs_hook=_json_raise_duplicates))
         raise NotImplementedError(f"Parsing of format '{self}' is not implemented")
@@ -67,7 +67,7 @@ class DataFormat(Enum):
             data = data.original()
 
         if self is DataFormat.YAML:
-            return yaml.safe_dump(data, indent=indent)  # type: ignore
+            return yaml.safe_dump(data, indent=indent)
         if self is DataFormat.JSON:
             return json.dumps(data, indent=indent)
         raise NotImplementedError(f"Exporting to '{self}' format is not implemented")
index bd30bc4da6b084508becbb0609f8d0361954ba19..09677f51a08b65b02b0ca180094a553ded07b51d 100644 (file)
@@ -68,7 +68,7 @@ class RenamedDict(Dict[K, V], Renamed):
         return dict(super().items())
 
 
-class RenamedList(List[V], Renamed):  # type: ignore
+class RenamedList(List[V], Renamed):
     def __getitem__(self, key: Any) -> Any:
         res = super().__getitem__(key)
         return renamed(res)
index 1c0252c674fcb4303d169470140f1cc4f39c030b..6e762d0d271d54fe75c24db5cbc67f8473cfc0de 100644 (file)
@@ -22,7 +22,7 @@ def is_optional(tp: Any) -> bool:
     origin = getattr(tp, "__origin__", None)
     args = get_generic_type_arguments(tp)
 
-    return origin == Union and len(args) == 2 and args[1] == NoneType  # type: ignore
+    return origin == Union and len(args) == 2 and args[1] == NoneType
 
 
 def is_dict(tp: Any) -> bool:
@@ -43,7 +43,7 @@ def is_tuple(tp: Any) -> bool:
 
 def is_union(tp: Any) -> bool:
     """Return true even for optional types, because they are just a Union[T, NoneType]."""
-    return getattr(tp, "__origin__", None) == Union  # type: ignore
+    return getattr(tp, "__origin__", None) == Union
 
 
 def is_literal(tp: Any) -> bool: