- id: mypy
name: mypy
language: system
- entry: mypy --pretty
+ entry: mypy --pretty --follow-imports=silent
files: \.py[i]?$
+# mypy: disable-error-code="import-not-found, attr-defined"
"""
Simplify access to the _psycopg module
"""
from __future__ import annotations
+from types import ModuleType
from . import pq
__version__: str | None = None
+_psycopg: ModuleType
# Note: "c" must the first attempt so that mypy associates the variable the
# right module interface. It will not result Optional, but hey.
if pq.__impl__ == "c":
- from psycopg_c import _psycopg as _psycopg
- from psycopg_c import __version__ as __version__ # noqa: F401
+ import psycopg_c._psycopg
+
+ _psycopg = psycopg_c._psycopg
+ __version__ = psycopg_c.__version__
+
elif pq.__impl__ == "binary":
- from psycopg_binary import _psycopg as _psycopg # type: ignore
- from psycopg_binary import __version__ as __version__ # type: ignore # noqa: F401
+ import psycopg_binary._psycopg
+
+ _psycopg = psycopg_binary._psycopg
+ __version__ = psycopg_binary.__version__
+
elif pq.__impl__ == "python":
- _psycopg = None # type: ignore
+
+ _psycopg = None # type: ignore[assignment]
+
else:
raise ImportError(f"can't find _psycopg optimised module in {pq.__impl__!r}")
else:
self.pgconn.send_query_params(command, None, result_format=result_format)
- result = (yield from generators.execute(self.pgconn))[-1]
+ result: PGresult = (yield from generators.execute(self.pgconn))[-1]
if result.status != COMMAND_OK and result.status != TUPLES_OK:
if result.status == FATAL_ERROR:
raise e.error_from_result(result, encoding=self.pgconn._encoding)
self._encoding = encoding
def parse_row(self, data: Buffer) -> tuple[Any, ...] | None:
+ rv: tuple[Any, ...] | None = None
if data:
- return parse_row_text(data, self.transformer)
- else:
- return None
+ rv = parse_row_text(data, self.transformer)
+
+ return rv
def write(self, buffer: Buffer | str) -> Buffer:
data = self._ensure_bytes(buffer)
self._signature_sent = False
def parse_row(self, data: Buffer) -> tuple[Any, ...] | None:
+ rv: tuple[Any, ...] | None = None
+
if not self._signature_sent:
if data[: len(_binary_signature)] != _binary_signature:
raise e.DataError(
self._signature_sent = True
data = data[len(_binary_signature) :]
- elif data == _binary_trailer:
- return None
+ if data != _binary_trailer:
+ rv = parse_row_binary(data, self.transformer)
- return parse_row_binary(data, self.transformer)
+ return rv
def write(self, buffer: Buffer | str) -> Buffer:
data = self._ensure_bytes(buffer)
yield from send(self._pgconn)
def _stream_fetchone_gen(self, first: bool) -> PQGen[PGresult | None]:
- res = yield from fetch(self._pgconn)
+ res: PGresult | None = yield from fetch(self._pgconn)
if res is None:
return None
[[tool.mypy.overrides]]
module = [
"numpy.*",
+ "polib",
"shapely.*",
]
ignore_missing_imports = true
):
while results_queue:
fetched = waiting.wait(
- pipeline_communicate(
- pgconn, # type: ignore[arg-type]
- commands,
- ),
+ pipeline_communicate(pgconn, commands),
pgconn.socket,
)
assert not commands, commands
):
while results_queue:
fetched = await waiting.wait_async(
- pipeline_communicate(
- pgconn, # type: ignore[arg-type]
- commands,
- ),
+ pipeline_communicate(pgconn, commands),
pgconn.socket,
)
assert not commands, commands
obj = getattr(_psycopg, n)
if not isinstance(obj, type):
continue
- if not issubclass(
- obj,
- (_psycopg.CDumper, _psycopg.CLoader), # type: ignore[attr-defined]
- ):
+ if not issubclass(obj, (_psycopg.CDumper, _psycopg.CLoader)):
continue
c_adapters[n] = obj
import pytest
import psycopg
+import psycopg.types.numeric
from psycopg import pq
from psycopg import sql
from psycopg.adapt import PyFormat, Transformer, Dumper