from ._config import Proxy, Timeout # noqa: F401
from ._models import URL, Cookies, Headers, QueryParams, Request # noqa: F401
-StrOrBytes = Union[str, bytes]
PrimitiveData = Optional[Union[str, int, float, bool]]
]
HeaderTypes = Union[
- "Headers", Dict[StrOrBytes, StrOrBytes], Sequence[Tuple[StrOrBytes, StrOrBytes]],
+ "Headers",
+ Dict[str, str],
+ Dict[bytes, bytes],
+ Sequence[Tuple[str, str]],
+ Sequence[Tuple[bytes, bytes]],
]
CookieTypes = Union["Cookies", CookieJar, Dict[str, str]]
from urllib.request import getproxies
from ._exceptions import NetworkError
-from ._types import PrimitiveData, StrOrBytes
+from ._types import PrimitiveData
if typing.TYPE_CHECKING: # pragma: no cover
from ._models import URL
)
-def normalize_header_key(value: StrOrBytes, encoding: str = None) -> bytes:
+def normalize_header_key(
+ value: typing.Union[str, bytes], encoding: str = None
+) -> bytes:
"""
Coerce str/bytes into a strictly byte-wise HTTP header key.
"""
return value.encode(encoding or "ascii").lower()
-def normalize_header_value(value: StrOrBytes, encoding: str = None) -> bytes:
+def normalize_header_value(
+ value: typing.Union[str, bytes], encoding: str = None
+) -> bytes:
"""
Coerce str/bytes into a strictly byte-wise HTTP header value.
"""
def obfuscate_sensitive_headers(
- items: typing.Iterable[typing.Tuple[StrOrBytes, StrOrBytes]]
-) -> typing.Iterator[typing.Tuple[StrOrBytes, StrOrBytes]]:
+ items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]]
+) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]:
for k, v in items:
if to_str(k.lower()) in SENSITIVE_HEADERS:
v = to_bytes_or_str("[secure]", match_type_of=v)
return value if isinstance(value, str) else value.decode(encoding)
-def to_bytes_or_str(value: str, match_type_of: StrOrBytes) -> StrOrBytes:
+def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr:
return value if isinstance(match_type_of, str) else value.encode()