HTTPError,
HTTPStatusError,
InvalidURL,
+ LocalProtocolError,
NetworkError,
NotRedirectResponse,
PoolTimeout,
ProxyError,
ReadError,
ReadTimeout,
+ RemoteProtocolError,
RequestBodyUnavailable,
RequestError,
RequestNotRead,
TimeoutException,
TooManyRedirects,
TransportError,
+ UnsupportedProtocol,
WriteError,
WriteTimeout,
)
"HTTPError",
"HTTPStatusError",
"InvalidURL",
+ "UnsupportedProtocol",
+ "LocalProtocolError",
+ "RemoteProtocolError",
"NetworkError",
"NotRedirectResponse",
"PoolTimeout",
from ._content_streams import ContentStream
from ._exceptions import (
HTTPCORE_EXC_MAP,
- InvalidURL,
RequestBodyUnavailable,
TooManyRedirects,
map_exceptions,
from ._utils import (
NetRCInfo,
URLPattern,
- enforce_http_url,
get_environment_proxies,
get_logger,
same_origin,
url = URL(location)
- # Check that we can handle the scheme
- if url.scheme and url.scheme not in ("http", "https"):
- message = f'Scheme "{url.scheme}" not supported.'
- raise InvalidURL(message, request=request)
-
# Handle malformed 'Location' headers that are "absolute" form, have no host.
# See: https://github.com/encode/httpx/issues/771
if url.scheme and not url.host:
return httpcore.SyncConnectionPool(
ssl_context=ssl_context,
- max_keepalive=limits.max_keepalive,
max_connections=limits.max_connections,
+ max_keepalive_connections=limits.max_keepalive_connections,
keepalive_expiry=KEEPALIVE_EXPIRY,
http2=http2,
)
proxy_headers=proxy.headers.raw,
proxy_mode=proxy.mode,
ssl_context=ssl_context,
- max_keepalive=limits.max_keepalive,
max_connections=limits.max_connections,
+ max_keepalive_connections=limits.max_keepalive_connections,
keepalive_expiry=KEEPALIVE_EXPIRY,
http2=http2,
)
- def _transport_for_url(self, request: Request) -> httpcore.SyncHTTPTransport:
+ def _transport_for_url(self, url: URL) -> httpcore.SyncHTTPTransport:
"""
Returns the transport instance that should be used for a given URL.
This will either be the standard connection pool, or a proxy.
"""
- url = request.url
- enforce_http_url(request)
-
for pattern, transport in self._proxies.items():
if pattern.matches(url):
return self._transport if transport is None else transport
allow_redirects: bool = True,
timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
) -> Response:
- if request.url.scheme not in ("http", "https"):
- message = 'URL scheme must be "http" or "https".'
- raise InvalidURL(message, request=request)
-
timeout = self.timeout if isinstance(timeout, UnsetType) else Timeout(timeout)
auth = self._build_auth(request, auth)
"""
Sends a single request, without handling any redirections.
"""
- transport = self._transport_for_url(request)
+ transport = self._transport_for_url(request.url)
with map_exceptions(HTTPCORE_EXC_MAP, request=request):
(
return httpcore.AsyncConnectionPool(
ssl_context=ssl_context,
- max_keepalive=limits.max_keepalive,
max_connections=limits.max_connections,
+ max_keepalive_connections=limits.max_keepalive_connections,
keepalive_expiry=KEEPALIVE_EXPIRY,
http2=http2,
)
proxy_headers=proxy.headers.raw,
proxy_mode=proxy.mode,
ssl_context=ssl_context,
- max_keepalive=limits.max_keepalive,
max_connections=limits.max_connections,
+ max_keepalive_connections=limits.max_keepalive_connections,
keepalive_expiry=KEEPALIVE_EXPIRY,
http2=http2,
)
- def _transport_for_url(self, request: Request) -> httpcore.AsyncHTTPTransport:
+ def _transport_for_url(self, url: URL) -> httpcore.AsyncHTTPTransport:
"""
Returns the transport instance that should be used for a given URL.
This will either be the standard connection pool, or a proxy.
"""
- url = request.url
- enforce_http_url(request)
-
for pattern, transport in self._proxies.items():
if pattern.matches(url):
return self._transport if transport is None else transport
"""
Sends a single request, without handling any redirections.
"""
- transport = self._transport_for_url(request)
+ transport = self._transport_for_url(request.url)
with map_exceptions(HTTPCORE_EXC_MAP, request=request):
(
**Parameters:**
- * **max_keepalive** - Allow the connection pool to maintain keep-alive connections
- below this point.
* **max_connections** - The maximum number of concurrent connections that may be
- established.
+ established.
+ * **max_keepalive_connections** - Allow the connection pool to maintain
+ keep-alive connections below this point. Should be less than or equal
+ to `max_connections`.
"""
def __init__(
- self, *, max_keepalive: int = None, max_connections: int = None,
+ self,
+ *,
+ max_connections: int = None,
+ max_keepalive_connections: int = None,
+ # Deprecated parameter naming, in favour of more explicit version:
+ max_keepalive: int = None,
):
- self.max_keepalive = max_keepalive
+ if max_keepalive is not None:
+ warnings.warn(
+ "'max_keepalive' is deprecated. Use 'max_keepalive_connections'.",
+ DeprecationWarning,
+ )
+ max_keepalive_connections = max_keepalive
+
self.max_connections = max_connections
+ self.max_keepalive_connections = max_keepalive_connections
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, self.__class__)
- and self.max_keepalive == other.max_keepalive
and self.max_connections == other.max_connections
+ and self.max_keepalive_connections == other.max_keepalive_connections
)
def __repr__(self) -> str:
class_name = self.__class__.__name__
return (
- f"{class_name}(max_keepalive={self.max_keepalive}, "
- f"max_connections={self.max_connections})"
+ f"{class_name}(max_connections={self.max_connections}, "
+ f"max_keepalive_connections={self.max_keepalive_connections})"
)
class PoolLimits(Limits):
- def __init__(
- self, *, max_keepalive: int = None, max_connections: int = None,
- ) -> None:
+ def __init__(self, **kwargs: typing.Any) -> None:
warn_deprecated(
"httpx.PoolLimits(...) is deprecated and will raise errors in the future. "
"Use httpx.Limits(...) instead."
)
- super().__init__(max_keepalive=max_keepalive, max_connections=max_connections)
+ super().__init__(**kwargs)
class Proxy:
· WriteError
· CloseError
- ProtocolError
+ · LocalProtocolError
+ · RemoteProtocolError
- ProxyError
+ - UnsupportedProtocol
+ DecodingError
+ TooManyRedirects
+ RequestBodyUnavailable
- + InvalidURL
x HTTPStatusError
* NotRedirectResponse
* CookieConflict
"""
+class UnsupportedProtocol(TransportError):
+ """
+ Attempted to make a request to an unsupported protocol.
+
+ For example issuing a request to `ftp://www.example.com`.
+ """
+
+
class ProtocolError(TransportError):
"""
- A protocol was violated by the server.
+ The protocol was violated.
+ """
+
+
+class LocalProtocolError(ProtocolError):
+ """
+ A protocol was violated by the client.
+
+ For example if the user instantiated a `Request` instance explicitly,
+ failed to include the mandatory `Host:` header, and then issued it directly
+ using `client.send()`.
+ """
+
+
+class RemoteProtocolError(ProtocolError):
+ """
+ The protocol was violated by the server.
+
+ For exaample, returning malformed HTTP.
"""
"""
-class InvalidURL(RequestError):
- """
- URL was missing a hostname, or was not one of HTTP/HTTPS.
- """
-
-
# Client errors
super().__init__(message)
+# The `InvalidURL` class is no longer required. It was being used to enforce only
+# 'http'/'https' URLs being requested, but is now treated instead at the
+# transport layer using `UnsupportedProtocol()`.`
+
+# We are currently still exposing this class, but it will be removed in 1.0.
+InvalidURL = UnsupportedProtocol
+
+
@contextlib.contextmanager
def map_exceptions(
mapping: typing.Mapping[typing.Type[Exception], typing.Type[Exception]],
httpcore.WriteError: WriteError,
httpcore.CloseError: CloseError,
httpcore.ProxyError: ProxyError,
+ httpcore.UnsupportedProtocol: UnsupportedProtocol,
httpcore.ProtocolError: ProtocolError,
+ httpcore.LocalProtocolError: LocalProtocolError,
+ httpcore.RemoteProtocolError: RemoteProtocolError,
}
-from typing import (
- TYPE_CHECKING,
- AsyncIterator,
- Callable,
- Dict,
- List,
- Optional,
- Tuple,
- Union,
-)
+from typing import TYPE_CHECKING, Callable, List, Mapping, Optional, Tuple, Union
import httpcore
import sniffio
return asyncio.Event()
-async def async_byte_iterator(bytestring: bytes) -> AsyncIterator[bytes]:
- yield bytestring
-
-
class ASGITransport(httpcore.AsyncHTTPTransport):
"""
A custom AsyncTransport that handles sending requests directly to an ASGI app.
url: Tuple[bytes, bytes, Optional[int], bytes],
headers: List[Tuple[bytes, bytes]] = None,
stream: httpcore.AsyncByteStream = None,
- timeout: Dict[str, Optional[float]] = None,
+ timeout: Mapping[str, Optional[float]] = None,
) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], httpcore.AsyncByteStream]:
headers = [] if headers is None else headers
- stream = (
- httpcore.AsyncByteStream(async_byte_iterator(b""))
- if stream is None
- else stream
- )
+ stream = httpcore.PlainByteStream(content=b"") if stream is None else stream
# ASGI scope.
scheme, host, port, full_path = url
assert status_code is not None
assert response_headers is not None
- response_body = b"".join(body_parts)
-
- stream = httpcore.AsyncByteStream(async_byte_iterator(response_body))
+ stream = httpcore.PlainByteStream(content=b"".join(body_parts))
return (b"HTTP/1.1", status_code, b"", response_headers, stream)
import socket
-from typing import Dict, Iterator, List, Optional, Tuple
+from typing import Iterator, List, Mapping, Optional, Tuple
import httpcore
url: Tuple[bytes, bytes, Optional[int], bytes],
headers: List[Tuple[bytes, bytes]] = None,
stream: httpcore.SyncByteStream = None,
- timeout: Dict[str, Optional[float]] = None,
+ timeout: Mapping[str, Optional[float]] = None,
) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], httpcore.SyncByteStream]:
headers = [] if headers is None else headers
stream = ByteStream(b"") if stream is None else stream
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
headers: typing.List[typing.Tuple[bytes, bytes]] = None,
stream: httpcore.SyncByteStream = None,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes,
int,
httpcore.SyncByteStream,
]:
headers = [] if headers is None else headers
- stream = (
- httpcore.SyncByteStream(chunk for chunk in [b""])
- if stream is None
- else stream
- )
+ stream = httpcore.PlainByteStream(content=b"") if stream is None else stream
scheme, host, port, full_path = url
path, _, query = full_path.partition(b"?")
(key.encode("ascii"), value.encode("ascii"))
for key, value in seen_response_headers
]
- stream = httpcore.SyncByteStream(chunk for chunk in result)
+ stream = httpcore.IteratorByteStream(iterator=result)
return (b"HTTP/1.1", status_code, b"", headers, stream)
from types import TracebackType
from urllib.request import getproxies
-from ._exceptions import InvalidURL
from ._types import PrimitiveData
if typing.TYPE_CHECKING: # pragma: no cover
- from ._models import URL, Request
+ from ._models import URL
_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"}
return typing.cast(Logger, logger)
-def enforce_http_url(request: "Request") -> None:
- """
- Raise an appropriate InvalidURL for any non-HTTP URLs.
- """
- url = request.url
-
- if not url.scheme:
- message = "No scheme included in URL."
- raise InvalidURL(message, request=request)
- if not url.host:
- message = "No host included in URL."
- raise InvalidURL(message, request=request)
- if url.scheme not in ("http", "https"):
- message = 'URL scheme must be "http" or "https".'
- raise InvalidURL(message, request=request)
-
-
def port_or_default(url: "URL") -> typing.Optional[int]:
if url.port is not None:
return url.port
"chardet==3.*",
"idna==2.*",
"rfc3986>=1.3,<2",
- "httpcore==0.9.*",
+ "httpcore[http2]==0.10.*",
],
classifiers=[
"Development Status :: 4 - Beta",
@pytest.mark.usefixtures("async_environment")
async def test_get_invalid_url(server, url):
async with httpx.AsyncClient() as client:
- with pytest.raises(httpx.InvalidURL):
+ with pytest.raises((httpx.UnsupportedProtocol, httpx.LocalProtocolError)):
await client.get(url)
url: typing.Tuple[bytes, bytes, int, bytes],
headers: typing.List[typing.Tuple[bytes, bytes]],
stream: ContentStream,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
]:
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
headers: typing.List[typing.Tuple[bytes, bytes]] = None,
stream: httpcore.AsyncByteStream = None,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
]:
)
def test_get_invalid_url(server, url):
with httpx.Client() as client:
- with pytest.raises(httpx.InvalidURL):
+ with pytest.raises((httpx.UnsupportedProtocol, httpx.LocalProtocolError)):
client.get(url)
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
headers: typing.List[typing.Tuple[bytes, bytes]] = None,
stream: httpcore.AsyncByteStream = None,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
]:
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
headers: typing.List[typing.Tuple[bytes, bytes]] = None,
stream: httpcore.AsyncByteStream = None,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
]:
)
def test_transport_for_request(url, proxies, expected):
client = httpx.AsyncClient(proxies=proxies)
- request = httpx.Request(method="GET", url=url)
- transport = client._transport_for_url(request)
+ transport = client._transport_for_url(httpx.URL(url))
if expected is None:
assert transport is client._transport
monkeypatch.setenv(name, value)
client = client_class()
- request = httpx.Request(method="GET", url=url)
- transport = client._transport_for_url(request)
+ transport = client._transport_for_url(httpx.URL(url))
if expected is None:
assert transport == client._transport
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
headers: typing.List[typing.Tuple[bytes, bytes]] = None,
stream: httpcore.AsyncByteStream = None,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
]:
URL,
AsyncClient,
Client,
- InvalidURL,
NotRedirectResponse,
RequestBodyUnavailable,
TooManyRedirects,
+ UnsupportedProtocol,
codes,
)
from httpx._content_streams import AsyncIteratorStream, ByteStream, ContentStream
url: typing.Tuple[bytes, bytes, int, bytes],
headers: typing.List[typing.Tuple[bytes, bytes]],
stream: ContentStream,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
]:
scheme, host, port, path = url
+ if scheme not in (b"http", b"https"):
+ raise httpcore.UnsupportedProtocol(f"Scheme {scheme!r} not supported.")
+
path, _, query = path.partition(b"?")
if path == b"/no_redirect":
return b"HTTP/1.1", codes.OK, b"OK", [], ByteStream(b"")
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
headers: typing.List[typing.Tuple[bytes, bytes]] = None,
stream: httpcore.AsyncByteStream = None,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes, int, bytes, typing.List[typing.Tuple[bytes, bytes]], ContentStream
]:
@pytest.mark.usefixtures("async_environment")
async def test_redirect_custom_scheme():
client = AsyncClient(transport=AsyncMockTransport())
- with pytest.raises(InvalidURL) as e:
+ with pytest.raises(UnsupportedProtocol) as e:
await client.post("https://example.org/redirect_custom_scheme")
- assert str(e.value) == 'Scheme "market" not supported.'
+ assert str(e.value) == "Scheme b'market' not supported."
def test_limits_repr():
limits = httpx.Limits(max_connections=100)
- assert repr(limits) == "Limits(max_keepalive=None, max_connections=100)"
+ assert repr(limits) == "Limits(max_connections=100, max_keepalive_connections=None)"
def test_limits_eq():
url: typing.Tuple[bytes, bytes, typing.Optional[int], bytes],
headers: typing.List[typing.Tuple[bytes, bytes]] = None,
stream: httpcore.AsyncByteStream = None,
- timeout: typing.Dict[str, typing.Optional[float]] = None,
+ timeout: typing.Mapping[str, typing.Optional[float]] = None,
) -> typing.Tuple[
bytes,
int,