]> git.ipfire.org Git - thirdparty/httpx.git/commitdiff
Reintroduce sync API. (#735)
authorTom Christie <tom@tomchristie.com>
Wed, 8 Jan 2020 12:31:50 +0000 (12:31 +0000)
committerGitHub <noreply@github.com>
Wed, 8 Jan 2020 12:31:50 +0000 (12:31 +0000)
* BaseClient and AsyncClient

* Introduce 'httpx.Client'

* Top level API -> sync

* Top level API -> sync

* Add WSGI support, drop deprecated imports

* Wire up timeouts to urllib3

* Wire up pool_limits

* Add urllib3 proxy support

* Pull #734 into sync Client

* Update AsyncClient docstring

* Simpler WSGI implementation

* Set body=None when no content

* Wrap urllib3 connection/read exceptions

18 files changed:
httpx/__init__.py
httpx/api.py
httpx/client.py
httpx/config.py
httpx/dispatch/__init__.py
httpx/dispatch/asgi.py
httpx/dispatch/base.py
httpx/dispatch/urllib3.py [new file with mode: 0644]
httpx/dispatch/wsgi.py [new file with mode: 0644]
httpx/models.py
setup.cfg
setup.py
tests/client/test_client.py
tests/client/test_proxies.py
tests/dispatch/test_proxy_http.py
tests/models/test_requests.py
tests/test_api.py
tests/test_wsgi.py [new file with mode: 0644]

index 80c29da7133afeacf6c818f6b21e7820733781a7..4a133e8efdcb369e2805a7147ce7096ab5faf268 100644 (file)
@@ -2,9 +2,9 @@ from .__version__ import __description__, __title__, __version__
 from .api import delete, get, head, options, patch, post, put, request, stream
 from .auth import Auth, BasicAuth, DigestAuth
 from .client import AsyncClient, Client
-from .config import TimeoutConfig  # For 0.8 backwards compat.
 from .config import PoolLimits, Proxy, Timeout
-from .dispatch.proxy_http import HTTPProxy, HTTPProxyMode
+from .dispatch.asgi import ASGIDispatch
+from .dispatch.wsgi import WSGIDispatch
 from .exceptions import (
     ConnectionClosed,
     ConnectTimeout,
@@ -45,6 +45,7 @@ __all__ = [
     "request",
     "stream",
     "codes",
+    "ASGIDispatch",
     "AsyncClient",
     "Auth",
     "BasicAuth",
@@ -53,9 +54,6 @@ __all__ = [
     "PoolLimits",
     "Proxy",
     "Timeout",
-    "TimeoutConfig",  # For 0.8 backwards compat.
-    "HTTPProxy",
-    "HTTPProxyMode",  # For 0.8 backwards compat.
     "ConnectTimeout",
     "CookieConflict",
     "ConnectionClosed",
@@ -84,4 +82,5 @@ __all__ = [
     "TimeoutException",
     "Response",
     "DigestAuth",
+    "WSGIDispatch",
 ]
index bf9d60320fe33904443fdb8213cbddfaabc20a9e..b030c5b5249df30b14d88da33df9b8fc88744d79 100644 (file)
@@ -1,7 +1,7 @@
 import typing
 
 from .auth import AuthTypes
-from .client import AsyncClient, StreamContextManager
+from .client import Client, StreamContextManager
 from .config import DEFAULT_TIMEOUT_CONFIG, CertTypes, TimeoutTypes, VerifyTypes
 from .models import (
     CookieTypes,
@@ -15,7 +15,7 @@ from .models import (
 )
 
 
-async def request(
+def request(
     method: str,
     url: URLTypes,
     *,
@@ -80,10 +80,10 @@ async def request(
     <Response [200 OK]>
     ```
     """
-    async with AsyncClient(
+    with Client(
         cert=cert, verify=verify, timeout=timeout, trust_env=trust_env,
     ) as client:
-        return await client.request(
+        return client.request(
             method=method,
             url=url,
             data=data,
@@ -114,7 +114,7 @@ def stream(
     cert: CertTypes = None,
     trust_env: bool = True,
 ) -> StreamContextManager:
-    client = AsyncClient(cert=cert, verify=verify, trust_env=trust_env)
+    client = Client(cert=cert, verify=verify, trust_env=trust_env)
     request = Request(
         method=method,
         url=url,
@@ -135,7 +135,7 @@ def stream(
     )
 
 
-async def get(
+def get(
     url: URLTypes,
     *,
     params: QueryParamTypes = None,
@@ -156,7 +156,7 @@ async def get(
     Note that the `data`, `files`, and `json` parameters are not available on
     this function, as `GET` requests should not include a request body.
     """
-    return await request(
+    return request(
         "GET",
         url,
         params=params,
@@ -171,7 +171,7 @@ async def get(
     )
 
 
-async def options(
+def options(
     url: URLTypes,
     *,
     params: QueryParamTypes = None,
@@ -192,7 +192,7 @@ async def options(
     Note that the `data`, `files`, and `json` parameters are not available on
     this function, as `OPTIONS` requests should not include a request body.
     """
-    return await request(
+    return request(
         "OPTIONS",
         url,
         params=params,
@@ -207,7 +207,7 @@ async def options(
     )
 
 
-async def head(
+def head(
     url: URLTypes,
     *,
     params: QueryParamTypes = None,
@@ -230,7 +230,7 @@ async def head(
     `HEAD` method also differs from the other cases in that `allow_redirects`
     defaults to `False`.
     """
-    return await request(
+    return request(
         "HEAD",
         url,
         params=params,
@@ -245,7 +245,7 @@ async def head(
     )
 
 
-async def post(
+def post(
     url: URLTypes,
     *,
     data: RequestData = None,
@@ -266,7 +266,7 @@ async def post(
 
     **Parameters**: See `httpx.request`.
     """
-    return await request(
+    return request(
         "POST",
         url,
         data=data,
@@ -284,7 +284,7 @@ async def post(
     )
 
 
-async def put(
+def put(
     url: URLTypes,
     *,
     data: RequestData = None,
@@ -305,7 +305,7 @@ async def put(
 
     **Parameters**: See `httpx.request`.
     """
-    return await request(
+    return request(
         "PUT",
         url,
         data=data,
@@ -323,7 +323,7 @@ async def put(
     )
 
 
-async def patch(
+def patch(
     url: URLTypes,
     *,
     data: RequestData = None,
@@ -344,7 +344,7 @@ async def patch(
 
     **Parameters**: See `httpx.request`.
     """
-    return await request(
+    return request(
         "PATCH",
         url,
         data=data,
@@ -362,7 +362,7 @@ async def patch(
     )
 
 
-async def delete(
+def delete(
     url: URLTypes,
     *,
     params: QueryParamTypes = None,
@@ -383,7 +383,7 @@ async def delete(
     Note that the `data`, `files`, and `json` parameters are not available on
     this function, as `DELETE` requests should not include a request body.
     """
-    return await request(
+    return request(
         "DELETE",
         url,
         params=params,
index 5725e404f6241ec23e17f69a4695b56255872ffe..9eec6641bb39887a7238cefc6ac9842d2c675026 100644 (file)
@@ -22,9 +22,11 @@ from .config import (
 )
 from .content_streams import ContentStream
 from .dispatch.asgi import ASGIDispatch
-from .dispatch.base import AsyncDispatcher
+from .dispatch.base import AsyncDispatcher, SyncDispatcher
 from .dispatch.connection_pool import ConnectionPool
 from .dispatch.proxy_http import HTTPProxy
+from .dispatch.urllib3 import URLLib3Dispatcher
+from .dispatch.wsgi import WSGIDispatch
 from .exceptions import (
     HTTPError,
     InvalidURL,
@@ -53,59 +55,7 @@ from .utils import NetRCInfo, get_environment_proxies, get_logger
 logger = get_logger(__name__)
 
 
-class AsyncClient:
-    """
-    An asynchronous HTTP client, with connection pooling, HTTP/2, redirects,
-    cookie persistence, etc.
-
-    Usage:
-
-    ```python
-    >>> client = httpx.AsyncClient()
-    >>> response = client.get('https://example.org')
-    ```
-
-    **Parameters:**
-
-    * **auth** - *(optional)* An authentication class to use when sending
-    requests.
-    * **params** - *(optional)* Query parameters to include in request URLs, as
-    a string, dictionary, or list of two-tuples.
-    * **headers** - *(optional)* Dictionary of HTTP headers to include when
-    sending requests.
-    * **cookies** - *(optional)* Dictionary of Cookie items to include when
-    sending requests.
-    * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
-    verify the identity of requested hosts. Either `True` (default CA bundle),
-    a path to an SSL certificate file, or `False` (disable verification).
-    * **cert** - *(optional)* An SSL certificate used by the requested host
-    to authenticate the client. Either a path to an SSL certificate file, or
-    two-tuple of (certificate file, key file), or a three-tuple of (certificate
-    file, key file, password).
-    * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be
-    enabled. Defaults to `False`.
-    * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy
-    URLs.
-    * **timeout** - *(optional)* The timeout configuration to use when sending
-    requests.
-    * **pool_limits** - *(optional)* The connection pool configuration to use
-    when determining the maximum number of concurrently open HTTP connections.
-    * **max_redirects** - *(optional)* The maximum number of redirect responses
-    that should be followed.
-    * **base_url** - *(optional)* A URL to use as the base when building
-    request URLs.
-    * **dispatch** - *(optional)* A dispatch class to use for sending requests
-    over the network.
-    * **app** - *(optional)* An ASGI application to send requests to,
-    rather than sending actual network requests.
-    * **backend** - *(optional)* A concurrency backend to use when issuing
-    async requests. Either 'auto', 'asyncio', 'trio', or a `ConcurrencyBackend`
-    instance. Defaults to 'auto', for autodetection.
-    * **trust_env** - *(optional)* Enables or disables usage of environment
-    variables for configuration.
-    * **uds** - *(optional)* A path to a Unix domain socket to connect through.
-    """
-
+class BaseClient:
     def __init__(
         self,
         *,
@@ -113,19 +63,10 @@ class AsyncClient:
         params: QueryParamTypes = None,
         headers: HeaderTypes = None,
         cookies: CookieTypes = None,
-        verify: VerifyTypes = True,
-        cert: CertTypes = None,
-        http2: bool = False,
-        proxies: ProxiesTypes = None,
         timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
-        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
         max_redirects: int = DEFAULT_MAX_REDIRECTS,
         base_url: URLTypes = None,
-        dispatch: AsyncDispatcher = None,
-        app: typing.Callable = None,
-        backend: typing.Union[str, ConcurrencyBackend] = "auto",
         trust_env: bool = True,
-        uds: str = None,
     ):
         if base_url is None:
             self.base_url = URL("", allow_relative=True)
@@ -135,9 +76,6 @@ class AsyncClient:
         if params is None:
             params = {}
 
-        if proxies is None and trust_env:
-            proxies = typing.cast(ProxiesTypes, get_environment_proxies())
-
         self.auth = auth
         self._params = QueryParams(params)
         self._headers = Headers(headers)
@@ -147,82 +85,6 @@ class AsyncClient:
         self.trust_env = trust_env
         self.netrc = NetRCInfo()
 
-        proxy_map = self.get_proxy_map(proxies, trust_env)
-
-        self.dispatch = self.init_dispatch(
-            verify=verify,
-            cert=cert,
-            http2=http2,
-            pool_limits=pool_limits,
-            dispatch=dispatch,
-            app=app,
-            backend=backend,
-            trust_env=trust_env,
-            uds=uds,
-        )
-        self.proxies: typing.Dict[str, AsyncDispatcher] = {
-            key: self.init_proxy_dispatch(
-                proxy,
-                verify=verify,
-                cert=cert,
-                http2=http2,
-                pool_limits=pool_limits,
-                backend=backend,
-                trust_env=trust_env,
-            )
-            for key, proxy in proxy_map.items()
-        }
-
-    def init_dispatch(
-        self,
-        verify: VerifyTypes = True,
-        cert: CertTypes = None,
-        http2: bool = False,
-        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
-        dispatch: AsyncDispatcher = None,
-        app: typing.Callable = None,
-        backend: typing.Union[str, ConcurrencyBackend] = "auto",
-        trust_env: bool = True,
-        uds: str = None,
-    ) -> AsyncDispatcher:
-        if dispatch is not None:
-            return dispatch
-
-        if app is not None:
-            return ASGIDispatch(app=app)
-
-        return ConnectionPool(
-            verify=verify,
-            cert=cert,
-            http2=http2,
-            pool_limits=pool_limits,
-            backend=backend,
-            trust_env=trust_env,
-            uds=uds,
-        )
-
-    def init_proxy_dispatch(
-        self,
-        proxy: Proxy,
-        verify: VerifyTypes = True,
-        cert: CertTypes = None,
-        http2: bool = False,
-        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
-        backend: typing.Union[str, ConcurrencyBackend] = "auto",
-        trust_env: bool = True,
-    ) -> AsyncDispatcher:
-        return HTTPProxy(
-            proxy_url=proxy.url,
-            proxy_headers=proxy.headers,
-            proxy_mode=proxy.mode,
-            verify=verify,
-            cert=cert,
-            http2=http2,
-            pool_limits=pool_limits,
-            backend=backend,
-            trust_env=trust_env,
-        )
-
     def get_proxy_map(
         self, proxies: typing.Optional[ProxiesTypes], trust_env: bool,
     ) -> typing.Dict[str, Proxy]:
@@ -437,88 +299,743 @@ class AsyncClient:
             method=method, url=url, headers=headers, cookies=cookies, stream=stream
         )
 
-    def redirect_method(self, request: Request, response: Response) -> str:
-        """
-        When being redirected we may want to change the method of the request
-        based on certain specs or browser behavior.
-        """
-        method = request.method
-
-        # https://tools.ietf.org/html/rfc7231#section-6.4.4
-        if response.status_code == codes.SEE_OTHER and method != "HEAD":
-            method = "GET"
+    def redirect_method(self, request: Request, response: Response) -> str:
+        """
+        When being redirected we may want to change the method of the request
+        based on certain specs or browser behavior.
+        """
+        method = request.method
+
+        # https://tools.ietf.org/html/rfc7231#section-6.4.4
+        if response.status_code == codes.SEE_OTHER and method != "HEAD":
+            method = "GET"
+
+        # Do what the browsers do, despite standards...
+        # Turn 302s into GETs.
+        if response.status_code == codes.FOUND and method != "HEAD":
+            method = "GET"
+
+        # If a POST is responded to with a 301, turn it into a GET.
+        # This bizarre behaviour is explained in 'requests' issue 1704.
+        if response.status_code == codes.MOVED_PERMANENTLY and method == "POST":
+            method = "GET"
+
+        return method
+
+    def redirect_url(self, request: Request, response: Response) -> URL:
+        """
+        Return the URL for the redirect to follow.
+        """
+        location = response.headers["Location"]
+
+        url = URL(location, allow_relative=True)
+
+        # Facilitate relative 'Location' headers, as allowed by RFC 7231.
+        # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
+        if url.is_relative_url:
+            url = request.url.join(url)
+
+        # Attach previous fragment if needed (RFC 7231 7.1.2)
+        if request.url.fragment and not url.fragment:
+            url = url.copy_with(fragment=request.url.fragment)
+
+        return url
+
+    def redirect_headers(self, request: Request, url: URL, method: str) -> Headers:
+        """
+        Return the headers that should be used for the redirect request.
+        """
+        headers = Headers(request.headers)
+
+        if Origin(url) != Origin(request.url):
+            # Strip Authorization headers when responses are redirected away from
+            # the origin.
+            headers.pop("Authorization", None)
+            headers["Host"] = url.authority
+
+        if method != request.method and method == "GET":
+            # If we've switch to a 'GET' request, then strip any headers which
+            # are only relevant to the request body.
+            headers.pop("Content-Length", None)
+            headers.pop("Transfer-Encoding", None)
+
+        # We should use the client cookie store to determine any cookie header,
+        # rather than whatever was on the original outgoing request.
+        headers.pop("Cookie", None)
+
+        return headers
+
+    def redirect_stream(
+        self, request: Request, method: str
+    ) -> typing.Optional[ContentStream]:
+        """
+        Return the body that should be used for the redirect request.
+        """
+        if method != request.method and method == "GET":
+            return None
+
+        if not request.stream.can_replay():
+            raise RequestBodyUnavailable(
+                "Got a redirect response, but the request body was streaming "
+                "and is no longer available."
+            )
+
+        return request.stream
+
+
+class Client(BaseClient):
+    """
+    An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.
+
+    Usage:
+
+    ```python
+    >>> client = httpx.Client()
+    >>> response = client.get('https://example.org')
+    ```
+
+    **Parameters:**
+
+    * **auth** - *(optional)* An authentication class to use when sending
+    requests.
+    * **params** - *(optional)* Query parameters to include in request URLs, as
+    a string, dictionary, or list of two-tuples.
+    * **headers** - *(optional)* Dictionary of HTTP headers to include when
+    sending requests.
+    * **cookies** - *(optional)* Dictionary of Cookie items to include when
+    sending requests.
+    * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
+    verify the identity of requested hosts. Either `True` (default CA bundle),
+    a path to an SSL certificate file, or `False` (disable verification).
+    * **cert** - *(optional)* An SSL certificate used by the requested host
+    to authenticate the client. Either a path to an SSL certificate file, or
+    two-tuple of (certificate file, key file), or a three-tuple of (certificate
+    file, key file, password).
+    * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy
+    URLs.
+    * **timeout** - *(optional)* The timeout configuration to use when sending
+    requests.
+    * **pool_limits** - *(optional)* The connection pool configuration to use
+    when determining the maximum number of concurrently open HTTP connections.
+    * **max_redirects** - *(optional)* The maximum number of redirect responses
+    that should be followed.
+    * **base_url** - *(optional)* A URL to use as the base when building
+    request URLs.
+    * **dispatch** - *(optional)* A dispatch class to use for sending requests
+    over the network.
+    * **app** - *(optional)* An ASGI application to send requests to,
+    rather than sending actual network requests.
+    * **trust_env** - *(optional)* Enables or disables usage of environment
+    variables for configuration.
+    """
+
+    def __init__(
+        self,
+        *,
+        auth: AuthTypes = None,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        verify: VerifyTypes = True,
+        cert: CertTypes = None,
+        proxies: ProxiesTypes = None,
+        timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
+        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
+        max_redirects: int = DEFAULT_MAX_REDIRECTS,
+        base_url: URLTypes = None,
+        dispatch: SyncDispatcher = None,
+        app: typing.Callable = None,
+        trust_env: bool = True,
+    ):
+        super().__init__(
+            auth=auth,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            timeout=timeout,
+            max_redirects=max_redirects,
+            base_url=base_url,
+            trust_env=trust_env,
+        )
+
+        proxy_map = self.get_proxy_map(proxies, trust_env)
+
+        self.dispatch = self.init_dispatch(
+            verify=verify,
+            cert=cert,
+            pool_limits=pool_limits,
+            dispatch=dispatch,
+            app=app,
+            trust_env=trust_env,
+        )
+        self.proxies: typing.Dict[str, SyncDispatcher] = {
+            key: self.init_proxy_dispatch(
+                proxy,
+                verify=verify,
+                cert=cert,
+                pool_limits=pool_limits,
+                trust_env=trust_env,
+            )
+            for key, proxy in proxy_map.items()
+        }
+
+    def init_dispatch(
+        self,
+        verify: VerifyTypes = True,
+        cert: CertTypes = None,
+        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
+        dispatch: SyncDispatcher = None,
+        app: typing.Callable = None,
+        trust_env: bool = True,
+    ) -> SyncDispatcher:
+        if dispatch is not None:
+            return dispatch
+
+        if app is not None:
+            return WSGIDispatch(app=app)
+
+        return URLLib3Dispatcher(
+            verify=verify, cert=cert, pool_limits=pool_limits, trust_env=trust_env,
+        )
+
+    def init_proxy_dispatch(
+        self,
+        proxy: Proxy,
+        verify: VerifyTypes = True,
+        cert: CertTypes = None,
+        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
+        trust_env: bool = True,
+    ) -> SyncDispatcher:
+        return URLLib3Dispatcher(
+            proxy=proxy,
+            verify=verify,
+            cert=cert,
+            pool_limits=pool_limits,
+            trust_env=trust_env,
+        )
+
+    def dispatcher_for_url(self, url: URL) -> SyncDispatcher:
+        """
+        Returns the SyncDispatcher instance that should be used for a given URL.
+        This will either be the standard connection pool, or a proxy.
+        """
+        if self.proxies:
+            is_default_port = (url.scheme == "http" and url.port == 80) or (
+                url.scheme == "https" and url.port == 443
+            )
+            hostname = f"{url.host}:{url.port}"
+            proxy_keys = (
+                f"{url.scheme}://{hostname}",
+                f"{url.scheme}://{url.host}" if is_default_port else None,
+                f"all://{hostname}",
+                f"all://{url.host}" if is_default_port else None,
+                url.scheme,
+                "all",
+            )
+            for proxy_key in proxy_keys:
+                if proxy_key and proxy_key in self.proxies:
+                    dispatcher = self.proxies[proxy_key]
+                    return dispatcher
+
+        return self.dispatch
+
+    def request(
+        self,
+        method: str,
+        url: URLTypes,
+        *,
+        data: RequestData = None,
+        files: RequestFiles = None,
+        json: typing.Any = None,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        request = self.build_request(
+            method=method,
+            url=url,
+            data=data,
+            files=files,
+            json=json,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+        )
+        return self.send(
+            request, auth=auth, allow_redirects=allow_redirects, timeout=timeout,
+        )
+
+    def send(
+        self,
+        request: Request,
+        *,
+        stream: bool = False,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        if request.url.scheme not in ("http", "https"):
+            raise InvalidURL('URL scheme must be "http" or "https".')
+
+        timeout = self.timeout if isinstance(timeout, UnsetType) else Timeout(timeout)
+
+        auth = self.build_auth(request, auth)
+
+        response = self.send_handling_redirects(
+            request, auth=auth, timeout=timeout, allow_redirects=allow_redirects,
+        )
+
+        if not stream:
+            try:
+                response.read()
+            finally:
+                response.close()
+
+        return response
+
+    def send_handling_redirects(
+        self,
+        request: Request,
+        auth: Auth,
+        timeout: Timeout,
+        allow_redirects: bool = True,
+        history: typing.List[Response] = None,
+    ) -> Response:
+        if history is None:
+            history = []
+
+        while True:
+            if len(history) > self.max_redirects:
+                raise TooManyRedirects()
+            urls = ((resp.request.method, resp.url) for resp in history)
+            if (request.method, request.url) in urls:
+                raise RedirectLoop()
+
+            response = self.send_handling_auth(
+                request, auth=auth, timeout=timeout, history=history
+            )
+            response.history = list(history)
+
+            if not response.is_redirect:
+                return response
+
+            response.read()
+            request = self.build_redirect_request(request, response)
+            history = history + [response]
+
+            if not allow_redirects:
+                response.call_next = functools.partial(
+                    self.send_handling_redirects,
+                    request=request,
+                    auth=auth,
+                    timeout=timeout,
+                    allow_redirects=False,
+                    history=history,
+                )
+                return response
+
+    def send_handling_auth(
+        self,
+        request: Request,
+        history: typing.List[Response],
+        auth: Auth,
+        timeout: Timeout,
+    ) -> Response:
+        if auth.requires_request_body:
+            request.read()
+
+        auth_flow = auth.auth_flow(request)
+        request = next(auth_flow)
+        while True:
+            response = self.send_single_request(request, timeout)
+            try:
+                next_request = auth_flow.send(response)
+            except StopIteration:
+                return response
+            except BaseException as exc:
+                response.close()
+                raise exc from None
+            else:
+                response.history = list(history)
+                response.read()
+                request = next_request
+                history.append(response)
+
+    def send_single_request(self, request: Request, timeout: Timeout,) -> Response:
+        """
+        Sends a single request, without handling any redirections.
+        """
+
+        dispatcher = self.dispatcher_for_url(request.url)
+
+        try:
+            response = dispatcher.send(request, timeout=timeout)
+        except HTTPError as exc:
+            # Add the original request to any HTTPError unless
+            # there'a already a request attached in the case of
+            # a ProxyError.
+            if exc.request is None:
+                exc.request = request
+            raise
+
+        self.cookies.extract_cookies(response)
+
+        status = f"{response.status_code} {response.reason_phrase}"
+        response_line = f"{response.http_version} {status}"
+        logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"')
+
+        return response
+
+    def get(
+        self,
+        url: URLTypes,
+        *,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        return self.request(
+            "GET",
+            url,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            auth=auth,
+            allow_redirects=allow_redirects,
+            timeout=timeout,
+        )
+
+    def options(
+        self,
+        url: URLTypes,
+        *,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        return self.request(
+            "OPTIONS",
+            url,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            auth=auth,
+            allow_redirects=allow_redirects,
+            timeout=timeout,
+        )
+
+    def head(
+        self,
+        url: URLTypes,
+        *,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = False,  # NOTE: Differs to usual default.
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        return self.request(
+            "HEAD",
+            url,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            auth=auth,
+            allow_redirects=allow_redirects,
+            timeout=timeout,
+        )
+
+    def post(
+        self,
+        url: URLTypes,
+        *,
+        data: RequestData = None,
+        files: RequestFiles = None,
+        json: typing.Any = None,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        return self.request(
+            "POST",
+            url,
+            data=data,
+            files=files,
+            json=json,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            auth=auth,
+            allow_redirects=allow_redirects,
+            timeout=timeout,
+        )
+
+    def put(
+        self,
+        url: URLTypes,
+        *,
+        data: RequestData = None,
+        files: RequestFiles = None,
+        json: typing.Any = None,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        return self.request(
+            "PUT",
+            url,
+            data=data,
+            files=files,
+            json=json,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            auth=auth,
+            allow_redirects=allow_redirects,
+            timeout=timeout,
+        )
+
+    def patch(
+        self,
+        url: URLTypes,
+        *,
+        data: RequestData = None,
+        files: RequestFiles = None,
+        json: typing.Any = None,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        return self.request(
+            "PATCH",
+            url,
+            data=data,
+            files=files,
+            json=json,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            auth=auth,
+            allow_redirects=allow_redirects,
+            timeout=timeout,
+        )
+
+    def delete(
+        self,
+        url: URLTypes,
+        *,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        auth: AuthTypes = None,
+        allow_redirects: bool = True,
+        timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET,
+    ) -> Response:
+        return self.request(
+            "DELETE",
+            url,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            auth=auth,
+            allow_redirects=allow_redirects,
+            timeout=timeout,
+        )
 
-        # Do what the browsers do, despite standards...
-        # Turn 302s into GETs.
-        if response.status_code == codes.FOUND and method != "HEAD":
-            method = "GET"
+    def close(self) -> None:
+        self.dispatch.close()
 
-        # If a POST is responded to with a 301, turn it into a GET.
-        # This bizarre behaviour is explained in 'requests' issue 1704.
-        if response.status_code == codes.MOVED_PERMANENTLY and method == "POST":
-            method = "GET"
+    def __enter__(self) -> "Client":
+        return self
 
-        return method
+    def __exit__(
+        self,
+        exc_type: typing.Type[BaseException] = None,
+        exc_value: BaseException = None,
+        traceback: TracebackType = None,
+    ) -> None:
+        self.close()
 
-    def redirect_url(self, request: Request, response: Response) -> URL:
-        """
-        Return the URL for the redirect to follow.
-        """
-        location = response.headers["Location"]
 
-        url = URL(location, allow_relative=True)
+class AsyncClient(BaseClient):
+    """
+    An asynchronous HTTP client, with connection pooling, HTTP/2, redirects,
+    cookie persistence, etc.
 
-        # Facilitate relative 'Location' headers, as allowed by RFC 7231.
-        # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
-        if url.is_relative_url:
-            url = request.url.join(url)
+    Usage:
 
-        # Attach previous fragment if needed (RFC 7231 7.1.2)
-        if request.url.fragment and not url.fragment:
-            url = url.copy_with(fragment=request.url.fragment)
+    ```python
+    >>> async with httpx.AsyncClient() as client:
+    >>>     response = await client.get('https://example.org')
+    ```
 
-        return url
+    **Parameters:**
 
-    def redirect_headers(self, request: Request, url: URL, method: str) -> Headers:
-        """
-        Return the headers that should be used for the redirect request.
-        """
-        headers = Headers(request.headers)
+    * **auth** - *(optional)* An authentication class to use when sending
+    requests.
+    * **params** - *(optional)* Query parameters to include in request URLs, as
+    a string, dictionary, or list of two-tuples.
+    * **headers** - *(optional)* Dictionary of HTTP headers to include when
+    sending requests.
+    * **cookies** - *(optional)* Dictionary of Cookie items to include when
+    sending requests.
+    * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
+    verify the identity of requested hosts. Either `True` (default CA bundle),
+    a path to an SSL certificate file, or `False` (disable verification).
+    * **cert** - *(optional)* An SSL certificate used by the requested host
+    to authenticate the client. Either a path to an SSL certificate file, or
+    two-tuple of (certificate file, key file), or a three-tuple of (certificate
+    file, key file, password).
+    * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be
+    enabled. Defaults to `False`.
+    * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy
+    URLs.
+    * **timeout** - *(optional)* The timeout configuration to use when sending
+    requests.
+    * **pool_limits** - *(optional)* The connection pool configuration to use
+    when determining the maximum number of concurrently open HTTP connections.
+    * **max_redirects** - *(optional)* The maximum number of redirect responses
+    that should be followed.
+    * **base_url** - *(optional)* A URL to use as the base when building
+    request URLs.
+    * **dispatch** - *(optional)* A dispatch class to use for sending requests
+    over the network.
+    * **app** - *(optional)* An ASGI application to send requests to,
+    rather than sending actual network requests.
+    * **backend** - *(optional)* A concurrency backend to use when issuing
+    async requests. Either 'auto', 'asyncio', 'trio', or a `ConcurrencyBackend`
+    instance. Defaults to 'auto', for autodetection.
+    * **trust_env** - *(optional)* Enables or disables usage of environment
+    variables for configuration.
+    * **uds** - *(optional)* A path to a Unix domain socket to connect through.
+    """
 
-        if Origin(url) != Origin(request.url):
-            # Strip Authorization headers when responses are redirected away from
-            # the origin.
-            headers.pop("Authorization", None)
-            headers["Host"] = url.authority
+    def __init__(
+        self,
+        *,
+        auth: AuthTypes = None,
+        params: QueryParamTypes = None,
+        headers: HeaderTypes = None,
+        cookies: CookieTypes = None,
+        verify: VerifyTypes = True,
+        cert: CertTypes = None,
+        http2: bool = False,
+        proxies: ProxiesTypes = None,
+        timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
+        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
+        max_redirects: int = DEFAULT_MAX_REDIRECTS,
+        base_url: URLTypes = None,
+        dispatch: AsyncDispatcher = None,
+        app: typing.Callable = None,
+        backend: typing.Union[str, ConcurrencyBackend] = "auto",
+        trust_env: bool = True,
+        uds: str = None,
+    ):
+        super().__init__(
+            auth=auth,
+            params=params,
+            headers=headers,
+            cookies=cookies,
+            timeout=timeout,
+            max_redirects=max_redirects,
+            base_url=base_url,
+            trust_env=trust_env,
+        )
 
-        if method != request.method and method == "GET":
-            # If we've switch to a 'GET' request, then strip any headers which
-            # are only relevant to the request body.
-            headers.pop("Content-Length", None)
-            headers.pop("Transfer-Encoding", None)
+        proxy_map = self.get_proxy_map(proxies, trust_env)
 
-        # We should use the client cookie store to determine any cookie header,
-        # rather than whatever was on the original outgoing request.
-        headers.pop("Cookie", None)
+        self.dispatch = self.init_dispatch(
+            verify=verify,
+            cert=cert,
+            http2=http2,
+            pool_limits=pool_limits,
+            dispatch=dispatch,
+            app=app,
+            backend=backend,
+            trust_env=trust_env,
+            uds=uds,
+        )
+        self.proxies: typing.Dict[str, AsyncDispatcher] = {
+            key: self.init_proxy_dispatch(
+                proxy,
+                verify=verify,
+                cert=cert,
+                http2=http2,
+                pool_limits=pool_limits,
+                backend=backend,
+                trust_env=trust_env,
+            )
+            for key, proxy in proxy_map.items()
+        }
 
-        return headers
+    def init_dispatch(
+        self,
+        verify: VerifyTypes = True,
+        cert: CertTypes = None,
+        http2: bool = False,
+        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
+        dispatch: AsyncDispatcher = None,
+        app: typing.Callable = None,
+        backend: typing.Union[str, ConcurrencyBackend] = "auto",
+        trust_env: bool = True,
+        uds: str = None,
+    ) -> AsyncDispatcher:
+        if dispatch is not None:
+            return dispatch
 
-    def redirect_stream(
-        self, request: Request, method: str
-    ) -> typing.Optional[ContentStream]:
-        """
-        Return the body that should be used for the redirect request.
-        """
-        if method != request.method and method == "GET":
-            return None
+        if app is not None:
+            return ASGIDispatch(app=app)
 
-        if not request.stream.can_replay():
-            raise RequestBodyUnavailable(
-                "Got a redirect response, but the request body was streaming "
-                "and is no longer available."
-            )
+        return ConnectionPool(
+            verify=verify,
+            cert=cert,
+            http2=http2,
+            pool_limits=pool_limits,
+            backend=backend,
+            trust_env=trust_env,
+            uds=uds,
+        )
 
-        return request.stream
+    def init_proxy_dispatch(
+        self,
+        proxy: Proxy,
+        verify: VerifyTypes = True,
+        cert: CertTypes = None,
+        http2: bool = False,
+        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
+        backend: typing.Union[str, ConcurrencyBackend] = "auto",
+        trust_env: bool = True,
+    ) -> AsyncDispatcher:
+        return HTTPProxy(
+            proxy_url=proxy.url,
+            proxy_headers=proxy.headers,
+            proxy_mode=proxy.mode,
+            verify=verify,
+            cert=cert,
+            http2=http2,
+            pool_limits=pool_limits,
+            backend=backend,
+            trust_env=trust_env,
+        )
 
     def dispatcher_for_url(self, url: URL) -> AsyncDispatcher:
         """
@@ -888,7 +1405,7 @@ class AsyncClient:
 class StreamContextManager:
     def __init__(
         self,
-        client: AsyncClient,
+        client: BaseClient,
         request: Request,
         *,
         auth: AuthTypes = None,
@@ -903,7 +1420,30 @@ class StreamContextManager:
         self.timeout = timeout
         self.close_client = close_client
 
+    def __enter__(self) -> "Response":
+        assert isinstance(self.client, Client)
+        self.response = self.client.send(
+            request=self.request,
+            auth=self.auth,
+            allow_redirects=self.allow_redirects,
+            timeout=self.timeout,
+            stream=True,
+        )
+        return self.response
+
+    def __exit__(
+        self,
+        exc_type: typing.Type[BaseException] = None,
+        exc_value: BaseException = None,
+        traceback: TracebackType = None,
+    ) -> None:
+        assert isinstance(self.client, Client)
+        self.response.close()
+        if self.close_client:
+            self.client.close()
+
     async def __aenter__(self) -> "Response":
+        assert isinstance(self.client, AsyncClient)
         self.response = await self.client.send(
             request=self.request,
             auth=self.auth,
@@ -919,10 +1459,7 @@ class StreamContextManager:
         exc_value: BaseException = None,
         traceback: TracebackType = None,
     ) -> None:
+        assert isinstance(self.client, AsyncClient)
         await self.response.aclose()
         if self.close_client:
             await self.client.aclose()
-
-
-# For compatibility with 0.9.x.
-Client = AsyncClient
index ee82a3822d5c57d96c17e6dc46eeae84bb934055..ef413bde0ee7f8c72305e9d01e533c2ee8e9f049 100644 (file)
@@ -330,9 +330,6 @@ class Proxy:
         )
 
 
-TimeoutConfig = Timeout  # Synonym for backwards compat
-
-
 DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0)
 DEFAULT_POOL_LIMITS = PoolLimits(soft_limit=10, hard_limit=100)
 DEFAULT_MAX_REDIRECTS = 20
index 2be8dd6d62563c362c8334518f419a4621be1577..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644 (file)
@@ -1,7 +0,0 @@
-"""
-Dispatch classes handle the raw network connections and the implementation
-details of making the HTTP request and receiving the response.
-"""
-from .asgi import ASGIDispatch
-
-__all__ = ["ASGIDispatch"]
index 335540b6efc6ac5a062f0252331edc7e5f121eb2..a0b55e2b16ba957ecb3defcce0282b9889ba8a70 100644 (file)
@@ -9,10 +9,7 @@ from .base import AsyncDispatcher
 class ASGIDispatch(AsyncDispatcher):
     """
     A custom AsyncDispatcher that handles sending requests directly to an ASGI app.
-
     The simplest way to use this functionality is to use the `app` argument.
-    This will automatically infer if 'app' is a WSGI or an ASGI application,
-    and will setup an appropriate dispatch class:
 
     ```
     client = httpx.AsyncClient(app=app)
index 4879ea8296508dd96f9c3fa8916975e17d17772b..ae25e4db4c774af65efcc7fdbc30c98d23c76ec5 100644 (file)
@@ -12,6 +12,18 @@ from ..models import (
 )
 
 
+class SyncDispatcher:
+    """
+    Base class for Dispatcher classes, that handle sending the request.
+    """
+
+    def send(self, request: Request, timeout: Timeout = None) -> Response:
+        raise NotImplementedError()  # pragma: nocover
+
+    def close(self) -> None:
+        pass  # pragma: nocover
+
+
 class AsyncDispatcher:
     """
     Base class for AsyncDispatcher classes, that handle sending the request.
diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py
new file mode 100644 (file)
index 0000000..1782834
--- /dev/null
@@ -0,0 +1,128 @@
+import math
+import socket
+import ssl
+import typing
+
+import urllib3
+from urllib3.exceptions import MaxRetryError, SSLError
+
+from ..config import (
+    DEFAULT_POOL_LIMITS,
+    CertTypes,
+    PoolLimits,
+    Proxy,
+    SSLConfig,
+    Timeout,
+    VerifyTypes,
+)
+from ..content_streams import IteratorStream
+from ..models import Request, Response
+from ..utils import as_network_error
+from .base import SyncDispatcher
+
+
+class URLLib3Dispatcher(SyncDispatcher):
+    def __init__(
+        self,
+        *,
+        proxy: Proxy = None,
+        verify: VerifyTypes = True,
+        cert: CertTypes = None,
+        trust_env: bool = None,
+        pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
+    ):
+        ssl_config = SSLConfig(
+            verify=verify, cert=cert, trust_env=trust_env, http2=False
+        )
+        hard_limit = pool_limits.hard_limit
+        soft_limit = pool_limits.soft_limit
+
+        # Our connection pool configuration doesn't quite match up with urllib3's
+        # controls, but we can put sensible mappings in place:
+        if hard_limit is None:
+            block = False
+            if soft_limit is None:
+                num_pools = 1000
+                maxsize = 1000
+            else:
+                num_pools = int(math.sqrt(soft_limit))
+                maxsize = int(math.sqrt(soft_limit))
+        else:
+            block = True
+            num_pools = int(math.sqrt(hard_limit))
+            maxsize = int(math.sqrt(hard_limit))
+
+        self.pool = self.init_pool_manager(
+            proxy=proxy,
+            ssl_context=ssl_config.ssl_context,
+            num_pools=num_pools,
+            maxsize=maxsize,
+            block=block,
+        )
+
+    def init_pool_manager(
+        self,
+        proxy: typing.Optional[Proxy],
+        ssl_context: ssl.SSLContext,
+        num_pools: int,
+        maxsize: int,
+        block: bool,
+    ) -> typing.Union[urllib3.PoolManager, urllib3.ProxyManager]:
+        if proxy is None:
+            return urllib3.PoolManager(
+                ssl_context=ssl_context,
+                num_pools=num_pools,
+                maxsize=maxsize,
+                block=block,
+            )
+        else:
+            return urllib3.ProxyManager(
+                proxy_url=proxy.url,
+                proxy_headers=dict(proxy.headers),
+                ssl_context=ssl_context,
+                num_pools=num_pools,
+                maxsize=maxsize,
+                block=block,
+            )
+
+    def send(self, request: Request, timeout: Timeout = None) -> Response:
+        timeout = Timeout() if timeout is None else timeout
+        urllib3_timeout = urllib3.util.Timeout(
+            connect=timeout.connect_timeout, read=timeout.read_timeout
+        )
+        chunked = request.headers.get("Transfer-Encoding") == "chunked"
+        content_length = int(request.headers.get("Content-Length", "0"))
+        body = request.stream if chunked or content_length else None
+
+        with as_network_error(MaxRetryError, SSLError, socket.error):
+            conn = self.pool.urlopen(
+                method=request.method,
+                url=str(request.url),
+                headers=dict(request.headers),
+                body=body,
+                redirect=False,
+                assert_same_host=False,
+                retries=0,
+                preload_content=False,
+                chunked=chunked,
+                timeout=urllib3_timeout,
+                pool_timeout=timeout.pool_timeout,
+            )
+
+        def response_bytes() -> typing.Iterator[bytes]:
+            with as_network_error(socket.error):
+                for chunk in conn.stream(4096, decode_content=False):
+                    yield chunk
+
+        return Response(
+            status_code=conn.status,
+            http_version="HTTP/1.1",
+            headers=list(conn.headers.items()),
+            stream=IteratorStream(
+                iterator=response_bytes(), close_func=conn.release_conn
+            ),
+            request=request,
+        )
+
+    def close(self) -> None:
+        self.pool.clear()
diff --git a/httpx/dispatch/wsgi.py b/httpx/dispatch/wsgi.py
new file mode 100644 (file)
index 0000000..2a90187
--- /dev/null
@@ -0,0 +1,103 @@
+import io
+import typing
+
+from ..config import TimeoutTypes
+from ..content_streams import IteratorStream
+from ..models import Request, Response
+from .base import SyncDispatcher
+
+
+class WSGIDispatch(SyncDispatcher):
+    """
+    A custom SyncDispatcher that handles sending requests directly to an WSGI app.
+    The simplest way to use this functionality is to use the `app` argument.
+
+    ```
+    client = httpx.Client(app=app)
+    ```
+
+    Alternatively, you can setup the dispatch instance explicitly.
+    This allows you to include any additional configuration arguments specific
+    to the WSGIDispatch class:
+
+    ```
+    dispatch = httpx.WSGIDispatch(
+        app=app,
+        script_name="/submount",
+        remote_addr="1.2.3.4"
+    )
+    client = httpx.Client(dispatch=dispatch)
+    ```
+
+    Arguments:
+
+    * `app` - The ASGI application.
+    * `raise_app_exceptions` - Boolean indicating if exceptions in the application
+       should be raised. Default to `True`. Can be set to `False` for use cases
+       such as testing the content of a client 500 response.
+    * `script_name` - The root path on which the ASGI application should be mounted.
+    * `remote_addr` - A string indicating the client IP of incoming requests.
+    ```
+    """
+
+    def __init__(
+        self,
+        app: typing.Callable,
+        raise_app_exceptions: bool = True,
+        script_name: str = "",
+        remote_addr: str = "127.0.0.1",
+    ) -> None:
+        self.app = app
+        self.raise_app_exceptions = raise_app_exceptions
+        self.script_name = script_name
+        self.remote_addr = remote_addr
+
+    def send(self, request: Request, timeout: TimeoutTypes = None) -> Response:
+        environ = {
+            "wsgi.version": (1, 0),
+            "wsgi.url_scheme": request.url.scheme,
+            "wsgi.input": io.BytesIO(request.read()),
+            "wsgi.errors": io.BytesIO(),
+            "wsgi.multithread": True,
+            "wsgi.multiprocess": False,
+            "wsgi.run_once": False,
+            "REQUEST_METHOD": request.method,
+            "SCRIPT_NAME": self.script_name,
+            "PATH_INFO": request.url.path,
+            "QUERY_STRING": request.url.query,
+            "SERVER_NAME": request.url.host,
+            "SERVER_PORT": str(request.url.port),
+            "REMOTE_ADDR": self.remote_addr,
+        }
+        for key, value in request.headers.items():
+            key = key.upper().replace("-", "_")
+            if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+                key = "HTTP_" + key
+            environ[key] = value
+
+        seen_status = None
+        seen_response_headers = None
+        seen_exc_info = None
+
+        def start_response(
+            status: str, response_headers: list, exc_info: typing.Any = None
+        ) -> None:
+            nonlocal seen_status, seen_response_headers, seen_exc_info
+            seen_status = status
+            seen_response_headers = response_headers
+            seen_exc_info = exc_info
+
+        result = self.app(environ, start_response)
+
+        assert seen_status is not None
+        assert seen_response_headers is not None
+        if seen_exc_info and self.raise_app_exceptions:
+            raise seen_exc_info[1]
+
+        return Response(
+            status_code=int(seen_status.split()[0]),
+            http_version="HTTP/1.1",
+            headers=seen_response_headers,
+            stream=IteratorStream(chunk for chunk in result),
+            request=request,
+        )
index 0cc13ed1ad0ef79903063d26c7a4ded885060b97..acaaf66d4a60dd34533a067510453bc945a48f32 100644 (file)
@@ -648,6 +648,18 @@ class Request:
             raise RequestNotRead()
         return self._content
 
+    def read(self) -> bytes:
+        """
+        Read and return the request content.
+        """
+        if not hasattr(self, "_content"):
+            self._content = b"".join([part for part in self.stream])
+            # If a streaming request has been read entirely into memory, then
+            # we can replace the stream with a raw bytes implementation,
+            # to ensure that any non-replayable streams can still be used.
+            self.stream = ByteStream(self._content)
+        return self._content
+
     async def aread(self) -> bytes:
         """
         Read and return the request content.
index b69228a5a353a10d61186bf8faa9508d3d31d9fe..5b2e962ff7b2b34c1e231013880c4f964bbb3ebf 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,7 +14,7 @@ combine_as_imports = True
 force_grid_wrap = 0
 include_trailing_comma = True
 known_first_party = httpx,tests
-known_third_party = brotli,certifi,chardet,cryptography,h11,h2,hstspreload,pytest,rfc3986,setuptools,sniffio,trio,trustme,uvicorn
+known_third_party = brotli,certifi,chardet,cryptography,h11,h2,hstspreload,pytest,rfc3986,setuptools,sniffio,trio,trustme,urllib3,uvicorn
 line_length = 88
 multi_line_output = 3
 
index d1f15a12da5a48fd0a188f1d0234bc307dbd3e52..2f313bdd9f6e1d28ead0bd9cccf5dfeea7f13689 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -59,6 +59,7 @@ setup(
         "idna==2.*",
         "rfc3986>=1.3,<2",
         "sniffio==1.*",
+        "urllib3==1.*",
     ],
     classifiers=[
         "Development Status :: 3 - Alpha",
index 7c877224049664386c311cf79156f46b0601be40..71687e64b61277c80bb63937d00ca4ccf07beaff 100644 (file)
@@ -5,11 +5,10 @@ import pytest
 import httpx
 
 
-@pytest.mark.asyncio
-async def test_get(server):
+def test_get(server):
     url = server.url
-    async with httpx.AsyncClient() as http:
-        response = await http.get(url)
+    with httpx.Client() as http:
+        response = http.get(url)
     assert response.status_code == 200
     assert response.url == url
     assert response.content == b"Hello, world!"
@@ -23,15 +22,14 @@ async def test_get(server):
     assert response.elapsed > timedelta(0)
 
 
-@pytest.mark.asyncio
-async def test_build_request(server):
+def test_build_request(server):
     url = server.url.copy_with(path="/echo_headers")
     headers = {"Custom-header": "value"}
 
-    async with httpx.AsyncClient() as client:
+    with httpx.Client() as client:
         request = client.build_request("GET", url)
         request.headers.update(headers)
-        response = await client.send(request)
+        response = client.send(request)
 
     assert response.status_code == 200
     assert response.url == url
@@ -39,62 +37,56 @@ async def test_build_request(server):
     assert response.json()["Custom-header"] == "value"
 
 
-@pytest.mark.asyncio
-async def test_post(server):
-    async with httpx.AsyncClient() as client:
-        response = await client.post(server.url, data=b"Hello, world!")
+def test_post(server):
+    with httpx.Client() as client:
+        response = client.post(server.url, data=b"Hello, world!")
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_post_json(server):
-    async with httpx.AsyncClient() as client:
-        response = await client.post(server.url, json={"text": "Hello, world!"})
+def test_post_json(server):
+    with httpx.Client() as client:
+        response = client.post(server.url, json={"text": "Hello, world!"})
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_stream_response(server):
-    async with httpx.AsyncClient() as client:
-        async with client.stream("GET", server.url) as response:
-            content = await response.aread()
+def test_stream_response(server):
+    with httpx.Client() as client:
+        with client.stream("GET", server.url) as response:
+            content = response.read()
     assert response.status_code == 200
     assert content == b"Hello, world!"
 
 
-@pytest.mark.asyncio
-async def test_stream_iterator(server):
+def test_stream_iterator(server):
     body = b""
 
-    async with httpx.AsyncClient() as client:
-        async with client.stream("GET", server.url) as response:
-            async for chunk in response.aiter_bytes():
+    with httpx.Client() as client:
+        with client.stream("GET", server.url) as response:
+            for chunk in response.iter_bytes():
                 body += chunk
 
     assert response.status_code == 200
     assert body == b"Hello, world!"
 
 
-@pytest.mark.asyncio
-async def test_raw_iterator(server):
+def test_raw_iterator(server):
     body = b""
 
-    async with httpx.AsyncClient() as client:
-        async with client.stream("GET", server.url) as response:
-            async for chunk in response.aiter_raw():
+    with httpx.Client() as client:
+        with client.stream("GET", server.url) as response:
+            for chunk in response.iter_raw():
                 body += chunk
 
     assert response.status_code == 200
     assert body == b"Hello, world!"
 
 
-@pytest.mark.asyncio
-async def test_raise_for_status(server):
-    async with httpx.AsyncClient() as client:
+def test_raise_for_status(server):
+    with httpx.Client() as client:
         for status_code in (200, 400, 404, 500, 505):
-            response = await client.request(
+            response = client.request(
                 "GET", server.url.copy_with(path="/status/{}".format(status_code))
             )
             if 400 <= status_code < 600:
@@ -105,78 +97,59 @@ async def test_raise_for_status(server):
                 assert response.raise_for_status() is None
 
 
-@pytest.mark.asyncio
-async def test_options(server):
-    async with httpx.AsyncClient() as client:
-        response = await client.options(server.url)
+def test_options(server):
+    with httpx.Client() as client:
+        response = client.options(server.url)
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_head(server):
-    async with httpx.AsyncClient() as client:
-        response = await client.head(server.url)
+def test_head(server):
+    with httpx.Client() as client:
+        response = client.head(server.url)
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_put(server):
-    async with httpx.AsyncClient() as client:
-        response = await client.put(server.url, data=b"Hello, world!")
+def test_put(server):
+    with httpx.Client() as client:
+        response = client.put(server.url, data=b"Hello, world!")
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_patch(server):
-    async with httpx.AsyncClient() as client:
-        response = await client.patch(server.url, data=b"Hello, world!")
+def test_patch(server):
+    with httpx.Client() as client:
+        response = client.patch(server.url, data=b"Hello, world!")
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_delete(server):
-    async with httpx.AsyncClient() as client:
-        response = await client.delete(server.url)
+def test_delete(server):
+    with httpx.Client() as client:
+        response = client.delete(server.url)
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_base_url(server):
+def test_base_url(server):
     base_url = server.url
-    async with httpx.AsyncClient(base_url=base_url) as client:
-        response = await client.get("/")
+    with httpx.Client(base_url=base_url) as client:
+        response = client.get("/")
     assert response.status_code == 200
     assert response.url == base_url
 
 
-@pytest.mark.asyncio
-async def test_uds(uds_server):
-    url = uds_server.url
-    uds = uds_server.config.uds
-    assert uds is not None
-    async with httpx.AsyncClient(uds=uds) as client:
-        response = await client.get(url)
-    assert response.status_code == 200
-    assert response.text == "Hello, world!"
-    assert response.encoding == "iso-8859-1"
-
-
 def test_merge_url():
-    client = httpx.AsyncClient(base_url="https://www.paypal.com/")
+    client = httpx.Client(base_url="https://www.paypal.com/")
     url = client.merge_url("http://www.paypal.com")
 
     assert url.scheme == "https"
     assert url.is_ssl
 
 
-@pytest.mark.asyncio
-async def test_elapsed_delay(server):
+def test_elapsed_delay(server):
     url = server.url.copy_with(path="/slow_response/100")
-    async with httpx.AsyncClient() as client:
-        response = await client.get(url)
+    with httpx.Client() as client:
+        response = client.get(url)
     assert response.elapsed.total_seconds() > 0.0
index b2e704747abfdf948966e9ab8dc1f0fe13ed1c91..a1008739c970d530647ce1192f94bbe2043a6464 100644 (file)
@@ -81,7 +81,6 @@ def test_dispatcher_for_request(url, proxies, expected):
     if expected is None:
         assert dispatcher is client.dispatch
     else:
-        assert isinstance(dispatcher, httpx.HTTPProxy)
         assert dispatcher.proxy_url == expected
 
 
index 5acb6579d069df6d125ca1e8472a209e75ec6ebc..bdb4d6bf8bf29326df2b67aa7306c625954d8b8e 100644 (file)
@@ -1,6 +1,7 @@
 import pytest
 
 import httpx
+from httpx.dispatch.proxy_http import HTTPProxy
 
 from .utils import MockRawSocketBackend
 
@@ -21,7 +22,7 @@ async def test_proxy_tunnel_success():
             ]
         ),
     )
-    async with httpx.HTTPProxy(
+    async with HTTPProxy(
         proxy_url="http://127.0.0.1:8000", backend=raw_io, proxy_mode="TUNNEL_ONLY",
     ) as proxy:
         response = await proxy.request("GET", "http://example.com")
@@ -57,7 +58,7 @@ async def test_proxy_tunnel_non_2xx_response(status_code):
     )
 
     with pytest.raises(httpx.ProxyError) as e:
-        async with httpx.HTTPProxy(
+        async with HTTPProxy(
             proxy_url="http://127.0.0.1:8000", backend=raw_io, proxy_mode="TUNNEL_ONLY",
         ) as proxy:
             await proxy.request("GET", "http://example.com")
@@ -107,7 +108,7 @@ async def test_proxy_tunnel_start_tls():
             ]
         ),
     )
-    async with httpx.HTTPProxy(
+    async with HTTPProxy(
         proxy_url="http://127.0.0.1:8000", backend=raw_io, proxy_mode="TUNNEL_ONLY",
     ) as proxy:
         resp = await proxy.request("GET", "https://example.com")
@@ -157,7 +158,7 @@ async def test_proxy_forwarding(proxy_mode):
             ]
         ),
     )
-    async with httpx.HTTPProxy(
+    async with HTTPProxy(
         proxy_url="http://127.0.0.1:8000",
         backend=raw_io,
         proxy_mode=proxy_mode,
@@ -186,14 +187,14 @@ async def test_proxy_forwarding(proxy_mode):
 
 
 def test_proxy_url_with_username_and_password():
-    proxy = httpx.HTTPProxy("http://user:password@example.com:1080")
+    proxy = HTTPProxy("http://user:password@example.com:1080")
 
     assert proxy.proxy_url == "http://example.com:1080"
     assert proxy.proxy_headers["Proxy-Authorization"] == "Basic dXNlcjpwYXNzd29yZA=="
 
 
 def test_proxy_repr():
-    proxy = httpx.HTTPProxy(
+    proxy = HTTPProxy(
         "http://127.0.0.1:1080",
         proxy_headers={"Custom": "Header"},
         proxy_mode="DEFAULT",
index 0c7269e21957bb293eae90edfeb3d81ca8a601c0..c72e0af96423c4247b25d45f71a2574c11ed9ceb 100644 (file)
@@ -18,26 +18,33 @@ def test_content_length_header():
     assert request.headers["Content-Length"] == "8"
 
 
-@pytest.mark.asyncio
-async def test_url_encoded_data():
+def test_url_encoded_data():
     request = httpx.Request("POST", "http://example.org", data={"test": "123"})
-    await request.aread()
+    request.read()
 
     assert request.headers["Content-Type"] == "application/x-www-form-urlencoded"
     assert request.content == b"test=123"
 
 
-@pytest.mark.asyncio
-async def test_json_encoded_data():
+def test_json_encoded_data():
     request = httpx.Request("POST", "http://example.org", json={"test": 123})
-    await request.aread()
+    request.read()
 
     assert request.headers["Content-Type"] == "application/json"
     assert request.content == b'{"test": 123}'
 
 
+def test_read_and_stream_data():
+    # Ensure a request may still be streamed if it has been read.
+    # Needed for cases such as authentication classes that read the request body.
+    request = httpx.Request("POST", "http://example.org", json={"test": 123})
+    request.read()
+    content = b"".join([part for part in request.stream])
+    assert content == request.content
+
+
 @pytest.mark.asyncio
-async def test_read_and_stream_data():
+async def test_aread_and_stream_data():
     # Ensure a request may still be streamed if it has been read.
     # Needed for cases such as authentication classes that read the request body.
     request = httpx.Request("POST", "http://example.org", json={"test": 123})
index 1c658e91a024e879e2f3448bbef774856dcad351..4c1d61162032c4933216e2d1a7e9f1bb242e4c50 100644 (file)
@@ -3,73 +3,64 @@ import pytest
 import httpx
 
 
-@pytest.mark.asyncio
-async def test_get(server):
-    response = await httpx.get(server.url)
+def test_get(server):
+    response = httpx.get(server.url)
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
     assert response.text == "Hello, world!"
     assert response.http_version == "HTTP/1.1"
 
 
-@pytest.mark.asyncio
-async def test_post(server):
-    response = await httpx.post(server.url, data=b"Hello, world!")
+def test_post(server):
+    response = httpx.post(server.url, data=b"Hello, world!")
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_post_byte_iterator(server):
-    async def data():
+def test_post_byte_iterator(server):
+    def data():
         yield b"Hello"
         yield b", "
         yield b"world!"
 
-    response = await httpx.post(server.url, data=data())
+    response = httpx.post(server.url, data=data())
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_options(server):
-    response = await httpx.options(server.url)
+def test_options(server):
+    response = httpx.options(server.url)
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_head(server):
-    response = await httpx.head(server.url)
+def test_head(server):
+    response = httpx.head(server.url)
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_put(server):
-    response = await httpx.put(server.url, data=b"Hello, world!")
+def test_put(server):
+    response = httpx.put(server.url, data=b"Hello, world!")
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_patch(server):
-    response = await httpx.patch(server.url, data=b"Hello, world!")
+def test_patch(server):
+    response = httpx.patch(server.url, data=b"Hello, world!")
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_delete(server):
-    response = await httpx.delete(server.url)
+def test_delete(server):
+    response = httpx.delete(server.url)
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
 
 
-@pytest.mark.asyncio
-async def test_stream(server):
-    async with httpx.stream("GET", server.url) as response:
-        await response.aread()
+def test_stream(server):
+    with httpx.stream("GET", server.url) as response:
+        response.read()
 
     assert response.status_code == 200
     assert response.reason_phrase == "OK"
diff --git a/tests/test_wsgi.py b/tests/test_wsgi.py
new file mode 100644 (file)
index 0000000..0dcf995
--- /dev/null
@@ -0,0 +1,95 @@
+import sys
+
+import pytest
+
+import httpx
+
+
+def hello_world(environ, start_response):
+    status = "200 OK"
+    output = b"Hello, World!"
+
+    response_headers = [
+        ("Content-type", "text/plain"),
+        ("Content-Length", str(len(output))),
+    ]
+
+    start_response(status, response_headers)
+
+    return [output]
+
+
+def echo_body(environ, start_response):
+    status = "200 OK"
+    output = environ["wsgi.input"].read()
+
+    response_headers = [
+        ("Content-type", "text/plain"),
+        ("Content-Length", str(len(output))),
+    ]
+
+    start_response(status, response_headers)
+
+    return [output]
+
+
+def echo_body_with_response_stream(environ, start_response):
+    status = "200 OK"
+
+    response_headers = [("Content-Type", "text/plain")]
+
+    start_response(status, response_headers)
+
+    def output_generator(f):
+        while True:
+            output = f.read(2)
+            if not output:
+                break
+            yield output
+
+    return output_generator(f=environ["wsgi.input"])
+
+
+def raise_exc(environ, start_response):
+    status = "500 Server Error"
+    output = b"Nope!"
+
+    response_headers = [
+        ("Content-type", "text/plain"),
+        ("Content-Length", str(len(output))),
+    ]
+
+    try:
+        raise ValueError()
+    except ValueError:
+        exc_info = sys.exc_info()
+        start_response(status, response_headers, exc_info=exc_info)
+
+    return [output]
+
+
+def test_wsgi():
+    client = httpx.Client(app=hello_world)
+    response = client.get("http://www.example.org/")
+    assert response.status_code == 200
+    assert response.text == "Hello, World!"
+
+
+def test_wsgi_upload():
+    client = httpx.Client(app=echo_body)
+    response = client.post("http://www.example.org/", data=b"example")
+    assert response.status_code == 200
+    assert response.text == "example"
+
+
+def test_wsgi_upload_with_response_stream():
+    client = httpx.Client(app=echo_body_with_response_stream)
+    response = client.post("http://www.example.org/", data=b"example")
+    assert response.status_code == 200
+    assert response.text == "example"
+
+
+def test_wsgi_exc():
+    client = httpx.Client(app=raise_exc)
+    with pytest.raises(ValueError):
+        client.get("http://www.example.org/")