From: Tom Christie Date: Thu, 11 Sep 2025 11:02:38 +0000 (+0100) Subject: Deployed 652f051 with MkDocs version: 1.6.1 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=refs%2Fheads%2Fgh-pages;p=thirdparty%2Fhttpx.git Deployed 652f051 with MkDocs version: 1.6.1 --- diff --git a/advanced/authentication/index.html b/advanced/authentication/index.html index 3bc3c3a9..f8410a1b 100644 --- a/advanced/authentication/index.html +++ b/advanced/authentication/index.html @@ -1149,22 +1149,22 @@ or fallback to the default.

  • An instance of subclasses of httpx.Auth.
  • The most involved of these is the last, which allows you to create authentication flows involving one or more requests. A subclass of httpx.Auth should implement def auth_flow(request), and yield any requests that need to be made...

    -
    class MyCustomAuth(httpx.Auth):
    -    def __init__(self, token):
    +
    class MyCustomAuth(httpx.Auth):
    +    def __init__(self, token):
             self.token = token
     
    -    def auth_flow(self, request):
    +    def auth_flow(self, request):
             # Send the request, with a custom `X-Authentication` header.
             request.headers['X-Authentication'] = self.token
             yield request
     

    If the auth flow requires more than one request, you can issue multiple yields, and obtain the response in each case...

    -
    class MyCustomAuth(httpx.Auth):
    -    def __init__(self, token):
    +
    class MyCustomAuth(httpx.Auth):
    +    def __init__(self, token):
             self.token = token
     
    -    def auth_flow(self, request):
    +    def auth_flow(self, request):
           response = yield request
           if response.status_code == 401:
               # If the server issues a 401 response then resend the request,
    @@ -1175,13 +1175,13 @@ or fallback to the default.

    Custom authentication classes are designed to not perform any I/O, so that they may be used with both sync and async client instances. If you are implementing an authentication scheme that requires the request body, then you need to indicate this on the class using a requires_request_body property.

    You will then be able to access request.content inside the .auth_flow() method.

    -
    class MyCustomAuth(httpx.Auth):
    +
    class MyCustomAuth(httpx.Auth):
         requires_request_body = True
     
    -    def __init__(self, token):
    +    def __init__(self, token):
             self.token = token
     
    -    def auth_flow(self, request):
    +    def auth_flow(self, request):
           response = yield request
           if response.status_code == 401:
               # If the server issues a 401 response then resend the request,
    @@ -1189,22 +1189,22 @@ or fallback to the default.

    request.headers['X-Authentication'] = self.sign_request(...) yield request - def sign_request(self, request): + def sign_request(self, request): # Create a request signature, based on `request.method`, `request.url`, # `request.headers`, and `request.content`. ...

    Similarly, if you are implementing a scheme that requires access to the response body, then use the requires_response_body property. You will then be able to access response body properties and methods such as response.content, response.text, response.json(), etc.

    -
    class MyCustomAuth(httpx.Auth):
    +
    class MyCustomAuth(httpx.Auth):
         requires_response_body = True
     
    -    def __init__(self, access_token, refresh_token, refresh_url):
    +    def __init__(self, access_token, refresh_token, refresh_url):
             self.access_token = access_token
             self.refresh_token = refresh_token
             self.refresh_url = refresh_url
     
    -    def auth_flow(self, request):
    +    def auth_flow(self, request):
             request.headers["X-Authentication"] = self.access_token
             response = yield request
     
    @@ -1217,11 +1217,11 @@ or fallback to the default.

    request.headers["X-Authentication"] = self.access_token yield request - def build_refresh_request(self): + def build_refresh_request(self): # Return an `httpx.Request` for refreshing tokens. ... - def update_tokens(self, response): + def update_tokens(self, response): # Update the `.access_token` and `.refresh_token` tokens # based on a refresh response. data = response.json() @@ -1229,47 +1229,47 @@ or fallback to the default.

    If you do need to perform I/O other than HTTP requests, such as accessing a disk-based cache, or you need to use concurrency primitives, such as locks, then you should override .sync_auth_flow() and .async_auth_flow() (instead of .auth_flow()). The former will be used by httpx.Client, while the latter will be used by httpx.AsyncClient.

    -
    import asyncio
    -import threading
    -import httpx
    +
    import asyncio
    +import threading
    +import httpx
     
     
    -class MyCustomAuth(httpx.Auth):
    -    def __init__(self):
    +class MyCustomAuth(httpx.Auth):
    +    def __init__(self):
             self._sync_lock = threading.RLock()
             self._async_lock = asyncio.Lock()
     
    -    def sync_get_token(self):
    +    def sync_get_token(self):
             with self._sync_lock:
                 ...
     
    -    def sync_auth_flow(self, request):
    +    def sync_auth_flow(self, request):
             token = self.sync_get_token()
             request.headers["Authorization"] = f"Token {token}"
             yield request
     
    -    async def async_get_token(self):
    +    async def async_get_token(self):
             async with self._async_lock:
                 ...
     
    -    async def async_auth_flow(self, request):
    +    async def async_auth_flow(self, request):
             token = await self.async_get_token()
             request.headers["Authorization"] = f"Token {token}"
             yield request
     

    If you only want to support one of the two methods, then you should still override it, but raise an explicit RuntimeError.

    -
    import httpx
    -import sync_only_library
    +
    import httpx
    +import sync_only_library
     
     
    -class MyCustomAuth(httpx.Auth):
    -    def sync_auth_flow(self, request):
    +class MyCustomAuth(httpx.Auth):
    +    def sync_auth_flow(self, request):
             token = sync_only_library.get_token(...)
             request.headers["Authorization"] = f"Token {token}"
             yield request
     
    -    async def async_auth_flow(self, request):
    +    async def async_auth_flow(self, request):
             raise RuntimeError("Cannot use a sync authentication class with httpx.AsyncClient")
     
    diff --git a/advanced/clients/index.html b/advanced/clients/index.html index 23707b32..ff6bae4f 100644 --- a/advanced/clients/index.html +++ b/advanced/clients/index.html @@ -1284,7 +1284,7 @@ ... r = client.get('https://example.com', auth=('alice', 'ecila123')) ... >>> _, _, auth = r.request.headers['Authorization'].partition(' ') ->>> import base64 +>>> import base64 >>> base64.b64decode(auth) b'alice:ecila123'
    @@ -1332,10 +1332,10 @@

    If you need to monitor download progress of large responses, you can use response streaming and inspect the response.num_bytes_downloaded property.

    This interface is required for properly determining download progress, because the total number of bytes returned by response.content or response.iter_content() will not always correspond with the raw content length of the response if HTTP response compression is being used.

    For example, showing a progress bar using the tqdm library while a response is being downloaded could be done like this…

    -
    import tempfile
    +
    import tempfile
     
    -import httpx
    -from tqdm import tqdm
    +import httpx
    +from tqdm import tqdm
     
     with tempfile.NamedTemporaryFile() as download_file:
         url = "https://speed.hetzner.de/100MB.bin"
    @@ -1352,9 +1352,9 @@
     
     

    tqdm progress bar

    Or an alternate example, this time using the rich library…

    -
    import tempfile
    -import httpx
    -import rich.progress
    +
    import tempfile
    +import httpx
    +import rich.progress
     
     with tempfile.NamedTemporaryFile() as download_file:
         url = "https://speed.hetzner.de/100MB.bin"
    @@ -1377,14 +1377,14 @@
     

    Monitoring upload progress

    If you need to monitor upload progress of large responses, you can use request content generator streaming.

    For example, showing a progress bar using the tqdm library.

    -
    import io
    -import random
    +
    import io
    +import random
     
    -import httpx
    -from tqdm import tqdm
    +import httpx
    +from tqdm import tqdm
     
     
    -def gen():
    +def gen():
         """
         this is a complete example with generated random bytes.
         you can replace `io.BytesIO` with real file object.
    @@ -1405,8 +1405,9 @@
     

    As mentioned in the quickstart multipart file encoding is available by passing a dictionary with the name of the payloads as keys and either tuple of elements or a file-like object or a string as values.

    -
    >>> files = {'upload-file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel')}
    ->>> r = httpx.post("https://httpbin.org/post", files=files)
    +
    >>> with open('report.xls', 'rb') as report_file:
    +...     files = {'upload-file': ('report.xls', report_file, 'application/vnd.ms-excel')}
    +...     r = httpx.post("https://httpbin.org/post", files=files)
     >>> print(r.text)
     {
       ...
    @@ -1450,9 +1451,12 @@ MIME header field.
     

    You can also send multiple files in one go with a multiple file field form. To do that, pass a list of (field, <file>) items instead of a dictionary, allowing you to pass multiple items with the same field. For instance this request sends 2 files, foo.png and bar.png in one request on the images form field:

    -
    >>> files = [('images', ('foo.png', open('foo.png', 'rb'), 'image/png')),
    -                      ('images', ('bar.png', open('bar.png', 'rb'), 'image/png'))]
    ->>> r = httpx.post("https://httpbin.org/post", files=files)
    +
    >>> with open('foo.png', 'rb') as foo_file, open('bar.png', 'rb') as bar_file:
    +...     files = [
    +...         ('images', ('foo.png', foo_file, 'image/png')),
    +...         ('images', ('bar.png', bar_file, 'image/png')),
    +...     ]
    +...     r = httpx.post("https://httpbin.org/post", files=files)
     
    diff --git a/advanced/event-hooks/index.html b/advanced/event-hooks/index.html index 9a202a00..83713220 100644 --- a/advanced/event-hooks/index.html +++ b/advanced/event-hooks/index.html @@ -972,10 +972,10 @@ every time a particular type of event takes place.

  • response - Called after the response has been fetched from the network, but before it is returned to the caller. Passed the response instance.
  • These allow you to install client-wide functionality such as logging, monitoring or tracing.

    -
    def log_request(request):
    +
    def log_request(request):
         print(f"Request event hook: {request.method} {request.url} - Waiting for response")
     
    -def log_response(response):
    +def log_response(response):
         request = response.request
         print(f"Response event hook: {request.method} {request.url} - Status {response.status_code}")
     
    @@ -985,7 +985,7 @@ every time a particular type of event takes place.

    You can also use these hooks to install response processing code, such as this example, which creates a client instance that always raises httpx.HTTPStatusError on 4xx and 5xx responses.

    -
    def raise_on_4xx_5xx(response):
    +
    def raise_on_4xx_5xx(response):
         response.raise_for_status()
     
     client = httpx.Client(event_hooks={'response': [raise_on_4xx_5xx]})
    @@ -999,7 +999,7 @@ should be read or not.

    need to call response.read(), or for AsyncClients, response.aread().

    The hooks are also allowed to modify request and response objects.

    -
    def add_timestamp(request):
    +
    def add_timestamp(request):
         request.headers['x-request-timestamp'] = datetime.now(tz=datetime.utc).isoformat()
     
     client = httpx.Client(event_hooks={'request': [add_timestamp]})
    diff --git a/advanced/extensions/index.html b/advanced/extensions/index.html
    index c35b3079..7a1dcee1 100644
    --- a/advanced/extensions/index.html
    +++ b/advanced/extensions/index.html
    @@ -1242,9 +1242,9 @@
     

    The trace extension allows a callback handler to be installed to monitor the internal flow of events within the underlying httpcore transport.

    The simplest way to explain this is with an example:

    -
    import httpx
    +
    import httpx
     
    -def log(event_name, info):
    +def log(event_name, info):
         print(event_name, info)
     
     client = httpx.Client()
    diff --git a/advanced/ssl/index.html b/advanced/ssl/index.html
    index ea4a01c3..090a8d98 100644
    --- a/advanced/ssl/index.html
    +++ b/advanced/ssl/index.html
    @@ -1111,9 +1111,9 @@
     

    If you're using a Client() instance you should pass any verify=<...> configuration when instantiating the client.

    By default the certifi CA bundle is used for SSL verification.

    For more complex configurations you can pass an SSL Context instance...

    -
    import certifi
    -import httpx
    -import ssl
    +
    import certifi
    +import httpx
    +import ssl
     
     # This SSL context is equivelent to the default `verify=True`.
     ctx = ssl.create_default_context(cafile=certifi.where())
    @@ -1121,9 +1121,9 @@
     

    Using the truststore package to support system certificate stores...

    -
    import ssl
    -import truststore
    -import httpx
    +
    import ssl
    +import truststore
    +import httpx
     
     # Use system certificate stores.
     ctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
    @@ -1131,8 +1131,8 @@
     

    Loding an alternative certificate verification store using the standard SSL context API...

    -
    import httpx
    -import ssl
    +
    import httpx
    +import ssl
     
     # Use an explicitly configured certificate store.
     ctx = ssl.create_default_context(cafile="path/to/certs.pem")  # Either cafile or capath.
    @@ -1148,17 +1148,7 @@
     

    Working with SSL_CERT_FILE and SSL_CERT_DIR

    -

    Unlike requests, the httpx package does not automatically pull in the environment variables SSL_CERT_FILE or SSL_CERT_DIR. If you want to use these they need to be enabled explicitly.

    -

    For example...

    -
    # Use `SSL_CERT_FILE` or `SSL_CERT_DIR` if configured.
    -# Otherwise default to certifi.
    -ctx = ssl.create_default_context(
    -    cafile=os.environ.get("SSL_CERT_FILE", certifi.where()),
    -    capath=os.environ.get("SSL_CERT_DIR"),
    -)
    -client = httpx.Client(verify=ctx)
    -
    - +

    httpx does respect the SSL_CERT_FILE and SSL_CERT_DIR environment variables by default. For details, refer to the section on the environment variables page.

    Making HTTPS requests to a local server

    When making requests to local servers, such as a development server running on localhost, you will typically be using unencrypted HTTP connections.

    If you do need to make HTTPS connections to a local server, for example to test an HTTPS-only service, you will need to create and use your own certificates. Here's one way to do it...

    diff --git a/advanced/text-encodings/index.html b/advanced/text-encodings/index.html index 42c28acc..b61957a2 100644 --- a/advanced/text-encodings/index.html +++ b/advanced/text-encodings/index.html @@ -1064,7 +1064,7 @@

    In cases where no charset information is included on the response, the default behaviour is to assume "utf-8" encoding, which is by far the most widely used text encoding on the internet.

    Using the default encoding

    To understand this better let's start by looking at the default behaviour for text decoding...

    -
    import httpx
    +
    import httpx
     # Instantiate a client with the default configuration.
     client = httpx.Client()
     # Using the client...
    @@ -1078,7 +1078,7 @@
     

    This is normally absolutely fine. Most servers will respond with a properly formatted Content-Type header, including a charset encoding. And in most cases where no charset encoding is included, UTF-8 is very likely to be used, since it is so widely adopted.

    Using an explicit encoding

    In some cases we might be making requests to a site where no character set information is being set explicitly by the server, but we know what the encoding is. In this case it's best to set the default encoding explicitly on the client.

    -
    import httpx
    +
    import httpx
     # Instantiate a client with a Japanese character set as the default encoding.
     client = httpx.Client(default_encoding="shift-jis")
     # Using the client...
    @@ -1103,10 +1103,10 @@ $ pip install 

    Once chardet is installed, we can configure a client to use character-set autodetection.

    -
    import httpx
    -import chardet
    +
    import httpx
    +import chardet
     
    -def autodetect(content):
    +def autodetect(content):
         return chardet.detect(content).get("encoding")
     
     # Using a client with character-set autodetection enabled.
    diff --git a/advanced/transports/index.html b/advanced/transports/index.html
    index 2eb325c6..00a5b211 100644
    --- a/advanced/transports/index.html
    +++ b/advanced/transports/index.html
    @@ -1390,20 +1390,20 @@ sending of the requests.

    For some advanced configuration you might need to instantiate a transport class directly, and pass it to the client instance. One example is the local_address configuration which is only available via this low-level API.

    -
    >>> import httpx
    +
    >>> import httpx
     >>> transport = httpx.HTTPTransport(local_address="0.0.0.0")
     >>> client = httpx.Client(transport=transport)
     

    Connection retries are also available via this interface. Requests will be retried the given number of times in case an httpx.ConnectError or an httpx.ConnectTimeout occurs, allowing smoother operation under flaky networks. If you need other forms of retry behaviors, such as handling read/write errors or reacting to 503 Service Unavailable, consider general-purpose tools such as tenacity.

    -
    >>> import httpx
    +
    >>> import httpx
     >>> transport = httpx.HTTPTransport(retries=1)
     >>> client = httpx.Client(transport=transport)
     

    Similarly, instantiating a transport directly provides a uds option for connecting via a Unix Domain Socket that is only available via this low-level API:

    -
    >>> import httpx
    +
    >>> import httpx
     >>> # Connect to the Docker API via a Unix Socket.
     >>> transport = httpx.HTTPTransport(uds="/var/run/docker.sock")
     >>> client = httpx.Client(transport=transport)
    @@ -1421,14 +1421,14 @@ connecting via a Unix Domain Socket that is only available via this low-level AP
     
     

    Example

    Here's an example of integrating against a Flask application:

    -
    from flask import Flask
    -import httpx
    +
    from flask import Flask
    +import httpx
     
     
     app = Flask(__name__)
     
     @app.route("/")
    -def hello():
    +def hello():
         return "Hello World!"
     
     transport = httpx.WSGITransport(app=app)
    @@ -1461,12 +1461,12 @@ connecting via a Unix Domain Socket that is only available via this low-level AP
     
     

    Example

    Let's take this Starlette application as an example:

    -
    from starlette.applications import Starlette
    -from starlette.responses import HTMLResponse
    -from starlette.routing import Route
    +
    from starlette.applications import Starlette
    +from starlette.responses import HTMLResponse
    +from starlette.routing import Route
     
     
    -async def hello(request):
    +async def hello(request):
         return HTMLResponse("Hello World!")
     
     
    @@ -1512,24 +1512,24 @@ use with AsyncClient.

    See the handle_request and handle_async_request docstrings for more details on the specifics of the Transport API.

    A complete example of a custom transport implementation would be:

    -
    import json
    -import httpx
    +
    import json
    +import httpx
     
    -class HelloWorldTransport(httpx.BaseTransport):
    +class HelloWorldTransport(httpx.BaseTransport):
         """
         A mock transport that always returns a JSON "Hello, world!" response.
         """
     
    -    def handle_request(self, request):
    +    def handle_request(self, request):
             return httpx.Response(200, json={"text": "Hello, world!"})
     

    Or this example, which uses a custom transport and httpx.Mounts to always redirect http:// requests.

    -
    class HTTPSRedirect(httpx.BaseTransport):
    +
    class HTTPSRedirect(httpx.BaseTransport):
         """
         A transport that always redirects to HTTPS.
         """
    -    def handle_request(self, request):
    +    def handle_request(self, request):
             url = request.url.copy_with(scheme="https")
             return httpx.Response(303, headers={"Location": str(url)})
     
    @@ -1542,17 +1542,17 @@ on the specifics of the Transport API.

    A useful pattern here is custom transport classes that wrap the default HTTP implementation. For example...

    -
    class DebuggingTransport(httpx.BaseTransport):
    -    def __init__(self, **kwargs):
    +
    class DebuggingTransport(httpx.BaseTransport):
    +    def __init__(self, **kwargs):
             self._wrapper = httpx.HTTPTransport(**kwargs)
     
    -    def handle_request(self, request):
    +    def handle_request(self, request):
             print(f">>> {request}")
             response = self._wrapper.handle_request(request)
             print(f"<<< {response}")
             return response
     
    -    def close(self):
    +    def close(self):
             self._wrapper.close()
     
     transport = DebuggingTransport()
    @@ -1560,20 +1560,20 @@ on the specifics of the Transport API.

    Here's another case, where we're using a round-robin across a number of different proxies...

    -
    class ProxyRoundRobin(httpx.BaseTransport):
    -    def __init__(self, proxies, **kwargs):
    +
    class ProxyRoundRobin(httpx.BaseTransport):
    +    def __init__(self, proxies, **kwargs):
             self._transports = [
                 httpx.HTTPTransport(proxy=proxy, **kwargs)
                 for proxy in proxies
             ]
             self._idx = 0
     
    -    def handle_request(self, request):
    +    def handle_request(self, request):
             transport = self._transports[self._idx]
             self._idx = (self._idx + 1) % len(self._transports)
             return transport.handle_request(request)
     
    -    def close(self):
    +    def close(self):
             for transport in self._transports:
                 transport.close()
     
    @@ -1591,7 +1591,7 @@ on the specifics of the Transport API.

    and return pre-determined responses, rather than making actual network requests.

    The httpx.MockTransport class accepts a handler function, which can be used to map requests onto pre-determined responses:

    -
    def handler(request):
    +
    def handler(request):
         return httpx.Response(200, json={"text": "Hello, world!"})
     
     
    @@ -1610,14 +1610,14 @@ mocking library, RESPX, or the the same style
     used for specifying proxy routing.

    -
    import httpx
    +
    import httpx
     
    -class HTTPSRedirectTransport(httpx.BaseTransport):
    +class HTTPSRedirectTransport(httpx.BaseTransport):
         """
         A transport that always redirects to HTTPS.
         """
     
    -    def handle_request(self, method, url, headers, stream, extensions):
    +    def handle_request(self, method, url, headers, stream, extensions):
             scheme, host, port, path = url
             if port is None:
                 location = b"https://%s%s" % (host, path)
    @@ -1646,7 +1646,7 @@ used for specifying proxy routing.

    Mocking requests to a given domain:

    # All requests to "example.org" should be mocked out.
     # Other requests occur as usual.
    -def handler(request):
    +def handler(request):
         return httpx.Response(200, json={"text": "Hello, World!"})
     
     mounts = {"all://example.org": httpx.MockTransport(handler)}
    diff --git a/api/index.html b/api/index.html
    index 98d30b3a..a59457ba 100644
    --- a/api/index.html
    +++ b/api/index.html
    @@ -868,6 +868,15 @@
         
       
       
    +
    +      
    +        
  • + + + Proxy + + +
  • @@ -1136,6 +1145,15 @@ + + +
  • + + + Proxy + + +
  • @@ -1198,10 +1216,10 @@ variables for configuration.

    Returns: Response

    Usage:

    -
    >>> import httpx
    ->>> response = httpx.request('GET', 'https://httpbin.org/get')
    ->>> response
    -<Response [200 OK]>
    +
    >>> import httpx
    +>>> response = httpx.request('GET', 'https://httpbin.org/get')
    +>>> response
    +<Response [200 OK]>
     
    @@ -1305,7 +1323,7 @@ header. Set to a callable for automatic character set detection. Default: "utf-8
    auth

    Authentication class used when none is passed at the request-level.

    See also Authentication.

    -
    request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Build and send a request.

    Equivalent to:

    request = client.build_request(...)
    @@ -1315,33 +1333,33 @@ header. Set to a callable for automatic character set detection. Default: "utf-8
     

    See Client.build_request(), Client.send() and Merging of configuration for how the various parameters are merged with client-level configuration.

    -
    get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a GET request.

    Parameters: See httpx.request.

    -
    head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a HEAD request.

    Parameters: See httpx.request.

    -
    options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send an OPTIONS request.

    Parameters: See httpx.request.

    -
    post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a POST request.

    Parameters: See httpx.request.

    -
    put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PUT request.

    Parameters: See httpx.request.

    -
    patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PATCH request.

    Parameters: See httpx.request.

    -
    delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a DELETE request.

    Parameters: See httpx.request.

    -
    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    -
    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)
    +
    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)

    Build and return a request instance.

    • The params, headers and cookies arguments @@ -1349,7 +1367,7 @@ are merged with any values set on the client.
    • The url argument is merged with any base_url set on the client.

    See also: Request instances

    -
    send(self, request, *, stream=False, auth=, follow_redirects=)
    +
    send(self, request, *, stream=False, auth=, follow_redirects=)

    Send a request.

    The request is sent as-is, unmodified.

    Typically you'll want to build one with Client.build_request() @@ -1412,7 +1430,7 @@ header. Set to a callable for automatic character set detection. Default: "utf-8

    auth

    Authentication class used when none is passed at the request-level.

    See also Authentication.

    -
    async request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Build and send a request.

    Equivalent to:

    request = client.build_request(...)
    @@ -1422,33 +1440,33 @@ header. Set to a callable for automatic character set detection. Default: "utf-8
     

    See AsyncClient.build_request(), AsyncClient.send() and Merging of configuration for how the various parameters are merged with client-level configuration.

    -
    async get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a GET request.

    Parameters: See httpx.request.

    -
    async head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a HEAD request.

    Parameters: See httpx.request.

    -
    async options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send an OPTIONS request.

    Parameters: See httpx.request.

    -
    async post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a POST request.

    Parameters: See httpx.request.

    -
    async put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PUT request.

    Parameters: See httpx.request.

    -
    async patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PATCH request.

    Parameters: See httpx.request.

    -
    async delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    async delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a DELETE request.

    Parameters: See httpx.request.

    -
    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)
    +
    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    -
    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)
    +
    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)

    Build and return a request instance.

    • The params, headers and cookies arguments @@ -1456,7 +1474,7 @@ are merged with any values set on the client.
    • The url argument is merged with any base_url set on the client.

    See also: Request instances

    -
    async send(self, request, *, stream=False, auth=, follow_redirects=)
    +
    async send(self, request, *, stream=False, auth=, follow_redirects=)

    Send a request.

    The request is sent as-is, unmodified.

    Typically you'll want to build one with AsyncClient.build_request() @@ -1570,6 +1588,19 @@ what gets sent over the wire.

  • def clear([domain], [path])
  • Standard mutable mapping interface
  • +

    Proxy

    +

    A configuration of the proxy server.

    +
    >>> proxy = Proxy("http://proxy.example.com:8030")
    +>>> client = Client(proxy=proxy)
    +
    + +
      +
    • def __init__(url, [ssl_context], [auth], [headers])
    • +
    • .url - URL
    • +
    • .auth - tuple[str, str]
    • +
    • .headers - Headers
    • +
    • .ssl_context - SSLContext
    • +
    diff --git a/async/index.html b/async/index.html index 4abca665..dcaf9309 100644 --- a/async/index.html +++ b/async/index.html @@ -1268,7 +1268,7 @@ async client for sending outgoing HTTP requests.

    Tip

    -

    Use IPython or Python 3.8+ with python -m asyncio to try this code interactively, as they support executing async/await expressions in the console.

    +

    Use IPython or Python 3.9+ with python -m asyncio to try this code interactively, as they support executing async/await expressions in the console.

    API Differences

    If you're using an async client then there are a few bits of API that @@ -1321,13 +1321,13 @@ use async methods.

    For situations when context block usage is not practical, it is possible to enter "manual mode" by sending a Request instance using client.send(..., stream=True).

    Example in the context of forwarding the response to a streaming web endpoint with Starlette:

    -
    import httpx
    -from starlette.background import BackgroundTask
    -from starlette.responses import StreamingResponse
    +
    import httpx
    +from starlette.background import BackgroundTask
    +from starlette.responses import StreamingResponse
     
     client = httpx.AsyncClient()
     
    -async def home(request):
    +async def home(request):
         req = client.build_request("GET", "https://www.example.com/")
         r = await client.send(req, stream=True)
         return StreamingResponse(r.aiter_text(), background=BackgroundTask(r.aclose))
    @@ -1339,7 +1339,7 @@ use async methods.

    Streaming requests

    When sending a streaming request body with an AsyncClient instance, you should use an async bytes generator instead of a bytes generator:

    -
    async def upload_bytes():
    +
    async def upload_bytes():
         ...  # yield byte content
     
     await client.post(url, content=upload_bytes())
    @@ -1348,7 +1348,7 @@ use async methods.

    Explicit transport instances

    When instantiating a transport instance directly, you need to use httpx.AsyncHTTPTransport.

    For instance:

    -
    >>> import httpx
    +
    >>> import httpx
     >>> transport = httpx.AsyncHTTPTransport(retries=1)
     >>> async with httpx.AsyncClient(transport=transport) as client:
     >>>     ...
    @@ -1361,10 +1361,10 @@ for socket operations and concurrency primitives.

    AsyncIO

    AsyncIO is Python's built-in library for writing concurrent code with the async/await syntax.

    -
    import asyncio
    -import httpx
    +
    import asyncio
    +import httpx
     
    -async def main():
    +async def main():
         async with httpx.AsyncClient() as client:
             response = await client.get('https://www.example.com/')
             print(response)
    @@ -1375,10 +1375,10 @@ for writing concurrent code with the async/await syntax.

    Trio

    Trio is an alternative async library, designed around the the principles of structured concurrency.

    -
    import httpx
    -import trio
    +
    import httpx
    +import trio
     
    -async def main():
    +async def main():
         async with httpx.AsyncClient() as client:
             response = await client.get('https://www.example.com/')
             print(response)
    @@ -1392,10 +1392,10 @@ designed around the AnyIO
     

    AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio or trio. It blends in with native libraries of your chosen backend (defaults to asyncio).

    -
    import httpx
    -import anyio
    +
    import httpx
    +import anyio
     
    -async def main():
    +async def main():
         async with httpx.AsyncClient() as client:
             response = await client.get('https://www.example.com/')
             print(response)
    diff --git a/compatibility/index.html b/compatibility/index.html
    index 19a77c22..f709cbf1 100644
    --- a/compatibility/index.html
    +++ b/compatibility/index.html
    @@ -901,6 +901,15 @@
         
       
       
    +
    +      
    +        
  • + + + Exceptions and Errors + + +
  • @@ -1370,6 +1379,15 @@ + + +
  • + + + Exceptions and Errors + + +
  • @@ -1532,6 +1550,8 @@ For more detailed documentation, see examples given in the documentation for requests.

    In HTTPX, event hooks may access properties of requests and responses, but event hook callbacks cannot mutate the original request/response.

    If you are looking for more control, consider checking out Custom Transports.

    +

    Exceptions and Errors

    +

    requests exception hierarchy is slightly different to the httpx exception hierarchy. requests exposes a top level RequestException, where as httpx exposes a top level HTTPError. see the exceptions exposes in requests here. See the httpx error hierarchy here.

    diff --git a/environment_variables/index.html b/environment_variables/index.html index 0c8ad224..dfc08f96 100644 --- a/environment_variables/index.html +++ b/environment_variables/index.html @@ -871,6 +871,24 @@ + + +
  • + + + SSL_CERT_FILE + + + +
  • + +
  • + + + SSL_CERT_DIR + + +
  • @@ -1058,6 +1076,24 @@ + + +
  • + + + SSL_CERT_FILE + + + +
  • + +
  • + + + SSL_CERT_DIR + + +
  • @@ -1116,6 +1152,22 @@ python -c "i python -c "import httpx; httpx.get('https://www.python-httpx.org')"
    +

    SSL_CERT_FILE

    +

    Valid values: a filename

    +

    If this environment variable is set then HTTPX will load +CA certificate from the specified file instead of the default +location.

    +

    Example:

    +
    SSL_CERT_FILE=/path/to/ca-certs/ca-bundle.crt python -c "import httpx; httpx.get('https://example.com')"
    +
    + +

    SSL_CERT_DIR

    +

    Valid values: a directory following an OpenSSL specific layout.

    +

    If this environment variable is set and the directory follows an OpenSSL specific layout (ie. you ran c_rehash) then HTTPX will load CA certificates from this directory instead of the default location.

    +

    Example:

    +
    SSL_CERT_DIR=/path/to/ca-certs/ python -c "import httpx; httpx.get('https://example.com')"
    +
    + diff --git a/img/speakeasy.png b/img/speakeasy.png index f8a22cca..4acb347c 100644 Binary files a/img/speakeasy.png and b/img/speakeasy.png differ diff --git a/index.html b/index.html index 53a68ef3..6637d6ac 100644 --- a/index.html +++ b/index.html @@ -1100,7 +1100,7 @@ HTTPX

    Now, let's get started:

    -
    >>> import httpx
    +
    >>> import httpx
     >>> r = httpx.get('https://www.example.org/')
     >>> r
     <Response [200 OK]>
    @@ -1190,7 +1190,7 @@ inspiration around the lower-level networking details.

    $ pip install httpx[brotli,zstd]
     
    -

    HTTPX requires Python 3.8+

    +

    HTTPX requires Python 3.9+

    diff --git a/logging/index.html b/logging/index.html index 5a1dc039..44b717d7 100644 --- a/logging/index.html +++ b/logging/index.html @@ -974,8 +974,8 @@

    Logging

    If you need to inspect the internal behaviour of httpx, you can use Python's standard logging to output information about the underlying network behaviour.

    For example, the following configuration...

    -
    import logging
    -import httpx
    +
    import logging
    +import httpx
     
     logging.basicConfig(
         format="%(levelname)s [%(asctime)s] %(name)s - %(message)s",
    @@ -987,31 +987,29 @@
     

    Will send debug level output to the console, or wherever stdout is directed too...

    -
    DEBUG [2024-09-28 17:27:40] httpx - load_ssl_context verify=True cert=None
    -DEBUG [2024-09-28 17:27:40] httpx - load_verify_locations cafile='/Users/karenpetrosyan/oss/karhttpx/.venv/lib/python3.9/site-packages/certifi/cacert.pem'
    -DEBUG [2024-09-28 17:27:40] httpcore.connection - connect_tcp.started host='www.example.com' port=443 local_address=None timeout=5.0 socket_options=None
    -DEBUG [2024-09-28 17:27:41] httpcore.connection - connect_tcp.complete return_value=<httpcore._backends.sync.SyncStream object at 0x101f1e8e0>
    -DEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.started ssl_context=SSLContext(verify=True) server_hostname='www.example.com' timeout=5.0
    -DEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.complete return_value=<httpcore._backends.sync.SyncStream object at 0x1020f49a0>
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.started request=<Request [b'GET']>
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.complete
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.started request=<Request [b'GET']>
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.complete
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.started request=<Request [b'GET']>
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Content-Encoding', b'gzip'), (b'Accept-Ranges', b'bytes'), (b'Age', b'407727'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Sat, 28 Sep 2024 13:27:42 GMT'), (b'Etag', b'"3147526947+gzip"'), (b'Expires', b'Sat, 05 Oct 2024 13:27:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECAcc (dcd/7D43)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'648')])
    -INFO [2024-09-28 17:27:41] httpx - HTTP Request: GET https://www.example.com "HTTP/1.1 200 OK"
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.started request=<Request [b'GET']>
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.complete
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.started
    -DEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.complete
    -DEBUG [2024-09-28 17:27:41] httpcore.connection - close.started
    -DEBUG [2024-09-28 17:27:41] httpcore.connection - close.complete
    +
    DEBUG [2024-09-28 17:27:40] httpcore.connection - connect_tcp.started host='www.example.com' port=443 local_address=None timeout=5.0 socket_options=None
    +DEBUG [2024-09-28 17:27:41] httpcore.connection - connect_tcp.complete return_value=<httpcore._backends.sync.SyncStream object at 0x101f1e8e0>
    +DEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.started ssl_context=SSLContext(verify=True) server_hostname='www.example.com' timeout=5.0
    +DEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.complete return_value=<httpcore._backends.sync.SyncStream object at 0x1020f49a0>
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.started request=<Request [b'GET']>
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.complete
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.started request=<Request [b'GET']>
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.complete
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.started request=<Request [b'GET']>
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Content-Encoding', b'gzip'), (b'Accept-Ranges', b'bytes'), (b'Age', b'407727'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Sat, 28 Sep 2024 13:27:42 GMT'), (b'Etag', b'"3147526947+gzip"'), (b'Expires', b'Sat, 05 Oct 2024 13:27:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECAcc (dcd/7D43)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'648')])
    +INFO [2024-09-28 17:27:41] httpx - HTTP Request: GET https://www.example.com "HTTP/1.1 200 OK"
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.started request=<Request [b'GET']>
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.complete
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.started
    +DEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.complete
    +DEBUG [2024-09-28 17:27:41] httpcore.connection - close.started
    +DEBUG [2024-09-28 17:27:41] httpcore.connection - close.complete
     

    Logging output includes information from both the high-level httpx logger, and the network-level httpcore logger, which can be configured separately.

    For handling more complex logging configurations you might want to use the dictionary configuration style...

    -
    import logging.config
    -import httpx
    +
    import logging.config
    +import httpx
     
     LOGGING_CONFIG = {
         "version": 1,
    diff --git a/quickstart/index.html b/quickstart/index.html
    index 06153740..1a70e107 100644
    --- a/quickstart/index.html
    +++ b/quickstart/index.html
    @@ -1315,7 +1315,7 @@
     
     

    QuickStart

    First, start by importing HTTPX:

    -
    >>> import httpx
    +
    >>> import httpx
     

    Now, let’s try to get a webpage.

    @@ -1390,8 +1390,8 @@ be decoded for you. If brotlipy is installed, then the brotli encoding will be supported. If zstandard is installed, then zstd response encodings will also be supported.

    For example, to create an image from binary data returned by a request, you can use the following code:

    -
    >>> from PIL import Image
    ->>> from io import BytesIO
    +
    >>> from PIL import Image
    +>>> from io import BytesIO
     >>> i = Image.open(BytesIO(r.content))
     
    @@ -1444,8 +1444,9 @@ which is used for HTML forms.

    Sending Multipart File Uploads

    You can also upload files, using HTTP multipart encoding:

    -
    >>> files = {'upload-file': open('report.xls', 'rb')}
    ->>> r = httpx.post("https://httpbin.org/post", files=files)
    +
    >>> with open('report.xls', 'rb') as report_file:
    +...     files = {'upload-file': report_file}
    +...     r = httpx.post("https://httpbin.org/post", files=files)
     >>> print(r.text)
     {
       ...
    @@ -1458,8 +1459,9 @@ which is used for HTML forms.

    You can also explicitly set the filename and content type, by using a tuple of items for the file value:

    -
    >>> files = {'upload-file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel')}
    ->>> r = httpx.post("https://httpbin.org/post", files=files)
    +
    >>> with open('report.xls', 'rb') report_file:
    +...     files = {'upload-file': ('report.xls', report_file, 'application/vnd.ms-excel')}
    +...     r = httpx.post("https://httpbin.org/post", files=files)
     >>> print(r.text)
     {
       ...
    @@ -1472,8 +1474,9 @@ of items for the file value:

    If you need to include non-file data fields in the multipart form, use the data=... parameter:

    >>> data = {'message': 'Hello, world!'}
    ->>> files = {'file': open('report.xls', 'rb')}
    ->>> r = httpx.post("https://httpbin.org/post", data=data, files=files)
    +>>> with open('report.xls', 'rb') as report_file:
    +...     files = {'file': report_file}
    +...     r = httpx.post("https://httpbin.org/post", data=data, files=files)
     >>> print(r.text)
     {
       ...
    diff --git a/search/search_index.json b/search/search_index.json
    index bc3e000b..ab049341 100644
    --- a/search/search_index.json
    +++ b/search/search_index.json
    @@ -1 +1 @@
    -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Introduction","text":"HTTPX  A next-generation HTTP client for Python. 

    HTTPX is a fully featured HTTP client for Python 3, which provides sync and async APIs, and support for both HTTP/1.1 and HTTP/2.

    Install HTTPX using pip:

    $ pip install httpx\n

    Now, let's get started:

    >>> import httpx\n>>> r = httpx.get('https://www.example.org/')\n>>> r\n<Response [200 OK]>\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'text/html; charset=UTF-8'\n>>> r.text\n'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    Or, using the command-line client.

    # The command line client is an optional dependency.\n$ pip install 'httpx[cli]'\n

    Which now allows us to use HTTPX directly from the command-line...

    Sending a request...

    "},{"location":"#features","title":"Features","text":"

    HTTPX builds on the well-established usability of requests, and gives you:

    • A broadly requests-compatible API.
    • Standard synchronous interface, but with async support if you need it.
    • HTTP/1.1 and HTTP/2 support.
    • Ability to make requests directly to WSGI applications or ASGI applications.
    • Strict timeouts everywhere.
    • Fully type annotated.
    • 100% test coverage.

    Plus all the standard features of requests...

    • International Domains and URLs
    • Keep-Alive & Connection Pooling
    • Sessions with Cookie Persistence
    • Browser-style SSL Verification
    • Basic/Digest Authentication
    • Elegant Key/Value Cookies
    • Automatic Decompression
    • Automatic Content Decoding
    • Unicode Response Bodies
    • Multipart File Uploads
    • HTTP(S) Proxy Support
    • Connection Timeouts
    • Streaming Downloads
    • .netrc Support
    • Chunked Requests
    "},{"location":"#documentation","title":"Documentation","text":"

    For a run-through of all the basics, head over to the QuickStart.

    For more advanced topics, see the Advanced section, the async support section, or the HTTP/2 section.

    The Developer Interface provides a comprehensive API reference.

    To find out about tools that integrate with HTTPX, see Third Party Packages.

    "},{"location":"#dependencies","title":"Dependencies","text":"

    The HTTPX project relies on these excellent libraries:

    • httpcore - The underlying transport implementation for httpx.
    • h11 - HTTP/1.1 support.
    • certifi - SSL certificates.
    • idna - Internationalized domain name support.
    • sniffio - Async library autodetection.

    As well as these optional installs:

    • h2 - HTTP/2 support. (Optional, with httpx[http2])
    • socksio - SOCKS proxy support. (Optional, with httpx[socks])
    • rich - Rich terminal support. (Optional, with httpx[cli])
    • click - Command line client support. (Optional, with httpx[cli])
    • brotli or brotlicffi - Decoding for \"brotli\" compressed responses. (Optional, with httpx[brotli])
    • zstandard - Decoding for \"zstd\" compressed responses. (Optional, with httpx[zstd])

    A huge amount of credit is due to requests for the API layout that much of this work follows, as well as to urllib3 for plenty of design inspiration around the lower-level networking details.

    "},{"location":"#installation","title":"Installation","text":"

    Install with pip:

    $ pip install httpx\n

    Or, to include the optional HTTP/2 support, use:

    $ pip install httpx[http2]\n

    To include the optional brotli and zstandard decoders support, use:

    $ pip install httpx[brotli,zstd]\n

    HTTPX requires Python 3.8+

    "},{"location":"api/","title":"Developer Interface","text":""},{"location":"api/#helper-functions","title":"Helper Functions","text":"

    Note

    Only use these functions if you're testing HTTPX in a console or making a small number of requests. Using a Client will enable HTTP/2 and connection pooling for more efficient and long-lived connections.

    httpx.request(method, url, *, params=None, content=None, data=None, files=None, json=None, headers=None, cookies=None, auth=None, proxy=None, timeout=Timeout(timeout=5.0), follow_redirects=False, verify=True, trust_env=True)

    Sends an HTTP request.

    Parameters:

    • method - HTTP method for the new Request object: GET, OPTIONS, HEAD, POST, PUT, PATCH, or DELETE.
    • url - URL for the new Request object.
    • params - (optional) Query parameters to include in the URL, as a string, dictionary, or sequence of two-tuples.
    • content - (optional) Binary content to include in the body of the request, as bytes or a byte iterator.
    • data - (optional) Form data to include in the body of the request, as a dictionary.
    • files - (optional) A dictionary of upload files to include in the body of the request.
    • json - (optional) A JSON serializable object to include in the body of the request.
    • headers - (optional) Dictionary of HTTP headers to include in the request.
    • cookies - (optional) Dictionary of Cookie items to include in the request.
    • auth - (optional) An authentication class to use when sending the request.
    • proxy - (optional) A proxy URL where all the traffic should be routed.
    • timeout - (optional) The timeout configuration to use when sending the request.
    • follow_redirects - (optional) Enables or disables HTTP redirects.
    • verify - (optional) Either True to use an SSL context with the default CA bundle, False to disable verification, or an instance of ssl.SSLContext to use a custom context.
    • trust_env - (optional) Enables or disables usage of environment variables for configuration.

    Returns: Response

    Usage:

    >>> import httpx\n>>> response = httpx.request('GET', 'https://httpbin.org/get')\n>>> response\n<Response [200 OK]>\n
    httpx.get(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a GET request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as GET requests should not include a request body.

    httpx.options(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends an OPTIONS request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as OPTIONS requests should not include a request body.

    httpx.head(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a HEAD request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as HEAD requests should not include a request body.

    httpx.post(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a POST request.

    Parameters: See httpx.request.

    httpx.put(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a PUT request.

    Parameters: See httpx.request.

    httpx.patch(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a PATCH request.

    Parameters: See httpx.request.

    httpx.delete(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, timeout=Timeout(timeout=5.0), verify=True, trust_env=True)

    Sends a DELETE request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as DELETE requests should not include a request body.

    httpx.stream(method, url, *, params=None, content=None, data=None, files=None, json=None, headers=None, cookies=None, auth=None, proxy=None, timeout=Timeout(timeout=5.0), follow_redirects=False, verify=True, trust_env=True)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    "},{"location":"api/#client","title":"Client","text":"class httpx.Client(*, auth=None, params=None, headers=None, cookies=None, verify=True, cert=None, trust_env=True, http1=True, http2=False, proxy=None, mounts=None, timeout=Timeout(timeout=5.0), follow_redirects=False, limits=Limits(max_connections=100, max_keepalive_connections=20, keepalive_expiry=5.0), max_redirects=20, event_hooks=None, base_url='', transport=None, default_encoding='utf-8')

    An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.

    It can be shared between threads.

    Usage:

    >>> client = httpx.Client()\n>>> response = client.get('https://example.org')\n

    Parameters:

    • auth - (optional) An authentication class to use when sending requests.
    • params - (optional) Query parameters to include in request URLs, as a string, dictionary, or sequence of two-tuples.
    • headers - (optional) Dictionary of HTTP headers to include when sending requests.
    • cookies - (optional) Dictionary of Cookie items to include when sending requests.
    • verify - (optional) Either True to use an SSL context with the default CA bundle, False to disable verification, or an instance of ssl.SSLContext to use a custom context.
    • http2 - (optional) A boolean indicating if HTTP/2 support should be enabled. Defaults to False.
    • proxy - (optional) A proxy URL where all the traffic should be routed.
    • timeout - (optional) The timeout configuration to use when sending requests.
    • limits - (optional) The limits configuration to use.
    • max_redirects - (optional) The maximum number of redirect responses that should be followed.
    • base_url - (optional) A URL to use as the base when building request URLs.
    • transport - (optional) A transport class to use for sending requests over the network.
    • trust_env - (optional) Enables or disables usage of environment variables for configuration.
    • default_encoding - (optional) The default encoding to use for decoding response text, if no charset information is included in a response Content-Type header. Set to a callable for automatic character set detection. Default: \"utf-8\".
    headers

    HTTP headers to include when sending requests.

    cookies

    Cookie values to include when sending requests.

    params

    Query parameters to include in the URL when sending requests.

    auth

    Authentication class used when none is passed at the request-level.

    See also Authentication.

    request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Build and send a request.

    Equivalent to:

    request = client.build_request(...)\nresponse = client.send(request, ...)\n

    See Client.build_request(), Client.send() and Merging of configuration for how the various parameters are merged with client-level configuration.

    get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a GET request.

    Parameters: See httpx.request.

    head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a HEAD request.

    Parameters: See httpx.request.

    options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send an OPTIONS request.

    Parameters: See httpx.request.

    post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a POST request.

    Parameters: See httpx.request.

    put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PUT request.

    Parameters: See httpx.request.

    patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PATCH request.

    Parameters: See httpx.request.

    delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a DELETE request.

    Parameters: See httpx.request.

    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)

    Build and return a request instance.

    • The params, headers and cookies arguments are merged with any values set on the client.
    • The url argument is merged with any base_url set on the client.

    See also: Request instances

    send(self, request, *, stream=False, auth=, follow_redirects=)

    Send a request.

    The request is sent as-is, unmodified.

    Typically you'll want to build one with Client.build_request() so that any client-level configuration is merged into the request, but passing an explicit httpx.Request() is supported as well.

    See also: Request instances

    close(self)

    Close transport and proxies.

    "},{"location":"api/#asyncclient","title":"AsyncClient","text":"class httpx.AsyncClient(*, auth=None, params=None, headers=None, cookies=None, verify=True, cert=None, http1=True, http2=False, proxy=None, mounts=None, timeout=Timeout(timeout=5.0), follow_redirects=False, limits=Limits(max_connections=100, max_keepalive_connections=20, keepalive_expiry=5.0), max_redirects=20, event_hooks=None, base_url='', transport=None, trust_env=True, default_encoding='utf-8')

    An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.

    It can be shared between tasks.

    Usage:

    >>> async with httpx.AsyncClient() as client:\n>>>     response = await client.get('https://example.org')\n

    Parameters:

    • auth - (optional) An authentication class to use when sending requests.
    • params - (optional) Query parameters to include in request URLs, as a string, dictionary, or sequence of two-tuples.
    • headers - (optional) Dictionary of HTTP headers to include when sending requests.
    • cookies - (optional) Dictionary of Cookie items to include when sending requests.
    • verify - (optional) Either True to use an SSL context with the default CA bundle, False to disable verification, or an instance of ssl.SSLContext to use a custom context.
    • http2 - (optional) A boolean indicating if HTTP/2 support should be enabled. Defaults to False.
    • proxy - (optional) A proxy URL where all the traffic should be routed.
    • timeout - (optional) The timeout configuration to use when sending requests.
    • limits - (optional) The limits configuration to use.
    • max_redirects - (optional) The maximum number of redirect responses that should be followed.
    • base_url - (optional) A URL to use as the base when building request URLs.
    • transport - (optional) A transport class to use for sending requests over the network.
    • trust_env - (optional) Enables or disables usage of environment variables for configuration.
    • default_encoding - (optional) The default encoding to use for decoding response text, if no charset information is included in a response Content-Type header. Set to a callable for automatic character set detection. Default: \"utf-8\".
    headers

    HTTP headers to include when sending requests.

    cookies

    Cookie values to include when sending requests.

    params

    Query parameters to include in the URL when sending requests.

    auth

    Authentication class used when none is passed at the request-level.

    See also Authentication.

    async request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Build and send a request.

    Equivalent to:

    request = client.build_request(...)\nresponse = await client.send(request, ...)\n

    See AsyncClient.build_request(), AsyncClient.send() and Merging of configuration for how the various parameters are merged with client-level configuration.

    async get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a GET request.

    Parameters: See httpx.request.

    async head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a HEAD request.

    Parameters: See httpx.request.

    async options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send an OPTIONS request.

    Parameters: See httpx.request.

    async post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a POST request.

    Parameters: See httpx.request.

    async put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PUT request.

    Parameters: See httpx.request.

    async patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PATCH request.

    Parameters: See httpx.request.

    async delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a DELETE request.

    Parameters: See httpx.request.

    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)

    Build and return a request instance.

    • The params, headers and cookies arguments are merged with any values set on the client.
    • The url argument is merged with any base_url set on the client.

    See also: Request instances

    async send(self, request, *, stream=False, auth=, follow_redirects=)

    Send a request.

    The request is sent as-is, unmodified.

    Typically you'll want to build one with AsyncClient.build_request() so that any client-level configuration is merged into the request, but passing an explicit httpx.Request() is supported as well.

    See also: Request instances

    async aclose(self)

    Close transport and proxies.

    "},{"location":"api/#response","title":"Response","text":"

    An HTTP response.

    • def __init__(...)
    • .status_code - int
    • .reason_phrase - str
    • .http_version - \"HTTP/2\" or \"HTTP/1.1\"
    • .url - URL
    • .headers - Headers
    • .content - bytes
    • .text - str
    • .encoding - str
    • .is_redirect - bool
    • .request - Request
    • .next_request - Optional[Request]
    • .cookies - Cookies
    • .history - List[Response]
    • .elapsed - timedelta
    • The amount of time elapsed between sending the request and calling close() on the corresponding response received for that request. total_seconds() to correctly get the total elapsed seconds.
    • def .raise_for_status() - Response
    • def .json() - Any
    • def .read() - bytes
    • def .iter_raw([chunk_size]) - bytes iterator
    • def .iter_bytes([chunk_size]) - bytes iterator
    • def .iter_text([chunk_size]) - text iterator
    • def .iter_lines() - text iterator
    • def .close() - None
    • def .next() - Response
    • def .aread() - bytes
    • def .aiter_raw([chunk_size]) - async bytes iterator
    • def .aiter_bytes([chunk_size]) - async bytes iterator
    • def .aiter_text([chunk_size]) - async text iterator
    • def .aiter_lines() - async text iterator
    • def .aclose() - None
    • def .anext() - Response
    "},{"location":"api/#request","title":"Request","text":"

    An HTTP request. Can be constructed explicitly for more control over exactly what gets sent over the wire.

    >>> request = httpx.Request(\"GET\", \"https://example.org\", headers={'host': 'example.org'})\n>>> response = client.send(request)\n
    • def __init__(method, url, [params], [headers], [cookies], [content], [data], [files], [json], [stream])
    • .method - str
    • .url - URL
    • .content - byte, byte iterator, or byte async iterator
    • .headers - Headers
    • .cookies - Cookies
    "},{"location":"api/#url","title":"URL","text":"

    A normalized, IDNA supporting URL.

    >>> url = URL(\"https://example.org/\")\n>>> url.host\n'example.org'\n
    • def __init__(url, **kwargs)
    • .scheme - str
    • .authority - str
    • .host - str
    • .port - int
    • .path - str
    • .query - str
    • .raw_path - str
    • .fragment - str
    • .is_ssl - bool
    • .is_absolute_url - bool
    • .is_relative_url - bool
    • def .copy_with([scheme], [authority], [path], [query], [fragment]) - URL
    "},{"location":"api/#headers","title":"Headers","text":"

    A case-insensitive multi-dict.

    >>> headers = Headers({'Content-Type': 'application/json'})\n>>> headers['content-type']\n'application/json'\n
    • def __init__(self, headers, encoding=None)
    • def copy() - Headers
    "},{"location":"api/#cookies","title":"Cookies","text":"

    A dict-like cookie store.

    >>> cookies = Cookies()\n>>> cookies.set(\"name\", \"value\", domain=\"example.org\")\n
    • def __init__(cookies: [dict, Cookies, CookieJar])
    • .jar - CookieJar
    • def extract_cookies(response)
    • def set_cookie_header(request)
    • def set(name, value, [domain], [path])
    • def get(name, [domain], [path])
    • def delete(name, [domain], [path])
    • def clear([domain], [path])
    • Standard mutable mapping interface
    "},{"location":"async/","title":"Async Support","text":"

    HTTPX offers a standard synchronous API by default, but also gives you the option of an async client if you need it.

    Async is a concurrency model that is far more efficient than multi-threading, and can provide significant performance benefits and enable the use of long-lived network connections such as WebSockets.

    If you're working with an async web framework then you'll also want to use an async client for sending outgoing HTTP requests.

    "},{"location":"async/#making-async-requests","title":"Making Async requests","text":"

    To make asynchronous requests, you'll need an AsyncClient.

    >>> async with httpx.AsyncClient() as client:\n...     r = await client.get('https://www.example.com/')\n...\n>>> r\n<Response [200 OK]>\n

    Tip

    Use IPython or Python 3.8+ with python -m asyncio to try this code interactively, as they support executing async/await expressions in the console.

    "},{"location":"async/#api-differences","title":"API Differences","text":"

    If you're using an async client then there are a few bits of API that use async methods.

    "},{"location":"async/#making-requests","title":"Making requests","text":"

    The request methods are all async, so you should use response = await client.get(...) style for all of the following:

    • AsyncClient.get(url, ...)
    • AsyncClient.options(url, ...)
    • AsyncClient.head(url, ...)
    • AsyncClient.post(url, ...)
    • AsyncClient.put(url, ...)
    • AsyncClient.patch(url, ...)
    • AsyncClient.delete(url, ...)
    • AsyncClient.request(method, url, ...)
    • AsyncClient.send(request, ...)
    "},{"location":"async/#opening-and-closing-clients","title":"Opening and closing clients","text":"

    Use async with httpx.AsyncClient() if you want a context-managed client...

    async with httpx.AsyncClient() as client:\n    ...\n

    Warning

    In order to get the most benefit from connection pooling, make sure you're not instantiating multiple client instances - for example by using async with inside a \"hot loop\". This can be achieved either by having a single scoped client that's passed throughout wherever it's needed, or by having a single global client instance.

    Alternatively, use await client.aclose() if you want to close a client explicitly:

    client = httpx.AsyncClient()\n...\nawait client.aclose()\n
    "},{"location":"async/#streaming-responses","title":"Streaming responses","text":"

    The AsyncClient.stream(method, url, ...) method is an async context block.

    >>> client = httpx.AsyncClient()\n>>> async with client.stream('GET', 'https://www.example.com/') as response:\n...     async for chunk in response.aiter_bytes():\n...         ...\n

    The async response streaming methods are:

    • Response.aread() - For conditionally reading a response inside a stream block.
    • Response.aiter_bytes() - For streaming the response content as bytes.
    • Response.aiter_text() - For streaming the response content as text.
    • Response.aiter_lines() - For streaming the response content as lines of text.
    • Response.aiter_raw() - For streaming the raw response bytes, without applying content decoding.
    • Response.aclose() - For closing the response. You don't usually need this, since .stream block closes the response automatically on exit.

    For situations when context block usage is not practical, it is possible to enter \"manual mode\" by sending a Request instance using client.send(..., stream=True).

    Example in the context of forwarding the response to a streaming web endpoint with Starlette:

    import httpx\nfrom starlette.background import BackgroundTask\nfrom starlette.responses import StreamingResponse\n\nclient = httpx.AsyncClient()\n\nasync def home(request):\n    req = client.build_request(\"GET\", \"https://www.example.com/\")\n    r = await client.send(req, stream=True)\n    return StreamingResponse(r.aiter_text(), background=BackgroundTask(r.aclose))\n

    Warning

    When using this \"manual streaming mode\", it is your duty as a developer to make sure that Response.aclose() is called eventually. Failing to do so would leave connections open, most likely resulting in resource leaks down the line.

    "},{"location":"async/#streaming-requests","title":"Streaming requests","text":"

    When sending a streaming request body with an AsyncClient instance, you should use an async bytes generator instead of a bytes generator:

    async def upload_bytes():\n    ...  # yield byte content\n\nawait client.post(url, content=upload_bytes())\n
    "},{"location":"async/#explicit-transport-instances","title":"Explicit transport instances","text":"

    When instantiating a transport instance directly, you need to use httpx.AsyncHTTPTransport.

    For instance:

    >>> import httpx\n>>> transport = httpx.AsyncHTTPTransport(retries=1)\n>>> async with httpx.AsyncClient(transport=transport) as client:\n>>>     ...\n
    "},{"location":"async/#supported-async-environments","title":"Supported async environments","text":"

    HTTPX supports either asyncio or trio as an async environment.

    It will auto-detect which of those two to use as the backend for socket operations and concurrency primitives.

    "},{"location":"async/#asyncio","title":"AsyncIO","text":"

    AsyncIO is Python's built-in library for writing concurrent code with the async/await syntax.

    import asyncio\nimport httpx\n\nasync def main():\n    async with httpx.AsyncClient() as client:\n        response = await client.get('https://www.example.com/')\n        print(response)\n\nasyncio.run(main())\n
    "},{"location":"async/#trio","title":"Trio","text":"

    Trio is an alternative async library, designed around the the principles of structured concurrency.

    import httpx\nimport trio\n\nasync def main():\n    async with httpx.AsyncClient() as client:\n        response = await client.get('https://www.example.com/')\n        print(response)\n\ntrio.run(main)\n

    Important

    The trio package must be installed to use the Trio backend.

    "},{"location":"async/#anyio","title":"AnyIO","text":"

    AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio or trio. It blends in with native libraries of your chosen backend (defaults to asyncio).

    import httpx\nimport anyio\n\nasync def main():\n    async with httpx.AsyncClient() as client:\n        response = await client.get('https://www.example.com/')\n        print(response)\n\nanyio.run(main, backend='trio')\n
    "},{"location":"async/#calling-into-python-web-apps","title":"Calling into Python Web Apps","text":"

    For details on calling directly into ASGI applications, see the ASGITransport docs.

    "},{"location":"code_of_conduct/","title":"Code of Conduct","text":"

    We expect contributors to our projects and online spaces to follow the Python Software Foundation\u2019s Code of Conduct.

    The Python community is made up of members from around the globe with a diverse set of skills, personalities, and experiences. It is through these differences that our community experiences great successes and continued growth. When you're working with members of the community, this Code of Conduct will help steer your interactions and keep Python a positive, successful, and growing community.

    "},{"location":"code_of_conduct/#our-community","title":"Our Community","text":"

    Members of the Python community are open, considerate, and respectful. Behaviours that reinforce these values contribute to a positive environment, and include:

    • Being open. Members of the community are open to collaboration, whether it's on PEPs, patches, problems, or otherwise.
    • Focusing on what is best for the community. We're respectful of the processes set forth in the community, and we work within them.
    • Acknowledging time and effort. We're respectful of the volunteer efforts that permeate the Python community. We're thoughtful when addressing the efforts of others, keeping in mind that often times the labor was completed simply for the good of the community.
    • Being respectful of differing viewpoints and experiences. We're receptive to constructive comments and criticism, as the experiences and skill sets of other members contribute to the whole of our efforts.
    • Showing empathy towards other community members. We're attentive in our communications, whether in person or online, and we're tactful when approaching differing views.
    • Being considerate. Members of the community are considerate of their peers -- other Python users.
    • Being respectful. We're respectful of others, their positions, their skills, their commitments, and their efforts.
    • Gracefully accepting constructive criticism. When we disagree, we are courteous in raising our issues.
    • Using welcoming and inclusive language. We're accepting of all who wish to take part in our activities, fostering an environment where anyone can participate and everyone can make a difference.
    "},{"location":"code_of_conduct/#our-standards","title":"Our Standards","text":"

    Every member of our community has the right to have their identity respected. The Python community is dedicated to providing a positive experience for everyone, regardless of age, gender identity and expression, sexual orientation, disability, physical appearance, body size, ethnicity, nationality, race, or religion (or lack thereof), education, or socio-economic status.

    "},{"location":"code_of_conduct/#inappropriate-behavior","title":"Inappropriate Behavior","text":"

    Examples of unacceptable behavior by participants include:

    • Harassment of any participants in any form
    • Deliberate intimidation, stalking, or following
    • Logging or taking screenshots of online activity for harassment purposes
    • Publishing others' private information, such as a physical or electronic address, without explicit permission
    • Violent threats or language directed against another person
    • Incitement of violence or harassment towards any individual, including encouraging a person to commit suicide or to engage in self-harm
    • Creating additional online accounts in order to harass another person or circumvent a ban
    • Sexual language and imagery in online communities or in any conference venue, including talks
    • Insults, put downs, or jokes that are based upon stereotypes, that are exclusionary, or that hold others up for ridicule
    • Excessive swearing
    • Unwelcome sexual attention or advances
    • Unwelcome physical contact, including simulated physical contact (eg, textual descriptions like \"hug\" or \"backrub\") without consent or after a request to stop
    • Pattern of inappropriate social contact, such as requesting/assuming inappropriate levels of intimacy with others
    • Sustained disruption of online community discussions, in-person presentations, or other in-person events
    • Continued one-on-one communication after requests to cease
    • Other conduct that is inappropriate for a professional audience including people of many different backgrounds

    Community members asked to stop any inappropriate behavior are expected to comply immediately.

    "},{"location":"code_of_conduct/#enforcement","title":"Enforcement","text":"

    We take Code of Conduct violations seriously, and will act to ensure our spaces are welcoming, inclusive, and professional environments to communicate in.

    If you need to raise a Code of Conduct report, you may do so privately by email to tom@tomchristie.com.

    Reports will be treated confidentially.

    Alternately you may make a report to the Python Software Foundation.

    "},{"location":"compatibility/","title":"Requests Compatibility Guide","text":"

    HTTPX aims to be broadly compatible with the requests API, although there are a few design differences in places.

    This documentation outlines places where the API differs...

    "},{"location":"compatibility/#redirects","title":"Redirects","text":"

    Unlike requests, HTTPX does not follow redirects by default.

    We differ in behaviour here because auto-redirects can easily mask unnecessary network calls being made.

    You can still enable behaviour to automatically follow redirects, but you need to do so explicitly...

    response = client.get(url, follow_redirects=True)\n

    Or else instantiate a client, with redirect following enabled by default...

    client = httpx.Client(follow_redirects=True)\n
    "},{"location":"compatibility/#client-instances","title":"Client instances","text":"

    The HTTPX equivalent of requests.Session is httpx.Client.

    session = requests.Session(**kwargs)\n

    is generally equivalent to

    client = httpx.Client(**kwargs)\n
    "},{"location":"compatibility/#request-urls","title":"Request URLs","text":"

    Accessing response.url will return a URL instance, rather than a string.

    Use str(response.url) if you need a string instance.

    "},{"location":"compatibility/#determining-the-next-redirect-request","title":"Determining the next redirect request","text":"

    The requests library exposes an attribute response.next, which can be used to obtain the next redirect request.

    session = requests.Session()\nrequest = requests.Request(\"GET\", ...).prepare()\nwhile request is not None:\n    response = session.send(request, allow_redirects=False)\n    request = response.next\n

    In HTTPX, this attribute is instead named response.next_request. For example:

    client = httpx.Client()\nrequest = client.build_request(\"GET\", ...)\nwhile request is not None:\n    response = client.send(request)\n    request = response.next_request\n
    "},{"location":"compatibility/#request-content","title":"Request Content","text":"

    For uploading raw text or binary content we prefer to use a content parameter, in order to better separate this usage from the case of uploading form data.

    For example, using content=... to upload raw content:

    # Uploading text, bytes, or a bytes iterator.\nhttpx.post(..., content=b\"Hello, world\")\n

    And using data=... to send form data:

    # Uploading form data.\nhttpx.post(..., data={\"message\": \"Hello, world\"})\n

    Using the data=<text/byte content> will raise a deprecation warning, and is expected to be fully removed with the HTTPX 1.0 release.

    "},{"location":"compatibility/#upload-files","title":"Upload files","text":"

    HTTPX strictly enforces that upload files must be opened in binary mode, in order to avoid character encoding issues that can result from attempting to upload files opened in text mode.

    "},{"location":"compatibility/#content-encoding","title":"Content encoding","text":"

    HTTPX uses utf-8 for encoding str request bodies. For example, when using content=<str> the request body will be encoded to utf-8 before being sent over the wire. This differs from Requests which uses latin1. If you need an explicit encoding, pass encoded bytes explicitly, e.g. content=<str>.encode(\"latin1\"). For response bodies, assuming the server didn't send an explicit encoding then HTTPX will do its best to figure out an appropriate encoding. HTTPX makes a guess at the encoding to use for decoding the response using charset_normalizer. Fallback to that or any content with less than 32 octets will be decoded using utf-8 with the error=\"replace\" decoder strategy.

    "},{"location":"compatibility/#cookies","title":"Cookies","text":"

    If using a client instance, then cookies should always be set on the client rather than on a per-request basis.

    This usage is supported:

    client = httpx.Client(cookies=...)\nclient.post(...)\n

    This usage is not supported:

    client = httpx.Client()\nclient.post(..., cookies=...)\n

    We prefer enforcing a stricter API here because it provides clearer expectations around cookie persistence, particularly when redirects occur.

    "},{"location":"compatibility/#status-codes","title":"Status Codes","text":"

    In our documentation we prefer the uppercased versions, such as codes.NOT_FOUND, but also provide lower-cased versions for API compatibility with requests.

    Requests includes various synonyms for status codes that HTTPX does not support.

    "},{"location":"compatibility/#streaming-responses","title":"Streaming responses","text":"

    HTTPX provides a .stream() interface rather than using stream=True. This ensures that streaming responses are always properly closed outside of the stream block, and makes it visually clearer at which points streaming I/O APIs may be used with a response.

    For example:

    with httpx.stream(\"GET\", \"https://www.example.com\") as response:\n    ...\n

    Within a stream() block request data is made available with:

    • .iter_bytes() - Instead of response.iter_content()
    • .iter_text() - Instead of response.iter_content(decode_unicode=True)
    • .iter_lines() - Corresponding to response.iter_lines()
    • .iter_raw() - Use this instead of response.raw
    • .read() - Read the entire response body, making response.text and response.content available.
    "},{"location":"compatibility/#timeouts","title":"Timeouts","text":"

    HTTPX defaults to including reasonable timeouts for all network operations, while Requests has no timeouts by default.

    To get the same behavior as Requests, set the timeout parameter to None:

    httpx.get('https://www.example.com', timeout=None)\n
    "},{"location":"compatibility/#proxy-keys","title":"Proxy keys","text":"

    HTTPX uses the mounts argument for HTTP proxying and transport routing. It can do much more than proxies and allows you to configure more than just the proxy route. For more detailed documentation, see Mounting Transports.

    When using httpx.Client(mounts={...}) to map to a selection of different transports, we use full URL schemes, such as mounts={\"http://\": ..., \"https://\": ...}.

    This is different to the requests usage of proxies={\"http\": ..., \"https\": ...}.

    This change is for better consistency with more complex mappings, that might also include domain names, such as mounts={\"all://\": ..., httpx.HTTPTransport(proxy=\"all://www.example.com\": None}) which maps all requests onto a proxy, except for requests to \"www.example.com\" which have an explicit exclusion.

    Also note that requests.Session.request(...) allows a proxies=... parameter, whereas httpx.Client.request(...) does not allow mounts=....

    "},{"location":"compatibility/#ssl-configuration","title":"SSL configuration","text":"

    When using a Client instance, the ssl configurations should always be passed on client instantiation, rather than passed to the request method.

    If you need more than one different SSL configuration, you should use different client instances for each SSL configuration.

    "},{"location":"compatibility/#request-body-on-http-methods","title":"Request body on HTTP methods","text":"

    The HTTP GET, DELETE, HEAD, and OPTIONS methods are specified as not supporting a request body. To stay in line with this, the .get, .delete, .head and .options functions do not support content, files, data, or json arguments.

    If you really do need to send request data using these http methods you should use the generic .request function instead.

    httpx.request(\n  method=\"DELETE\",\n  url=\"https://www.example.com/\",\n  content=b'A request body on a DELETE request.'\n)\n
    "},{"location":"compatibility/#checking-for-success-and-failure-responses","title":"Checking for success and failure responses","text":"

    We don't support response.is_ok since the naming is ambiguous there, and might incorrectly imply an equivalence to response.status_code == codes.OK. Instead we provide the response.is_success property, which can be used to check for a 2xx response.

    "},{"location":"compatibility/#request-instantiation","title":"Request instantiation","text":"

    There is no notion of prepared requests in HTTPX. If you need to customize request instantiation, see Request instances.

    Besides, httpx.Request() does not support the auth, timeout, follow_redirects, mounts, verify and cert parameters. However these are available in httpx.request, httpx.get, httpx.post etc., as well as on Client instances.

    "},{"location":"compatibility/#mocking","title":"Mocking","text":"

    If you need to mock HTTPX the same way that test utilities like responses and requests-mock does for requests, see RESPX.

    "},{"location":"compatibility/#caching","title":"Caching","text":"

    If you use cachecontrol or requests-cache to add HTTP Caching support to the requests library, you can use Hishel for HTTPX.

    "},{"location":"compatibility/#networking-layer","title":"Networking layer","text":"

    requests defers most of its HTTP networking code to the excellent urllib3 library.

    On the other hand, HTTPX uses HTTPCore as its core HTTP networking layer, which is a different project than urllib3.

    "},{"location":"compatibility/#query-parameters","title":"Query Parameters","text":"

    requests omits params whose values are None (e.g. requests.get(..., params={\"foo\": None})). This is not supported by HTTPX.

    For both query params (params=) and form data (data=), requests supports sending a list of tuples (e.g. requests.get(..., params=[('key1', 'value1'), ('key1', 'value2')])). This is not supported by HTTPX. Instead, use a dictionary with lists as values. E.g.: httpx.get(..., params={'key1': ['value1', 'value2']}) or with form data: httpx.post(..., data={'key1': ['value1', 'value2']}).

    "},{"location":"compatibility/#event-hooks","title":"Event Hooks","text":"

    requests allows event hooks to mutate Request and Response objects. See examples given in the documentation for requests.

    In HTTPX, event hooks may access properties of requests and responses, but event hook callbacks cannot mutate the original request/response.

    If you are looking for more control, consider checking out Custom Transports.

    "},{"location":"contributing/","title":"Contributing","text":"

    Thank you for being interested in contributing to HTTPX. There are many ways you can contribute to the project:

    • Try HTTPX and report bugs/issues you find
    • Implement new features
    • Review Pull Requests of others
    • Write documentation
    • Participate in discussions
    "},{"location":"contributing/#reporting-bugs-or-other-issues","title":"Reporting Bugs or Other Issues","text":"

    Found something that HTTPX should support? Stumbled upon some unexpected behaviour?

    Contributions should generally start out with a discussion. Possible bugs may be raised as a \"Potential Issue\" discussion, feature requests may be raised as an \"Ideas\" discussion. We can then determine if the discussion needs to be escalated into an \"Issue\" or not, or if we'd consider a pull request.

    Try to be more descriptive as you can and in case of a bug report, provide as much information as possible like:

    • OS platform
    • Python version
    • Installed dependencies and versions (python -m pip freeze)
    • Code snippet
    • Error traceback

    You should always try to reduce any examples to the simplest possible case that demonstrates the issue.

    Some possibly useful tips for narrowing down potential issues...

    • Does the issue exist on HTTP/1.1, or HTTP/2, or both?
    • Does the issue exist with Client, AsyncClient, or both?
    • When using AsyncClient does the issue exist when using asyncio or trio, or both?
    "},{"location":"contributing/#development","title":"Development","text":"

    To start developing HTTPX create a fork of the HTTPX repository on GitHub.

    Then clone your fork with the following command replacing YOUR-USERNAME with your GitHub username:

    $ git clone https://github.com/YOUR-USERNAME/httpx\n

    You can now install the project and its dependencies using:

    $ cd httpx\n$ scripts/install\n
    "},{"location":"contributing/#testing-and-linting","title":"Testing and Linting","text":"

    We use custom shell scripts to automate testing, linting, and documentation building workflow.

    To run the tests, use:

    $ scripts/test\n

    Warning

    The test suite spawns testing servers on ports 8000 and 8001. Make sure these are not in use, so the tests can run properly.

    Any additional arguments will be passed to pytest. See the pytest documentation for more information.

    For example, to run a single test script:

    $ scripts/test tests/test_multipart.py\n

    To run the code auto-formatting:

    $ scripts/lint\n

    Lastly, to run code checks separately (they are also run as part of scripts/test), run:

    $ scripts/check\n
    "},{"location":"contributing/#documenting","title":"Documenting","text":"

    Documentation pages are located under the docs/ folder.

    To run the documentation site locally (useful for previewing changes), use:

    $ scripts/docs\n
    "},{"location":"contributing/#resolving-build-ci-failures","title":"Resolving Build / CI Failures","text":"

    Once you've submitted your pull request, the test suite will automatically run, and the results will show up in GitHub. If the test suite fails, you'll want to click through to the \"Details\" link, and try to identify why the test suite failed.

    Here are some common ways the test suite can fail:

    "},{"location":"contributing/#check-job-failed","title":"Check Job Failed","text":"

    This job failing means there is either a code formatting issue or type-annotation issue. You can look at the job output to figure out why it's failed or within a shell run:

    $ scripts/check\n

    It may be worth it to run $ scripts/lint to attempt auto-formatting the code and if that job succeeds commit the changes.

    "},{"location":"contributing/#docs-job-failed","title":"Docs Job Failed","text":"

    This job failing means the documentation failed to build. This can happen for a variety of reasons like invalid markdown or missing configuration within mkdocs.yml.

    "},{"location":"contributing/#python-3x-job-failed","title":"Python 3.X Job Failed","text":"

    This job failing means the unit tests failed or not all code paths are covered by unit tests.

    If tests are failing you will see this message under the coverage report:

    === 1 failed, 435 passed, 1 skipped, 1 xfailed in 11.09s ===

    If tests succeed but coverage doesn't reach our current threshold, you will see this message under the coverage report:

    FAIL Required test coverage of 100% not reached. Total coverage: 99.00%

    "},{"location":"contributing/#releasing","title":"Releasing","text":"

    This section is targeted at HTTPX maintainers.

    Before releasing a new version, create a pull request that includes:

    • An update to the changelog:
      • We follow the format from keepachangelog.
      • Compare master with the tag of the latest release, and list all entries that are of interest to our users:
        • Things that must go in the changelog: added, changed, deprecated or removed features, and bug fixes.
        • Things that should not go in the changelog: changes to documentation, tests or tooling.
        • Try sorting entries in descending order of impact / importance.
        • Keep it concise and to-the-point. \ud83c\udfaf
    • A version bump: see __version__.py.

    For an example, see #1006.

    Once the release PR is merged, create a new release including:

    • Tag version like 0.13.3.
    • Release title Version 0.13.3
    • Description copied from the changelog.

    Once created this release will be automatically uploaded to PyPI.

    If something goes wrong with the PyPI job the release can be published using the scripts/publish script.

    "},{"location":"contributing/#development-proxy-setup","title":"Development proxy setup","text":"

    To test and debug requests via a proxy it's best to run a proxy server locally. Any server should do but HTTPCore's test suite uses mitmproxy which is written in Python, it's fully featured and has excellent UI and tools for introspection of requests.

    You can install mitmproxy using pip install mitmproxy or several other ways.

    mitmproxy does require setting up local TLS certificates for HTTPS requests, as its main purpose is to allow developers to inspect requests that pass through it. We can set them up follows:

    1. pip install trustme-cli.
    2. trustme-cli -i example.org www.example.org, assuming you want to test connecting to that domain, this will create three files: server.pem, server.key and client.pem.
    3. mitmproxy requires a PEM file that includes the private key and the certificate so we need to concatenate them: cat server.key server.pem > server.withkey.pem.
    4. Start the proxy server mitmproxy --certs server.withkey.pem, or use the other mitmproxy commands with different UI options.

    At this point the server is ready to start serving requests, you'll need to configure HTTPX as described in the proxy section and the SSL certificates section, this is where our previously generated client.pem comes in:

    ctx = ssl.create_default_context(cafile=\"/path/to/client.pem\")\nclient = httpx.Client(proxy=\"http://127.0.0.1:8080/\", verify=ctx)\n

    Note, however, that HTTPS requests will only succeed to the host specified in the SSL/TLS certificate we generated, HTTPS requests to other hosts will raise an error like:

    ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate\nverify failed: Hostname mismatch, certificate is not valid for\n'duckduckgo.com'. (_ssl.c:1108)\n

    If you want to make requests to more hosts you'll need to regenerate the certificates and include all the hosts you intend to connect to in the seconds step, i.e.

    trustme-cli -i example.org www.example.org duckduckgo.com www.duckduckgo.com

    "},{"location":"environment_variables/","title":"Environment Variables","text":"

    The HTTPX library can be configured via environment variables. Environment variables are used by default. To ignore environment variables, trust_env has to be set False. There are two ways to set trust_env to disable environment variables:

    • On the client via httpx.Client(trust_env=False).
    • Using the top-level API, such as httpx.get(\"<url>\", trust_env=False).

    Here is a list of environment variables that HTTPX recognizes and what function they serve:

    "},{"location":"environment_variables/#proxies","title":"Proxies","text":"

    The environment variables documented below are used as a convention by various HTTP tooling, including:

    • cURL
    • requests

    For more information on using proxies in HTTPX, see HTTP Proxying.

    "},{"location":"environment_variables/#http_proxy-https_proxy-all_proxy","title":"HTTP_PROXY, HTTPS_PROXY, ALL_PROXY","text":"

    Valid values: A URL to a proxy

    HTTP_PROXY, HTTPS_PROXY, ALL_PROXY set the proxy to be used for http, https, or all requests respectively.

    export HTTP_PROXY=http://my-external-proxy.com:1234\n\n# This request will be sent through the proxy\npython -c \"import httpx; httpx.get('http://example.com')\"\n\n# This request will be sent directly, as we set `trust_env=False`\npython -c \"import httpx; httpx.get('http://example.com', trust_env=False)\"\n
    "},{"location":"environment_variables/#no_proxy","title":"NO_PROXY","text":"

    Valid values: a comma-separated list of hostnames/urls

    NO_PROXY disables the proxy for specific urls

    export HTTP_PROXY=http://my-external-proxy.com:1234\nexport NO_PROXY=http://127.0.0.1,python-httpx.org\n\n# As in the previous example, this request will be sent through the proxy\npython -c \"import httpx; httpx.get('http://example.com')\"\n\n# These requests will be sent directly, bypassing the proxy\npython -c \"import httpx; httpx.get('http://127.0.0.1:5000/my-api')\"\npython -c \"import httpx; httpx.get('https://www.python-httpx.org')\"\n
    "},{"location":"exceptions/","title":"Exceptions","text":"

    This page lists exceptions that may be raised when using HTTPX.

    For an overview of how to work with HTTPX exceptions, see Exceptions (Quickstart).

    "},{"location":"exceptions/#the-exception-hierarchy","title":"The exception hierarchy","text":"
    • HTTPError
      • RequestError
        • TransportError
          • TimeoutException
            • ConnectTimeout
            • ReadTimeout
            • WriteTimeout
            • PoolTimeout
          • NetworkError
            • ConnectError
            • ReadError
            • WriteError
            • CloseError
          • ProtocolError
            • LocalProtocolError
            • RemoteProtocolError
          • ProxyError
          • UnsupportedProtocol
        • DecodingError
        • TooManyRedirects
      • HTTPStatusError
    • InvalidURL
    • CookieConflict
    • StreamError
      • StreamConsumed
      • ResponseNotRead
      • RequestNotRead
      • StreamClosed
    "},{"location":"exceptions/#exception-classes","title":"Exception classes","text":"class httpx.HTTPError(message)

    Base class for RequestError and HTTPStatusError.

    Useful for try...except blocks when issuing a request, and then calling .raise_for_status().

    For example:

    try:\n    response = httpx.get(\"https://www.example.com\")\n    response.raise_for_status()\nexcept httpx.HTTPError as exc:\n    print(f\"HTTP Exception for {exc.request.url} - {exc}\")\n
    class httpx.RequestError(message, *, request=None)

    Base class for all exceptions that may occur when issuing a .request().

    class httpx.TransportError(message, *, request=None)

    Base class for all exceptions that occur at the level of the Transport API.

    class httpx.TimeoutException(message, *, request=None)

    The base class for timeout errors.

    An operation has timed out.

    class httpx.ConnectTimeout(message, *, request=None)

    Timed out while connecting to the host.

    class httpx.ReadTimeout(message, *, request=None)

    Timed out while receiving data from the host.

    class httpx.WriteTimeout(message, *, request=None)

    Timed out while sending data to the host.

    class httpx.PoolTimeout(message, *, request=None)

    Timed out waiting to acquire a connection from the pool.

    class httpx.NetworkError(message, *, request=None)

    The base class for network-related errors.

    An error occurred while interacting with the network.

    class httpx.ConnectError(message, *, request=None)

    Failed to establish a connection.

    class httpx.ReadError(message, *, request=None)

    Failed to receive data from the network.

    class httpx.WriteError(message, *, request=None)

    Failed to send data through the network.

    class httpx.CloseError(message, *, request=None)

    Failed to close a connection.

    class httpx.ProtocolError(message, *, request=None)

    The protocol was violated.

    class httpx.LocalProtocolError(message, *, request=None)

    A protocol was violated by the client.

    For example if the user instantiated a Request instance explicitly, failed to include the mandatory Host: header, and then issued it directly using client.send().

    class httpx.RemoteProtocolError(message, *, request=None)

    The protocol was violated by the server.

    For example, returning malformed HTTP.

    class httpx.ProxyError(message, *, request=None)

    An error occurred while establishing a proxy connection.

    class httpx.UnsupportedProtocol(message, *, request=None)

    Attempted to make a request to an unsupported protocol.

    For example issuing a request to ftp://www.example.com.

    class httpx.DecodingError(message, *, request=None)

    Decoding of the response failed, due to a malformed encoding.

    class httpx.TooManyRedirects(message, *, request=None)

    Too many redirects.

    class httpx.HTTPStatusError(message, *, request, response)

    The response had an error HTTP status of 4xx or 5xx.

    May be raised when calling response.raise_for_status()

    class httpx.InvalidURL(message)

    URL is improperly formed or cannot be parsed.

    class httpx.CookieConflict(message)

    Attempted to lookup a cookie by name, but multiple cookies existed.

    Can occur when calling response.cookies.get(...).

    class httpx.StreamError(message)

    The base class for stream exceptions.

    The developer made an error in accessing the request stream in an invalid way.

    class httpx.StreamConsumed()

    Attempted to read or stream content, but the content has already been streamed.

    class httpx.StreamClosed()

    Attempted to read or stream response content, but the request has been closed.

    class httpx.ResponseNotRead()

    Attempted to access streaming response content, without having called read().

    class httpx.RequestNotRead()

    Attempted to access streaming request content, without having called read().

    "},{"location":"http2/","title":"HTTP/2","text":"

    HTTP/2 is a major new iteration of the HTTP protocol, that provides a far more efficient transport, with potential performance benefits. HTTP/2 does not change the core semantics of the request or response, but alters the way that data is sent to and from the server.

    Rather than the text format that HTTP/1.1 uses, HTTP/2 is a binary format. The binary format provides full request and response multiplexing, and efficient compression of HTTP headers. The stream multiplexing means that where HTTP/1.1 requires one TCP stream for each concurrent request, HTTP/2 allows a single TCP stream to handle multiple concurrent requests.

    HTTP/2 also provides support for functionality such as response prioritization, and server push.

    For a comprehensive guide to HTTP/2 you may want to check out \"http2 explained\".

    "},{"location":"http2/#enabling-http2","title":"Enabling HTTP/2","text":"

    When using the httpx client, HTTP/2 support is not enabled by default, because HTTP/1.1 is a mature, battle-hardened transport layer, and our HTTP/1.1 implementation may be considered the more robust option at this point in time. It is possible that a future version of httpx may enable HTTP/2 support by default.

    If you're issuing highly concurrent requests you might want to consider trying out our HTTP/2 support. You can do so by first making sure to install the optional HTTP/2 dependencies...

    $ pip install httpx[http2]\n

    And then instantiating a client with HTTP/2 support enabled:

    client = httpx.AsyncClient(http2=True)\n...\n

    You can also instantiate a client as a context manager, to ensure that all HTTP connections are nicely scoped, and will be closed once the context block is exited.

    async with httpx.AsyncClient(http2=True) as client:\n    ...\n

    HTTP/2 support is available on both Client and AsyncClient, although it's typically more useful in async contexts if you're issuing lots of concurrent requests.

    "},{"location":"http2/#inspecting-the-http-version","title":"Inspecting the HTTP version","text":"

    Enabling HTTP/2 support on the client does not necessarily mean that your requests and responses will be transported over HTTP/2, since both the client and the server need to support HTTP/2. If you connect to a server that only supports HTTP/1.1 the client will use a standard HTTP/1.1 connection instead.

    You can determine which version of the HTTP protocol was used by examining the .http_version property on the response.

    client = httpx.AsyncClient(http2=True)\nresponse = await client.get(...)\nprint(response.http_version)  # \"HTTP/1.0\", \"HTTP/1.1\", or \"HTTP/2\".\n
    "},{"location":"logging/","title":"Logging","text":"

    If you need to inspect the internal behaviour of httpx, you can use Python's standard logging to output information about the underlying network behaviour.

    For example, the following configuration...

    import logging\nimport httpx\n\nlogging.basicConfig(\n    format=\"%(levelname)s [%(asctime)s] %(name)s - %(message)s\",\n    datefmt=\"%Y-%m-%d %H:%M:%S\",\n    level=logging.DEBUG\n)\n\nhttpx.get(\"https://www.example.com\")\n

    Will send debug level output to the console, or wherever stdout is directed too...

    DEBUG [2024-09-28 17:27:40] httpx - load_ssl_context verify=True cert=None\nDEBUG [2024-09-28 17:27:40] httpx - load_verify_locations cafile='/Users/karenpetrosyan/oss/karhttpx/.venv/lib/python3.9/site-packages/certifi/cacert.pem'\nDEBUG [2024-09-28 17:27:40] httpcore.connection - connect_tcp.started host='www.example.com' port=443 local_address=None timeout=5.0 socket_options=None\nDEBUG [2024-09-28 17:27:41] httpcore.connection - connect_tcp.complete return_value=<httpcore._backends.sync.SyncStream object at 0x101f1e8e0>\nDEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.started ssl_context=SSLContext(verify=True) server_hostname='www.example.com' timeout=5.0\nDEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.complete return_value=<httpcore._backends.sync.SyncStream object at 0x1020f49a0>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.complete\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.complete\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Content-Encoding', b'gzip'), (b'Accept-Ranges', b'bytes'), (b'Age', b'407727'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Sat, 28 Sep 2024 13:27:42 GMT'), (b'Etag', b'\"3147526947+gzip\"'), (b'Expires', b'Sat, 05 Oct 2024 13:27:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECAcc (dcd/7D43)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'648')])\nINFO [2024-09-28 17:27:41] httpx - HTTP Request: GET https://www.example.com \"HTTP/1.1 200 OK\"\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.complete\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.started\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.complete\nDEBUG [2024-09-28 17:27:41] httpcore.connection - close.started\nDEBUG [2024-09-28 17:27:41] httpcore.connection - close.complete\n

    Logging output includes information from both the high-level httpx logger, and the network-level httpcore logger, which can be configured separately.

    For handling more complex logging configurations you might want to use the dictionary configuration style...

    import logging.config\nimport httpx\n\nLOGGING_CONFIG = {\n    \"version\": 1,\n    \"handlers\": {\n        \"default\": {\n            \"class\": \"logging.StreamHandler\",\n            \"formatter\": \"http\",\n            \"stream\": \"ext://sys.stderr\"\n        }\n    },\n    \"formatters\": {\n        \"http\": {\n            \"format\": \"%(levelname)s [%(asctime)s] %(name)s - %(message)s\",\n            \"datefmt\": \"%Y-%m-%d %H:%M:%S\",\n        }\n    },\n    'loggers': {\n        'httpx': {\n            'handlers': ['default'],\n            'level': 'DEBUG',\n        },\n        'httpcore': {\n            'handlers': ['default'],\n            'level': 'DEBUG',\n        },\n    }\n}\n\nlogging.config.dictConfig(LOGGING_CONFIG)\nhttpx.get('https://www.example.com')\n

    The exact formatting of the debug logging may be subject to change across different versions of httpx and httpcore. If you need to rely on a particular format it is recommended that you pin installation of these packages to fixed versions.

    "},{"location":"quickstart/","title":"QuickStart","text":"

    First, start by importing HTTPX:

    >>> import httpx\n

    Now, let\u2019s try to get a webpage.

    >>> r = httpx.get('https://httpbin.org/get')\n>>> r\n<Response [200 OK]>\n

    Similarly, to make an HTTP POST request:

    >>> r = httpx.post('https://httpbin.org/post', data={'key': 'value'})\n

    The PUT, DELETE, HEAD, and OPTIONS requests all follow the same style:

    >>> r = httpx.put('https://httpbin.org/put', data={'key': 'value'})\n>>> r = httpx.delete('https://httpbin.org/delete')\n>>> r = httpx.head('https://httpbin.org/get')\n>>> r = httpx.options('https://httpbin.org/get')\n
    "},{"location":"quickstart/#passing-parameters-in-urls","title":"Passing Parameters in URLs","text":"

    To include URL query parameters in the request, use the params keyword:

    >>> params = {'key1': 'value1', 'key2': 'value2'}\n>>> r = httpx.get('https://httpbin.org/get', params=params)\n

    To see how the values get encoding into the URL string, we can inspect the resulting URL that was used to make the request:

    >>> r.url\nURL('https://httpbin.org/get?key2=value2&key1=value1')\n

    You can also pass a list of items as a value:

    >>> params = {'key1': 'value1', 'key2': ['value2', 'value3']}\n>>> r = httpx.get('https://httpbin.org/get', params=params)\n>>> r.url\nURL('https://httpbin.org/get?key1=value1&key2=value2&key2=value3')\n
    "},{"location":"quickstart/#response-content","title":"Response Content","text":"

    HTTPX will automatically handle decoding the response content into Unicode text.

    >>> r = httpx.get('https://www.example.org/')\n>>> r.text\n'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    You can inspect what encoding will be used to decode the response.

    >>> r.encoding\n'UTF-8'\n

    In some cases the response may not contain an explicit encoding, in which case HTTPX will attempt to automatically determine an encoding to use.

    >>> r.encoding\nNone\n>>> r.text\n'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    If you need to override the standard behaviour and explicitly set the encoding to use, then you can do that too.

    >>> r.encoding = 'ISO-8859-1'\n
    "},{"location":"quickstart/#binary-response-content","title":"Binary Response Content","text":"

    The response content can also be accessed as bytes, for non-text responses:

    >>> r.content\nb'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    Any gzip and deflate HTTP response encodings will automatically be decoded for you. If brotlipy is installed, then the brotli response encoding will be supported. If zstandard is installed, then zstd response encodings will also be supported.

    For example, to create an image from binary data returned by a request, you can use the following code:

    >>> from PIL import Image\n>>> from io import BytesIO\n>>> i = Image.open(BytesIO(r.content))\n
    "},{"location":"quickstart/#json-response-content","title":"JSON Response Content","text":"

    Often Web API responses will be encoded as JSON.

    >>> r = httpx.get('https://api.github.com/events')\n>>> r.json()\n[{u'repository': {u'open_issues': 0, u'url': 'https://github.com/...' ...  }}]\n
    "},{"location":"quickstart/#custom-headers","title":"Custom Headers","text":"

    To include additional headers in the outgoing request, use the headers keyword argument:

    >>> url = 'https://httpbin.org/headers'\n>>> headers = {'user-agent': 'my-app/0.0.1'}\n>>> r = httpx.get(url, headers=headers)\n
    "},{"location":"quickstart/#sending-form-encoded-data","title":"Sending Form Encoded Data","text":"

    Some types of HTTP requests, such as POST and PUT requests, can include data in the request body. One common way of including that is as form-encoded data, which is used for HTML forms.

    >>> data = {'key1': 'value1', 'key2': 'value2'}\n>>> r = httpx.post(\"https://httpbin.org/post\", data=data)\n>>> print(r.text)\n{\n  ...\n  \"form\": {\n    \"key2\": \"value2\",\n    \"key1\": \"value1\"\n  },\n  ...\n}\n

    Form encoded data can also include multiple values from a given key.

    >>> data = {'key1': ['value1', 'value2']}\n>>> r = httpx.post(\"https://httpbin.org/post\", data=data)\n>>> print(r.text)\n{\n  ...\n  \"form\": {\n    \"key1\": [\n      \"value1\",\n      \"value2\"\n    ]\n  },\n  ...\n}\n
    "},{"location":"quickstart/#sending-multipart-file-uploads","title":"Sending Multipart File Uploads","text":"

    You can also upload files, using HTTP multipart encoding:

    >>> files = {'upload-file': open('report.xls', 'rb')}\n>>> r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"upload-file\": \"<... binary content ...>\"\n  },\n  ...\n}\n

    You can also explicitly set the filename and content type, by using a tuple of items for the file value:

    >>> files = {'upload-file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel')}\n>>> r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"upload-file\": \"<... binary content ...>\"\n  },\n  ...\n}\n

    If you need to include non-file data fields in the multipart form, use the data=... parameter:

    >>> data = {'message': 'Hello, world!'}\n>>> files = {'file': open('report.xls', 'rb')}\n>>> r = httpx.post(\"https://httpbin.org/post\", data=data, files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"file\": \"<... binary content ...>\"\n  },\n  \"form\": {\n    \"message\": \"Hello, world!\",\n  },\n  ...\n}\n
    "},{"location":"quickstart/#sending-json-encoded-data","title":"Sending JSON Encoded Data","text":"

    Form encoded data is okay if all you need is a simple key-value data structure. For more complicated data structures you'll often want to use JSON encoding instead.

    >>> data = {'integer': 123, 'boolean': True, 'list': ['a', 'b', 'c']}\n>>> r = httpx.post(\"https://httpbin.org/post\", json=data)\n>>> print(r.text)\n{\n  ...\n  \"json\": {\n    \"boolean\": true,\n    \"integer\": 123,\n    \"list\": [\n      \"a\",\n      \"b\",\n      \"c\"\n    ]\n  },\n  ...\n}\n
    "},{"location":"quickstart/#sending-binary-request-data","title":"Sending Binary Request Data","text":"

    For other encodings, you should use the content=... parameter, passing either a bytes type or a generator that yields bytes.

    >>> content = b'Hello, world'\n>>> r = httpx.post(\"https://httpbin.org/post\", content=content)\n

    You may also want to set a custom Content-Type header when uploading binary data.

    "},{"location":"quickstart/#response-status-codes","title":"Response Status Codes","text":"

    We can inspect the HTTP status code of the response:

    >>> r = httpx.get('https://httpbin.org/get')\n>>> r.status_code\n200\n

    HTTPX also includes an easy shortcut for accessing status codes by their text phrase.

    >>> r.status_code == httpx.codes.OK\nTrue\n

    We can raise an exception for any responses which are not a 2xx success code:

    >>> not_found = httpx.get('https://httpbin.org/status/404')\n>>> not_found.status_code\n404\n>>> not_found.raise_for_status()\nTraceback (most recent call last):\n  File \"/Users/tomchristie/GitHub/encode/httpcore/httpx/models.py\", line 837, in raise_for_status\n    raise HTTPStatusError(message, response=self)\nhttpx._exceptions.HTTPStatusError: 404 Client Error: Not Found for url: https://httpbin.org/status/404\nFor more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404\n

    Any successful response codes will return the Response instance rather than raising an exception.

    >>> r.raise_for_status()\n

    The method returns the response instance, allowing you to use it inline. For example:

    >>> r = httpx.get('...').raise_for_status()\n>>> data = httpx.get('...').raise_for_status().json()\n
    "},{"location":"quickstart/#response-headers","title":"Response Headers","text":"

    The response headers are available as a dictionary-like interface.

    >>> r.headers\nHeaders({\n    'content-encoding': 'gzip',\n    'transfer-encoding': 'chunked',\n    'connection': 'close',\n    'server': 'nginx/1.0.4',\n    'x-runtime': '148ms',\n    'etag': '\"e1ca502697e5c9317743dc078f67693f\"',\n    'content-type': 'application/json'\n})\n

    The Headers data type is case-insensitive, so you can use any capitalization.

    >>> r.headers['Content-Type']\n'application/json'\n\n>>> r.headers.get('content-type')\n'application/json'\n

    Multiple values for a single response header are represented as a single comma-separated value, as per RFC 7230:

    A recipient MAY combine multiple header fields with the same field name into one \u201cfield-name: field-value\u201d pair, without changing the semantics of the message, by appending each subsequent field-value to the combined field value in order, separated by a comma.

    "},{"location":"quickstart/#streaming-responses","title":"Streaming Responses","text":"

    For large downloads you may want to use streaming responses that do not load the entire response body into memory at once.

    You can stream the binary content of the response...

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for data in r.iter_bytes():\n...         print(data)\n

    Or the text of the response...

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for text in r.iter_text():\n...         print(text)\n

    Or stream the text, on a line-by-line basis...

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for line in r.iter_lines():\n...         print(line)\n

    HTTPX will use universal line endings, normalising all cases to \\n.

    In some cases you might want to access the raw bytes on the response without applying any HTTP content decoding. In this case any content encoding that the web server has applied such as gzip, deflate, brotli, or zstd will not be automatically decoded.

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for chunk in r.iter_raw():\n...         print(chunk)\n

    If you're using streaming responses in any of these ways then the response.content and response.text attributes will not be available, and will raise errors if accessed. However you can also use the response streaming functionality to conditionally load the response body:

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     if int(r.headers['Content-Length']) < TOO_LONG:\n...         r.read()\n...         print(r.text)\n
    "},{"location":"quickstart/#cookies","title":"Cookies","text":"

    Any cookies that are set on the response can be easily accessed:

    >>> r = httpx.get('https://httpbin.org/cookies/set?chocolate=chip')\n>>> r.cookies['chocolate']\n'chip'\n

    To include cookies in an outgoing request, use the cookies parameter:

    >>> cookies = {\"peanut\": \"butter\"}\n>>> r = httpx.get('https://httpbin.org/cookies', cookies=cookies)\n>>> r.json()\n{'cookies': {'peanut': 'butter'}}\n

    Cookies are returned in a Cookies instance, which is a dict-like data structure with additional API for accessing cookies by their domain or path.

    >>> cookies = httpx.Cookies()\n>>> cookies.set('cookie_on_domain', 'hello, there!', domain='httpbin.org')\n>>> cookies.set('cookie_off_domain', 'nope.', domain='example.org')\n>>> r = httpx.get('http://httpbin.org/cookies', cookies=cookies)\n>>> r.json()\n{'cookies': {'cookie_on_domain': 'hello, there!'}}\n
    "},{"location":"quickstart/#redirection-and-history","title":"Redirection and History","text":"

    By default, HTTPX will not follow redirects for all HTTP methods, although this can be explicitly enabled.

    For example, GitHub redirects all HTTP requests to HTTPS.

    >>> r = httpx.get('http://github.com/')\n>>> r.status_code\n301\n>>> r.history\n[]\n>>> r.next_request\n<Request('GET', 'https://github.com/')>\n

    You can modify the default redirection handling with the follow_redirects parameter:

    >>> r = httpx.get('http://github.com/', follow_redirects=True)\n>>> r.url\nURL('https://github.com/')\n>>> r.status_code\n200\n>>> r.history\n[<Response [301 Moved Permanently]>]\n

    The history property of the response can be used to inspect any followed redirects. It contains a list of any redirect responses that were followed, in the order in which they were made.

    "},{"location":"quickstart/#timeouts","title":"Timeouts","text":"

    HTTPX defaults to including reasonable timeouts for all network operations, meaning that if a connection is not properly established then it should always raise an error rather than hanging indefinitely.

    The default timeout for network inactivity is five seconds. You can modify the value to be more or less strict:

    >>> httpx.get('https://github.com/', timeout=0.001)\n

    You can also disable the timeout behavior completely...

    >>> httpx.get('https://github.com/', timeout=None)\n

    For advanced timeout management, see Timeout fine-tuning.

    "},{"location":"quickstart/#authentication","title":"Authentication","text":"

    HTTPX supports Basic and Digest HTTP authentication.

    To provide Basic authentication credentials, pass a 2-tuple of plaintext str or bytes objects as the auth argument to the request functions:

    >>> httpx.get(\"https://example.com\", auth=(\"my_user\", \"password123\"))\n

    To provide credentials for Digest authentication you'll need to instantiate a DigestAuth object with the plaintext username and password as arguments. This object can be then passed as the auth argument to the request methods as above:

    >>> auth = httpx.DigestAuth(\"my_user\", \"password123\")\n>>> httpx.get(\"https://example.com\", auth=auth)\n<Response [200 OK]>\n
    "},{"location":"quickstart/#exceptions","title":"Exceptions","text":"

    HTTPX will raise exceptions if an error occurs.

    The most important exception classes in HTTPX are RequestError and HTTPStatusError.

    The RequestError class is a superclass that encompasses any exception that occurs while issuing an HTTP request. These exceptions include a .request attribute.

    try:\n    response = httpx.get(\"https://www.example.com/\")\nexcept httpx.RequestError as exc:\n    print(f\"An error occurred while requesting {exc.request.url!r}.\")\n

    The HTTPStatusError class is raised by response.raise_for_status() on responses which are not a 2xx success code. These exceptions include both a .request and a .response attribute.

    response = httpx.get(\"https://www.example.com/\")\ntry:\n    response.raise_for_status()\nexcept httpx.HTTPStatusError as exc:\n    print(f\"Error response {exc.response.status_code} while requesting {exc.request.url!r}.\")\n

    There is also a base class HTTPError that includes both of these categories, and can be used to catch either failed requests, or 4xx and 5xx responses.

    You can either use this base class to catch both categories...

    try:\n    response = httpx.get(\"https://www.example.com/\")\n    response.raise_for_status()\nexcept httpx.HTTPError as exc:\n    print(f\"Error while requesting {exc.request.url!r}.\")\n

    Or handle each case explicitly...

    try:\n    response = httpx.get(\"https://www.example.com/\")\n    response.raise_for_status()\nexcept httpx.RequestError as exc:\n    print(f\"An error occurred while requesting {exc.request.url!r}.\")\nexcept httpx.HTTPStatusError as exc:\n    print(f\"Error response {exc.response.status_code} while requesting {exc.request.url!r}.\")\n

    For a full list of available exceptions, see Exceptions (API Reference).

    "},{"location":"third_party_packages/","title":"Third Party Packages","text":"

    As HTTPX usage grows, there is an expanding community of developers building tools and libraries that integrate with HTTPX, or depend on HTTPX. Here are some of them.

    "},{"location":"third_party_packages/#plugins","title":"Plugins","text":""},{"location":"third_party_packages/#httpx-ws","title":"httpx-ws","text":"

    GitHub - Documentation

    WebSocket support for HTTPX.

    "},{"location":"third_party_packages/#httpx-socks","title":"httpx-socks","text":"

    GitHub

    Proxy (HTTP, SOCKS) transports for httpx.

    "},{"location":"third_party_packages/#hishel","title":"Hishel","text":"

    GitHub - Documentation

    An elegant HTTP Cache implementation for HTTPX and HTTP Core.

    "},{"location":"third_party_packages/#authlib","title":"Authlib","text":"

    GitHub - Documentation

    The ultimate Python library in building OAuth and OpenID Connect clients and servers. Includes an OAuth HTTPX client.

    "},{"location":"third_party_packages/#gidgethub","title":"Gidgethub","text":"

    GitHub - Documentation

    An asynchronous GitHub API library. Includes HTTPX support.

    "},{"location":"third_party_packages/#httpx-auth","title":"HTTPX-Auth","text":"

    GitHub - Documentation

    Provides authentication classes to be used with HTTPX authentication parameter.

    "},{"location":"third_party_packages/#pytest-httpx","title":"pytest-HTTPX","text":"

    GitHub - Documentation

    Provides httpx_mock pytest fixture to mock HTTPX within test cases.

    "},{"location":"third_party_packages/#respx","title":"RESPX","text":"

    GitHub - Documentation

    A utility for mocking out the Python HTTPX library.

    "},{"location":"third_party_packages/#rpcpy","title":"rpc.py","text":"

    Github - Documentation

    An fast and powerful RPC framework based on ASGI/WSGI. Use HTTPX as the client of the RPC service.

    "},{"location":"third_party_packages/#vcrpy","title":"VCR.py","text":"

    GitHub - Documentation

    A utility for record and repeat an http request.

    "},{"location":"third_party_packages/#httpx-caching","title":"httpx-caching","text":"

    Github

    This package adds caching functionality to HTTPX

    "},{"location":"third_party_packages/#httpx-sse","title":"httpx-sse","text":"

    GitHub

    Allows consuming Server-Sent Events (SSE) with HTTPX.

    "},{"location":"third_party_packages/#robox","title":"robox","text":"

    Github

    A library for scraping the web built on top of HTTPX.

    "},{"location":"third_party_packages/#gists","title":"Gists","text":""},{"location":"third_party_packages/#urllib3-transport","title":"urllib3-transport","text":"

    GitHub

    This public gist provides an example implementation for a custom transport implementation on top of the battle-tested urllib3 library.

    "},{"location":"troubleshooting/","title":"Troubleshooting","text":"

    This page lists some common problems or issues you could encounter while developing with HTTPX, as well as possible solutions.

    "},{"location":"troubleshooting/#proxies","title":"Proxies","text":""},{"location":"troubleshooting/#the-handshake-operation-timed-out-on-https-requests-when-using-a-proxy","title":"\"The handshake operation timed out\" on HTTPS requests when using a proxy","text":"

    Description: When using a proxy and making an HTTPS request, you see an exception looking like this:

    httpx.ProxyError: _ssl.c:1091: The handshake operation timed out\n

    Similar issues: encode/httpx#1412, encode/httpx#1433

    Resolution: it is likely that you've set up your proxies like this...

    mounts = {\n  \"http://\": httpx.HTTPTransport(proxy=\"http://myproxy.org\"),\n  \"https://\": httpx.HTTPTransport(proxy=\"https://myproxy.org\"),\n}\n

    Using this setup, you're telling HTTPX to connect to the proxy using HTTP for HTTP requests, and using HTTPS for HTTPS requests.

    But if you get the error above, it is likely that your proxy doesn't support connecting via HTTPS. Don't worry: that's a common gotcha.

    Change the scheme of your HTTPS proxy to http://... instead of https://...:

    mounts = {\n  \"http://\": httpx.HTTPTransport(proxy=\"http://myproxy.org\"),\n  \"https://\": httpx.HTTPTransport(proxy=\"http://myproxy.org\"),\n}\n

    This can be simplified to:

    proxy = \"http://myproxy.org\"\nwith httpx.Client(proxy=proxy) as client:\n  ...\n

    For more information, see Proxies: FORWARD vs TUNNEL.

    "},{"location":"troubleshooting/#error-when-making-requests-to-an-https-proxy","title":"Error when making requests to an HTTPS proxy","text":"

    Description: your proxy does support connecting via HTTPS, but you are seeing errors along the lines of...

    httpx.ProxyError: [SSL: PRE_MAC_LENGTH_TOO_LONG] invalid alert (_ssl.c:1091)\n

    Similar issues: encode/httpx#1424.

    Resolution: HTTPX does not properly support HTTPS proxies at this time. If that's something you're interested in having, please see encode/httpx#1434 and consider lending a hand there.

    "},{"location":"advanced/authentication/","title":"Authentication","text":"

    Authentication can either be included on a per-request basis...

    >>> auth = httpx.BasicAuth(username=\"username\", password=\"secret\")\n>>> client = httpx.Client()\n>>> response = client.get(\"https://www.example.com/\", auth=auth)\n

    Or configured on the client instance, ensuring that all outgoing requests will include authentication credentials...

    >>> auth = httpx.BasicAuth(username=\"username\", password=\"secret\")\n>>> client = httpx.Client(auth=auth)\n>>> response = client.get(\"https://www.example.com/\")\n
    "},{"location":"advanced/authentication/#basic-authentication","title":"Basic authentication","text":"

    HTTP basic authentication is an unencrypted authentication scheme that uses a simple encoding of the username and password in the request Authorization header. Since it is unencrypted it should typically only be used over https, although this is not strictly enforced.

    >>> auth = httpx.BasicAuth(username=\"finley\", password=\"secret\")\n>>> client = httpx.Client(auth=auth)\n>>> response = client.get(\"https://httpbin.org/basic-auth/finley/secret\")\n>>> response\n<Response [200 OK]>\n
    "},{"location":"advanced/authentication/#digest-authentication","title":"Digest authentication","text":"

    HTTP digest authentication is a challenge-response authentication scheme. Unlike basic authentication it provides encryption, and can be used over unencrypted http connections. It requires an additional round-trip in order to negotiate the authentication.

    >>> auth = httpx.DigestAuth(username=\"olivia\", password=\"secret\")\n>>> client = httpx.Client(auth=auth)\n>>> response = client.get(\"https://httpbin.org/digest-auth/auth/olivia/secret\")\n>>> response\n<Response [200 OK]>\n>>> response.history\n[<Response [401 UNAUTHORIZED]>]\n
    "},{"location":"advanced/authentication/#netrc-authentication","title":"NetRC authentication","text":"

    HTTPX can be configured to use a .netrc config file for authentication.

    The .netrc config file allows authentication credentials to be associated with specified hosts. When a request is made to a host that is found in the netrc file, the username and password will be included using HTTP basic authentication.

    Example .netrc file:

    machine example.org\nlogin example-username\npassword example-password\n\nmachine python-httpx.org\nlogin other-username\npassword other-password\n

    Some examples of configuring .netrc authentication with httpx.

    Use the default .netrc file in the users home directory:

    >>> auth = httpx.NetRCAuth()\n>>> client = httpx.Client(auth=auth)\n

    Use an explicit path to a .netrc file:

    >>> auth = httpx.NetRCAuth(file=\"/path/to/.netrc\")\n>>> client = httpx.Client(auth=auth)\n

    Use the NETRC environment variable to configure a path to the .netrc file, or fallback to the default.

    >>> auth = httpx.NetRCAuth(file=os.environ.get(\"NETRC\"))\n>>> client = httpx.Client(auth=auth)\n

    The NetRCAuth() class uses the netrc.netrc() function from the Python standard library. See the documentation there for more details on exceptions that may be raised if the .netrc file is not found, or cannot be parsed.

    "},{"location":"advanced/authentication/#custom-authentication-schemes","title":"Custom authentication schemes","text":"

    When issuing requests or instantiating a client, the auth argument can be used to pass an authentication scheme to use. The auth argument may be one of the following...

    • A two-tuple of username/password, to be used with basic authentication.
    • An instance of httpx.BasicAuth(), httpx.DigestAuth(), or httpx.NetRCAuth().
    • A callable, accepting a request and returning an authenticated request instance.
    • An instance of subclasses of httpx.Auth.

    The most involved of these is the last, which allows you to create authentication flows involving one or more requests. A subclass of httpx.Auth should implement def auth_flow(request), and yield any requests that need to be made...

    class MyCustomAuth(httpx.Auth):\n    def __init__(self, token):\n        self.token = token\n\n    def auth_flow(self, request):\n        # Send the request, with a custom `X-Authentication` header.\n        request.headers['X-Authentication'] = self.token\n        yield request\n

    If the auth flow requires more than one request, you can issue multiple yields, and obtain the response in each case...

    class MyCustomAuth(httpx.Auth):\n    def __init__(self, token):\n        self.token = token\n\n    def auth_flow(self, request):\n      response = yield request\n      if response.status_code == 401:\n          # If the server issues a 401 response then resend the request,\n          # with a custom `X-Authentication` header.\n          request.headers['X-Authentication'] = self.token\n          yield request\n

    Custom authentication classes are designed to not perform any I/O, so that they may be used with both sync and async client instances. If you are implementing an authentication scheme that requires the request body, then you need to indicate this on the class using a requires_request_body property.

    You will then be able to access request.content inside the .auth_flow() method.

    class MyCustomAuth(httpx.Auth):\n    requires_request_body = True\n\n    def __init__(self, token):\n        self.token = token\n\n    def auth_flow(self, request):\n      response = yield request\n      if response.status_code == 401:\n          # If the server issues a 401 response then resend the request,\n          # with a custom `X-Authentication` header.\n          request.headers['X-Authentication'] = self.sign_request(...)\n          yield request\n\n    def sign_request(self, request):\n        # Create a request signature, based on `request.method`, `request.url`,\n        # `request.headers`, and `request.content`.\n        ...\n

    Similarly, if you are implementing a scheme that requires access to the response body, then use the requires_response_body property. You will then be able to access response body properties and methods such as response.content, response.text, response.json(), etc.

    class MyCustomAuth(httpx.Auth):\n    requires_response_body = True\n\n    def __init__(self, access_token, refresh_token, refresh_url):\n        self.access_token = access_token\n        self.refresh_token = refresh_token\n        self.refresh_url = refresh_url\n\n    def auth_flow(self, request):\n        request.headers[\"X-Authentication\"] = self.access_token\n        response = yield request\n\n        if response.status_code == 401:\n            # If the server issues a 401 response, then issue a request to\n            # refresh tokens, and resend the request.\n            refresh_response = yield self.build_refresh_request()\n            self.update_tokens(refresh_response)\n\n            request.headers[\"X-Authentication\"] = self.access_token\n            yield request\n\n    def build_refresh_request(self):\n        # Return an `httpx.Request` for refreshing tokens.\n        ...\n\n    def update_tokens(self, response):\n        # Update the `.access_token` and `.refresh_token` tokens\n        # based on a refresh response.\n        data = response.json()\n        ...\n

    If you do need to perform I/O other than HTTP requests, such as accessing a disk-based cache, or you need to use concurrency primitives, such as locks, then you should override .sync_auth_flow() and .async_auth_flow() (instead of .auth_flow()). The former will be used by httpx.Client, while the latter will be used by httpx.AsyncClient.

    import asyncio\nimport threading\nimport httpx\n\n\nclass MyCustomAuth(httpx.Auth):\n    def __init__(self):\n        self._sync_lock = threading.RLock()\n        self._async_lock = asyncio.Lock()\n\n    def sync_get_token(self):\n        with self._sync_lock:\n            ...\n\n    def sync_auth_flow(self, request):\n        token = self.sync_get_token()\n        request.headers[\"Authorization\"] = f\"Token {token}\"\n        yield request\n\n    async def async_get_token(self):\n        async with self._async_lock:\n            ...\n\n    async def async_auth_flow(self, request):\n        token = await self.async_get_token()\n        request.headers[\"Authorization\"] = f\"Token {token}\"\n        yield request\n

    If you only want to support one of the two methods, then you should still override it, but raise an explicit RuntimeError.

    import httpx\nimport sync_only_library\n\n\nclass MyCustomAuth(httpx.Auth):\n    def sync_auth_flow(self, request):\n        token = sync_only_library.get_token(...)\n        request.headers[\"Authorization\"] = f\"Token {token}\"\n        yield request\n\n    async def async_auth_flow(self, request):\n        raise RuntimeError(\"Cannot use a sync authentication class with httpx.AsyncClient\")\n
    "},{"location":"advanced/clients/","title":"Clients","text":"

    Hint

    If you are coming from Requests, httpx.Client() is what you can use instead of requests.Session().

    "},{"location":"advanced/clients/#why-use-a-client","title":"Why use a Client?","text":"

    TL;DR

    If you do anything more than experimentation, one-off scripts, or prototypes, then you should use a Client instance.

    More efficient usage of network resources

    When you make requests using the top-level API as documented in the Quickstart guide, HTTPX has to establish a new connection for every single request (connections are not reused). As the number of requests to a host increases, this quickly becomes inefficient.

    On the other hand, a Client instance uses HTTP connection pooling. This means that when you make several requests to the same host, the Client will reuse the underlying TCP connection, instead of recreating one for every single request.

    This can bring significant performance improvements compared to using the top-level API, including:

    • Reduced latency across requests (no handshaking).
    • Reduced CPU usage and round-trips.
    • Reduced network congestion.

    Extra features

    Client instances also support features that aren't available at the top-level API, such as:

    • Cookie persistence across requests.
    • Applying configuration across all outgoing requests.
    • Sending requests through HTTP proxies.
    • Using HTTP/2.

    The other sections on this page go into further detail about what you can do with a Client instance.

    "},{"location":"advanced/clients/#usage","title":"Usage","text":"

    The recommended way to use a Client is as a context manager. This will ensure that connections are properly cleaned up when leaving the with block:

    with httpx.Client() as client:\n    ...\n

    Alternatively, you can explicitly close the connection pool without block-usage using .close():

    client = httpx.Client()\ntry:\n    ...\nfinally:\n    client.close()\n
    "},{"location":"advanced/clients/#making-requests","title":"Making requests","text":"

    Once you have a Client, you can send requests using .get(), .post(), etc. For example:

    >>> with httpx.Client() as client:\n...     r = client.get('https://example.com')\n...\n>>> r\n<Response [200 OK]>\n

    These methods accept the same arguments as httpx.get(), httpx.post(), etc. This means that all features documented in the Quickstart guide are also available at the client level.

    For example, to send a request with custom headers:

    >>> with httpx.Client() as client:\n...     headers = {'X-Custom': 'value'}\n...     r = client.get('https://example.com', headers=headers)\n...\n>>> r.request.headers['X-Custom']\n'value'\n
    "},{"location":"advanced/clients/#sharing-configuration-across-requests","title":"Sharing configuration across requests","text":"

    Clients allow you to apply configuration to all outgoing requests by passing parameters to the Client constructor.

    For example, to apply a set of custom headers on every request:

    >>> url = 'http://httpbin.org/headers'\n>>> headers = {'user-agent': 'my-app/0.0.1'}\n>>> with httpx.Client(headers=headers) as client:\n...     r = client.get(url)\n...\n>>> r.json()['headers']['User-Agent']\n'my-app/0.0.1'\n
    "},{"location":"advanced/clients/#merging-of-configuration","title":"Merging of configuration","text":"

    When a configuration option is provided at both the client-level and request-level, one of two things can happen:

    • For headers, query parameters and cookies, the values are combined together. For example:
    >>> headers = {'X-Auth': 'from-client'}\n>>> params = {'client_id': 'client1'}\n>>> with httpx.Client(headers=headers, params=params) as client:\n...     headers = {'X-Custom': 'from-request'}\n...     params = {'request_id': 'request1'}\n...     r = client.get('https://example.com', headers=headers, params=params)\n...\n>>> r.request.url\nURL('https://example.com?client_id=client1&request_id=request1')\n>>> r.request.headers['X-Auth']\n'from-client'\n>>> r.request.headers['X-Custom']\n'from-request'\n
    • For all other parameters, the request-level value takes priority. For example:
    >>> with httpx.Client(auth=('tom', 'mot123')) as client:\n...     r = client.get('https://example.com', auth=('alice', 'ecila123'))\n...\n>>> _, _, auth = r.request.headers['Authorization'].partition(' ')\n>>> import base64\n>>> base64.b64decode(auth)\nb'alice:ecila123'\n

    If you need finer-grained control on the merging of client-level and request-level parameters, see Request instances.

    "},{"location":"advanced/clients/#other-client-only-configuration-options","title":"Other Client-only configuration options","text":"

    Additionally, Client accepts some configuration options that aren't available at the request level.

    For example, base_url allows you to prepend an URL to all outgoing requests:

    >>> with httpx.Client(base_url='http://httpbin.org') as client:\n...     r = client.get('/headers')\n...\n>>> r.request.url\nURL('http://httpbin.org/headers')\n

    For a list of all available client parameters, see the Client API reference.

    "},{"location":"advanced/clients/#request-instances","title":"Request instances","text":"

    For maximum control on what gets sent over the wire, HTTPX supports building explicit Request instances:

    request = httpx.Request(\"GET\", \"https://example.com\")\n

    To dispatch a Request instance across to the network, create a Client instance and use .send():

    with httpx.Client() as client:\n    response = client.send(request)\n    ...\n

    If you need to mix client-level and request-level options in a way that is not supported by the default Merging of parameters, you can use .build_request() and then make arbitrary modifications to the Request instance. For example:

    headers = {\"X-Api-Key\": \"...\", \"X-Client-ID\": \"ABC123\"}\n\nwith httpx.Client(headers=headers) as client:\n    request = client.build_request(\"GET\", \"https://api.example.com\")\n\n    print(request.headers[\"X-Client-ID\"])  # \"ABC123\"\n\n    # Don't send the API key for this particular request.\n    del request.headers[\"X-Api-Key\"]\n\n    response = client.send(request)\n    ...\n
    "},{"location":"advanced/clients/#monitoring-download-progress","title":"Monitoring download progress","text":"

    If you need to monitor download progress of large responses, you can use response streaming and inspect the response.num_bytes_downloaded property.

    This interface is required for properly determining download progress, because the total number of bytes returned by response.content or response.iter_content() will not always correspond with the raw content length of the response if HTTP response compression is being used.

    For example, showing a progress bar using the tqdm library while a response is being downloaded could be done like this\u2026

    import tempfile\n\nimport httpx\nfrom tqdm import tqdm\n\nwith tempfile.NamedTemporaryFile() as download_file:\n    url = \"https://speed.hetzner.de/100MB.bin\"\n    with httpx.stream(\"GET\", url) as response:\n        total = int(response.headers[\"Content-Length\"])\n\n        with tqdm(total=total, unit_scale=True, unit_divisor=1024, unit=\"B\") as progress:\n            num_bytes_downloaded = response.num_bytes_downloaded\n            for chunk in response.iter_bytes():\n                download_file.write(chunk)\n                progress.update(response.num_bytes_downloaded - num_bytes_downloaded)\n                num_bytes_downloaded = response.num_bytes_downloaded\n

    Or an alternate example, this time using the rich library\u2026

    import tempfile\nimport httpx\nimport rich.progress\n\nwith tempfile.NamedTemporaryFile() as download_file:\n    url = \"https://speed.hetzner.de/100MB.bin\"\n    with httpx.stream(\"GET\", url) as response:\n        total = int(response.headers[\"Content-Length\"])\n\n        with rich.progress.Progress(\n            \"[progress.percentage]{task.percentage:>3.0f}%\",\n            rich.progress.BarColumn(bar_width=None),\n            rich.progress.DownloadColumn(),\n            rich.progress.TransferSpeedColumn(),\n        ) as progress:\n            download_task = progress.add_task(\"Download\", total=total)\n            for chunk in response.iter_bytes():\n                download_file.write(chunk)\n                progress.update(download_task, completed=response.num_bytes_downloaded)\n

    "},{"location":"advanced/clients/#monitoring-upload-progress","title":"Monitoring upload progress","text":"

    If you need to monitor upload progress of large responses, you can use request content generator streaming.

    For example, showing a progress bar using the tqdm library.

    import io\nimport random\n\nimport httpx\nfrom tqdm import tqdm\n\n\ndef gen():\n    \"\"\"\n    this is a complete example with generated random bytes.\n    you can replace `io.BytesIO` with real file object.\n    \"\"\"\n    total = 32 * 1024 * 1024  # 32m\n    with tqdm(ascii=True, unit_scale=True, unit='B', unit_divisor=1024, total=total) as bar:\n        with io.BytesIO(random.randbytes(total)) as f:\n            while data := f.read(1024):\n                yield data\n                bar.update(len(data))\n\n\nhttpx.post(\"https://httpbin.org/post\", content=gen())\n

    "},{"location":"advanced/clients/#multipart-file-encoding","title":"Multipart file encoding","text":"

    As mentioned in the quickstart multipart file encoding is available by passing a dictionary with the name of the payloads as keys and either tuple of elements or a file-like object or a string as values.

    >>> files = {'upload-file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel')}\n>>> r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"upload-file\": \"<... binary content ...>\"\n  },\n  ...\n}\n

    More specifically, if a tuple is used as a value, it must have between 2 and 3 elements:

    • The first element is an optional file name which can be set to None.
    • The second element may be a file-like object or a string which will be automatically encoded in UTF-8.
    • An optional third element can be used to specify the MIME type of the file being uploaded. If not specified HTTPX will attempt to guess the MIME type based on the file name, with unknown file extensions defaulting to \"application/octet-stream\". If the file name is explicitly set to None then HTTPX will not include a content-type MIME header field.
    >>> files = {'upload-file': (None, 'text content', 'text/plain')}\n>>> r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {},\n  \"form\": {\n    \"upload-file\": \"text-content\"\n  },\n  ...\n}\n

    Tip

    It is safe to upload large files this way. File uploads are streaming by default, meaning that only one chunk will be loaded into memory at a time.

    Non-file data fields can be included in the multipart form using by passing them to data=....

    You can also send multiple files in one go with a multiple file field form. To do that, pass a list of (field, <file>) items instead of a dictionary, allowing you to pass multiple items with the same field. For instance this request sends 2 files, foo.png and bar.png in one request on the images form field:

    >>> files = [('images', ('foo.png', open('foo.png', 'rb'), 'image/png')),\n                      ('images', ('bar.png', open('bar.png', 'rb'), 'image/png'))]\n>>> r = httpx.post(\"https://httpbin.org/post\", files=files)\n
    "},{"location":"advanced/event-hooks/","title":"Event Hooks","text":"

    HTTPX allows you to register \"event hooks\" with the client, that are called every time a particular type of event takes place.

    There are currently two event hooks:

    • request - Called after a request is fully prepared, but before it is sent to the network. Passed the request instance.
    • response - Called after the response has been fetched from the network, but before it is returned to the caller. Passed the response instance.

    These allow you to install client-wide functionality such as logging, monitoring or tracing.

    def log_request(request):\n    print(f\"Request event hook: {request.method} {request.url} - Waiting for response\")\n\ndef log_response(response):\n    request = response.request\n    print(f\"Response event hook: {request.method} {request.url} - Status {response.status_code}\")\n\nclient = httpx.Client(event_hooks={'request': [log_request], 'response': [log_response]})\n

    You can also use these hooks to install response processing code, such as this example, which creates a client instance that always raises httpx.HTTPStatusError on 4xx and 5xx responses.

    def raise_on_4xx_5xx(response):\n    response.raise_for_status()\n\nclient = httpx.Client(event_hooks={'response': [raise_on_4xx_5xx]})\n

    Note

    Response event hooks are called before determining if the response body should be read or not.

    If you need access to the response body inside an event hook, you'll need to call response.read(), or for AsyncClients, response.aread().

    The hooks are also allowed to modify request and response objects.

    def add_timestamp(request):\n    request.headers['x-request-timestamp'] = datetime.now(tz=datetime.utc).isoformat()\n\nclient = httpx.Client(event_hooks={'request': [add_timestamp]})\n

    Event hooks must always be set as a list of callables, and you may register multiple event hooks for each type of event.

    As well as being able to set event hooks on instantiating the client, there is also an .event_hooks property, that allows you to inspect and modify the installed hooks.

    client = httpx.Client()\nclient.event_hooks['request'] = [log_request]\nclient.event_hooks['response'] = [log_response, raise_on_4xx_5xx]\n

    Note

    If you are using HTTPX's async support, then you need to be aware that hooks registered with httpx.AsyncClient MUST be async functions, rather than plain functions.

    "},{"location":"advanced/extensions/","title":"Extensions","text":"

    Request and response extensions provide a untyped space where additional information may be added.

    Extensions should be used for features that may not be available on all transports, and that do not fit neatly into the simplified request/response model that the underlying httpcore package uses as its API.

    Several extensions are supported on the request:

    # Request timeouts actually implemented as an extension on\n# the request, ensuring that they are passed throughout the\n# entire call stack.\nclient = httpx.Client()\nresponse = client.get(\n    \"https://www.example.com\",\n    extensions={\"timeout\": {\"connect\": 5.0}}\n)\nresponse.request.extensions[\"timeout\"]\n{\"connect\": 5.0}\n

    And on the response:

    client = httpx.Client()\nresponse = client.get(\"https://www.example.com\")\nprint(response.extensions[\"http_version\"])  # b\"HTTP/1.1\"\n# Other server responses could have been\n# b\"HTTP/0.9\", b\"HTTP/1.0\", or b\"HTTP/1.1\"\n
    "},{"location":"advanced/extensions/#request-extensions","title":"Request Extensions","text":""},{"location":"advanced/extensions/#trace","title":"\"trace\"","text":"

    The trace extension allows a callback handler to be installed to monitor the internal flow of events within the underlying httpcore transport.

    The simplest way to explain this is with an example:

    import httpx\n\ndef log(event_name, info):\n    print(event_name, info)\n\nclient = httpx.Client()\nresponse = client.get(\"https://www.example.com/\", extensions={\"trace\": log})\n# connection.connect_tcp.started {'host': 'www.example.com', 'port': 443, 'local_address': None, 'timeout': None}\n# connection.connect_tcp.complete {'return_value': <httpcore.backends.sync.SyncStream object at 0x1093f94d0>}\n# connection.start_tls.started {'ssl_context': <ssl.SSLContext object at 0x1093ee750>, 'server_hostname': b'www.example.com', 'timeout': None}\n# connection.start_tls.complete {'return_value': <httpcore.backends.sync.SyncStream object at 0x1093f9450>}\n# http11.send_request_headers.started {'request': <Request [b'GET']>}\n# http11.send_request_headers.complete {'return_value': None}\n# http11.send_request_body.started {'request': <Request [b'GET']>}\n# http11.send_request_body.complete {'return_value': None}\n# http11.receive_response_headers.started {'request': <Request [b'GET']>}\n# http11.receive_response_headers.complete {'return_value': (b'HTTP/1.1', 200, b'OK', [(b'Age', b'553715'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Thu, 21 Oct 2021 17:08:42 GMT'), (b'Etag', b'\"3147526947+ident\"'), (b'Expires', b'Thu, 28 Oct 2021 17:08:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECS (nyb/1DCD)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'1256')])}\n# http11.receive_response_body.started {'request': <Request [b'GET']>}\n# http11.receive_response_body.complete {'return_value': None}\n# http11.response_closed.started {}\n# http11.response_closed.complete {'return_value': None}\n

    The event_name and info arguments here will be one of the following:

    • {event_type}.{event_name}.started, <dictionary of keyword arguments>
    • {event_type}.{event_name}.complete, {\"return_value\": <...>}
    • {event_type}.{event_name}.failed, {\"exception\": <...>}

    Note that when using async code the handler function passed to \"trace\" must be an async def ... function.

    The following event types are currently exposed...

    Establishing the connection

    • \"connection.connect_tcp\"
    • \"connection.connect_unix_socket\"
    • \"connection.start_tls\"

    HTTP/1.1 events

    • \"http11.send_request_headers\"
    • \"http11.send_request_body\"
    • \"http11.receive_response\"
    • \"http11.receive_response_body\"
    • \"http11.response_closed\"

    HTTP/2 events

    • \"http2.send_connection_init\"
    • \"http2.send_request_headers\"
    • \"http2.send_request_body\"
    • \"http2.receive_response_headers\"
    • \"http2.receive_response_body\"
    • \"http2.response_closed\"

    The exact set of trace events may be subject to change across different versions of httpcore. If you need to rely on a particular set of events it is recommended that you pin installation of the package to a fixed version.

    "},{"location":"advanced/extensions/#sni_hostname","title":"\"sni_hostname\"","text":"

    The server's hostname, which is used to confirm the hostname supplied by the SSL certificate.

    If you want to connect to an explicit IP address rather than using the standard DNS hostname lookup, then you'll need to use this request extension.

    For example:

    # Connect to '185.199.108.153' but use 'www.encode.io' in the Host header,\n#\u00a0and use 'www.encode.io' when SSL verifying the server hostname.\nclient = httpx.Client()\nheaders = {\"Host\": \"www.encode.io\"}\nextensions = {\"sni_hostname\": \"www.encode.io\"}\nresponse = client.get(\n    \"https://185.199.108.153/path\",\n    headers=headers,\n    extensions=extensions\n)\n
    "},{"location":"advanced/extensions/#timeout","title":"\"timeout\"","text":"

    A dictionary of str: Optional[float] timeout values.

    May include values for 'connect', 'read', 'write', or 'pool'.

    For example:

    # Timeout if a connection takes more than 5 seconds to established, or if\n# we are blocked waiting on the connection pool for more than 10 seconds.\nclient = httpx.Client()\nresponse = client.get(\n    \"https://www.example.com\",\n    extensions={\"timeout\": {\"connect\": 5.0, \"pool\": 10.0}}\n)\n

    This extension is how the httpx timeouts are implemented, ensuring that the timeout values are associated with the request instance and passed throughout the stack. You shouldn't typically be working with this extension directly, but use the higher level timeout API instead.

    "},{"location":"advanced/extensions/#target","title":"\"target\"","text":"

    The target that is used as the HTTP target instead of the URL path.

    This enables support constructing requests that would otherwise be unsupported.

    • URL paths with non-standard escaping applied.
    • Forward proxy requests using an absolute URI.
    • Tunneling proxy requests using CONNECT with hostname as the target.
    • Server-wide OPTIONS * requests.

    Some examples:

    Using the 'target' extension to send requests without the standard path escaping rules...

    # Typically a request to \"https://www.example.com/test^path\" would\n# connect to \"www.example.com\" and send an HTTP/1.1 request like...\n#\n# GET /test%5Epath HTTP/1.1\n#\n# Using the target extension we can include the literal '^'...\n#\n# GET /test^path HTTP/1.1\n#\n# Note that requests must still be valid HTTP requests.\n# For example including whitespace in the target will raise a `LocalProtocolError`.\nextensions = {\"target\": b\"/test^path\"}\nresponse = httpx.get(\"https://www.example.com\", extensions=extensions)\n

    The target extension also allows server-wide OPTIONS * requests to be constructed...

    # This will send the following request...\n#\n# CONNECT * HTTP/1.1\nextensions = {\"target\": b\"*\"}\nresponse = httpx.request(\"CONNECT\", \"https://www.example.com\", extensions=extensions)\n
    "},{"location":"advanced/extensions/#response-extensions","title":"Response Extensions","text":""},{"location":"advanced/extensions/#http_version","title":"\"http_version\"","text":"

    The HTTP version, as bytes. Eg. b\"HTTP/1.1\".

    When using HTTP/1.1 the response line includes an explicit version, and the value of this key could feasibly be one of b\"HTTP/0.9\", b\"HTTP/1.0\", or b\"HTTP/1.1\".

    When using HTTP/2 there is no further response versioning included in the protocol, and the value of this key will always be b\"HTTP/2\".

    "},{"location":"advanced/extensions/#reason_phrase","title":"\"reason_phrase\"","text":"

    The reason-phrase of the HTTP response, as bytes. For example b\"OK\". Some servers may include a custom reason phrase, although this is not recommended.

    HTTP/2 onwards does not include a reason phrase on the wire.

    When no key is included, a default based on the status code may be used.

    "},{"location":"advanced/extensions/#stream_id","title":"\"stream_id\"","text":"

    When HTTP/2 is being used the \"stream_id\" response extension can be accessed to determine the ID of the data stream that the response was sent on.

    "},{"location":"advanced/extensions/#network_stream","title":"\"network_stream\"","text":"

    The \"network_stream\" extension allows developers to handle HTTP CONNECT and Upgrade requests, by providing an API that steps outside the standard request/response model, and can directly read or write to the network.

    The interface provided by the network stream:

    • read(max_bytes, timeout = None) -> bytes
    • write(buffer, timeout = None)
    • close()
    • start_tls(ssl_context, server_hostname = None, timeout = None) -> NetworkStream
    • get_extra_info(info) -> Any

    This API can be used as the foundation for working with HTTP proxies, WebSocket upgrades, and other advanced use-cases.

    See the network backends documentation for more information on working directly with network streams.

    Extra network information

    The network stream abstraction also allows access to various low-level information that may be exposed by the underlying socket:

    response = httpx.get(\"https://www.example.com\")\nnetwork_stream = response.extensions[\"network_stream\"]\n\nclient_addr = network_stream.get_extra_info(\"client_addr\")\nserver_addr = network_stream.get_extra_info(\"server_addr\")\nprint(\"Client address\", client_addr)\nprint(\"Server address\", server_addr)\n

    The socket SSL information is also available through this interface, although you need to ensure that the underlying connection is still open, in order to access it...

    with httpx.stream(\"GET\", \"https://www.example.com\") as response:\n    network_stream = response.extensions[\"network_stream\"]\n\n    ssl_object = network_stream.get_extra_info(\"ssl_object\")\n    print(\"TLS version\", ssl_object.version())\n
    "},{"location":"advanced/proxies/","title":"Proxies","text":"

    HTTPX supports setting up HTTP proxies via the proxy parameter to be passed on client initialization or top-level API functions like httpx.get(..., proxy=...).

    Diagram of how a proxy works (source: Wikipedia). The left hand side \"Internet\" blob may be your HTTPX client requesting example.com through a proxy."},{"location":"advanced/proxies/#http-proxies","title":"HTTP Proxies","text":"

    To route all traffic (HTTP and HTTPS) to a proxy located at http://localhost:8030, pass the proxy URL to the client...

    with httpx.Client(proxy=\"http://localhost:8030\") as client:\n    ...\n

    For more advanced use cases, pass a mounts dict. For example, to route HTTP and HTTPS requests to 2 different proxies, respectively located at http://localhost:8030, and http://localhost:8031, pass a dict of proxy URLs:

    proxy_mounts = {\n    \"http://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n    \"https://\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n}\n\nwith httpx.Client(mounts=proxy_mounts) as client:\n    ...\n

    For detailed information about proxy routing, see the Routing section.

    Gotcha

    In most cases, the proxy URL for the https:// key should use the http:// scheme (that's not a typo!).

    This is because HTTP proxying requires initiating a connection with the proxy server. While it's possible that your proxy supports doing it via HTTPS, most proxies only support doing it via HTTP.

    For more information, see FORWARD vs TUNNEL.

    "},{"location":"advanced/proxies/#authentication","title":"Authentication","text":"

    Proxy credentials can be passed as the userinfo section of the proxy URL. For example:

    with httpx.Client(proxy=\"http://username:password@localhost:8030\") as client:\n    ...\n
    "},{"location":"advanced/proxies/#proxy-mechanisms","title":"Proxy mechanisms","text":"

    Note

    This section describes advanced proxy concepts and functionality.

    "},{"location":"advanced/proxies/#forward-vs-tunnel","title":"FORWARD vs TUNNEL","text":"

    In general, the flow for making an HTTP request through a proxy is as follows:

    1. The client connects to the proxy (initial connection request).
    2. The proxy transfers data to the server on your behalf.

    How exactly step 2/ is performed depends on which of two proxying mechanisms is used:

    • Forwarding: the proxy makes the request for you, and sends back the response it obtained from the server.
    • Tunnelling: the proxy establishes a TCP connection to the server on your behalf, and the client reuses this connection to send the request and receive the response. This is known as an HTTP Tunnel. This mechanism is how you can access websites that use HTTPS from an HTTP proxy (the client \"upgrades\" the connection to HTTPS by performing the TLS handshake with the server over the TCP connection provided by the proxy).
    "},{"location":"advanced/proxies/#troubleshooting-proxies","title":"Troubleshooting proxies","text":"

    If you encounter issues when setting up proxies, please refer to our Troubleshooting guide.

    "},{"location":"advanced/proxies/#socks","title":"SOCKS","text":"

    In addition to HTTP proxies, httpcore also supports proxies using the SOCKS protocol. This is an optional feature that requires an additional third-party library be installed before use.

    You can install SOCKS support using pip:

    $ pip install httpx[socks]\n

    You can now configure a client to make requests via a proxy using the SOCKS protocol:

    httpx.Client(proxy='socks5://user:pass@host:port')\n
    "},{"location":"advanced/resource-limits/","title":"Resource Limits","text":"

    You can control the connection pool size using the limits keyword argument on the client. It takes instances of httpx.Limits which define:

    • max_keepalive_connections, number of allowable keep-alive connections, or None to always allow. (Defaults 20)
    • max_connections, maximum number of allowable connections, or None for no limits. (Default 100)
    • keepalive_expiry, time limit on idle keep-alive connections in seconds, or None for no limits. (Default 5)
    limits = httpx.Limits(max_keepalive_connections=5, max_connections=10)\nclient = httpx.Client(limits=limits)\n
    "},{"location":"advanced/ssl/","title":"SSL","text":"

    When making a request over HTTPS, HTTPX needs to verify the identity of the requested host. To do this, it uses a bundle of SSL certificates (a.k.a. CA bundle) delivered by a trusted certificate authority (CA).

    "},{"location":"advanced/ssl/#enabling-and-disabling-verification","title":"Enabling and disabling verification","text":"

    By default httpx will verify HTTPS connections, and raise an error for invalid SSL cases...

    >>> httpx.get(\"https://expired.badssl.com/\")\nhttpx.ConnectError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:997)\n

    You can disable SSL verification completely and allow insecure requests...

    >>> httpx.get(\"https://expired.badssl.com/\", verify=False)\n<Response [200 OK]>\n
    "},{"location":"advanced/ssl/#configuring-client-instances","title":"Configuring client instances","text":"

    If you're using a Client() instance you should pass any verify=<...> configuration when instantiating the client.

    By default the certifi CA bundle is used for SSL verification.

    For more complex configurations you can pass an SSL Context instance...

    import certifi\nimport httpx\nimport ssl\n\n# This SSL context is equivelent to the default `verify=True`.\nctx = ssl.create_default_context(cafile=certifi.where())\nclient = httpx.Client(verify=ctx)\n

    Using the truststore package to support system certificate stores...

    import ssl\nimport truststore\nimport httpx\n\n# Use system certificate stores.\nctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)\nclient = httpx.Client(verify=ctx)\n

    Loding an alternative certificate verification store using the standard SSL context API...

    import httpx\nimport ssl\n\n# Use an explicitly configured certificate store.\nctx = ssl.create_default_context(cafile=\"path/to/certs.pem\")  # Either cafile or capath.\nclient = httpx.Client(verify=ctx)\n
    "},{"location":"advanced/ssl/#client-side-certificates","title":"Client side certificates","text":"

    Client side certificates allow a remote server to verify the client. They tend to be used within private organizations to authenticate requests to remote servers.

    You can specify client-side certificates, using the .load_cert_chain() API...

    ctx = ssl.create_default_context()\nctx.load_cert_chain(certfile=\"path/to/client.pem\")  # Optionally also keyfile or password.\nclient = httpx.Client(verify=ctx)\n
    "},{"location":"advanced/ssl/#working-with-ssl_cert_file-and-ssl_cert_dir","title":"Working with SSL_CERT_FILE and SSL_CERT_DIR","text":"

    Unlike requests, the httpx package does not automatically pull in the environment variables SSL_CERT_FILE or SSL_CERT_DIR. If you want to use these they need to be enabled explicitly.

    For example...

    # Use `SSL_CERT_FILE` or `SSL_CERT_DIR` if configured.\n# Otherwise default to certifi.\nctx = ssl.create_default_context(\n    cafile=os.environ.get(\"SSL_CERT_FILE\", certifi.where()),\n    capath=os.environ.get(\"SSL_CERT_DIR\"),\n)\nclient = httpx.Client(verify=ctx)\n
    "},{"location":"advanced/ssl/#making-https-requests-to-a-local-server","title":"Making HTTPS requests to a local server","text":"

    When making requests to local servers, such as a development server running on localhost, you will typically be using unencrypted HTTP connections.

    If you do need to make HTTPS connections to a local server, for example to test an HTTPS-only service, you will need to create and use your own certificates. Here's one way to do it...

    1. Use trustme to generate a pair of server key/cert files, and a client cert file.
    2. Pass the server key/cert files when starting your local server. (This depends on the particular web server you're using. For example, Uvicorn provides the --ssl-keyfile and --ssl-certfile options.)
    3. Configure httpx to use the certificates stored in client.pem.
    ctx = ssl.create_default_context(cafile=\"client.pem\")\nclient = httpx.Client(verify=ctx)\n
    "},{"location":"advanced/text-encodings/","title":"Text Encodings","text":"

    When accessing response.text, we need to decode the response bytes into a unicode text representation.

    By default httpx will use \"charset\" information included in the response Content-Type header to determine how the response bytes should be decoded into text.

    In cases where no charset information is included on the response, the default behaviour is to assume \"utf-8\" encoding, which is by far the most widely used text encoding on the internet.

    "},{"location":"advanced/text-encodings/#using-the-default-encoding","title":"Using the default encoding","text":"

    To understand this better let's start by looking at the default behaviour for text decoding...

    import httpx\n# Instantiate a client with the default configuration.\nclient = httpx.Client()\n# Using the client...\nresponse = client.get(...)\nprint(response.encoding)  # This will either print the charset given in\n                          # the Content-Type charset, or else \"utf-8\".\nprint(response.text)  # The text will either be decoded with the Content-Type\n                      # charset, or using \"utf-8\".\n

    This is normally absolutely fine. Most servers will respond with a properly formatted Content-Type header, including a charset encoding. And in most cases where no charset encoding is included, UTF-8 is very likely to be used, since it is so widely adopted.

    "},{"location":"advanced/text-encodings/#using-an-explicit-encoding","title":"Using an explicit encoding","text":"

    In some cases we might be making requests to a site where no character set information is being set explicitly by the server, but we know what the encoding is. In this case it's best to set the default encoding explicitly on the client.

    import httpx\n# Instantiate a client with a Japanese character set as the default encoding.\nclient = httpx.Client(default_encoding=\"shift-jis\")\n# Using the client...\nresponse = client.get(...)\nprint(response.encoding)  # This will either print the charset given in\n                          # the Content-Type charset, or else \"shift-jis\".\nprint(response.text)  # The text will either be decoded with the Content-Type\n                      # charset, or using \"shift-jis\".\n
    "},{"location":"advanced/text-encodings/#using-auto-detection","title":"Using auto-detection","text":"

    In cases where the server is not reliably including character set information, and where we don't know what encoding is being used, we can enable auto-detection to make a best-guess attempt when decoding from bytes to text.

    To use auto-detection you need to set the default_encoding argument to a callable instead of a string. This callable should be a function which takes the input bytes as an argument and returns the character set to use for decoding those bytes to text.

    There are two widely used Python packages which both handle this functionality:

    • chardet - This is a well established package, and is a port of the auto-detection code in Mozilla.
    • charset-normalizer - A newer package, motivated by chardet, with a different approach.

    Let's take a look at installing autodetection using one of these packages...

    $ pip install httpx\n$ pip install chardet\n

    Once chardet is installed, we can configure a client to use character-set autodetection.

    import httpx\nimport chardet\n\ndef autodetect(content):\n    return chardet.detect(content).get(\"encoding\")\n\n# Using a client with character-set autodetection enabled.\nclient = httpx.Client(default_encoding=autodetect)\nresponse = client.get(...)\nprint(response.encoding)  # This will either print the charset given in\n                          # the Content-Type charset, or else the auto-detected\n                          # character set.\nprint(response.text)\n
    "},{"location":"advanced/timeouts/","title":"Timeouts","text":"

    HTTPX is careful to enforce timeouts everywhere by default.

    The default behavior is to raise a TimeoutException after 5 seconds of network inactivity.

    "},{"location":"advanced/timeouts/#setting-and-disabling-timeouts","title":"Setting and disabling timeouts","text":"

    You can set timeouts for an individual request:

    # Using the top-level API:\nhttpx.get('http://example.com/api/v1/example', timeout=10.0)\n\n# Using a client instance:\nwith httpx.Client() as client:\n    client.get(\"http://example.com/api/v1/example\", timeout=10.0)\n

    Or disable timeouts for an individual request:

    # Using the top-level API:\nhttpx.get('http://example.com/api/v1/example', timeout=None)\n\n# Using a client instance:\nwith httpx.Client() as client:\n    client.get(\"http://example.com/api/v1/example\", timeout=None)\n
    "},{"location":"advanced/timeouts/#setting-a-default-timeout-on-a-client","title":"Setting a default timeout on a client","text":"

    You can set a timeout on a client instance, which results in the given timeout being used as the default for requests made with this client:

    client = httpx.Client()              # Use a default 5s timeout everywhere.\nclient = httpx.Client(timeout=10.0)  # Use a default 10s timeout everywhere.\nclient = httpx.Client(timeout=None)  # Disable all timeouts by default.\n
    "},{"location":"advanced/timeouts/#fine-tuning-the-configuration","title":"Fine tuning the configuration","text":"

    HTTPX also allows you to specify the timeout behavior in more fine grained detail.

    There are four different types of timeouts that may occur. These are connect, read, write, and pool timeouts.

    • The connect timeout specifies the maximum amount of time to wait until a socket connection to the requested host is established. If HTTPX is unable to connect within this time frame, a ConnectTimeout exception is raised.
    • The read timeout specifies the maximum duration to wait for a chunk of data to be received (for example, a chunk of the response body). If HTTPX is unable to receive data within this time frame, a ReadTimeout exception is raised.
    • The write timeout specifies the maximum duration to wait for a chunk of data to be sent (for example, a chunk of the request body). If HTTPX is unable to send data within this time frame, a WriteTimeout exception is raised.
    • The pool timeout specifies the maximum duration to wait for acquiring a connection from the connection pool. If HTTPX is unable to acquire a connection within this time frame, a PoolTimeout exception is raised. A related configuration here is the maximum number of allowable connections in the connection pool, which is configured by the limits argument.

    You can configure the timeout behavior for any of these values...

    # A client with a 60s timeout for connecting, and a 10s timeout elsewhere.\ntimeout = httpx.Timeout(10.0, connect=60.0)\nclient = httpx.Client(timeout=timeout)\n\nresponse = client.get('http://example.com/')\n
    "},{"location":"advanced/transports/","title":"Transports","text":"

    HTTPX's Client also accepts a transport argument. This argument allows you to provide a custom Transport object that will be used to perform the actual sending of the requests.

    "},{"location":"advanced/transports/#http-transport","title":"HTTP Transport","text":"

    For some advanced configuration you might need to instantiate a transport class directly, and pass it to the client instance. One example is the local_address configuration which is only available via this low-level API.

    >>> import httpx\n>>> transport = httpx.HTTPTransport(local_address=\"0.0.0.0\")\n>>> client = httpx.Client(transport=transport)\n

    Connection retries are also available via this interface. Requests will be retried the given number of times in case an httpx.ConnectError or an httpx.ConnectTimeout occurs, allowing smoother operation under flaky networks. If you need other forms of retry behaviors, such as handling read/write errors or reacting to 503 Service Unavailable, consider general-purpose tools such as tenacity.

    >>> import httpx\n>>> transport = httpx.HTTPTransport(retries=1)\n>>> client = httpx.Client(transport=transport)\n

    Similarly, instantiating a transport directly provides a uds option for connecting via a Unix Domain Socket that is only available via this low-level API:

    >>> import httpx\n>>> # Connect to the Docker API via a Unix Socket.\n>>> transport = httpx.HTTPTransport(uds=\"/var/run/docker.sock\")\n>>> client = httpx.Client(transport=transport)\n>>> response = client.get(\"http://docker/info\")\n>>> response.json()\n{\"ID\": \"...\", \"Containers\": 4, \"Images\": 74, ...}\n
    "},{"location":"advanced/transports/#wsgi-transport","title":"WSGI Transport","text":"

    You can configure an httpx client to call directly into a Python web application using the WSGI protocol.

    This is particularly useful for two main use-cases:

    • Using httpx as a client inside test cases.
    • Mocking out external services during tests or in dev or staging environments.
    "},{"location":"advanced/transports/#example","title":"Example","text":"

    Here's an example of integrating against a Flask application:

    from flask import Flask\nimport httpx\n\n\napp = Flask(__name__)\n\n@app.route(\"/\")\ndef hello():\n    return \"Hello World!\"\n\ntransport = httpx.WSGITransport(app=app)\nwith httpx.Client(transport=transport, base_url=\"http://testserver\") as client:\n    r = client.get(\"/\")\n    assert r.status_code == 200\n    assert r.text == \"Hello World!\"\n
    "},{"location":"advanced/transports/#configuration","title":"Configuration","text":"

    For some more complex cases you might need to customize the WSGI transport. This allows you to:

    • Inspect 500 error responses rather than raise exceptions by setting raise_app_exceptions=False.
    • Mount the WSGI application at a subpath by setting script_name (WSGI).
    • Use a given client address for requests by setting remote_addr (WSGI).

    For example:

    # Instantiate a client that makes WSGI requests with a client IP of \"1.2.3.4\".\ntransport = httpx.WSGITransport(app=app, remote_addr=\"1.2.3.4\")\nwith httpx.Client(transport=transport, base_url=\"http://testserver\") as client:\n    ...\n
    "},{"location":"advanced/transports/#asgi-transport","title":"ASGI Transport","text":"

    You can configure an httpx client to call directly into an async Python web application using the ASGI protocol.

    This is particularly useful for two main use-cases:

    • Using httpx as a client inside test cases.
    • Mocking out external services during tests or in dev or staging environments.
    "},{"location":"advanced/transports/#example_1","title":"Example","text":"

    Let's take this Starlette application as an example:

    from starlette.applications import Starlette\nfrom starlette.responses import HTMLResponse\nfrom starlette.routing import Route\n\n\nasync def hello(request):\n    return HTMLResponse(\"Hello World!\")\n\n\napp = Starlette(routes=[Route(\"/\", hello)])\n

    We can make requests directly against the application, like so:

    transport = httpx.ASGITransport(app=app)\n\nasync with httpx.AsyncClient(transport=transport, base_url=\"http://testserver\") as client:\n    r = await client.get(\"/\")\n    assert r.status_code == 200\n    assert r.text == \"Hello World!\"\n
    "},{"location":"advanced/transports/#configuration_1","title":"Configuration","text":"

    For some more complex cases you might need to customise the ASGI transport. This allows you to:

    • Inspect 500 error responses rather than raise exceptions by setting raise_app_exceptions=False.
    • Mount the ASGI application at a subpath by setting root_path.
    • Use a given client address for requests by setting client.

    For example:

    # Instantiate a client that makes ASGI requests with a client IP of \"1.2.3.4\",\n# on port 123.\ntransport = httpx.ASGITransport(app=app, client=(\"1.2.3.4\", 123))\nasync with httpx.AsyncClient(transport=transport, base_url=\"http://testserver\") as client:\n    ...\n

    See the ASGI documentation for more details on the client and root_path keys.

    "},{"location":"advanced/transports/#asgi-startup-and-shutdown","title":"ASGI startup and shutdown","text":"

    It is not in the scope of HTTPX to trigger ASGI lifespan events of your app.

    However it is suggested to use LifespanManager from asgi-lifespan in pair with AsyncClient.

    "},{"location":"advanced/transports/#custom-transports","title":"Custom transports","text":"

    A transport instance must implement the low-level Transport API which deals with sending a single request, and returning a response. You should either subclass httpx.BaseTransport to implement a transport to use with Client, or subclass httpx.AsyncBaseTransport to implement a transport to use with AsyncClient.

    At the layer of the transport API we're using the familiar Request and Response models.

    See the handle_request and handle_async_request docstrings for more details on the specifics of the Transport API.

    A complete example of a custom transport implementation would be:

    import json\nimport httpx\n\nclass HelloWorldTransport(httpx.BaseTransport):\n    \"\"\"\n    A mock transport that always returns a JSON \"Hello, world!\" response.\n    \"\"\"\n\n    def handle_request(self, request):\n        return httpx.Response(200, json={\"text\": \"Hello, world!\"})\n

    Or this example, which uses a custom transport and httpx.Mounts to always redirect http:// requests.

    class HTTPSRedirect(httpx.BaseTransport):\n    \"\"\"\n    A transport that always redirects to HTTPS.\n    \"\"\"\n    def handle_request(self, request):\n        url = request.url.copy_with(scheme=\"https\")\n        return httpx.Response(303, headers={\"Location\": str(url)})\n\n# A client where any `http` requests are always redirected to `https`\ntransport = httpx.Mounts({\n    'http://': HTTPSRedirect()\n    'https://': httpx.HTTPTransport()\n})\nclient = httpx.Client(transport=transport)\n

    A useful pattern here is custom transport classes that wrap the default HTTP implementation. For example...

    class DebuggingTransport(httpx.BaseTransport):\n    def __init__(self, **kwargs):\n        self._wrapper = httpx.HTTPTransport(**kwargs)\n\n    def handle_request(self, request):\n        print(f\">>> {request}\")\n        response = self._wrapper.handle_request(request)\n        print(f\"<<< {response}\")\n        return response\n\n    def close(self):\n        self._wrapper.close()\n\ntransport = DebuggingTransport()\nclient = httpx.Client(transport=transport)\n

    Here's another case, where we're using a round-robin across a number of different proxies...

    class ProxyRoundRobin(httpx.BaseTransport):\n    def __init__(self, proxies, **kwargs):\n        self._transports = [\n            httpx.HTTPTransport(proxy=proxy, **kwargs)\n            for proxy in proxies\n        ]\n        self._idx = 0\n\n    def handle_request(self, request):\n        transport = self._transports[self._idx]\n        self._idx = (self._idx + 1) % len(self._transports)\n        return transport.handle_request(request)\n\n    def close(self):\n        for transport in self._transports:\n            transport.close()\n\nproxies = [\n    httpx.Proxy(\"http://127.0.0.1:8081\"),\n    httpx.Proxy(\"http://127.0.0.1:8082\"),\n    httpx.Proxy(\"http://127.0.0.1:8083\"),\n]\ntransport = ProxyRoundRobin(proxies=proxies)\nclient = httpx.Client(transport=transport)\n
    "},{"location":"advanced/transports/#mock-transports","title":"Mock transports","text":"

    During testing it can often be useful to be able to mock out a transport, and return pre-determined responses, rather than making actual network requests.

    The httpx.MockTransport class accepts a handler function, which can be used to map requests onto pre-determined responses:

    def handler(request):\n    return httpx.Response(200, json={\"text\": \"Hello, world!\"})\n\n\n# Switch to a mock transport, if the TESTING environment variable is set.\nif os.environ.get('TESTING', '').upper() == \"TRUE\":\n    transport = httpx.MockTransport(handler)\nelse:\n    transport = httpx.HTTPTransport()\n\nclient = httpx.Client(transport=transport)\n

    For more advanced use-cases you might want to take a look at either the third-party mocking library, RESPX, or the pytest-httpx library.

    "},{"location":"advanced/transports/#mounting-transports","title":"Mounting transports","text":"

    You can also mount transports against given schemes or domains, to control which transport an outgoing request should be routed via, with the same style used for specifying proxy routing.

    import httpx\n\nclass HTTPSRedirectTransport(httpx.BaseTransport):\n    \"\"\"\n    A transport that always redirects to HTTPS.\n    \"\"\"\n\n    def handle_request(self, method, url, headers, stream, extensions):\n        scheme, host, port, path = url\n        if port is None:\n            location = b\"https://%s%s\" % (host, path)\n        else:\n            location = b\"https://%s:%d%s\" % (host, port, path)\n        stream = httpx.ByteStream(b\"\")\n        headers = [(b\"location\", location)]\n        extensions = {}\n        return 303, headers, stream, extensions\n\n\n# A client where any `http` requests are always redirected to `https`\nmounts = {'http://': HTTPSRedirectTransport()}\nclient = httpx.Client(mounts=mounts)\n

    A couple of other sketches of how you might take advantage of mounted transports...

    Disabling HTTP/2 on a single given domain...

    mounts = {\n    \"all://\": httpx.HTTPTransport(http2=True),\n    \"all://*example.org\": httpx.HTTPTransport()\n}\nclient = httpx.Client(mounts=mounts)\n

    Mocking requests to a given domain:

    # All requests to \"example.org\" should be mocked out.\n# Other requests occur as usual.\ndef handler(request):\n    return httpx.Response(200, json={\"text\": \"Hello, World!\"})\n\nmounts = {\"all://example.org\": httpx.MockTransport(handler)}\nclient = httpx.Client(mounts=mounts)\n

    Adding support for custom schemes:

    # Support URLs like \"file:///Users/sylvia_green/websites/new_client/index.html\"\nmounts = {\"file://\": FileSystemTransport()}\nclient = httpx.Client(mounts=mounts)\n
    "},{"location":"advanced/transports/#routing","title":"Routing","text":"

    HTTPX provides a powerful mechanism for routing requests, allowing you to write complex rules that specify which transport should be used for each request.

    The mounts dictionary maps URL patterns to HTTP transports. HTTPX matches requested URLs against URL patterns to decide which transport should be used, if any. Matching is done from most specific URL patterns (e.g. https://<domain>:<port>) to least specific ones (e.g. https://).

    HTTPX supports routing requests based on scheme, domain, port, or a combination of these.

    "},{"location":"advanced/transports/#wildcard-routing","title":"Wildcard routing","text":"

    Route everything through a transport...

    mounts = {\n    \"all://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n
    "},{"location":"advanced/transports/#scheme-routing","title":"Scheme routing","text":"

    Route HTTP requests through one transport, and HTTPS requests through another...

    mounts = {\n    \"http://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n    \"https://\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n}\n
    "},{"location":"advanced/transports/#domain-routing","title":"Domain routing","text":"

    Proxy all requests on domain \"example.com\", let other requests pass through...

    mounts = {\n    \"all://example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy HTTP requests on domain \"example.com\", let HTTPS and other requests pass through...

    mounts = {\n    \"http://example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy all requests to \"example.com\" and its subdomains, let other requests pass through...

    mounts = {\n    \"all://*example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy all requests to strict subdomains of \"example.com\", let \"example.com\" and other requests pass through...

    mounts = {\n    \"all://*.example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n
    "},{"location":"advanced/transports/#port-routing","title":"Port routing","text":"

    Proxy HTTPS requests on port 1234 to \"example.com\"...

    mounts = {\n    \"https://example.com:1234\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy all requests on port 1234...

    mounts = {\n    \"all://*:1234\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n
    "},{"location":"advanced/transports/#no-proxy-support","title":"No-proxy support","text":"

    It is also possible to define requests that shouldn't be routed through the transport.

    To do so, pass None as the proxy URL. For example...

    mounts = {\n    # Route requests through a proxy by default...\n    \"all://\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n    # Except those for \"example.com\".\n    \"all://example.com\": None,\n}\n
    "},{"location":"advanced/transports/#complex-configuration-example","title":"Complex configuration example","text":"

    You can combine the routing features outlined above to build complex proxy routing configurations. For example...

    mounts = {\n    # Route all traffic through a proxy by default...\n    \"all://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n    # But don't use proxies for HTTPS requests to \"domain.io\"...\n    \"https://domain.io\": None,\n    # And use another proxy for requests to \"example.com\" and its subdomains...\n    \"all://*example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n    # And yet another proxy if HTTP is used,\n    # and the \"internal\" subdomain on port 5550 is requested...\n    \"http://internal.example.com:5550\": httpx.HTTPTransport(proxy=\"http://localhost:8032\"),\n}\n
    "},{"location":"advanced/transports/#environment-variables","title":"Environment variables","text":"

    There are also environment variables that can be used to control the dictionary of the client mounts. They can be used to configure HTTP proxying for clients.

    See documentation on HTTP_PROXY, HTTPS_PROXY, ALL_PROXY and NO_PROXY for more information.

    "}]} \ No newline at end of file +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Introduction","text":"HTTPX A next-generation HTTP client for Python.

    HTTPX is a fully featured HTTP client for Python 3, which provides sync and async APIs, and support for both HTTP/1.1 and HTTP/2.

    Install HTTPX using pip:

    $ pip install httpx\n

    Now, let's get started:

    >>> import httpx\n>>> r = httpx.get('https://www.example.org/')\n>>> r\n<Response [200 OK]>\n>>> r.status_code\n200\n>>> r.headers['content-type']\n'text/html; charset=UTF-8'\n>>> r.text\n'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    Or, using the command-line client.

    # The command line client is an optional dependency.\n$ pip install 'httpx[cli]'\n

    Which now allows us to use HTTPX directly from the command-line...

    Sending a request...

    "},{"location":"#features","title":"Features","text":"

    HTTPX builds on the well-established usability of requests, and gives you:

    • A broadly requests-compatible API.
    • Standard synchronous interface, but with async support if you need it.
    • HTTP/1.1 and HTTP/2 support.
    • Ability to make requests directly to WSGI applications or ASGI applications.
    • Strict timeouts everywhere.
    • Fully type annotated.
    • 100% test coverage.

    Plus all the standard features of requests...

    • International Domains and URLs
    • Keep-Alive & Connection Pooling
    • Sessions with Cookie Persistence
    • Browser-style SSL Verification
    • Basic/Digest Authentication
    • Elegant Key/Value Cookies
    • Automatic Decompression
    • Automatic Content Decoding
    • Unicode Response Bodies
    • Multipart File Uploads
    • HTTP(S) Proxy Support
    • Connection Timeouts
    • Streaming Downloads
    • .netrc Support
    • Chunked Requests
    "},{"location":"#documentation","title":"Documentation","text":"

    For a run-through of all the basics, head over to the QuickStart.

    For more advanced topics, see the Advanced section, the async support section, or the HTTP/2 section.

    The Developer Interface provides a comprehensive API reference.

    To find out about tools that integrate with HTTPX, see Third Party Packages.

    "},{"location":"#dependencies","title":"Dependencies","text":"

    The HTTPX project relies on these excellent libraries:

    • httpcore - The underlying transport implementation for httpx.
    • h11 - HTTP/1.1 support.
    • certifi - SSL certificates.
    • idna - Internationalized domain name support.
    • sniffio - Async library autodetection.

    As well as these optional installs:

    • h2 - HTTP/2 support. (Optional, with httpx[http2])
    • socksio - SOCKS proxy support. (Optional, with httpx[socks])
    • rich - Rich terminal support. (Optional, with httpx[cli])
    • click - Command line client support. (Optional, with httpx[cli])
    • brotli or brotlicffi - Decoding for \"brotli\" compressed responses. (Optional, with httpx[brotli])
    • zstandard - Decoding for \"zstd\" compressed responses. (Optional, with httpx[zstd])

    A huge amount of credit is due to requests for the API layout that much of this work follows, as well as to urllib3 for plenty of design inspiration around the lower-level networking details.

    "},{"location":"#installation","title":"Installation","text":"

    Install with pip:

    $ pip install httpx\n

    Or, to include the optional HTTP/2 support, use:

    $ pip install httpx[http2]\n

    To include the optional brotli and zstandard decoders support, use:

    $ pip install httpx[brotli,zstd]\n

    HTTPX requires Python 3.9+

    "},{"location":"api/","title":"Developer Interface","text":""},{"location":"api/#helper-functions","title":"Helper Functions","text":"

    Note

    Only use these functions if you're testing HTTPX in a console or making a small number of requests. Using a Client will enable HTTP/2 and connection pooling for more efficient and long-lived connections.

    httpx.request(method, url, *, params=None, content=None, data=None, files=None, json=None, headers=None, cookies=None, auth=None, proxy=None, timeout=Timeout(timeout=5.0), follow_redirects=False, verify=True, trust_env=True)

    Sends an HTTP request.

    Parameters:

    • method - HTTP method for the new Request object: GET, OPTIONS, HEAD, POST, PUT, PATCH, or DELETE.
    • url - URL for the new Request object.
    • params - (optional) Query parameters to include in the URL, as a string, dictionary, or sequence of two-tuples.
    • content - (optional) Binary content to include in the body of the request, as bytes or a byte iterator.
    • data - (optional) Form data to include in the body of the request, as a dictionary.
    • files - (optional) A dictionary of upload files to include in the body of the request.
    • json - (optional) A JSON serializable object to include in the body of the request.
    • headers - (optional) Dictionary of HTTP headers to include in the request.
    • cookies - (optional) Dictionary of Cookie items to include in the request.
    • auth - (optional) An authentication class to use when sending the request.
    • proxy - (optional) A proxy URL where all the traffic should be routed.
    • timeout - (optional) The timeout configuration to use when sending the request.
    • follow_redirects - (optional) Enables or disables HTTP redirects.
    • verify - (optional) Either True to use an SSL context with the default CA bundle, False to disable verification, or an instance of ssl.SSLContext to use a custom context.
    • trust_env - (optional) Enables or disables usage of environment variables for configuration.

    Returns: Response

    Usage:

    >>> import httpx\n>>> response = httpx.request('GET', 'https://httpbin.org/get')\n>>> response\n<Response [200 OK]>\n
    httpx.get(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a GET request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as GET requests should not include a request body.

    httpx.options(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends an OPTIONS request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as OPTIONS requests should not include a request body.

    httpx.head(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a HEAD request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as HEAD requests should not include a request body.

    httpx.post(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a POST request.

    Parameters: See httpx.request.

    httpx.put(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a PUT request.

    Parameters: See httpx.request.

    httpx.patch(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)

    Sends a PATCH request.

    Parameters: See httpx.request.

    httpx.delete(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, timeout=Timeout(timeout=5.0), verify=True, trust_env=True)

    Sends a DELETE request.

    Parameters: See httpx.request.

    Note that the data, files, json and content parameters are not available on this function, as DELETE requests should not include a request body.

    httpx.stream(method, url, *, params=None, content=None, data=None, files=None, json=None, headers=None, cookies=None, auth=None, proxy=None, timeout=Timeout(timeout=5.0), follow_redirects=False, verify=True, trust_env=True)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    "},{"location":"api/#client","title":"Client","text":"class httpx.Client(*, auth=None, params=None, headers=None, cookies=None, verify=True, cert=None, trust_env=True, http1=True, http2=False, proxy=None, mounts=None, timeout=Timeout(timeout=5.0), follow_redirects=False, limits=Limits(max_connections=100, max_keepalive_connections=20, keepalive_expiry=5.0), max_redirects=20, event_hooks=None, base_url='', transport=None, default_encoding='utf-8')

    An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.

    It can be shared between threads.

    Usage:

    >>> client = httpx.Client()\n>>> response = client.get('https://example.org')\n

    Parameters:

    • auth - (optional) An authentication class to use when sending requests.
    • params - (optional) Query parameters to include in request URLs, as a string, dictionary, or sequence of two-tuples.
    • headers - (optional) Dictionary of HTTP headers to include when sending requests.
    • cookies - (optional) Dictionary of Cookie items to include when sending requests.
    • verify - (optional) Either True to use an SSL context with the default CA bundle, False to disable verification, or an instance of ssl.SSLContext to use a custom context.
    • http2 - (optional) A boolean indicating if HTTP/2 support should be enabled. Defaults to False.
    • proxy - (optional) A proxy URL where all the traffic should be routed.
    • timeout - (optional) The timeout configuration to use when sending requests.
    • limits - (optional) The limits configuration to use.
    • max_redirects - (optional) The maximum number of redirect responses that should be followed.
    • base_url - (optional) A URL to use as the base when building request URLs.
    • transport - (optional) A transport class to use for sending requests over the network.
    • trust_env - (optional) Enables or disables usage of environment variables for configuration.
    • default_encoding - (optional) The default encoding to use for decoding response text, if no charset information is included in a response Content-Type header. Set to a callable for automatic character set detection. Default: \"utf-8\".
    headers

    HTTP headers to include when sending requests.

    cookies

    Cookie values to include when sending requests.

    params

    Query parameters to include in the URL when sending requests.

    auth

    Authentication class used when none is passed at the request-level.

    See also Authentication.

    request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Build and send a request.

    Equivalent to:

    request = client.build_request(...)\nresponse = client.send(request, ...)\n

    See Client.build_request(), Client.send() and Merging of configuration for how the various parameters are merged with client-level configuration.

    get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a GET request.

    Parameters: See httpx.request.

    head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a HEAD request.

    Parameters: See httpx.request.

    options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send an OPTIONS request.

    Parameters: See httpx.request.

    post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a POST request.

    Parameters: See httpx.request.

    put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PUT request.

    Parameters: See httpx.request.

    patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PATCH request.

    Parameters: See httpx.request.

    delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a DELETE request.

    Parameters: See httpx.request.

    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)

    Build and return a request instance.

    • The params, headers and cookies arguments are merged with any values set on the client.
    • The url argument is merged with any base_url set on the client.

    See also: Request instances

    send(self, request, *, stream=False, auth=, follow_redirects=)

    Send a request.

    The request is sent as-is, unmodified.

    Typically you'll want to build one with Client.build_request() so that any client-level configuration is merged into the request, but passing an explicit httpx.Request() is supported as well.

    See also: Request instances

    close(self)

    Close transport and proxies.

    "},{"location":"api/#asyncclient","title":"AsyncClient","text":"class httpx.AsyncClient(*, auth=None, params=None, headers=None, cookies=None, verify=True, cert=None, http1=True, http2=False, proxy=None, mounts=None, timeout=Timeout(timeout=5.0), follow_redirects=False, limits=Limits(max_connections=100, max_keepalive_connections=20, keepalive_expiry=5.0), max_redirects=20, event_hooks=None, base_url='', transport=None, trust_env=True, default_encoding='utf-8')

    An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.

    It can be shared between tasks.

    Usage:

    >>> async with httpx.AsyncClient() as client:\n>>>     response = await client.get('https://example.org')\n

    Parameters:

    • auth - (optional) An authentication class to use when sending requests.
    • params - (optional) Query parameters to include in request URLs, as a string, dictionary, or sequence of two-tuples.
    • headers - (optional) Dictionary of HTTP headers to include when sending requests.
    • cookies - (optional) Dictionary of Cookie items to include when sending requests.
    • verify - (optional) Either True to use an SSL context with the default CA bundle, False to disable verification, or an instance of ssl.SSLContext to use a custom context.
    • http2 - (optional) A boolean indicating if HTTP/2 support should be enabled. Defaults to False.
    • proxy - (optional) A proxy URL where all the traffic should be routed.
    • timeout - (optional) The timeout configuration to use when sending requests.
    • limits - (optional) The limits configuration to use.
    • max_redirects - (optional) The maximum number of redirect responses that should be followed.
    • base_url - (optional) A URL to use as the base when building request URLs.
    • transport - (optional) A transport class to use for sending requests over the network.
    • trust_env - (optional) Enables or disables usage of environment variables for configuration.
    • default_encoding - (optional) The default encoding to use for decoding response text, if no charset information is included in a response Content-Type header. Set to a callable for automatic character set detection. Default: \"utf-8\".
    headers

    HTTP headers to include when sending requests.

    cookies

    Cookie values to include when sending requests.

    params

    Query parameters to include in the URL when sending requests.

    auth

    Authentication class used when none is passed at the request-level.

    See also Authentication.

    async request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Build and send a request.

    Equivalent to:

    request = client.build_request(...)\nresponse = await client.send(request, ...)\n

    See AsyncClient.build_request(), AsyncClient.send() and Merging of configuration for how the various parameters are merged with client-level configuration.

    async get(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a GET request.

    Parameters: See httpx.request.

    async head(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a HEAD request.

    Parameters: See httpx.request.

    async options(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send an OPTIONS request.

    Parameters: See httpx.request.

    async post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a POST request.

    Parameters: See httpx.request.

    async put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PUT request.

    Parameters: See httpx.request.

    async patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a PATCH request.

    Parameters: See httpx.request.

    async delete(self, url, *, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Send a DELETE request.

    Parameters: See httpx.request.

    stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=, follow_redirects=, timeout=, extensions=None)

    Alternative to httpx.request() that streams the response body instead of loading it into memory at once.

    Parameters: See httpx.request.

    See also: Streaming Responses

    build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=, extensions=None)

    Build and return a request instance.

    • The params, headers and cookies arguments are merged with any values set on the client.
    • The url argument is merged with any base_url set on the client.

    See also: Request instances

    async send(self, request, *, stream=False, auth=, follow_redirects=)

    Send a request.

    The request is sent as-is, unmodified.

    Typically you'll want to build one with AsyncClient.build_request() so that any client-level configuration is merged into the request, but passing an explicit httpx.Request() is supported as well.

    See also: Request instances

    async aclose(self)

    Close transport and proxies.

    "},{"location":"api/#response","title":"Response","text":"

    An HTTP response.

    • def __init__(...)
    • .status_code - int
    • .reason_phrase - str
    • .http_version - \"HTTP/2\" or \"HTTP/1.1\"
    • .url - URL
    • .headers - Headers
    • .content - bytes
    • .text - str
    • .encoding - str
    • .is_redirect - bool
    • .request - Request
    • .next_request - Optional[Request]
    • .cookies - Cookies
    • .history - List[Response]
    • .elapsed - timedelta
    • The amount of time elapsed between sending the request and calling close() on the corresponding response received for that request. total_seconds() to correctly get the total elapsed seconds.
    • def .raise_for_status() - Response
    • def .json() - Any
    • def .read() - bytes
    • def .iter_raw([chunk_size]) - bytes iterator
    • def .iter_bytes([chunk_size]) - bytes iterator
    • def .iter_text([chunk_size]) - text iterator
    • def .iter_lines() - text iterator
    • def .close() - None
    • def .next() - Response
    • def .aread() - bytes
    • def .aiter_raw([chunk_size]) - async bytes iterator
    • def .aiter_bytes([chunk_size]) - async bytes iterator
    • def .aiter_text([chunk_size]) - async text iterator
    • def .aiter_lines() - async text iterator
    • def .aclose() - None
    • def .anext() - Response
    "},{"location":"api/#request","title":"Request","text":"

    An HTTP request. Can be constructed explicitly for more control over exactly what gets sent over the wire.

    >>> request = httpx.Request(\"GET\", \"https://example.org\", headers={'host': 'example.org'})\n>>> response = client.send(request)\n
    • def __init__(method, url, [params], [headers], [cookies], [content], [data], [files], [json], [stream])
    • .method - str
    • .url - URL
    • .content - byte, byte iterator, or byte async iterator
    • .headers - Headers
    • .cookies - Cookies
    "},{"location":"api/#url","title":"URL","text":"

    A normalized, IDNA supporting URL.

    >>> url = URL(\"https://example.org/\")\n>>> url.host\n'example.org'\n
    • def __init__(url, **kwargs)
    • .scheme - str
    • .authority - str
    • .host - str
    • .port - int
    • .path - str
    • .query - str
    • .raw_path - str
    • .fragment - str
    • .is_ssl - bool
    • .is_absolute_url - bool
    • .is_relative_url - bool
    • def .copy_with([scheme], [authority], [path], [query], [fragment]) - URL
    "},{"location":"api/#headers","title":"Headers","text":"

    A case-insensitive multi-dict.

    >>> headers = Headers({'Content-Type': 'application/json'})\n>>> headers['content-type']\n'application/json'\n
    • def __init__(self, headers, encoding=None)
    • def copy() - Headers
    "},{"location":"api/#cookies","title":"Cookies","text":"

    A dict-like cookie store.

    >>> cookies = Cookies()\n>>> cookies.set(\"name\", \"value\", domain=\"example.org\")\n
    • def __init__(cookies: [dict, Cookies, CookieJar])
    • .jar - CookieJar
    • def extract_cookies(response)
    • def set_cookie_header(request)
    • def set(name, value, [domain], [path])
    • def get(name, [domain], [path])
    • def delete(name, [domain], [path])
    • def clear([domain], [path])
    • Standard mutable mapping interface
    "},{"location":"api/#proxy","title":"Proxy","text":"

    A configuration of the proxy server.

    >>> proxy = Proxy(\"http://proxy.example.com:8030\")\n>>> client = Client(proxy=proxy)\n
    • def __init__(url, [ssl_context], [auth], [headers])
    • .url - URL
    • .auth - tuple[str, str]
    • .headers - Headers
    • .ssl_context - SSLContext
    "},{"location":"async/","title":"Async Support","text":"

    HTTPX offers a standard synchronous API by default, but also gives you the option of an async client if you need it.

    Async is a concurrency model that is far more efficient than multi-threading, and can provide significant performance benefits and enable the use of long-lived network connections such as WebSockets.

    If you're working with an async web framework then you'll also want to use an async client for sending outgoing HTTP requests.

    "},{"location":"async/#making-async-requests","title":"Making Async requests","text":"

    To make asynchronous requests, you'll need an AsyncClient.

    >>> async with httpx.AsyncClient() as client:\n...     r = await client.get('https://www.example.com/')\n...\n>>> r\n<Response [200 OK]>\n

    Tip

    Use IPython or Python 3.9+ with python -m asyncio to try this code interactively, as they support executing async/await expressions in the console.

    "},{"location":"async/#api-differences","title":"API Differences","text":"

    If you're using an async client then there are a few bits of API that use async methods.

    "},{"location":"async/#making-requests","title":"Making requests","text":"

    The request methods are all async, so you should use response = await client.get(...) style for all of the following:

    • AsyncClient.get(url, ...)
    • AsyncClient.options(url, ...)
    • AsyncClient.head(url, ...)
    • AsyncClient.post(url, ...)
    • AsyncClient.put(url, ...)
    • AsyncClient.patch(url, ...)
    • AsyncClient.delete(url, ...)
    • AsyncClient.request(method, url, ...)
    • AsyncClient.send(request, ...)
    "},{"location":"async/#opening-and-closing-clients","title":"Opening and closing clients","text":"

    Use async with httpx.AsyncClient() if you want a context-managed client...

    async with httpx.AsyncClient() as client:\n    ...\n

    Warning

    In order to get the most benefit from connection pooling, make sure you're not instantiating multiple client instances - for example by using async with inside a \"hot loop\". This can be achieved either by having a single scoped client that's passed throughout wherever it's needed, or by having a single global client instance.

    Alternatively, use await client.aclose() if you want to close a client explicitly:

    client = httpx.AsyncClient()\n...\nawait client.aclose()\n
    "},{"location":"async/#streaming-responses","title":"Streaming responses","text":"

    The AsyncClient.stream(method, url, ...) method is an async context block.

    >>> client = httpx.AsyncClient()\n>>> async with client.stream('GET', 'https://www.example.com/') as response:\n...     async for chunk in response.aiter_bytes():\n...         ...\n

    The async response streaming methods are:

    • Response.aread() - For conditionally reading a response inside a stream block.
    • Response.aiter_bytes() - For streaming the response content as bytes.
    • Response.aiter_text() - For streaming the response content as text.
    • Response.aiter_lines() - For streaming the response content as lines of text.
    • Response.aiter_raw() - For streaming the raw response bytes, without applying content decoding.
    • Response.aclose() - For closing the response. You don't usually need this, since .stream block closes the response automatically on exit.

    For situations when context block usage is not practical, it is possible to enter \"manual mode\" by sending a Request instance using client.send(..., stream=True).

    Example in the context of forwarding the response to a streaming web endpoint with Starlette:

    import httpx\nfrom starlette.background import BackgroundTask\nfrom starlette.responses import StreamingResponse\n\nclient = httpx.AsyncClient()\n\nasync def home(request):\n    req = client.build_request(\"GET\", \"https://www.example.com/\")\n    r = await client.send(req, stream=True)\n    return StreamingResponse(r.aiter_text(), background=BackgroundTask(r.aclose))\n

    Warning

    When using this \"manual streaming mode\", it is your duty as a developer to make sure that Response.aclose() is called eventually. Failing to do so would leave connections open, most likely resulting in resource leaks down the line.

    "},{"location":"async/#streaming-requests","title":"Streaming requests","text":"

    When sending a streaming request body with an AsyncClient instance, you should use an async bytes generator instead of a bytes generator:

    async def upload_bytes():\n    ...  # yield byte content\n\nawait client.post(url, content=upload_bytes())\n
    "},{"location":"async/#explicit-transport-instances","title":"Explicit transport instances","text":"

    When instantiating a transport instance directly, you need to use httpx.AsyncHTTPTransport.

    For instance:

    >>> import httpx\n>>> transport = httpx.AsyncHTTPTransport(retries=1)\n>>> async with httpx.AsyncClient(transport=transport) as client:\n>>>     ...\n
    "},{"location":"async/#supported-async-environments","title":"Supported async environments","text":"

    HTTPX supports either asyncio or trio as an async environment.

    It will auto-detect which of those two to use as the backend for socket operations and concurrency primitives.

    "},{"location":"async/#asyncio","title":"AsyncIO","text":"

    AsyncIO is Python's built-in library for writing concurrent code with the async/await syntax.

    import asyncio\nimport httpx\n\nasync def main():\n    async with httpx.AsyncClient() as client:\n        response = await client.get('https://www.example.com/')\n        print(response)\n\nasyncio.run(main())\n
    "},{"location":"async/#trio","title":"Trio","text":"

    Trio is an alternative async library, designed around the the principles of structured concurrency.

    import httpx\nimport trio\n\nasync def main():\n    async with httpx.AsyncClient() as client:\n        response = await client.get('https://www.example.com/')\n        print(response)\n\ntrio.run(main)\n

    Important

    The trio package must be installed to use the Trio backend.

    "},{"location":"async/#anyio","title":"AnyIO","text":"

    AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio or trio. It blends in with native libraries of your chosen backend (defaults to asyncio).

    import httpx\nimport anyio\n\nasync def main():\n    async with httpx.AsyncClient() as client:\n        response = await client.get('https://www.example.com/')\n        print(response)\n\nanyio.run(main, backend='trio')\n
    "},{"location":"async/#calling-into-python-web-apps","title":"Calling into Python Web Apps","text":"

    For details on calling directly into ASGI applications, see the ASGITransport docs.

    "},{"location":"code_of_conduct/","title":"Code of Conduct","text":"

    We expect contributors to our projects and online spaces to follow the Python Software Foundation\u2019s Code of Conduct.

    The Python community is made up of members from around the globe with a diverse set of skills, personalities, and experiences. It is through these differences that our community experiences great successes and continued growth. When you're working with members of the community, this Code of Conduct will help steer your interactions and keep Python a positive, successful, and growing community.

    "},{"location":"code_of_conduct/#our-community","title":"Our Community","text":"

    Members of the Python community are open, considerate, and respectful. Behaviours that reinforce these values contribute to a positive environment, and include:

    • Being open. Members of the community are open to collaboration, whether it's on PEPs, patches, problems, or otherwise.
    • Focusing on what is best for the community. We're respectful of the processes set forth in the community, and we work within them.
    • Acknowledging time and effort. We're respectful of the volunteer efforts that permeate the Python community. We're thoughtful when addressing the efforts of others, keeping in mind that often times the labor was completed simply for the good of the community.
    • Being respectful of differing viewpoints and experiences. We're receptive to constructive comments and criticism, as the experiences and skill sets of other members contribute to the whole of our efforts.
    • Showing empathy towards other community members. We're attentive in our communications, whether in person or online, and we're tactful when approaching differing views.
    • Being considerate. Members of the community are considerate of their peers -- other Python users.
    • Being respectful. We're respectful of others, their positions, their skills, their commitments, and their efforts.
    • Gracefully accepting constructive criticism. When we disagree, we are courteous in raising our issues.
    • Using welcoming and inclusive language. We're accepting of all who wish to take part in our activities, fostering an environment where anyone can participate and everyone can make a difference.
    "},{"location":"code_of_conduct/#our-standards","title":"Our Standards","text":"

    Every member of our community has the right to have their identity respected. The Python community is dedicated to providing a positive experience for everyone, regardless of age, gender identity and expression, sexual orientation, disability, physical appearance, body size, ethnicity, nationality, race, or religion (or lack thereof), education, or socio-economic status.

    "},{"location":"code_of_conduct/#inappropriate-behavior","title":"Inappropriate Behavior","text":"

    Examples of unacceptable behavior by participants include:

    • Harassment of any participants in any form
    • Deliberate intimidation, stalking, or following
    • Logging or taking screenshots of online activity for harassment purposes
    • Publishing others' private information, such as a physical or electronic address, without explicit permission
    • Violent threats or language directed against another person
    • Incitement of violence or harassment towards any individual, including encouraging a person to commit suicide or to engage in self-harm
    • Creating additional online accounts in order to harass another person or circumvent a ban
    • Sexual language and imagery in online communities or in any conference venue, including talks
    • Insults, put downs, or jokes that are based upon stereotypes, that are exclusionary, or that hold others up for ridicule
    • Excessive swearing
    • Unwelcome sexual attention or advances
    • Unwelcome physical contact, including simulated physical contact (eg, textual descriptions like \"hug\" or \"backrub\") without consent or after a request to stop
    • Pattern of inappropriate social contact, such as requesting/assuming inappropriate levels of intimacy with others
    • Sustained disruption of online community discussions, in-person presentations, or other in-person events
    • Continued one-on-one communication after requests to cease
    • Other conduct that is inappropriate for a professional audience including people of many different backgrounds

    Community members asked to stop any inappropriate behavior are expected to comply immediately.

    "},{"location":"code_of_conduct/#enforcement","title":"Enforcement","text":"

    We take Code of Conduct violations seriously, and will act to ensure our spaces are welcoming, inclusive, and professional environments to communicate in.

    If you need to raise a Code of Conduct report, you may do so privately by email to tom@tomchristie.com.

    Reports will be treated confidentially.

    Alternately you may make a report to the Python Software Foundation.

    "},{"location":"compatibility/","title":"Requests Compatibility Guide","text":"

    HTTPX aims to be broadly compatible with the requests API, although there are a few design differences in places.

    This documentation outlines places where the API differs...

    "},{"location":"compatibility/#redirects","title":"Redirects","text":"

    Unlike requests, HTTPX does not follow redirects by default.

    We differ in behaviour here because auto-redirects can easily mask unnecessary network calls being made.

    You can still enable behaviour to automatically follow redirects, but you need to do so explicitly...

    response = client.get(url, follow_redirects=True)\n

    Or else instantiate a client, with redirect following enabled by default...

    client = httpx.Client(follow_redirects=True)\n
    "},{"location":"compatibility/#client-instances","title":"Client instances","text":"

    The HTTPX equivalent of requests.Session is httpx.Client.

    session = requests.Session(**kwargs)\n

    is generally equivalent to

    client = httpx.Client(**kwargs)\n
    "},{"location":"compatibility/#request-urls","title":"Request URLs","text":"

    Accessing response.url will return a URL instance, rather than a string.

    Use str(response.url) if you need a string instance.

    "},{"location":"compatibility/#determining-the-next-redirect-request","title":"Determining the next redirect request","text":"

    The requests library exposes an attribute response.next, which can be used to obtain the next redirect request.

    session = requests.Session()\nrequest = requests.Request(\"GET\", ...).prepare()\nwhile request is not None:\n    response = session.send(request, allow_redirects=False)\n    request = response.next\n

    In HTTPX, this attribute is instead named response.next_request. For example:

    client = httpx.Client()\nrequest = client.build_request(\"GET\", ...)\nwhile request is not None:\n    response = client.send(request)\n    request = response.next_request\n
    "},{"location":"compatibility/#request-content","title":"Request Content","text":"

    For uploading raw text or binary content we prefer to use a content parameter, in order to better separate this usage from the case of uploading form data.

    For example, using content=... to upload raw content:

    # Uploading text, bytes, or a bytes iterator.\nhttpx.post(..., content=b\"Hello, world\")\n

    And using data=... to send form data:

    # Uploading form data.\nhttpx.post(..., data={\"message\": \"Hello, world\"})\n

    Using the data=<text/byte content> will raise a deprecation warning, and is expected to be fully removed with the HTTPX 1.0 release.

    "},{"location":"compatibility/#upload-files","title":"Upload files","text":"

    HTTPX strictly enforces that upload files must be opened in binary mode, in order to avoid character encoding issues that can result from attempting to upload files opened in text mode.

    "},{"location":"compatibility/#content-encoding","title":"Content encoding","text":"

    HTTPX uses utf-8 for encoding str request bodies. For example, when using content=<str> the request body will be encoded to utf-8 before being sent over the wire. This differs from Requests which uses latin1. If you need an explicit encoding, pass encoded bytes explicitly, e.g. content=<str>.encode(\"latin1\"). For response bodies, assuming the server didn't send an explicit encoding then HTTPX will do its best to figure out an appropriate encoding. HTTPX makes a guess at the encoding to use for decoding the response using charset_normalizer. Fallback to that or any content with less than 32 octets will be decoded using utf-8 with the error=\"replace\" decoder strategy.

    "},{"location":"compatibility/#cookies","title":"Cookies","text":"

    If using a client instance, then cookies should always be set on the client rather than on a per-request basis.

    This usage is supported:

    client = httpx.Client(cookies=...)\nclient.post(...)\n

    This usage is not supported:

    client = httpx.Client()\nclient.post(..., cookies=...)\n

    We prefer enforcing a stricter API here because it provides clearer expectations around cookie persistence, particularly when redirects occur.

    "},{"location":"compatibility/#status-codes","title":"Status Codes","text":"

    In our documentation we prefer the uppercased versions, such as codes.NOT_FOUND, but also provide lower-cased versions for API compatibility with requests.

    Requests includes various synonyms for status codes that HTTPX does not support.

    "},{"location":"compatibility/#streaming-responses","title":"Streaming responses","text":"

    HTTPX provides a .stream() interface rather than using stream=True. This ensures that streaming responses are always properly closed outside of the stream block, and makes it visually clearer at which points streaming I/O APIs may be used with a response.

    For example:

    with httpx.stream(\"GET\", \"https://www.example.com\") as response:\n    ...\n

    Within a stream() block request data is made available with:

    • .iter_bytes() - Instead of response.iter_content()
    • .iter_text() - Instead of response.iter_content(decode_unicode=True)
    • .iter_lines() - Corresponding to response.iter_lines()
    • .iter_raw() - Use this instead of response.raw
    • .read() - Read the entire response body, making response.text and response.content available.
    "},{"location":"compatibility/#timeouts","title":"Timeouts","text":"

    HTTPX defaults to including reasonable timeouts for all network operations, while Requests has no timeouts by default.

    To get the same behavior as Requests, set the timeout parameter to None:

    httpx.get('https://www.example.com', timeout=None)\n
    "},{"location":"compatibility/#proxy-keys","title":"Proxy keys","text":"

    HTTPX uses the mounts argument for HTTP proxying and transport routing. It can do much more than proxies and allows you to configure more than just the proxy route. For more detailed documentation, see Mounting Transports.

    When using httpx.Client(mounts={...}) to map to a selection of different transports, we use full URL schemes, such as mounts={\"http://\": ..., \"https://\": ...}.

    This is different to the requests usage of proxies={\"http\": ..., \"https\": ...}.

    This change is for better consistency with more complex mappings, that might also include domain names, such as mounts={\"all://\": ..., httpx.HTTPTransport(proxy=\"all://www.example.com\": None}) which maps all requests onto a proxy, except for requests to \"www.example.com\" which have an explicit exclusion.

    Also note that requests.Session.request(...) allows a proxies=... parameter, whereas httpx.Client.request(...) does not allow mounts=....

    "},{"location":"compatibility/#ssl-configuration","title":"SSL configuration","text":"

    When using a Client instance, the ssl configurations should always be passed on client instantiation, rather than passed to the request method.

    If you need more than one different SSL configuration, you should use different client instances for each SSL configuration.

    "},{"location":"compatibility/#request-body-on-http-methods","title":"Request body on HTTP methods","text":"

    The HTTP GET, DELETE, HEAD, and OPTIONS methods are specified as not supporting a request body. To stay in line with this, the .get, .delete, .head and .options functions do not support content, files, data, or json arguments.

    If you really do need to send request data using these http methods you should use the generic .request function instead.

    httpx.request(\n  method=\"DELETE\",\n  url=\"https://www.example.com/\",\n  content=b'A request body on a DELETE request.'\n)\n
    "},{"location":"compatibility/#checking-for-success-and-failure-responses","title":"Checking for success and failure responses","text":"

    We don't support response.is_ok since the naming is ambiguous there, and might incorrectly imply an equivalence to response.status_code == codes.OK. Instead we provide the response.is_success property, which can be used to check for a 2xx response.

    "},{"location":"compatibility/#request-instantiation","title":"Request instantiation","text":"

    There is no notion of prepared requests in HTTPX. If you need to customize request instantiation, see Request instances.

    Besides, httpx.Request() does not support the auth, timeout, follow_redirects, mounts, verify and cert parameters. However these are available in httpx.request, httpx.get, httpx.post etc., as well as on Client instances.

    "},{"location":"compatibility/#mocking","title":"Mocking","text":"

    If you need to mock HTTPX the same way that test utilities like responses and requests-mock does for requests, see RESPX.

    "},{"location":"compatibility/#caching","title":"Caching","text":"

    If you use cachecontrol or requests-cache to add HTTP Caching support to the requests library, you can use Hishel for HTTPX.

    "},{"location":"compatibility/#networking-layer","title":"Networking layer","text":"

    requests defers most of its HTTP networking code to the excellent urllib3 library.

    On the other hand, HTTPX uses HTTPCore as its core HTTP networking layer, which is a different project than urllib3.

    "},{"location":"compatibility/#query-parameters","title":"Query Parameters","text":"

    requests omits params whose values are None (e.g. requests.get(..., params={\"foo\": None})). This is not supported by HTTPX.

    For both query params (params=) and form data (data=), requests supports sending a list of tuples (e.g. requests.get(..., params=[('key1', 'value1'), ('key1', 'value2')])). This is not supported by HTTPX. Instead, use a dictionary with lists as values. E.g.: httpx.get(..., params={'key1': ['value1', 'value2']}) or with form data: httpx.post(..., data={'key1': ['value1', 'value2']}).

    "},{"location":"compatibility/#event-hooks","title":"Event Hooks","text":"

    requests allows event hooks to mutate Request and Response objects. See examples given in the documentation for requests.

    In HTTPX, event hooks may access properties of requests and responses, but event hook callbacks cannot mutate the original request/response.

    If you are looking for more control, consider checking out Custom Transports.

    "},{"location":"compatibility/#exceptions-and-errors","title":"Exceptions and Errors","text":"

    requests exception hierarchy is slightly different to the httpx exception hierarchy. requests exposes a top level RequestException, where as httpx exposes a top level HTTPError. see the exceptions exposes in requests here. See the httpx error hierarchy here.

    "},{"location":"contributing/","title":"Contributing","text":"

    Thank you for being interested in contributing to HTTPX. There are many ways you can contribute to the project:

    • Try HTTPX and report bugs/issues you find
    • Implement new features
    • Review Pull Requests of others
    • Write documentation
    • Participate in discussions
    "},{"location":"contributing/#reporting-bugs-or-other-issues","title":"Reporting Bugs or Other Issues","text":"

    Found something that HTTPX should support? Stumbled upon some unexpected behaviour?

    Contributions should generally start out with a discussion. Possible bugs may be raised as a \"Potential Issue\" discussion, feature requests may be raised as an \"Ideas\" discussion. We can then determine if the discussion needs to be escalated into an \"Issue\" or not, or if we'd consider a pull request.

    Try to be more descriptive as you can and in case of a bug report, provide as much information as possible like:

    • OS platform
    • Python version
    • Installed dependencies and versions (python -m pip freeze)
    • Code snippet
    • Error traceback

    You should always try to reduce any examples to the simplest possible case that demonstrates the issue.

    Some possibly useful tips for narrowing down potential issues...

    • Does the issue exist on HTTP/1.1, or HTTP/2, or both?
    • Does the issue exist with Client, AsyncClient, or both?
    • When using AsyncClient does the issue exist when using asyncio or trio, or both?
    "},{"location":"contributing/#development","title":"Development","text":"

    To start developing HTTPX create a fork of the HTTPX repository on GitHub.

    Then clone your fork with the following command replacing YOUR-USERNAME with your GitHub username:

    $ git clone https://github.com/YOUR-USERNAME/httpx\n

    You can now install the project and its dependencies using:

    $ cd httpx\n$ scripts/install\n
    "},{"location":"contributing/#testing-and-linting","title":"Testing and Linting","text":"

    We use custom shell scripts to automate testing, linting, and documentation building workflow.

    To run the tests, use:

    $ scripts/test\n

    Warning

    The test suite spawns testing servers on ports 8000 and 8001. Make sure these are not in use, so the tests can run properly.

    Any additional arguments will be passed to pytest. See the pytest documentation for more information.

    For example, to run a single test script:

    $ scripts/test tests/test_multipart.py\n

    To run the code auto-formatting:

    $ scripts/lint\n

    Lastly, to run code checks separately (they are also run as part of scripts/test), run:

    $ scripts/check\n
    "},{"location":"contributing/#documenting","title":"Documenting","text":"

    Documentation pages are located under the docs/ folder.

    To run the documentation site locally (useful for previewing changes), use:

    $ scripts/docs\n
    "},{"location":"contributing/#resolving-build-ci-failures","title":"Resolving Build / CI Failures","text":"

    Once you've submitted your pull request, the test suite will automatically run, and the results will show up in GitHub. If the test suite fails, you'll want to click through to the \"Details\" link, and try to identify why the test suite failed.

    Here are some common ways the test suite can fail:

    "},{"location":"contributing/#check-job-failed","title":"Check Job Failed","text":"

    This job failing means there is either a code formatting issue or type-annotation issue. You can look at the job output to figure out why it's failed or within a shell run:

    $ scripts/check\n

    It may be worth it to run $ scripts/lint to attempt auto-formatting the code and if that job succeeds commit the changes.

    "},{"location":"contributing/#docs-job-failed","title":"Docs Job Failed","text":"

    This job failing means the documentation failed to build. This can happen for a variety of reasons like invalid markdown or missing configuration within mkdocs.yml.

    "},{"location":"contributing/#python-3x-job-failed","title":"Python 3.X Job Failed","text":"

    This job failing means the unit tests failed or not all code paths are covered by unit tests.

    If tests are failing you will see this message under the coverage report:

    === 1 failed, 435 passed, 1 skipped, 1 xfailed in 11.09s ===

    If tests succeed but coverage doesn't reach our current threshold, you will see this message under the coverage report:

    FAIL Required test coverage of 100% not reached. Total coverage: 99.00%

    "},{"location":"contributing/#releasing","title":"Releasing","text":"

    This section is targeted at HTTPX maintainers.

    Before releasing a new version, create a pull request that includes:

    • An update to the changelog:
      • We follow the format from keepachangelog.
      • Compare master with the tag of the latest release, and list all entries that are of interest to our users:
        • Things that must go in the changelog: added, changed, deprecated or removed features, and bug fixes.
        • Things that should not go in the changelog: changes to documentation, tests or tooling.
        • Try sorting entries in descending order of impact / importance.
        • Keep it concise and to-the-point. \ud83c\udfaf
    • A version bump: see __version__.py.

    For an example, see #1006.

    Once the release PR is merged, create a new release including:

    • Tag version like 0.13.3.
    • Release title Version 0.13.3
    • Description copied from the changelog.

    Once created this release will be automatically uploaded to PyPI.

    If something goes wrong with the PyPI job the release can be published using the scripts/publish script.

    "},{"location":"contributing/#development-proxy-setup","title":"Development proxy setup","text":"

    To test and debug requests via a proxy it's best to run a proxy server locally. Any server should do but HTTPCore's test suite uses mitmproxy which is written in Python, it's fully featured and has excellent UI and tools for introspection of requests.

    You can install mitmproxy using pip install mitmproxy or several other ways.

    mitmproxy does require setting up local TLS certificates for HTTPS requests, as its main purpose is to allow developers to inspect requests that pass through it. We can set them up follows:

    1. pip install trustme-cli.
    2. trustme-cli -i example.org www.example.org, assuming you want to test connecting to that domain, this will create three files: server.pem, server.key and client.pem.
    3. mitmproxy requires a PEM file that includes the private key and the certificate so we need to concatenate them: cat server.key server.pem > server.withkey.pem.
    4. Start the proxy server mitmproxy --certs server.withkey.pem, or use the other mitmproxy commands with different UI options.

    At this point the server is ready to start serving requests, you'll need to configure HTTPX as described in the proxy section and the SSL certificates section, this is where our previously generated client.pem comes in:

    ctx = ssl.create_default_context(cafile=\"/path/to/client.pem\")\nclient = httpx.Client(proxy=\"http://127.0.0.1:8080/\", verify=ctx)\n

    Note, however, that HTTPS requests will only succeed to the host specified in the SSL/TLS certificate we generated, HTTPS requests to other hosts will raise an error like:

    ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate\nverify failed: Hostname mismatch, certificate is not valid for\n'duckduckgo.com'. (_ssl.c:1108)\n

    If you want to make requests to more hosts you'll need to regenerate the certificates and include all the hosts you intend to connect to in the seconds step, i.e.

    trustme-cli -i example.org www.example.org duckduckgo.com www.duckduckgo.com

    "},{"location":"environment_variables/","title":"Environment Variables","text":"

    The HTTPX library can be configured via environment variables. Environment variables are used by default. To ignore environment variables, trust_env has to be set False. There are two ways to set trust_env to disable environment variables:

    • On the client via httpx.Client(trust_env=False).
    • Using the top-level API, such as httpx.get(\"<url>\", trust_env=False).

    Here is a list of environment variables that HTTPX recognizes and what function they serve:

    "},{"location":"environment_variables/#proxies","title":"Proxies","text":"

    The environment variables documented below are used as a convention by various HTTP tooling, including:

    • cURL
    • requests

    For more information on using proxies in HTTPX, see HTTP Proxying.

    "},{"location":"environment_variables/#http_proxy-https_proxy-all_proxy","title":"HTTP_PROXY, HTTPS_PROXY, ALL_PROXY","text":"

    Valid values: A URL to a proxy

    HTTP_PROXY, HTTPS_PROXY, ALL_PROXY set the proxy to be used for http, https, or all requests respectively.

    export HTTP_PROXY=http://my-external-proxy.com:1234\n\n# This request will be sent through the proxy\npython -c \"import httpx; httpx.get('http://example.com')\"\n\n# This request will be sent directly, as we set `trust_env=False`\npython -c \"import httpx; httpx.get('http://example.com', trust_env=False)\"\n
    "},{"location":"environment_variables/#no_proxy","title":"NO_PROXY","text":"

    Valid values: a comma-separated list of hostnames/urls

    NO_PROXY disables the proxy for specific urls

    export HTTP_PROXY=http://my-external-proxy.com:1234\nexport NO_PROXY=http://127.0.0.1,python-httpx.org\n\n# As in the previous example, this request will be sent through the proxy\npython -c \"import httpx; httpx.get('http://example.com')\"\n\n# These requests will be sent directly, bypassing the proxy\npython -c \"import httpx; httpx.get('http://127.0.0.1:5000/my-api')\"\npython -c \"import httpx; httpx.get('https://www.python-httpx.org')\"\n
    "},{"location":"environment_variables/#ssl_cert_file","title":"SSL_CERT_FILE","text":"

    Valid values: a filename

    If this environment variable is set then HTTPX will load CA certificate from the specified file instead of the default location.

    Example:

    SSL_CERT_FILE=/path/to/ca-certs/ca-bundle.crt python -c \"import httpx; httpx.get('https://example.com')\"\n
    "},{"location":"environment_variables/#ssl_cert_dir","title":"SSL_CERT_DIR","text":"

    Valid values: a directory following an OpenSSL specific layout.

    If this environment variable is set and the directory follows an OpenSSL specific layout (ie. you ran c_rehash) then HTTPX will load CA certificates from this directory instead of the default location.

    Example:

    SSL_CERT_DIR=/path/to/ca-certs/ python -c \"import httpx; httpx.get('https://example.com')\"\n
    "},{"location":"exceptions/","title":"Exceptions","text":"

    This page lists exceptions that may be raised when using HTTPX.

    For an overview of how to work with HTTPX exceptions, see Exceptions (Quickstart).

    "},{"location":"exceptions/#the-exception-hierarchy","title":"The exception hierarchy","text":"
    • HTTPError
      • RequestError
        • TransportError
          • TimeoutException
            • ConnectTimeout
            • ReadTimeout
            • WriteTimeout
            • PoolTimeout
          • NetworkError
            • ConnectError
            • ReadError
            • WriteError
            • CloseError
          • ProtocolError
            • LocalProtocolError
            • RemoteProtocolError
          • ProxyError
          • UnsupportedProtocol
        • DecodingError
        • TooManyRedirects
      • HTTPStatusError
    • InvalidURL
    • CookieConflict
    • StreamError
      • StreamConsumed
      • ResponseNotRead
      • RequestNotRead
      • StreamClosed
    "},{"location":"exceptions/#exception-classes","title":"Exception classes","text":"class httpx.HTTPError(message)

    Base class for RequestError and HTTPStatusError.

    Useful for try...except blocks when issuing a request, and then calling .raise_for_status().

    For example:

    try:\n    response = httpx.get(\"https://www.example.com\")\n    response.raise_for_status()\nexcept httpx.HTTPError as exc:\n    print(f\"HTTP Exception for {exc.request.url} - {exc}\")\n
    class httpx.RequestError(message, *, request=None)

    Base class for all exceptions that may occur when issuing a .request().

    class httpx.TransportError(message, *, request=None)

    Base class for all exceptions that occur at the level of the Transport API.

    class httpx.TimeoutException(message, *, request=None)

    The base class for timeout errors.

    An operation has timed out.

    class httpx.ConnectTimeout(message, *, request=None)

    Timed out while connecting to the host.

    class httpx.ReadTimeout(message, *, request=None)

    Timed out while receiving data from the host.

    class httpx.WriteTimeout(message, *, request=None)

    Timed out while sending data to the host.

    class httpx.PoolTimeout(message, *, request=None)

    Timed out waiting to acquire a connection from the pool.

    class httpx.NetworkError(message, *, request=None)

    The base class for network-related errors.

    An error occurred while interacting with the network.

    class httpx.ConnectError(message, *, request=None)

    Failed to establish a connection.

    class httpx.ReadError(message, *, request=None)

    Failed to receive data from the network.

    class httpx.WriteError(message, *, request=None)

    Failed to send data through the network.

    class httpx.CloseError(message, *, request=None)

    Failed to close a connection.

    class httpx.ProtocolError(message, *, request=None)

    The protocol was violated.

    class httpx.LocalProtocolError(message, *, request=None)

    A protocol was violated by the client.

    For example if the user instantiated a Request instance explicitly, failed to include the mandatory Host: header, and then issued it directly using client.send().

    class httpx.RemoteProtocolError(message, *, request=None)

    The protocol was violated by the server.

    For example, returning malformed HTTP.

    class httpx.ProxyError(message, *, request=None)

    An error occurred while establishing a proxy connection.

    class httpx.UnsupportedProtocol(message, *, request=None)

    Attempted to make a request to an unsupported protocol.

    For example issuing a request to ftp://www.example.com.

    class httpx.DecodingError(message, *, request=None)

    Decoding of the response failed, due to a malformed encoding.

    class httpx.TooManyRedirects(message, *, request=None)

    Too many redirects.

    class httpx.HTTPStatusError(message, *, request, response)

    The response had an error HTTP status of 4xx or 5xx.

    May be raised when calling response.raise_for_status()

    class httpx.InvalidURL(message)

    URL is improperly formed or cannot be parsed.

    class httpx.CookieConflict(message)

    Attempted to lookup a cookie by name, but multiple cookies existed.

    Can occur when calling response.cookies.get(...).

    class httpx.StreamError(message)

    The base class for stream exceptions.

    The developer made an error in accessing the request stream in an invalid way.

    class httpx.StreamConsumed()

    Attempted to read or stream content, but the content has already been streamed.

    class httpx.StreamClosed()

    Attempted to read or stream response content, but the request has been closed.

    class httpx.ResponseNotRead()

    Attempted to access streaming response content, without having called read().

    class httpx.RequestNotRead()

    Attempted to access streaming request content, without having called read().

    "},{"location":"http2/","title":"HTTP/2","text":"

    HTTP/2 is a major new iteration of the HTTP protocol, that provides a far more efficient transport, with potential performance benefits. HTTP/2 does not change the core semantics of the request or response, but alters the way that data is sent to and from the server.

    Rather than the text format that HTTP/1.1 uses, HTTP/2 is a binary format. The binary format provides full request and response multiplexing, and efficient compression of HTTP headers. The stream multiplexing means that where HTTP/1.1 requires one TCP stream for each concurrent request, HTTP/2 allows a single TCP stream to handle multiple concurrent requests.

    HTTP/2 also provides support for functionality such as response prioritization, and server push.

    For a comprehensive guide to HTTP/2 you may want to check out \"http2 explained\".

    "},{"location":"http2/#enabling-http2","title":"Enabling HTTP/2","text":"

    When using the httpx client, HTTP/2 support is not enabled by default, because HTTP/1.1 is a mature, battle-hardened transport layer, and our HTTP/1.1 implementation may be considered the more robust option at this point in time. It is possible that a future version of httpx may enable HTTP/2 support by default.

    If you're issuing highly concurrent requests you might want to consider trying out our HTTP/2 support. You can do so by first making sure to install the optional HTTP/2 dependencies...

    $ pip install httpx[http2]\n

    And then instantiating a client with HTTP/2 support enabled:

    client = httpx.AsyncClient(http2=True)\n...\n

    You can also instantiate a client as a context manager, to ensure that all HTTP connections are nicely scoped, and will be closed once the context block is exited.

    async with httpx.AsyncClient(http2=True) as client:\n    ...\n

    HTTP/2 support is available on both Client and AsyncClient, although it's typically more useful in async contexts if you're issuing lots of concurrent requests.

    "},{"location":"http2/#inspecting-the-http-version","title":"Inspecting the HTTP version","text":"

    Enabling HTTP/2 support on the client does not necessarily mean that your requests and responses will be transported over HTTP/2, since both the client and the server need to support HTTP/2. If you connect to a server that only supports HTTP/1.1 the client will use a standard HTTP/1.1 connection instead.

    You can determine which version of the HTTP protocol was used by examining the .http_version property on the response.

    client = httpx.AsyncClient(http2=True)\nresponse = await client.get(...)\nprint(response.http_version)  # \"HTTP/1.0\", \"HTTP/1.1\", or \"HTTP/2\".\n
    "},{"location":"logging/","title":"Logging","text":"

    If you need to inspect the internal behaviour of httpx, you can use Python's standard logging to output information about the underlying network behaviour.

    For example, the following configuration...

    import logging\nimport httpx\n\nlogging.basicConfig(\n    format=\"%(levelname)s [%(asctime)s] %(name)s - %(message)s\",\n    datefmt=\"%Y-%m-%d %H:%M:%S\",\n    level=logging.DEBUG\n)\n\nhttpx.get(\"https://www.example.com\")\n

    Will send debug level output to the console, or wherever stdout is directed too...

    DEBUG [2024-09-28 17:27:40] httpcore.connection - connect_tcp.started host='www.example.com' port=443 local_address=None timeout=5.0 socket_options=None\nDEBUG [2024-09-28 17:27:41] httpcore.connection - connect_tcp.complete return_value=<httpcore._backends.sync.SyncStream object at 0x101f1e8e0>\nDEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.started ssl_context=SSLContext(verify=True) server_hostname='www.example.com' timeout=5.0\nDEBUG [2024-09-28 17:27:41] httpcore.connection - start_tls.complete return_value=<httpcore._backends.sync.SyncStream object at 0x1020f49a0>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_headers.complete\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - send_request_body.complete\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Content-Encoding', b'gzip'), (b'Accept-Ranges', b'bytes'), (b'Age', b'407727'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Sat, 28 Sep 2024 13:27:42 GMT'), (b'Etag', b'\"3147526947+gzip\"'), (b'Expires', b'Sat, 05 Oct 2024 13:27:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECAcc (dcd/7D43)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'648')])\nINFO [2024-09-28 17:27:41] httpx - HTTP Request: GET https://www.example.com \"HTTP/1.1 200 OK\"\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.started request=<Request [b'GET']>\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - receive_response_body.complete\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.started\nDEBUG [2024-09-28 17:27:41] httpcore.http11 - response_closed.complete\nDEBUG [2024-09-28 17:27:41] httpcore.connection - close.started\nDEBUG [2024-09-28 17:27:41] httpcore.connection - close.complete\n

    Logging output includes information from both the high-level httpx logger, and the network-level httpcore logger, which can be configured separately.

    For handling more complex logging configurations you might want to use the dictionary configuration style...

    import logging.config\nimport httpx\n\nLOGGING_CONFIG = {\n    \"version\": 1,\n    \"handlers\": {\n        \"default\": {\n            \"class\": \"logging.StreamHandler\",\n            \"formatter\": \"http\",\n            \"stream\": \"ext://sys.stderr\"\n        }\n    },\n    \"formatters\": {\n        \"http\": {\n            \"format\": \"%(levelname)s [%(asctime)s] %(name)s - %(message)s\",\n            \"datefmt\": \"%Y-%m-%d %H:%M:%S\",\n        }\n    },\n    'loggers': {\n        'httpx': {\n            'handlers': ['default'],\n            'level': 'DEBUG',\n        },\n        'httpcore': {\n            'handlers': ['default'],\n            'level': 'DEBUG',\n        },\n    }\n}\n\nlogging.config.dictConfig(LOGGING_CONFIG)\nhttpx.get('https://www.example.com')\n

    The exact formatting of the debug logging may be subject to change across different versions of httpx and httpcore. If you need to rely on a particular format it is recommended that you pin installation of these packages to fixed versions.

    "},{"location":"quickstart/","title":"QuickStart","text":"

    First, start by importing HTTPX:

    >>> import httpx\n

    Now, let\u2019s try to get a webpage.

    >>> r = httpx.get('https://httpbin.org/get')\n>>> r\n<Response [200 OK]>\n

    Similarly, to make an HTTP POST request:

    >>> r = httpx.post('https://httpbin.org/post', data={'key': 'value'})\n

    The PUT, DELETE, HEAD, and OPTIONS requests all follow the same style:

    >>> r = httpx.put('https://httpbin.org/put', data={'key': 'value'})\n>>> r = httpx.delete('https://httpbin.org/delete')\n>>> r = httpx.head('https://httpbin.org/get')\n>>> r = httpx.options('https://httpbin.org/get')\n
    "},{"location":"quickstart/#passing-parameters-in-urls","title":"Passing Parameters in URLs","text":"

    To include URL query parameters in the request, use the params keyword:

    >>> params = {'key1': 'value1', 'key2': 'value2'}\n>>> r = httpx.get('https://httpbin.org/get', params=params)\n

    To see how the values get encoding into the URL string, we can inspect the resulting URL that was used to make the request:

    >>> r.url\nURL('https://httpbin.org/get?key2=value2&key1=value1')\n

    You can also pass a list of items as a value:

    >>> params = {'key1': 'value1', 'key2': ['value2', 'value3']}\n>>> r = httpx.get('https://httpbin.org/get', params=params)\n>>> r.url\nURL('https://httpbin.org/get?key1=value1&key2=value2&key2=value3')\n
    "},{"location":"quickstart/#response-content","title":"Response Content","text":"

    HTTPX will automatically handle decoding the response content into Unicode text.

    >>> r = httpx.get('https://www.example.org/')\n>>> r.text\n'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    You can inspect what encoding will be used to decode the response.

    >>> r.encoding\n'UTF-8'\n

    In some cases the response may not contain an explicit encoding, in which case HTTPX will attempt to automatically determine an encoding to use.

    >>> r.encoding\nNone\n>>> r.text\n'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    If you need to override the standard behaviour and explicitly set the encoding to use, then you can do that too.

    >>> r.encoding = 'ISO-8859-1'\n
    "},{"location":"quickstart/#binary-response-content","title":"Binary Response Content","text":"

    The response content can also be accessed as bytes, for non-text responses:

    >>> r.content\nb'<!doctype html>\\n<html>\\n<head>\\n<title>Example Domain</title>...'\n

    Any gzip and deflate HTTP response encodings will automatically be decoded for you. If brotlipy is installed, then the brotli response encoding will be supported. If zstandard is installed, then zstd response encodings will also be supported.

    For example, to create an image from binary data returned by a request, you can use the following code:

    >>> from PIL import Image\n>>> from io import BytesIO\n>>> i = Image.open(BytesIO(r.content))\n
    "},{"location":"quickstart/#json-response-content","title":"JSON Response Content","text":"

    Often Web API responses will be encoded as JSON.

    >>> r = httpx.get('https://api.github.com/events')\n>>> r.json()\n[{u'repository': {u'open_issues': 0, u'url': 'https://github.com/...' ...  }}]\n
    "},{"location":"quickstart/#custom-headers","title":"Custom Headers","text":"

    To include additional headers in the outgoing request, use the headers keyword argument:

    >>> url = 'https://httpbin.org/headers'\n>>> headers = {'user-agent': 'my-app/0.0.1'}\n>>> r = httpx.get(url, headers=headers)\n
    "},{"location":"quickstart/#sending-form-encoded-data","title":"Sending Form Encoded Data","text":"

    Some types of HTTP requests, such as POST and PUT requests, can include data in the request body. One common way of including that is as form-encoded data, which is used for HTML forms.

    >>> data = {'key1': 'value1', 'key2': 'value2'}\n>>> r = httpx.post(\"https://httpbin.org/post\", data=data)\n>>> print(r.text)\n{\n  ...\n  \"form\": {\n    \"key2\": \"value2\",\n    \"key1\": \"value1\"\n  },\n  ...\n}\n

    Form encoded data can also include multiple values from a given key.

    >>> data = {'key1': ['value1', 'value2']}\n>>> r = httpx.post(\"https://httpbin.org/post\", data=data)\n>>> print(r.text)\n{\n  ...\n  \"form\": {\n    \"key1\": [\n      \"value1\",\n      \"value2\"\n    ]\n  },\n  ...\n}\n
    "},{"location":"quickstart/#sending-multipart-file-uploads","title":"Sending Multipart File Uploads","text":"

    You can also upload files, using HTTP multipart encoding:

    >>> with open('report.xls', 'rb') as report_file:\n...     files = {'upload-file': report_file}\n...     r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"upload-file\": \"<... binary content ...>\"\n  },\n  ...\n}\n

    You can also explicitly set the filename and content type, by using a tuple of items for the file value:

    >>> with open('report.xls', 'rb') report_file:\n...     files = {'upload-file': ('report.xls', report_file, 'application/vnd.ms-excel')}\n...     r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"upload-file\": \"<... binary content ...>\"\n  },\n  ...\n}\n

    If you need to include non-file data fields in the multipart form, use the data=... parameter:

    >>> data = {'message': 'Hello, world!'}\n>>> with open('report.xls', 'rb') as report_file:\n...     files = {'file': report_file}\n...     r = httpx.post(\"https://httpbin.org/post\", data=data, files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"file\": \"<... binary content ...>\"\n  },\n  \"form\": {\n    \"message\": \"Hello, world!\",\n  },\n  ...\n}\n
    "},{"location":"quickstart/#sending-json-encoded-data","title":"Sending JSON Encoded Data","text":"

    Form encoded data is okay if all you need is a simple key-value data structure. For more complicated data structures you'll often want to use JSON encoding instead.

    >>> data = {'integer': 123, 'boolean': True, 'list': ['a', 'b', 'c']}\n>>> r = httpx.post(\"https://httpbin.org/post\", json=data)\n>>> print(r.text)\n{\n  ...\n  \"json\": {\n    \"boolean\": true,\n    \"integer\": 123,\n    \"list\": [\n      \"a\",\n      \"b\",\n      \"c\"\n    ]\n  },\n  ...\n}\n
    "},{"location":"quickstart/#sending-binary-request-data","title":"Sending Binary Request Data","text":"

    For other encodings, you should use the content=... parameter, passing either a bytes type or a generator that yields bytes.

    >>> content = b'Hello, world'\n>>> r = httpx.post(\"https://httpbin.org/post\", content=content)\n

    You may also want to set a custom Content-Type header when uploading binary data.

    "},{"location":"quickstart/#response-status-codes","title":"Response Status Codes","text":"

    We can inspect the HTTP status code of the response:

    >>> r = httpx.get('https://httpbin.org/get')\n>>> r.status_code\n200\n

    HTTPX also includes an easy shortcut for accessing status codes by their text phrase.

    >>> r.status_code == httpx.codes.OK\nTrue\n

    We can raise an exception for any responses which are not a 2xx success code:

    >>> not_found = httpx.get('https://httpbin.org/status/404')\n>>> not_found.status_code\n404\n>>> not_found.raise_for_status()\nTraceback (most recent call last):\n  File \"/Users/tomchristie/GitHub/encode/httpcore/httpx/models.py\", line 837, in raise_for_status\n    raise HTTPStatusError(message, response=self)\nhttpx._exceptions.HTTPStatusError: 404 Client Error: Not Found for url: https://httpbin.org/status/404\nFor more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404\n

    Any successful response codes will return the Response instance rather than raising an exception.

    >>> r.raise_for_status()\n

    The method returns the response instance, allowing you to use it inline. For example:

    >>> r = httpx.get('...').raise_for_status()\n>>> data = httpx.get('...').raise_for_status().json()\n
    "},{"location":"quickstart/#response-headers","title":"Response Headers","text":"

    The response headers are available as a dictionary-like interface.

    >>> r.headers\nHeaders({\n    'content-encoding': 'gzip',\n    'transfer-encoding': 'chunked',\n    'connection': 'close',\n    'server': 'nginx/1.0.4',\n    'x-runtime': '148ms',\n    'etag': '\"e1ca502697e5c9317743dc078f67693f\"',\n    'content-type': 'application/json'\n})\n

    The Headers data type is case-insensitive, so you can use any capitalization.

    >>> r.headers['Content-Type']\n'application/json'\n\n>>> r.headers.get('content-type')\n'application/json'\n

    Multiple values for a single response header are represented as a single comma-separated value, as per RFC 7230:

    A recipient MAY combine multiple header fields with the same field name into one \u201cfield-name: field-value\u201d pair, without changing the semantics of the message, by appending each subsequent field-value to the combined field value in order, separated by a comma.

    "},{"location":"quickstart/#streaming-responses","title":"Streaming Responses","text":"

    For large downloads you may want to use streaming responses that do not load the entire response body into memory at once.

    You can stream the binary content of the response...

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for data in r.iter_bytes():\n...         print(data)\n

    Or the text of the response...

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for text in r.iter_text():\n...         print(text)\n

    Or stream the text, on a line-by-line basis...

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for line in r.iter_lines():\n...         print(line)\n

    HTTPX will use universal line endings, normalising all cases to \\n.

    In some cases you might want to access the raw bytes on the response without applying any HTTP content decoding. In this case any content encoding that the web server has applied such as gzip, deflate, brotli, or zstd will not be automatically decoded.

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     for chunk in r.iter_raw():\n...         print(chunk)\n

    If you're using streaming responses in any of these ways then the response.content and response.text attributes will not be available, and will raise errors if accessed. However you can also use the response streaming functionality to conditionally load the response body:

    >>> with httpx.stream(\"GET\", \"https://www.example.com\") as r:\n...     if int(r.headers['Content-Length']) < TOO_LONG:\n...         r.read()\n...         print(r.text)\n
    "},{"location":"quickstart/#cookies","title":"Cookies","text":"

    Any cookies that are set on the response can be easily accessed:

    >>> r = httpx.get('https://httpbin.org/cookies/set?chocolate=chip')\n>>> r.cookies['chocolate']\n'chip'\n

    To include cookies in an outgoing request, use the cookies parameter:

    >>> cookies = {\"peanut\": \"butter\"}\n>>> r = httpx.get('https://httpbin.org/cookies', cookies=cookies)\n>>> r.json()\n{'cookies': {'peanut': 'butter'}}\n

    Cookies are returned in a Cookies instance, which is a dict-like data structure with additional API for accessing cookies by their domain or path.

    >>> cookies = httpx.Cookies()\n>>> cookies.set('cookie_on_domain', 'hello, there!', domain='httpbin.org')\n>>> cookies.set('cookie_off_domain', 'nope.', domain='example.org')\n>>> r = httpx.get('http://httpbin.org/cookies', cookies=cookies)\n>>> r.json()\n{'cookies': {'cookie_on_domain': 'hello, there!'}}\n
    "},{"location":"quickstart/#redirection-and-history","title":"Redirection and History","text":"

    By default, HTTPX will not follow redirects for all HTTP methods, although this can be explicitly enabled.

    For example, GitHub redirects all HTTP requests to HTTPS.

    >>> r = httpx.get('http://github.com/')\n>>> r.status_code\n301\n>>> r.history\n[]\n>>> r.next_request\n<Request('GET', 'https://github.com/')>\n

    You can modify the default redirection handling with the follow_redirects parameter:

    >>> r = httpx.get('http://github.com/', follow_redirects=True)\n>>> r.url\nURL('https://github.com/')\n>>> r.status_code\n200\n>>> r.history\n[<Response [301 Moved Permanently]>]\n

    The history property of the response can be used to inspect any followed redirects. It contains a list of any redirect responses that were followed, in the order in which they were made.

    "},{"location":"quickstart/#timeouts","title":"Timeouts","text":"

    HTTPX defaults to including reasonable timeouts for all network operations, meaning that if a connection is not properly established then it should always raise an error rather than hanging indefinitely.

    The default timeout for network inactivity is five seconds. You can modify the value to be more or less strict:

    >>> httpx.get('https://github.com/', timeout=0.001)\n

    You can also disable the timeout behavior completely...

    >>> httpx.get('https://github.com/', timeout=None)\n

    For advanced timeout management, see Timeout fine-tuning.

    "},{"location":"quickstart/#authentication","title":"Authentication","text":"

    HTTPX supports Basic and Digest HTTP authentication.

    To provide Basic authentication credentials, pass a 2-tuple of plaintext str or bytes objects as the auth argument to the request functions:

    >>> httpx.get(\"https://example.com\", auth=(\"my_user\", \"password123\"))\n

    To provide credentials for Digest authentication you'll need to instantiate a DigestAuth object with the plaintext username and password as arguments. This object can be then passed as the auth argument to the request methods as above:

    >>> auth = httpx.DigestAuth(\"my_user\", \"password123\")\n>>> httpx.get(\"https://example.com\", auth=auth)\n<Response [200 OK]>\n
    "},{"location":"quickstart/#exceptions","title":"Exceptions","text":"

    HTTPX will raise exceptions if an error occurs.

    The most important exception classes in HTTPX are RequestError and HTTPStatusError.

    The RequestError class is a superclass that encompasses any exception that occurs while issuing an HTTP request. These exceptions include a .request attribute.

    try:\n    response = httpx.get(\"https://www.example.com/\")\nexcept httpx.RequestError as exc:\n    print(f\"An error occurred while requesting {exc.request.url!r}.\")\n

    The HTTPStatusError class is raised by response.raise_for_status() on responses which are not a 2xx success code. These exceptions include both a .request and a .response attribute.

    response = httpx.get(\"https://www.example.com/\")\ntry:\n    response.raise_for_status()\nexcept httpx.HTTPStatusError as exc:\n    print(f\"Error response {exc.response.status_code} while requesting {exc.request.url!r}.\")\n

    There is also a base class HTTPError that includes both of these categories, and can be used to catch either failed requests, or 4xx and 5xx responses.

    You can either use this base class to catch both categories...

    try:\n    response = httpx.get(\"https://www.example.com/\")\n    response.raise_for_status()\nexcept httpx.HTTPError as exc:\n    print(f\"Error while requesting {exc.request.url!r}.\")\n

    Or handle each case explicitly...

    try:\n    response = httpx.get(\"https://www.example.com/\")\n    response.raise_for_status()\nexcept httpx.RequestError as exc:\n    print(f\"An error occurred while requesting {exc.request.url!r}.\")\nexcept httpx.HTTPStatusError as exc:\n    print(f\"Error response {exc.response.status_code} while requesting {exc.request.url!r}.\")\n

    For a full list of available exceptions, see Exceptions (API Reference).

    "},{"location":"third_party_packages/","title":"Third Party Packages","text":"

    As HTTPX usage grows, there is an expanding community of developers building tools and libraries that integrate with HTTPX, or depend on HTTPX. Here are some of them.

    "},{"location":"third_party_packages/#plugins","title":"Plugins","text":""},{"location":"third_party_packages/#hishel","title":"Hishel","text":"

    GitHub - Documentation

    An elegant HTTP Cache implementation for HTTPX and HTTP Core.

    "},{"location":"third_party_packages/#httpx-auth","title":"HTTPX-Auth","text":"

    GitHub - Documentation

    Provides authentication classes to be used with HTTPX's authentication parameter.

    "},{"location":"third_party_packages/#httpx-caching","title":"httpx-caching","text":"

    Github

    This package adds caching functionality to HTTPX

    "},{"location":"third_party_packages/#httpx-secure","title":"httpx-secure","text":"

    GitHub

    Drop-in SSRF protection for httpx with DNS caching and custom validation support.

    "},{"location":"third_party_packages/#httpx-socks","title":"httpx-socks","text":"

    GitHub

    Proxy (HTTP, SOCKS) transports for httpx.

    "},{"location":"third_party_packages/#httpx-sse","title":"httpx-sse","text":"

    GitHub

    Allows consuming Server-Sent Events (SSE) with HTTPX.

    "},{"location":"third_party_packages/#httpx-retries","title":"httpx-retries","text":"

    GitHub - Documentation

    A retry layer for HTTPX.

    "},{"location":"third_party_packages/#httpx-ws","title":"httpx-ws","text":"

    GitHub - Documentation

    WebSocket support for HTTPX.

    "},{"location":"third_party_packages/#pytest-httpx","title":"pytest-HTTPX","text":"

    GitHub - Documentation

    Provides a pytest fixture to mock HTTPX within test cases.

    "},{"location":"third_party_packages/#respx","title":"RESPX","text":"

    GitHub - Documentation

    A utility for mocking out HTTPX.

    "},{"location":"third_party_packages/#rpcpy","title":"rpc.py","text":"

    Github - Documentation

    A fast and powerful RPC framework based on ASGI/WSGI. Use HTTPX as the client of the RPC service.

    "},{"location":"third_party_packages/#libraries-with-httpx-support","title":"Libraries with HTTPX support","text":""},{"location":"third_party_packages/#authlib","title":"Authlib","text":"

    GitHub - Documentation

    A python library for building OAuth and OpenID Connect clients and servers. Includes an OAuth HTTPX client.

    "},{"location":"third_party_packages/#gidgethub","title":"Gidgethub","text":"

    GitHub - Documentation

    An asynchronous GitHub API library. Includes HTTPX support.

    "},{"location":"third_party_packages/#httpdbg","title":"httpdbg","text":"

    GitHub - Documentation

    A tool for python developers to easily debug the HTTP(S) client requests in a python program.

    "},{"location":"third_party_packages/#vcrpy","title":"VCR.py","text":"

    GitHub - Documentation

    Record and repeat requests.

    "},{"location":"third_party_packages/#gists","title":"Gists","text":""},{"location":"third_party_packages/#urllib3-transport","title":"urllib3-transport","text":"

    GitHub

    This public gist provides an example implementation for a custom transport implementation on top of the battle-tested urllib3 library.

    "},{"location":"troubleshooting/","title":"Troubleshooting","text":"

    This page lists some common problems or issues you could encounter while developing with HTTPX, as well as possible solutions.

    "},{"location":"troubleshooting/#proxies","title":"Proxies","text":""},{"location":"troubleshooting/#the-handshake-operation-timed-out-on-https-requests-when-using-a-proxy","title":"\"The handshake operation timed out\" on HTTPS requests when using a proxy","text":"

    Description: When using a proxy and making an HTTPS request, you see an exception looking like this:

    httpx.ProxyError: _ssl.c:1091: The handshake operation timed out\n

    Similar issues: encode/httpx#1412, encode/httpx#1433

    Resolution: it is likely that you've set up your proxies like this...

    mounts = {\n  \"http://\": httpx.HTTPTransport(proxy=\"http://myproxy.org\"),\n  \"https://\": httpx.HTTPTransport(proxy=\"https://myproxy.org\"),\n}\n

    Using this setup, you're telling HTTPX to connect to the proxy using HTTP for HTTP requests, and using HTTPS for HTTPS requests.

    But if you get the error above, it is likely that your proxy doesn't support connecting via HTTPS. Don't worry: that's a common gotcha.

    Change the scheme of your HTTPS proxy to http://... instead of https://...:

    mounts = {\n  \"http://\": httpx.HTTPTransport(proxy=\"http://myproxy.org\"),\n  \"https://\": httpx.HTTPTransport(proxy=\"http://myproxy.org\"),\n}\n

    This can be simplified to:

    proxy = \"http://myproxy.org\"\nwith httpx.Client(proxy=proxy) as client:\n  ...\n

    For more information, see Proxies: FORWARD vs TUNNEL.

    "},{"location":"troubleshooting/#error-when-making-requests-to-an-https-proxy","title":"Error when making requests to an HTTPS proxy","text":"

    Description: your proxy does support connecting via HTTPS, but you are seeing errors along the lines of...

    httpx.ProxyError: [SSL: PRE_MAC_LENGTH_TOO_LONG] invalid alert (_ssl.c:1091)\n

    Similar issues: encode/httpx#1424.

    Resolution: HTTPX does not properly support HTTPS proxies at this time. If that's something you're interested in having, please see encode/httpx#1434 and consider lending a hand there.

    "},{"location":"advanced/authentication/","title":"Authentication","text":"

    Authentication can either be included on a per-request basis...

    >>> auth = httpx.BasicAuth(username=\"username\", password=\"secret\")\n>>> client = httpx.Client()\n>>> response = client.get(\"https://www.example.com/\", auth=auth)\n

    Or configured on the client instance, ensuring that all outgoing requests will include authentication credentials...

    >>> auth = httpx.BasicAuth(username=\"username\", password=\"secret\")\n>>> client = httpx.Client(auth=auth)\n>>> response = client.get(\"https://www.example.com/\")\n
    "},{"location":"advanced/authentication/#basic-authentication","title":"Basic authentication","text":"

    HTTP basic authentication is an unencrypted authentication scheme that uses a simple encoding of the username and password in the request Authorization header. Since it is unencrypted it should typically only be used over https, although this is not strictly enforced.

    >>> auth = httpx.BasicAuth(username=\"finley\", password=\"secret\")\n>>> client = httpx.Client(auth=auth)\n>>> response = client.get(\"https://httpbin.org/basic-auth/finley/secret\")\n>>> response\n<Response [200 OK]>\n
    "},{"location":"advanced/authentication/#digest-authentication","title":"Digest authentication","text":"

    HTTP digest authentication is a challenge-response authentication scheme. Unlike basic authentication it provides encryption, and can be used over unencrypted http connections. It requires an additional round-trip in order to negotiate the authentication.

    >>> auth = httpx.DigestAuth(username=\"olivia\", password=\"secret\")\n>>> client = httpx.Client(auth=auth)\n>>> response = client.get(\"https://httpbin.org/digest-auth/auth/olivia/secret\")\n>>> response\n<Response [200 OK]>\n>>> response.history\n[<Response [401 UNAUTHORIZED]>]\n
    "},{"location":"advanced/authentication/#netrc-authentication","title":"NetRC authentication","text":"

    HTTPX can be configured to use a .netrc config file for authentication.

    The .netrc config file allows authentication credentials to be associated with specified hosts. When a request is made to a host that is found in the netrc file, the username and password will be included using HTTP basic authentication.

    Example .netrc file:

    machine example.org\nlogin example-username\npassword example-password\n\nmachine python-httpx.org\nlogin other-username\npassword other-password\n

    Some examples of configuring .netrc authentication with httpx.

    Use the default .netrc file in the users home directory:

    >>> auth = httpx.NetRCAuth()\n>>> client = httpx.Client(auth=auth)\n

    Use an explicit path to a .netrc file:

    >>> auth = httpx.NetRCAuth(file=\"/path/to/.netrc\")\n>>> client = httpx.Client(auth=auth)\n

    Use the NETRC environment variable to configure a path to the .netrc file, or fallback to the default.

    >>> auth = httpx.NetRCAuth(file=os.environ.get(\"NETRC\"))\n>>> client = httpx.Client(auth=auth)\n

    The NetRCAuth() class uses the netrc.netrc() function from the Python standard library. See the documentation there for more details on exceptions that may be raised if the .netrc file is not found, or cannot be parsed.

    "},{"location":"advanced/authentication/#custom-authentication-schemes","title":"Custom authentication schemes","text":"

    When issuing requests or instantiating a client, the auth argument can be used to pass an authentication scheme to use. The auth argument may be one of the following...

    • A two-tuple of username/password, to be used with basic authentication.
    • An instance of httpx.BasicAuth(), httpx.DigestAuth(), or httpx.NetRCAuth().
    • A callable, accepting a request and returning an authenticated request instance.
    • An instance of subclasses of httpx.Auth.

    The most involved of these is the last, which allows you to create authentication flows involving one or more requests. A subclass of httpx.Auth should implement def auth_flow(request), and yield any requests that need to be made...

    class MyCustomAuth(httpx.Auth):\n    def __init__(self, token):\n        self.token = token\n\n    def auth_flow(self, request):\n        # Send the request, with a custom `X-Authentication` header.\n        request.headers['X-Authentication'] = self.token\n        yield request\n

    If the auth flow requires more than one request, you can issue multiple yields, and obtain the response in each case...

    class MyCustomAuth(httpx.Auth):\n    def __init__(self, token):\n        self.token = token\n\n    def auth_flow(self, request):\n      response = yield request\n      if response.status_code == 401:\n          # If the server issues a 401 response then resend the request,\n          # with a custom `X-Authentication` header.\n          request.headers['X-Authentication'] = self.token\n          yield request\n

    Custom authentication classes are designed to not perform any I/O, so that they may be used with both sync and async client instances. If you are implementing an authentication scheme that requires the request body, then you need to indicate this on the class using a requires_request_body property.

    You will then be able to access request.content inside the .auth_flow() method.

    class MyCustomAuth(httpx.Auth):\n    requires_request_body = True\n\n    def __init__(self, token):\n        self.token = token\n\n    def auth_flow(self, request):\n      response = yield request\n      if response.status_code == 401:\n          # If the server issues a 401 response then resend the request,\n          # with a custom `X-Authentication` header.\n          request.headers['X-Authentication'] = self.sign_request(...)\n          yield request\n\n    def sign_request(self, request):\n        # Create a request signature, based on `request.method`, `request.url`,\n        # `request.headers`, and `request.content`.\n        ...\n

    Similarly, if you are implementing a scheme that requires access to the response body, then use the requires_response_body property. You will then be able to access response body properties and methods such as response.content, response.text, response.json(), etc.

    class MyCustomAuth(httpx.Auth):\n    requires_response_body = True\n\n    def __init__(self, access_token, refresh_token, refresh_url):\n        self.access_token = access_token\n        self.refresh_token = refresh_token\n        self.refresh_url = refresh_url\n\n    def auth_flow(self, request):\n        request.headers[\"X-Authentication\"] = self.access_token\n        response = yield request\n\n        if response.status_code == 401:\n            # If the server issues a 401 response, then issue a request to\n            # refresh tokens, and resend the request.\n            refresh_response = yield self.build_refresh_request()\n            self.update_tokens(refresh_response)\n\n            request.headers[\"X-Authentication\"] = self.access_token\n            yield request\n\n    def build_refresh_request(self):\n        # Return an `httpx.Request` for refreshing tokens.\n        ...\n\n    def update_tokens(self, response):\n        # Update the `.access_token` and `.refresh_token` tokens\n        # based on a refresh response.\n        data = response.json()\n        ...\n

    If you do need to perform I/O other than HTTP requests, such as accessing a disk-based cache, or you need to use concurrency primitives, such as locks, then you should override .sync_auth_flow() and .async_auth_flow() (instead of .auth_flow()). The former will be used by httpx.Client, while the latter will be used by httpx.AsyncClient.

    import asyncio\nimport threading\nimport httpx\n\n\nclass MyCustomAuth(httpx.Auth):\n    def __init__(self):\n        self._sync_lock = threading.RLock()\n        self._async_lock = asyncio.Lock()\n\n    def sync_get_token(self):\n        with self._sync_lock:\n            ...\n\n    def sync_auth_flow(self, request):\n        token = self.sync_get_token()\n        request.headers[\"Authorization\"] = f\"Token {token}\"\n        yield request\n\n    async def async_get_token(self):\n        async with self._async_lock:\n            ...\n\n    async def async_auth_flow(self, request):\n        token = await self.async_get_token()\n        request.headers[\"Authorization\"] = f\"Token {token}\"\n        yield request\n

    If you only want to support one of the two methods, then you should still override it, but raise an explicit RuntimeError.

    import httpx\nimport sync_only_library\n\n\nclass MyCustomAuth(httpx.Auth):\n    def sync_auth_flow(self, request):\n        token = sync_only_library.get_token(...)\n        request.headers[\"Authorization\"] = f\"Token {token}\"\n        yield request\n\n    async def async_auth_flow(self, request):\n        raise RuntimeError(\"Cannot use a sync authentication class with httpx.AsyncClient\")\n
    "},{"location":"advanced/clients/","title":"Clients","text":"

    Hint

    If you are coming from Requests, httpx.Client() is what you can use instead of requests.Session().

    "},{"location":"advanced/clients/#why-use-a-client","title":"Why use a Client?","text":"

    TL;DR

    If you do anything more than experimentation, one-off scripts, or prototypes, then you should use a Client instance.

    More efficient usage of network resources

    When you make requests using the top-level API as documented in the Quickstart guide, HTTPX has to establish a new connection for every single request (connections are not reused). As the number of requests to a host increases, this quickly becomes inefficient.

    On the other hand, a Client instance uses HTTP connection pooling. This means that when you make several requests to the same host, the Client will reuse the underlying TCP connection, instead of recreating one for every single request.

    This can bring significant performance improvements compared to using the top-level API, including:

    • Reduced latency across requests (no handshaking).
    • Reduced CPU usage and round-trips.
    • Reduced network congestion.

    Extra features

    Client instances also support features that aren't available at the top-level API, such as:

    • Cookie persistence across requests.
    • Applying configuration across all outgoing requests.
    • Sending requests through HTTP proxies.
    • Using HTTP/2.

    The other sections on this page go into further detail about what you can do with a Client instance.

    "},{"location":"advanced/clients/#usage","title":"Usage","text":"

    The recommended way to use a Client is as a context manager. This will ensure that connections are properly cleaned up when leaving the with block:

    with httpx.Client() as client:\n    ...\n

    Alternatively, you can explicitly close the connection pool without block-usage using .close():

    client = httpx.Client()\ntry:\n    ...\nfinally:\n    client.close()\n
    "},{"location":"advanced/clients/#making-requests","title":"Making requests","text":"

    Once you have a Client, you can send requests using .get(), .post(), etc. For example:

    >>> with httpx.Client() as client:\n...     r = client.get('https://example.com')\n...\n>>> r\n<Response [200 OK]>\n

    These methods accept the same arguments as httpx.get(), httpx.post(), etc. This means that all features documented in the Quickstart guide are also available at the client level.

    For example, to send a request with custom headers:

    >>> with httpx.Client() as client:\n...     headers = {'X-Custom': 'value'}\n...     r = client.get('https://example.com', headers=headers)\n...\n>>> r.request.headers['X-Custom']\n'value'\n
    "},{"location":"advanced/clients/#sharing-configuration-across-requests","title":"Sharing configuration across requests","text":"

    Clients allow you to apply configuration to all outgoing requests by passing parameters to the Client constructor.

    For example, to apply a set of custom headers on every request:

    >>> url = 'http://httpbin.org/headers'\n>>> headers = {'user-agent': 'my-app/0.0.1'}\n>>> with httpx.Client(headers=headers) as client:\n...     r = client.get(url)\n...\n>>> r.json()['headers']['User-Agent']\n'my-app/0.0.1'\n
    "},{"location":"advanced/clients/#merging-of-configuration","title":"Merging of configuration","text":"

    When a configuration option is provided at both the client-level and request-level, one of two things can happen:

    • For headers, query parameters and cookies, the values are combined together. For example:
    >>> headers = {'X-Auth': 'from-client'}\n>>> params = {'client_id': 'client1'}\n>>> with httpx.Client(headers=headers, params=params) as client:\n...     headers = {'X-Custom': 'from-request'}\n...     params = {'request_id': 'request1'}\n...     r = client.get('https://example.com', headers=headers, params=params)\n...\n>>> r.request.url\nURL('https://example.com?client_id=client1&request_id=request1')\n>>> r.request.headers['X-Auth']\n'from-client'\n>>> r.request.headers['X-Custom']\n'from-request'\n
    • For all other parameters, the request-level value takes priority. For example:
    >>> with httpx.Client(auth=('tom', 'mot123')) as client:\n...     r = client.get('https://example.com', auth=('alice', 'ecila123'))\n...\n>>> _, _, auth = r.request.headers['Authorization'].partition(' ')\n>>> import base64\n>>> base64.b64decode(auth)\nb'alice:ecila123'\n

    If you need finer-grained control on the merging of client-level and request-level parameters, see Request instances.

    "},{"location":"advanced/clients/#other-client-only-configuration-options","title":"Other Client-only configuration options","text":"

    Additionally, Client accepts some configuration options that aren't available at the request level.

    For example, base_url allows you to prepend an URL to all outgoing requests:

    >>> with httpx.Client(base_url='http://httpbin.org') as client:\n...     r = client.get('/headers')\n...\n>>> r.request.url\nURL('http://httpbin.org/headers')\n

    For a list of all available client parameters, see the Client API reference.

    "},{"location":"advanced/clients/#request-instances","title":"Request instances","text":"

    For maximum control on what gets sent over the wire, HTTPX supports building explicit Request instances:

    request = httpx.Request(\"GET\", \"https://example.com\")\n

    To dispatch a Request instance across to the network, create a Client instance and use .send():

    with httpx.Client() as client:\n    response = client.send(request)\n    ...\n

    If you need to mix client-level and request-level options in a way that is not supported by the default Merging of parameters, you can use .build_request() and then make arbitrary modifications to the Request instance. For example:

    headers = {\"X-Api-Key\": \"...\", \"X-Client-ID\": \"ABC123\"}\n\nwith httpx.Client(headers=headers) as client:\n    request = client.build_request(\"GET\", \"https://api.example.com\")\n\n    print(request.headers[\"X-Client-ID\"])  # \"ABC123\"\n\n    # Don't send the API key for this particular request.\n    del request.headers[\"X-Api-Key\"]\n\n    response = client.send(request)\n    ...\n
    "},{"location":"advanced/clients/#monitoring-download-progress","title":"Monitoring download progress","text":"

    If you need to monitor download progress of large responses, you can use response streaming and inspect the response.num_bytes_downloaded property.

    This interface is required for properly determining download progress, because the total number of bytes returned by response.content or response.iter_content() will not always correspond with the raw content length of the response if HTTP response compression is being used.

    For example, showing a progress bar using the tqdm library while a response is being downloaded could be done like this\u2026

    import tempfile\n\nimport httpx\nfrom tqdm import tqdm\n\nwith tempfile.NamedTemporaryFile() as download_file:\n    url = \"https://speed.hetzner.de/100MB.bin\"\n    with httpx.stream(\"GET\", url) as response:\n        total = int(response.headers[\"Content-Length\"])\n\n        with tqdm(total=total, unit_scale=True, unit_divisor=1024, unit=\"B\") as progress:\n            num_bytes_downloaded = response.num_bytes_downloaded\n            for chunk in response.iter_bytes():\n                download_file.write(chunk)\n                progress.update(response.num_bytes_downloaded - num_bytes_downloaded)\n                num_bytes_downloaded = response.num_bytes_downloaded\n

    Or an alternate example, this time using the rich library\u2026

    import tempfile\nimport httpx\nimport rich.progress\n\nwith tempfile.NamedTemporaryFile() as download_file:\n    url = \"https://speed.hetzner.de/100MB.bin\"\n    with httpx.stream(\"GET\", url) as response:\n        total = int(response.headers[\"Content-Length\"])\n\n        with rich.progress.Progress(\n            \"[progress.percentage]{task.percentage:>3.0f}%\",\n            rich.progress.BarColumn(bar_width=None),\n            rich.progress.DownloadColumn(),\n            rich.progress.TransferSpeedColumn(),\n        ) as progress:\n            download_task = progress.add_task(\"Download\", total=total)\n            for chunk in response.iter_bytes():\n                download_file.write(chunk)\n                progress.update(download_task, completed=response.num_bytes_downloaded)\n

    "},{"location":"advanced/clients/#monitoring-upload-progress","title":"Monitoring upload progress","text":"

    If you need to monitor upload progress of large responses, you can use request content generator streaming.

    For example, showing a progress bar using the tqdm library.

    import io\nimport random\n\nimport httpx\nfrom tqdm import tqdm\n\n\ndef gen():\n    \"\"\"\n    this is a complete example with generated random bytes.\n    you can replace `io.BytesIO` with real file object.\n    \"\"\"\n    total = 32 * 1024 * 1024  # 32m\n    with tqdm(ascii=True, unit_scale=True, unit='B', unit_divisor=1024, total=total) as bar:\n        with io.BytesIO(random.randbytes(total)) as f:\n            while data := f.read(1024):\n                yield data\n                bar.update(len(data))\n\n\nhttpx.post(\"https://httpbin.org/post\", content=gen())\n

    "},{"location":"advanced/clients/#multipart-file-encoding","title":"Multipart file encoding","text":"

    As mentioned in the quickstart multipart file encoding is available by passing a dictionary with the name of the payloads as keys and either tuple of elements or a file-like object or a string as values.

    >>> with open('report.xls', 'rb') as report_file:\n...     files = {'upload-file': ('report.xls', report_file, 'application/vnd.ms-excel')}\n...     r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {\n    \"upload-file\": \"<... binary content ...>\"\n  },\n  ...\n}\n

    More specifically, if a tuple is used as a value, it must have between 2 and 3 elements:

    • The first element is an optional file name which can be set to None.
    • The second element may be a file-like object or a string which will be automatically encoded in UTF-8.
    • An optional third element can be used to specify the MIME type of the file being uploaded. If not specified HTTPX will attempt to guess the MIME type based on the file name, with unknown file extensions defaulting to \"application/octet-stream\". If the file name is explicitly set to None then HTTPX will not include a content-type MIME header field.
    >>> files = {'upload-file': (None, 'text content', 'text/plain')}\n>>> r = httpx.post(\"https://httpbin.org/post\", files=files)\n>>> print(r.text)\n{\n  ...\n  \"files\": {},\n  \"form\": {\n    \"upload-file\": \"text-content\"\n  },\n  ...\n}\n

    Tip

    It is safe to upload large files this way. File uploads are streaming by default, meaning that only one chunk will be loaded into memory at a time.

    Non-file data fields can be included in the multipart form using by passing them to data=....

    You can also send multiple files in one go with a multiple file field form. To do that, pass a list of (field, <file>) items instead of a dictionary, allowing you to pass multiple items with the same field. For instance this request sends 2 files, foo.png and bar.png in one request on the images form field:

    >>> with open('foo.png', 'rb') as foo_file, open('bar.png', 'rb') as bar_file:\n...     files = [\n...         ('images', ('foo.png', foo_file, 'image/png')),\n...         ('images', ('bar.png', bar_file, 'image/png')),\n...     ]\n...     r = httpx.post(\"https://httpbin.org/post\", files=files)\n
    "},{"location":"advanced/event-hooks/","title":"Event Hooks","text":"

    HTTPX allows you to register \"event hooks\" with the client, that are called every time a particular type of event takes place.

    There are currently two event hooks:

    • request - Called after a request is fully prepared, but before it is sent to the network. Passed the request instance.
    • response - Called after the response has been fetched from the network, but before it is returned to the caller. Passed the response instance.

    These allow you to install client-wide functionality such as logging, monitoring or tracing.

    def log_request(request):\n    print(f\"Request event hook: {request.method} {request.url} - Waiting for response\")\n\ndef log_response(response):\n    request = response.request\n    print(f\"Response event hook: {request.method} {request.url} - Status {response.status_code}\")\n\nclient = httpx.Client(event_hooks={'request': [log_request], 'response': [log_response]})\n

    You can also use these hooks to install response processing code, such as this example, which creates a client instance that always raises httpx.HTTPStatusError on 4xx and 5xx responses.

    def raise_on_4xx_5xx(response):\n    response.raise_for_status()\n\nclient = httpx.Client(event_hooks={'response': [raise_on_4xx_5xx]})\n

    Note

    Response event hooks are called before determining if the response body should be read or not.

    If you need access to the response body inside an event hook, you'll need to call response.read(), or for AsyncClients, response.aread().

    The hooks are also allowed to modify request and response objects.

    def add_timestamp(request):\n    request.headers['x-request-timestamp'] = datetime.now(tz=datetime.utc).isoformat()\n\nclient = httpx.Client(event_hooks={'request': [add_timestamp]})\n

    Event hooks must always be set as a list of callables, and you may register multiple event hooks for each type of event.

    As well as being able to set event hooks on instantiating the client, there is also an .event_hooks property, that allows you to inspect and modify the installed hooks.

    client = httpx.Client()\nclient.event_hooks['request'] = [log_request]\nclient.event_hooks['response'] = [log_response, raise_on_4xx_5xx]\n

    Note

    If you are using HTTPX's async support, then you need to be aware that hooks registered with httpx.AsyncClient MUST be async functions, rather than plain functions.

    "},{"location":"advanced/extensions/","title":"Extensions","text":"

    Request and response extensions provide a untyped space where additional information may be added.

    Extensions should be used for features that may not be available on all transports, and that do not fit neatly into the simplified request/response model that the underlying httpcore package uses as its API.

    Several extensions are supported on the request:

    # Request timeouts actually implemented as an extension on\n# the request, ensuring that they are passed throughout the\n# entire call stack.\nclient = httpx.Client()\nresponse = client.get(\n    \"https://www.example.com\",\n    extensions={\"timeout\": {\"connect\": 5.0}}\n)\nresponse.request.extensions[\"timeout\"]\n{\"connect\": 5.0}\n

    And on the response:

    client = httpx.Client()\nresponse = client.get(\"https://www.example.com\")\nprint(response.extensions[\"http_version\"])  # b\"HTTP/1.1\"\n# Other server responses could have been\n# b\"HTTP/0.9\", b\"HTTP/1.0\", or b\"HTTP/1.1\"\n
    "},{"location":"advanced/extensions/#request-extensions","title":"Request Extensions","text":""},{"location":"advanced/extensions/#trace","title":"\"trace\"","text":"

    The trace extension allows a callback handler to be installed to monitor the internal flow of events within the underlying httpcore transport.

    The simplest way to explain this is with an example:

    import httpx\n\ndef log(event_name, info):\n    print(event_name, info)\n\nclient = httpx.Client()\nresponse = client.get(\"https://www.example.com/\", extensions={\"trace\": log})\n# connection.connect_tcp.started {'host': 'www.example.com', 'port': 443, 'local_address': None, 'timeout': None}\n# connection.connect_tcp.complete {'return_value': <httpcore.backends.sync.SyncStream object at 0x1093f94d0>}\n# connection.start_tls.started {'ssl_context': <ssl.SSLContext object at 0x1093ee750>, 'server_hostname': b'www.example.com', 'timeout': None}\n# connection.start_tls.complete {'return_value': <httpcore.backends.sync.SyncStream object at 0x1093f9450>}\n# http11.send_request_headers.started {'request': <Request [b'GET']>}\n# http11.send_request_headers.complete {'return_value': None}\n# http11.send_request_body.started {'request': <Request [b'GET']>}\n# http11.send_request_body.complete {'return_value': None}\n# http11.receive_response_headers.started {'request': <Request [b'GET']>}\n# http11.receive_response_headers.complete {'return_value': (b'HTTP/1.1', 200, b'OK', [(b'Age', b'553715'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Thu, 21 Oct 2021 17:08:42 GMT'), (b'Etag', b'\"3147526947+ident\"'), (b'Expires', b'Thu, 28 Oct 2021 17:08:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECS (nyb/1DCD)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'1256')])}\n# http11.receive_response_body.started {'request': <Request [b'GET']>}\n# http11.receive_response_body.complete {'return_value': None}\n# http11.response_closed.started {}\n# http11.response_closed.complete {'return_value': None}\n

    The event_name and info arguments here will be one of the following:

    • {event_type}.{event_name}.started, <dictionary of keyword arguments>
    • {event_type}.{event_name}.complete, {\"return_value\": <...>}
    • {event_type}.{event_name}.failed, {\"exception\": <...>}

    Note that when using async code the handler function passed to \"trace\" must be an async def ... function.

    The following event types are currently exposed...

    Establishing the connection

    • \"connection.connect_tcp\"
    • \"connection.connect_unix_socket\"
    • \"connection.start_tls\"

    HTTP/1.1 events

    • \"http11.send_request_headers\"
    • \"http11.send_request_body\"
    • \"http11.receive_response\"
    • \"http11.receive_response_body\"
    • \"http11.response_closed\"

    HTTP/2 events

    • \"http2.send_connection_init\"
    • \"http2.send_request_headers\"
    • \"http2.send_request_body\"
    • \"http2.receive_response_headers\"
    • \"http2.receive_response_body\"
    • \"http2.response_closed\"

    The exact set of trace events may be subject to change across different versions of httpcore. If you need to rely on a particular set of events it is recommended that you pin installation of the package to a fixed version.

    "},{"location":"advanced/extensions/#sni_hostname","title":"\"sni_hostname\"","text":"

    The server's hostname, which is used to confirm the hostname supplied by the SSL certificate.

    If you want to connect to an explicit IP address rather than using the standard DNS hostname lookup, then you'll need to use this request extension.

    For example:

    # Connect to '185.199.108.153' but use 'www.encode.io' in the Host header,\n#\u00a0and use 'www.encode.io' when SSL verifying the server hostname.\nclient = httpx.Client()\nheaders = {\"Host\": \"www.encode.io\"}\nextensions = {\"sni_hostname\": \"www.encode.io\"}\nresponse = client.get(\n    \"https://185.199.108.153/path\",\n    headers=headers,\n    extensions=extensions\n)\n
    "},{"location":"advanced/extensions/#timeout","title":"\"timeout\"","text":"

    A dictionary of str: Optional[float] timeout values.

    May include values for 'connect', 'read', 'write', or 'pool'.

    For example:

    # Timeout if a connection takes more than 5 seconds to established, or if\n# we are blocked waiting on the connection pool for more than 10 seconds.\nclient = httpx.Client()\nresponse = client.get(\n    \"https://www.example.com\",\n    extensions={\"timeout\": {\"connect\": 5.0, \"pool\": 10.0}}\n)\n

    This extension is how the httpx timeouts are implemented, ensuring that the timeout values are associated with the request instance and passed throughout the stack. You shouldn't typically be working with this extension directly, but use the higher level timeout API instead.

    "},{"location":"advanced/extensions/#target","title":"\"target\"","text":"

    The target that is used as the HTTP target instead of the URL path.

    This enables support constructing requests that would otherwise be unsupported.

    • URL paths with non-standard escaping applied.
    • Forward proxy requests using an absolute URI.
    • Tunneling proxy requests using CONNECT with hostname as the target.
    • Server-wide OPTIONS * requests.

    Some examples:

    Using the 'target' extension to send requests without the standard path escaping rules...

    # Typically a request to \"https://www.example.com/test^path\" would\n# connect to \"www.example.com\" and send an HTTP/1.1 request like...\n#\n# GET /test%5Epath HTTP/1.1\n#\n# Using the target extension we can include the literal '^'...\n#\n# GET /test^path HTTP/1.1\n#\n# Note that requests must still be valid HTTP requests.\n# For example including whitespace in the target will raise a `LocalProtocolError`.\nextensions = {\"target\": b\"/test^path\"}\nresponse = httpx.get(\"https://www.example.com\", extensions=extensions)\n

    The target extension also allows server-wide OPTIONS * requests to be constructed...

    # This will send the following request...\n#\n# CONNECT * HTTP/1.1\nextensions = {\"target\": b\"*\"}\nresponse = httpx.request(\"CONNECT\", \"https://www.example.com\", extensions=extensions)\n
    "},{"location":"advanced/extensions/#response-extensions","title":"Response Extensions","text":""},{"location":"advanced/extensions/#http_version","title":"\"http_version\"","text":"

    The HTTP version, as bytes. Eg. b\"HTTP/1.1\".

    When using HTTP/1.1 the response line includes an explicit version, and the value of this key could feasibly be one of b\"HTTP/0.9\", b\"HTTP/1.0\", or b\"HTTP/1.1\".

    When using HTTP/2 there is no further response versioning included in the protocol, and the value of this key will always be b\"HTTP/2\".

    "},{"location":"advanced/extensions/#reason_phrase","title":"\"reason_phrase\"","text":"

    The reason-phrase of the HTTP response, as bytes. For example b\"OK\". Some servers may include a custom reason phrase, although this is not recommended.

    HTTP/2 onwards does not include a reason phrase on the wire.

    When no key is included, a default based on the status code may be used.

    "},{"location":"advanced/extensions/#stream_id","title":"\"stream_id\"","text":"

    When HTTP/2 is being used the \"stream_id\" response extension can be accessed to determine the ID of the data stream that the response was sent on.

    "},{"location":"advanced/extensions/#network_stream","title":"\"network_stream\"","text":"

    The \"network_stream\" extension allows developers to handle HTTP CONNECT and Upgrade requests, by providing an API that steps outside the standard request/response model, and can directly read or write to the network.

    The interface provided by the network stream:

    • read(max_bytes, timeout = None) -> bytes
    • write(buffer, timeout = None)
    • close()
    • start_tls(ssl_context, server_hostname = None, timeout = None) -> NetworkStream
    • get_extra_info(info) -> Any

    This API can be used as the foundation for working with HTTP proxies, WebSocket upgrades, and other advanced use-cases.

    See the network backends documentation for more information on working directly with network streams.

    Extra network information

    The network stream abstraction also allows access to various low-level information that may be exposed by the underlying socket:

    response = httpx.get(\"https://www.example.com\")\nnetwork_stream = response.extensions[\"network_stream\"]\n\nclient_addr = network_stream.get_extra_info(\"client_addr\")\nserver_addr = network_stream.get_extra_info(\"server_addr\")\nprint(\"Client address\", client_addr)\nprint(\"Server address\", server_addr)\n

    The socket SSL information is also available through this interface, although you need to ensure that the underlying connection is still open, in order to access it...

    with httpx.stream(\"GET\", \"https://www.example.com\") as response:\n    network_stream = response.extensions[\"network_stream\"]\n\n    ssl_object = network_stream.get_extra_info(\"ssl_object\")\n    print(\"TLS version\", ssl_object.version())\n
    "},{"location":"advanced/proxies/","title":"Proxies","text":"

    HTTPX supports setting up HTTP proxies via the proxy parameter to be passed on client initialization or top-level API functions like httpx.get(..., proxy=...).

    Diagram of how a proxy works (source: Wikipedia). The left hand side \"Internet\" blob may be your HTTPX client requesting example.com through a proxy."},{"location":"advanced/proxies/#http-proxies","title":"HTTP Proxies","text":"

    To route all traffic (HTTP and HTTPS) to a proxy located at http://localhost:8030, pass the proxy URL to the client...

    with httpx.Client(proxy=\"http://localhost:8030\") as client:\n    ...\n

    For more advanced use cases, pass a mounts dict. For example, to route HTTP and HTTPS requests to 2 different proxies, respectively located at http://localhost:8030, and http://localhost:8031, pass a dict of proxy URLs:

    proxy_mounts = {\n    \"http://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n    \"https://\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n}\n\nwith httpx.Client(mounts=proxy_mounts) as client:\n    ...\n

    For detailed information about proxy routing, see the Routing section.

    Gotcha

    In most cases, the proxy URL for the https:// key should use the http:// scheme (that's not a typo!).

    This is because HTTP proxying requires initiating a connection with the proxy server. While it's possible that your proxy supports doing it via HTTPS, most proxies only support doing it via HTTP.

    For more information, see FORWARD vs TUNNEL.

    "},{"location":"advanced/proxies/#authentication","title":"Authentication","text":"

    Proxy credentials can be passed as the userinfo section of the proxy URL. For example:

    with httpx.Client(proxy=\"http://username:password@localhost:8030\") as client:\n    ...\n
    "},{"location":"advanced/proxies/#proxy-mechanisms","title":"Proxy mechanisms","text":"

    Note

    This section describes advanced proxy concepts and functionality.

    "},{"location":"advanced/proxies/#forward-vs-tunnel","title":"FORWARD vs TUNNEL","text":"

    In general, the flow for making an HTTP request through a proxy is as follows:

    1. The client connects to the proxy (initial connection request).
    2. The proxy transfers data to the server on your behalf.

    How exactly step 2/ is performed depends on which of two proxying mechanisms is used:

    • Forwarding: the proxy makes the request for you, and sends back the response it obtained from the server.
    • Tunnelling: the proxy establishes a TCP connection to the server on your behalf, and the client reuses this connection to send the request and receive the response. This is known as an HTTP Tunnel. This mechanism is how you can access websites that use HTTPS from an HTTP proxy (the client \"upgrades\" the connection to HTTPS by performing the TLS handshake with the server over the TCP connection provided by the proxy).
    "},{"location":"advanced/proxies/#troubleshooting-proxies","title":"Troubleshooting proxies","text":"

    If you encounter issues when setting up proxies, please refer to our Troubleshooting guide.

    "},{"location":"advanced/proxies/#socks","title":"SOCKS","text":"

    In addition to HTTP proxies, httpcore also supports proxies using the SOCKS protocol. This is an optional feature that requires an additional third-party library be installed before use.

    You can install SOCKS support using pip:

    $ pip install httpx[socks]\n

    You can now configure a client to make requests via a proxy using the SOCKS protocol:

    httpx.Client(proxy='socks5://user:pass@host:port')\n
    "},{"location":"advanced/resource-limits/","title":"Resource Limits","text":"

    You can control the connection pool size using the limits keyword argument on the client. It takes instances of httpx.Limits which define:

    • max_keepalive_connections, number of allowable keep-alive connections, or None to always allow. (Defaults 20)
    • max_connections, maximum number of allowable connections, or None for no limits. (Default 100)
    • keepalive_expiry, time limit on idle keep-alive connections in seconds, or None for no limits. (Default 5)
    limits = httpx.Limits(max_keepalive_connections=5, max_connections=10)\nclient = httpx.Client(limits=limits)\n
    "},{"location":"advanced/ssl/","title":"SSL","text":"

    When making a request over HTTPS, HTTPX needs to verify the identity of the requested host. To do this, it uses a bundle of SSL certificates (a.k.a. CA bundle) delivered by a trusted certificate authority (CA).

    "},{"location":"advanced/ssl/#enabling-and-disabling-verification","title":"Enabling and disabling verification","text":"

    By default httpx will verify HTTPS connections, and raise an error for invalid SSL cases...

    >>> httpx.get(\"https://expired.badssl.com/\")\nhttpx.ConnectError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:997)\n

    You can disable SSL verification completely and allow insecure requests...

    >>> httpx.get(\"https://expired.badssl.com/\", verify=False)\n<Response [200 OK]>\n
    "},{"location":"advanced/ssl/#configuring-client-instances","title":"Configuring client instances","text":"

    If you're using a Client() instance you should pass any verify=<...> configuration when instantiating the client.

    By default the certifi CA bundle is used for SSL verification.

    For more complex configurations you can pass an SSL Context instance...

    import certifi\nimport httpx\nimport ssl\n\n# This SSL context is equivelent to the default `verify=True`.\nctx = ssl.create_default_context(cafile=certifi.where())\nclient = httpx.Client(verify=ctx)\n

    Using the truststore package to support system certificate stores...

    import ssl\nimport truststore\nimport httpx\n\n# Use system certificate stores.\nctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)\nclient = httpx.Client(verify=ctx)\n

    Loding an alternative certificate verification store using the standard SSL context API...

    import httpx\nimport ssl\n\n# Use an explicitly configured certificate store.\nctx = ssl.create_default_context(cafile=\"path/to/certs.pem\")  # Either cafile or capath.\nclient = httpx.Client(verify=ctx)\n
    "},{"location":"advanced/ssl/#client-side-certificates","title":"Client side certificates","text":"

    Client side certificates allow a remote server to verify the client. They tend to be used within private organizations to authenticate requests to remote servers.

    You can specify client-side certificates, using the .load_cert_chain() API...

    ctx = ssl.create_default_context()\nctx.load_cert_chain(certfile=\"path/to/client.pem\")  # Optionally also keyfile or password.\nclient = httpx.Client(verify=ctx)\n
    "},{"location":"advanced/ssl/#working-with-ssl_cert_file-and-ssl_cert_dir","title":"Working with SSL_CERT_FILE and SSL_CERT_DIR","text":"

    httpx does respect the SSL_CERT_FILE and SSL_CERT_DIR environment variables by default. For details, refer to the section on the environment variables page.

    "},{"location":"advanced/ssl/#making-https-requests-to-a-local-server","title":"Making HTTPS requests to a local server","text":"

    When making requests to local servers, such as a development server running on localhost, you will typically be using unencrypted HTTP connections.

    If you do need to make HTTPS connections to a local server, for example to test an HTTPS-only service, you will need to create and use your own certificates. Here's one way to do it...

    1. Use trustme to generate a pair of server key/cert files, and a client cert file.
    2. Pass the server key/cert files when starting your local server. (This depends on the particular web server you're using. For example, Uvicorn provides the --ssl-keyfile and --ssl-certfile options.)
    3. Configure httpx to use the certificates stored in client.pem.
    ctx = ssl.create_default_context(cafile=\"client.pem\")\nclient = httpx.Client(verify=ctx)\n
    "},{"location":"advanced/text-encodings/","title":"Text Encodings","text":"

    When accessing response.text, we need to decode the response bytes into a unicode text representation.

    By default httpx will use \"charset\" information included in the response Content-Type header to determine how the response bytes should be decoded into text.

    In cases where no charset information is included on the response, the default behaviour is to assume \"utf-8\" encoding, which is by far the most widely used text encoding on the internet.

    "},{"location":"advanced/text-encodings/#using-the-default-encoding","title":"Using the default encoding","text":"

    To understand this better let's start by looking at the default behaviour for text decoding...

    import httpx\n# Instantiate a client with the default configuration.\nclient = httpx.Client()\n# Using the client...\nresponse = client.get(...)\nprint(response.encoding)  # This will either print the charset given in\n                          # the Content-Type charset, or else \"utf-8\".\nprint(response.text)  # The text will either be decoded with the Content-Type\n                      # charset, or using \"utf-8\".\n

    This is normally absolutely fine. Most servers will respond with a properly formatted Content-Type header, including a charset encoding. And in most cases where no charset encoding is included, UTF-8 is very likely to be used, since it is so widely adopted.

    "},{"location":"advanced/text-encodings/#using-an-explicit-encoding","title":"Using an explicit encoding","text":"

    In some cases we might be making requests to a site where no character set information is being set explicitly by the server, but we know what the encoding is. In this case it's best to set the default encoding explicitly on the client.

    import httpx\n# Instantiate a client with a Japanese character set as the default encoding.\nclient = httpx.Client(default_encoding=\"shift-jis\")\n# Using the client...\nresponse = client.get(...)\nprint(response.encoding)  # This will either print the charset given in\n                          # the Content-Type charset, or else \"shift-jis\".\nprint(response.text)  # The text will either be decoded with the Content-Type\n                      # charset, or using \"shift-jis\".\n
    "},{"location":"advanced/text-encodings/#using-auto-detection","title":"Using auto-detection","text":"

    In cases where the server is not reliably including character set information, and where we don't know what encoding is being used, we can enable auto-detection to make a best-guess attempt when decoding from bytes to text.

    To use auto-detection you need to set the default_encoding argument to a callable instead of a string. This callable should be a function which takes the input bytes as an argument and returns the character set to use for decoding those bytes to text.

    There are two widely used Python packages which both handle this functionality:

    • chardet - This is a well established package, and is a port of the auto-detection code in Mozilla.
    • charset-normalizer - A newer package, motivated by chardet, with a different approach.

    Let's take a look at installing autodetection using one of these packages...

    $ pip install httpx\n$ pip install chardet\n

    Once chardet is installed, we can configure a client to use character-set autodetection.

    import httpx\nimport chardet\n\ndef autodetect(content):\n    return chardet.detect(content).get(\"encoding\")\n\n# Using a client with character-set autodetection enabled.\nclient = httpx.Client(default_encoding=autodetect)\nresponse = client.get(...)\nprint(response.encoding)  # This will either print the charset given in\n                          # the Content-Type charset, or else the auto-detected\n                          # character set.\nprint(response.text)\n
    "},{"location":"advanced/timeouts/","title":"Timeouts","text":"

    HTTPX is careful to enforce timeouts everywhere by default.

    The default behavior is to raise a TimeoutException after 5 seconds of network inactivity.

    "},{"location":"advanced/timeouts/#setting-and-disabling-timeouts","title":"Setting and disabling timeouts","text":"

    You can set timeouts for an individual request:

    # Using the top-level API:\nhttpx.get('http://example.com/api/v1/example', timeout=10.0)\n\n# Using a client instance:\nwith httpx.Client() as client:\n    client.get(\"http://example.com/api/v1/example\", timeout=10.0)\n

    Or disable timeouts for an individual request:

    # Using the top-level API:\nhttpx.get('http://example.com/api/v1/example', timeout=None)\n\n# Using a client instance:\nwith httpx.Client() as client:\n    client.get(\"http://example.com/api/v1/example\", timeout=None)\n
    "},{"location":"advanced/timeouts/#setting-a-default-timeout-on-a-client","title":"Setting a default timeout on a client","text":"

    You can set a timeout on a client instance, which results in the given timeout being used as the default for requests made with this client:

    client = httpx.Client()              # Use a default 5s timeout everywhere.\nclient = httpx.Client(timeout=10.0)  # Use a default 10s timeout everywhere.\nclient = httpx.Client(timeout=None)  # Disable all timeouts by default.\n
    "},{"location":"advanced/timeouts/#fine-tuning-the-configuration","title":"Fine tuning the configuration","text":"

    HTTPX also allows you to specify the timeout behavior in more fine grained detail.

    There are four different types of timeouts that may occur. These are connect, read, write, and pool timeouts.

    • The connect timeout specifies the maximum amount of time to wait until a socket connection to the requested host is established. If HTTPX is unable to connect within this time frame, a ConnectTimeout exception is raised.
    • The read timeout specifies the maximum duration to wait for a chunk of data to be received (for example, a chunk of the response body). If HTTPX is unable to receive data within this time frame, a ReadTimeout exception is raised.
    • The write timeout specifies the maximum duration to wait for a chunk of data to be sent (for example, a chunk of the request body). If HTTPX is unable to send data within this time frame, a WriteTimeout exception is raised.
    • The pool timeout specifies the maximum duration to wait for acquiring a connection from the connection pool. If HTTPX is unable to acquire a connection within this time frame, a PoolTimeout exception is raised. A related configuration here is the maximum number of allowable connections in the connection pool, which is configured by the limits argument.

    You can configure the timeout behavior for any of these values...

    # A client with a 60s timeout for connecting, and a 10s timeout elsewhere.\ntimeout = httpx.Timeout(10.0, connect=60.0)\nclient = httpx.Client(timeout=timeout)\n\nresponse = client.get('http://example.com/')\n
    "},{"location":"advanced/transports/","title":"Transports","text":"

    HTTPX's Client also accepts a transport argument. This argument allows you to provide a custom Transport object that will be used to perform the actual sending of the requests.

    "},{"location":"advanced/transports/#http-transport","title":"HTTP Transport","text":"

    For some advanced configuration you might need to instantiate a transport class directly, and pass it to the client instance. One example is the local_address configuration which is only available via this low-level API.

    >>> import httpx\n>>> transport = httpx.HTTPTransport(local_address=\"0.0.0.0\")\n>>> client = httpx.Client(transport=transport)\n

    Connection retries are also available via this interface. Requests will be retried the given number of times in case an httpx.ConnectError or an httpx.ConnectTimeout occurs, allowing smoother operation under flaky networks. If you need other forms of retry behaviors, such as handling read/write errors or reacting to 503 Service Unavailable, consider general-purpose tools such as tenacity.

    >>> import httpx\n>>> transport = httpx.HTTPTransport(retries=1)\n>>> client = httpx.Client(transport=transport)\n

    Similarly, instantiating a transport directly provides a uds option for connecting via a Unix Domain Socket that is only available via this low-level API:

    >>> import httpx\n>>> # Connect to the Docker API via a Unix Socket.\n>>> transport = httpx.HTTPTransport(uds=\"/var/run/docker.sock\")\n>>> client = httpx.Client(transport=transport)\n>>> response = client.get(\"http://docker/info\")\n>>> response.json()\n{\"ID\": \"...\", \"Containers\": 4, \"Images\": 74, ...}\n
    "},{"location":"advanced/transports/#wsgi-transport","title":"WSGI Transport","text":"

    You can configure an httpx client to call directly into a Python web application using the WSGI protocol.

    This is particularly useful for two main use-cases:

    • Using httpx as a client inside test cases.
    • Mocking out external services during tests or in dev or staging environments.
    "},{"location":"advanced/transports/#example","title":"Example","text":"

    Here's an example of integrating against a Flask application:

    from flask import Flask\nimport httpx\n\n\napp = Flask(__name__)\n\n@app.route(\"/\")\ndef hello():\n    return \"Hello World!\"\n\ntransport = httpx.WSGITransport(app=app)\nwith httpx.Client(transport=transport, base_url=\"http://testserver\") as client:\n    r = client.get(\"/\")\n    assert r.status_code == 200\n    assert r.text == \"Hello World!\"\n
    "},{"location":"advanced/transports/#configuration","title":"Configuration","text":"

    For some more complex cases you might need to customize the WSGI transport. This allows you to:

    • Inspect 500 error responses rather than raise exceptions by setting raise_app_exceptions=False.
    • Mount the WSGI application at a subpath by setting script_name (WSGI).
    • Use a given client address for requests by setting remote_addr (WSGI).

    For example:

    # Instantiate a client that makes WSGI requests with a client IP of \"1.2.3.4\".\ntransport = httpx.WSGITransport(app=app, remote_addr=\"1.2.3.4\")\nwith httpx.Client(transport=transport, base_url=\"http://testserver\") as client:\n    ...\n
    "},{"location":"advanced/transports/#asgi-transport","title":"ASGI Transport","text":"

    You can configure an httpx client to call directly into an async Python web application using the ASGI protocol.

    This is particularly useful for two main use-cases:

    • Using httpx as a client inside test cases.
    • Mocking out external services during tests or in dev or staging environments.
    "},{"location":"advanced/transports/#example_1","title":"Example","text":"

    Let's take this Starlette application as an example:

    from starlette.applications import Starlette\nfrom starlette.responses import HTMLResponse\nfrom starlette.routing import Route\n\n\nasync def hello(request):\n    return HTMLResponse(\"Hello World!\")\n\n\napp = Starlette(routes=[Route(\"/\", hello)])\n

    We can make requests directly against the application, like so:

    transport = httpx.ASGITransport(app=app)\n\nasync with httpx.AsyncClient(transport=transport, base_url=\"http://testserver\") as client:\n    r = await client.get(\"/\")\n    assert r.status_code == 200\n    assert r.text == \"Hello World!\"\n
    "},{"location":"advanced/transports/#configuration_1","title":"Configuration","text":"

    For some more complex cases you might need to customise the ASGI transport. This allows you to:

    • Inspect 500 error responses rather than raise exceptions by setting raise_app_exceptions=False.
    • Mount the ASGI application at a subpath by setting root_path.
    • Use a given client address for requests by setting client.

    For example:

    # Instantiate a client that makes ASGI requests with a client IP of \"1.2.3.4\",\n# on port 123.\ntransport = httpx.ASGITransport(app=app, client=(\"1.2.3.4\", 123))\nasync with httpx.AsyncClient(transport=transport, base_url=\"http://testserver\") as client:\n    ...\n

    See the ASGI documentation for more details on the client and root_path keys.

    "},{"location":"advanced/transports/#asgi-startup-and-shutdown","title":"ASGI startup and shutdown","text":"

    It is not in the scope of HTTPX to trigger ASGI lifespan events of your app.

    However it is suggested to use LifespanManager from asgi-lifespan in pair with AsyncClient.

    "},{"location":"advanced/transports/#custom-transports","title":"Custom transports","text":"

    A transport instance must implement the low-level Transport API which deals with sending a single request, and returning a response. You should either subclass httpx.BaseTransport to implement a transport to use with Client, or subclass httpx.AsyncBaseTransport to implement a transport to use with AsyncClient.

    At the layer of the transport API we're using the familiar Request and Response models.

    See the handle_request and handle_async_request docstrings for more details on the specifics of the Transport API.

    A complete example of a custom transport implementation would be:

    import json\nimport httpx\n\nclass HelloWorldTransport(httpx.BaseTransport):\n    \"\"\"\n    A mock transport that always returns a JSON \"Hello, world!\" response.\n    \"\"\"\n\n    def handle_request(self, request):\n        return httpx.Response(200, json={\"text\": \"Hello, world!\"})\n

    Or this example, which uses a custom transport and httpx.Mounts to always redirect http:// requests.

    class HTTPSRedirect(httpx.BaseTransport):\n    \"\"\"\n    A transport that always redirects to HTTPS.\n    \"\"\"\n    def handle_request(self, request):\n        url = request.url.copy_with(scheme=\"https\")\n        return httpx.Response(303, headers={\"Location\": str(url)})\n\n# A client where any `http` requests are always redirected to `https`\ntransport = httpx.Mounts({\n    'http://': HTTPSRedirect()\n    'https://': httpx.HTTPTransport()\n})\nclient = httpx.Client(transport=transport)\n

    A useful pattern here is custom transport classes that wrap the default HTTP implementation. For example...

    class DebuggingTransport(httpx.BaseTransport):\n    def __init__(self, **kwargs):\n        self._wrapper = httpx.HTTPTransport(**kwargs)\n\n    def handle_request(self, request):\n        print(f\">>> {request}\")\n        response = self._wrapper.handle_request(request)\n        print(f\"<<< {response}\")\n        return response\n\n    def close(self):\n        self._wrapper.close()\n\ntransport = DebuggingTransport()\nclient = httpx.Client(transport=transport)\n

    Here's another case, where we're using a round-robin across a number of different proxies...

    class ProxyRoundRobin(httpx.BaseTransport):\n    def __init__(self, proxies, **kwargs):\n        self._transports = [\n            httpx.HTTPTransport(proxy=proxy, **kwargs)\n            for proxy in proxies\n        ]\n        self._idx = 0\n\n    def handle_request(self, request):\n        transport = self._transports[self._idx]\n        self._idx = (self._idx + 1) % len(self._transports)\n        return transport.handle_request(request)\n\n    def close(self):\n        for transport in self._transports:\n            transport.close()\n\nproxies = [\n    httpx.Proxy(\"http://127.0.0.1:8081\"),\n    httpx.Proxy(\"http://127.0.0.1:8082\"),\n    httpx.Proxy(\"http://127.0.0.1:8083\"),\n]\ntransport = ProxyRoundRobin(proxies=proxies)\nclient = httpx.Client(transport=transport)\n
    "},{"location":"advanced/transports/#mock-transports","title":"Mock transports","text":"

    During testing it can often be useful to be able to mock out a transport, and return pre-determined responses, rather than making actual network requests.

    The httpx.MockTransport class accepts a handler function, which can be used to map requests onto pre-determined responses:

    def handler(request):\n    return httpx.Response(200, json={\"text\": \"Hello, world!\"})\n\n\n# Switch to a mock transport, if the TESTING environment variable is set.\nif os.environ.get('TESTING', '').upper() == \"TRUE\":\n    transport = httpx.MockTransport(handler)\nelse:\n    transport = httpx.HTTPTransport()\n\nclient = httpx.Client(transport=transport)\n

    For more advanced use-cases you might want to take a look at either the third-party mocking library, RESPX, or the pytest-httpx library.

    "},{"location":"advanced/transports/#mounting-transports","title":"Mounting transports","text":"

    You can also mount transports against given schemes or domains, to control which transport an outgoing request should be routed via, with the same style used for specifying proxy routing.

    import httpx\n\nclass HTTPSRedirectTransport(httpx.BaseTransport):\n    \"\"\"\n    A transport that always redirects to HTTPS.\n    \"\"\"\n\n    def handle_request(self, method, url, headers, stream, extensions):\n        scheme, host, port, path = url\n        if port is None:\n            location = b\"https://%s%s\" % (host, path)\n        else:\n            location = b\"https://%s:%d%s\" % (host, port, path)\n        stream = httpx.ByteStream(b\"\")\n        headers = [(b\"location\", location)]\n        extensions = {}\n        return 303, headers, stream, extensions\n\n\n# A client where any `http` requests are always redirected to `https`\nmounts = {'http://': HTTPSRedirectTransport()}\nclient = httpx.Client(mounts=mounts)\n

    A couple of other sketches of how you might take advantage of mounted transports...

    Disabling HTTP/2 on a single given domain...

    mounts = {\n    \"all://\": httpx.HTTPTransport(http2=True),\n    \"all://*example.org\": httpx.HTTPTransport()\n}\nclient = httpx.Client(mounts=mounts)\n

    Mocking requests to a given domain:

    # All requests to \"example.org\" should be mocked out.\n# Other requests occur as usual.\ndef handler(request):\n    return httpx.Response(200, json={\"text\": \"Hello, World!\"})\n\nmounts = {\"all://example.org\": httpx.MockTransport(handler)}\nclient = httpx.Client(mounts=mounts)\n

    Adding support for custom schemes:

    # Support URLs like \"file:///Users/sylvia_green/websites/new_client/index.html\"\nmounts = {\"file://\": FileSystemTransport()}\nclient = httpx.Client(mounts=mounts)\n
    "},{"location":"advanced/transports/#routing","title":"Routing","text":"

    HTTPX provides a powerful mechanism for routing requests, allowing you to write complex rules that specify which transport should be used for each request.

    The mounts dictionary maps URL patterns to HTTP transports. HTTPX matches requested URLs against URL patterns to decide which transport should be used, if any. Matching is done from most specific URL patterns (e.g. https://<domain>:<port>) to least specific ones (e.g. https://).

    HTTPX supports routing requests based on scheme, domain, port, or a combination of these.

    "},{"location":"advanced/transports/#wildcard-routing","title":"Wildcard routing","text":"

    Route everything through a transport...

    mounts = {\n    \"all://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n
    "},{"location":"advanced/transports/#scheme-routing","title":"Scheme routing","text":"

    Route HTTP requests through one transport, and HTTPS requests through another...

    mounts = {\n    \"http://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n    \"https://\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n}\n
    "},{"location":"advanced/transports/#domain-routing","title":"Domain routing","text":"

    Proxy all requests on domain \"example.com\", let other requests pass through...

    mounts = {\n    \"all://example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy HTTP requests on domain \"example.com\", let HTTPS and other requests pass through...

    mounts = {\n    \"http://example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy all requests to \"example.com\" and its subdomains, let other requests pass through...

    mounts = {\n    \"all://*example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy all requests to strict subdomains of \"example.com\", let \"example.com\" and other requests pass through...

    mounts = {\n    \"all://*.example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n
    "},{"location":"advanced/transports/#port-routing","title":"Port routing","text":"

    Proxy HTTPS requests on port 1234 to \"example.com\"...

    mounts = {\n    \"https://example.com:1234\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n

    Proxy all requests on port 1234...

    mounts = {\n    \"all://*:1234\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n}\n
    "},{"location":"advanced/transports/#no-proxy-support","title":"No-proxy support","text":"

    It is also possible to define requests that shouldn't be routed through the transport.

    To do so, pass None as the proxy URL. For example...

    mounts = {\n    # Route requests through a proxy by default...\n    \"all://\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n    # Except those for \"example.com\".\n    \"all://example.com\": None,\n}\n
    "},{"location":"advanced/transports/#complex-configuration-example","title":"Complex configuration example","text":"

    You can combine the routing features outlined above to build complex proxy routing configurations. For example...

    mounts = {\n    # Route all traffic through a proxy by default...\n    \"all://\": httpx.HTTPTransport(proxy=\"http://localhost:8030\"),\n    # But don't use proxies for HTTPS requests to \"domain.io\"...\n    \"https://domain.io\": None,\n    # And use another proxy for requests to \"example.com\" and its subdomains...\n    \"all://*example.com\": httpx.HTTPTransport(proxy=\"http://localhost:8031\"),\n    # And yet another proxy if HTTP is used,\n    # and the \"internal\" subdomain on port 5550 is requested...\n    \"http://internal.example.com:5550\": httpx.HTTPTransport(proxy=\"http://localhost:8032\"),\n}\n
    "},{"location":"advanced/transports/#environment-variables","title":"Environment variables","text":"

    There are also environment variables that can be used to control the dictionary of the client mounts. They can be used to configure HTTP proxying for clients.

    See documentation on HTTP_PROXY, HTTPS_PROXY, ALL_PROXY and NO_PROXY for more information.

    "}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml index 878d8acc..4e0f265c 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -2,94 +2,94 @@ https://www.python-httpx.org/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/api/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/async/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/code_of_conduct/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/compatibility/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/contributing/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/environment_variables/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/exceptions/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/http2/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/logging/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/quickstart/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/third_party_packages/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/troubleshooting/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/authentication/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/clients/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/event-hooks/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/extensions/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/proxies/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/resource-limits/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/ssl/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/text-encodings/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/timeouts/ - 2024-12-06 + 2025-09-11 https://www.python-httpx.org/advanced/transports/ - 2024-12-06 + 2025-09-11 \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz index 4e556eff..25a36fda 100644 Binary files a/sitemap.xml.gz and b/sitemap.xml.gz differ diff --git a/third_party_packages/index.html b/third_party_packages/index.html index 8bbbedc2..192d6018 100644 --- a/third_party_packages/index.html +++ b/third_party_packages/index.html @@ -914,54 +914,72 @@ + + + + +
  • + + + Libraries with HTTPX support + + + + + +
  • + +
  • + + + Libraries with HTTPX support + + + +