From: GitHub Action <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 15:37:25 +0000 (+0000) Subject: Deployed 26d48e0 with MkDocs version: 1.6.1 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=cfa72f32010c004edf40b5548d5058c2c8c419cf;p=thirdparty%2Fhttpx.git Deployed 26d48e0 with MkDocs version: 1.6.1 --- cfa72f32010c004edf40b5548d5058c2c8c419cf diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..0e49e74b --- /dev/null +++ b/404.html @@ -0,0 +1,988 @@ + + + +
+ + + + + + + + + + + + + + + + +Authentication can either be included on a per-request basis...
+>>> auth = httpx.BasicAuth(username="username", password="secret")
+>>> client = httpx.Client()
+>>> response = client.get("https://www.example.com/", auth=auth)
+Or configured on the client instance, ensuring that all outgoing requests will include authentication credentials...
+>>> auth = httpx.BasicAuth(username="username", password="secret")
+>>> client = httpx.Client(auth=auth)
+>>> response = client.get("https://www.example.com/")
+HTTP basic authentication is an unencrypted authentication scheme that uses a simple encoding of the username and password in the request Authorization header. Since it is unencrypted it should typically only be used over https, although this is not strictly enforced.
>>> auth = httpx.BasicAuth(username="finley", password="secret")
+>>> client = httpx.Client(auth=auth)
+>>> response = client.get("https://httpbin.org/basic-auth/finley/secret")
+>>> response
+<Response [200 OK]>
+HTTP digest authentication is a challenge-response authentication scheme. Unlike basic authentication it provides encryption, and can be used over unencrypted http connections. It requires an additional round-trip in order to negotiate the authentication.
>>> auth = httpx.DigestAuth(username="olivia", password="secret")
+>>> client = httpx.Client(auth=auth)
+>>> response = client.get("https://httpbin.org/digest-auth/auth/olivia/secret")
+>>> response
+<Response [200 OK]>
+>>> response.history
+[<Response [401 UNAUTHORIZED]>]
+HTTPX can be configured to use a .netrc config file for authentication.
The .netrc config file allows authentication credentials to be associated with specified hosts. When a request is made to a host that is found in the netrc file, the username and password will be included using HTTP basic authentication.
Example .netrc file:
machine example.org
+login example-username
+password example-password
+
+machine python-httpx.org
+login other-username
+password other-password
+Some examples of configuring .netrc authentication with httpx.
Use the default .netrc file in the users home directory:
>>> auth = httpx.NetRCAuth()
+>>> client = httpx.Client(auth=auth)
+Use an explicit path to a .netrc file:
>>> auth = httpx.NetRCAuth(file="/path/to/.netrc")
+>>> client = httpx.Client(auth=auth)
+Use the NETRC environment variable to configure a path to the .netrc file,
+or fallback to the default.
>>> auth = httpx.NetRCAuth(file=os.environ.get("NETRC"))
+>>> client = httpx.Client(auth=auth)
+The NetRCAuth() class uses the netrc.netrc() function from the Python standard library. See the documentation there for more details on exceptions that may be raised if the .netrc file is not found, or cannot be parsed.
When issuing requests or instantiating a client, the auth argument can be used to pass an authentication scheme to use. The auth argument may be one of the following...
username/password, to be used with basic authentication.httpx.BasicAuth(), httpx.DigestAuth(), or httpx.NetRCAuth().httpx.Auth.The most involved of these is the last, which allows you to create authentication flows involving one or more requests. A subclass of httpx.Auth should implement def auth_flow(request), and yield any requests that need to be made...
class MyCustomAuth(httpx.Auth):
+ def __init__(self, token):
+ self.token = token
+
+ def auth_flow(self, request):
+ # Send the request, with a custom `X-Authentication` header.
+ request.headers['X-Authentication'] = self.token
+ yield request
+If the auth flow requires more than one request, you can issue multiple yields, and obtain the response in each case...
+class MyCustomAuth(httpx.Auth):
+ def __init__(self, token):
+ self.token = token
+
+ def auth_flow(self, request):
+ response = yield request
+ if response.status_code == 401:
+ # If the server issues a 401 response then resend the request,
+ # with a custom `X-Authentication` header.
+ request.headers['X-Authentication'] = self.token
+ yield request
+Custom authentication classes are designed to not perform any I/O, so that they may be used with both sync and async client instances. If you are implementing an authentication scheme that requires the request body, then you need to indicate this on the class using a requires_request_body property.
You will then be able to access request.content inside the .auth_flow() method.
class MyCustomAuth(httpx.Auth):
+ requires_request_body = True
+
+ def __init__(self, token):
+ self.token = token
+
+ def auth_flow(self, request):
+ response = yield request
+ if response.status_code == 401:
+ # If the server issues a 401 response then resend the request,
+ # with a custom `X-Authentication` header.
+ request.headers['X-Authentication'] = self.sign_request(...)
+ yield request
+
+ def sign_request(self, request):
+ # Create a request signature, based on `request.method`, `request.url`,
+ # `request.headers`, and `request.content`.
+ ...
+Similarly, if you are implementing a scheme that requires access to the response body, then use the requires_response_body property. You will then be able to access response body properties and methods such as response.content, response.text, response.json(), etc.
class MyCustomAuth(httpx.Auth):
+ requires_response_body = True
+
+ def __init__(self, access_token, refresh_token, refresh_url):
+ self.access_token = access_token
+ self.refresh_token = refresh_token
+ self.refresh_url = refresh_url
+
+ def auth_flow(self, request):
+ request.headers["X-Authentication"] = self.access_token
+ response = yield request
+
+ if response.status_code == 401:
+ # If the server issues a 401 response, then issue a request to
+ # refresh tokens, and resend the request.
+ refresh_response = yield self.build_refresh_request()
+ self.update_tokens(refresh_response)
+
+ request.headers["X-Authentication"] = self.access_token
+ yield request
+
+ def build_refresh_request(self):
+ # Return an `httpx.Request` for refreshing tokens.
+ ...
+
+ def update_tokens(self, response):
+ # Update the `.access_token` and `.refresh_token` tokens
+ # based on a refresh response.
+ data = response.json()
+ ...
+If you do need to perform I/O other than HTTP requests, such as accessing a disk-based cache, or you need to use concurrency primitives, such as locks, then you should override .sync_auth_flow() and .async_auth_flow() (instead of .auth_flow()). The former will be used by httpx.Client, while the latter will be used by httpx.AsyncClient.
import asyncio
+import threading
+import httpx
+
+
+class MyCustomAuth(httpx.Auth):
+ def __init__(self):
+ self._sync_lock = threading.RLock()
+ self._async_lock = asyncio.Lock()
+
+ def sync_get_token(self):
+ with self._sync_lock:
+ ...
+
+ def sync_auth_flow(self, request):
+ token = self.sync_get_token()
+ request.headers["Authorization"] = f"Token {token}"
+ yield request
+
+ async def async_get_token(self):
+ async with self._async_lock:
+ ...
+
+ async def async_auth_flow(self, request):
+ token = await self.async_get_token()
+ request.headers["Authorization"] = f"Token {token}"
+ yield request
+If you only want to support one of the two methods, then you should still override it, but raise an explicit RuntimeError.
import httpx
+import sync_only_library
+
+
+class MyCustomAuth(httpx.Auth):
+ def sync_auth_flow(self, request):
+ token = sync_only_library.get_token(...)
+ request.headers["Authorization"] = f"Token {token}"
+ yield request
+
+ async def async_auth_flow(self, request):
+ raise RuntimeError("Cannot use a sync authentication class with httpx.AsyncClient")
+Hint
+If you are coming from Requests, httpx.Client() is what you can use instead of requests.Session().
TL;DR
+If you do anything more than experimentation, one-off scripts, or prototypes, then you should use a Client instance.
More efficient usage of network resources
+When you make requests using the top-level API as documented in the Quickstart guide, HTTPX has to establish a new connection for every single request (connections are not reused). As the number of requests to a host increases, this quickly becomes inefficient.
+On the other hand, a Client instance uses HTTP connection pooling. This means that when you make several requests to the same host, the Client will reuse the underlying TCP connection, instead of recreating one for every single request.
This can bring significant performance improvements compared to using the top-level API, including:
+Extra features
+Client instances also support features that aren't available at the top-level API, such as:
The other sections on this page go into further detail about what you can do with a Client instance.
The recommended way to use a Client is as a context manager. This will ensure that connections are properly cleaned up when leaving the with block:
with httpx.Client() as client:
+ ...
+Alternatively, you can explicitly close the connection pool without block-usage using .close():
client = httpx.Client()
+try:
+ ...
+finally:
+ client.close()
+Once you have a Client, you can send requests using .get(), .post(), etc. For example:
>>> with httpx.Client() as client:
+... r = client.get('https://example.com')
+...
+>>> r
+<Response [200 OK]>
+These methods accept the same arguments as httpx.get(), httpx.post(), etc. This means that all features documented in the Quickstart guide are also available at the client level.
For example, to send a request with custom headers:
+>>> with httpx.Client() as client:
+... headers = {'X-Custom': 'value'}
+... r = client.get('https://example.com', headers=headers)
+...
+>>> r.request.headers['X-Custom']
+'value'
+Clients allow you to apply configuration to all outgoing requests by passing parameters to the Client constructor.
For example, to apply a set of custom headers on every request:
+>>> url = 'http://httpbin.org/headers'
+>>> headers = {'user-agent': 'my-app/0.0.1'}
+>>> with httpx.Client(headers=headers) as client:
+... r = client.get(url)
+...
+>>> r.json()['headers']['User-Agent']
+'my-app/0.0.1'
+When a configuration option is provided at both the client-level and request-level, one of two things can happen:
+>>> headers = {'X-Auth': 'from-client'}
+>>> params = {'client_id': 'client1'}
+>>> with httpx.Client(headers=headers, params=params) as client:
+... headers = {'X-Custom': 'from-request'}
+... params = {'request_id': 'request1'}
+... r = client.get('https://example.com', headers=headers, params=params)
+...
+>>> r.request.url
+URL('https://example.com?client_id=client1&request_id=request1')
+>>> r.request.headers['X-Auth']
+'from-client'
+>>> r.request.headers['X-Custom']
+'from-request'
+>>> with httpx.Client(auth=('tom', 'mot123')) as client:
+... r = client.get('https://example.com', auth=('alice', 'ecila123'))
+...
+>>> _, _, auth = r.request.headers['Authorization'].partition(' ')
+>>> import base64
+>>> base64.b64decode(auth)
+b'alice:ecila123'
+If you need finer-grained control on the merging of client-level and request-level parameters, see Request instances.
+Additionally, Client accepts some configuration options that aren't available at the request level.
For example, base_url allows you to prepend an URL to all outgoing requests:
>>> with httpx.Client(base_url='http://httpbin.org') as client:
+... r = client.get('/headers')
+...
+>>> r.request.url
+URL('http://httpbin.org/headers')
+For a list of all available client parameters, see the Client API reference.
For maximum control on what gets sent over the wire, HTTPX supports building explicit Request instances:
request = httpx.Request("GET", "https://example.com")
+To dispatch a Request instance across to the network, create a Client instance and use .send():
with httpx.Client() as client:
+ response = client.send(request)
+ ...
+If you need to mix client-level and request-level options in a way that is not supported by the default Merging of parameters, you can use .build_request() and then make arbitrary modifications to the Request instance. For example:
headers = {"X-Api-Key": "...", "X-Client-ID": "ABC123"}
+
+with httpx.Client(headers=headers) as client:
+ request = client.build_request("GET", "https://api.example.com")
+
+ print(request.headers["X-Client-ID"]) # "ABC123"
+
+ # Don't send the API key for this particular request.
+ del request.headers["X-Api-Key"]
+
+ response = client.send(request)
+ ...
+If you need to monitor download progress of large responses, you can use response streaming and inspect the response.num_bytes_downloaded property.
This interface is required for properly determining download progress, because the total number of bytes returned by response.content or response.iter_content() will not always correspond with the raw content length of the response if HTTP response compression is being used.
For example, showing a progress bar using the tqdm library while a response is being downloaded could be done like thisâ¦
import tempfile
+
+import httpx
+from tqdm import tqdm
+
+with tempfile.NamedTemporaryFile() as download_file:
+ url = "https://speed.hetzner.de/100MB.bin"
+ with httpx.stream("GET", url) as response:
+ total = int(response.headers["Content-Length"])
+
+ with tqdm(total=total, unit_scale=True, unit_divisor=1024, unit="B") as progress:
+ num_bytes_downloaded = response.num_bytes_downloaded
+ for chunk in response.iter_bytes():
+ download_file.write(chunk)
+ progress.update(response.num_bytes_downloaded - num_bytes_downloaded)
+ num_bytes_downloaded = response.num_bytes_downloaded
+
Or an alternate example, this time using the rich libraryâ¦
import tempfile
+import httpx
+import rich.progress
+
+with tempfile.NamedTemporaryFile() as download_file:
+ url = "https://speed.hetzner.de/100MB.bin"
+ with httpx.stream("GET", url) as response:
+ total = int(response.headers["Content-Length"])
+
+ with rich.progress.Progress(
+ "[progress.percentage]{task.percentage:>3.0f}%",
+ rich.progress.BarColumn(bar_width=None),
+ rich.progress.DownloadColumn(),
+ rich.progress.TransferSpeedColumn(),
+ ) as progress:
+ download_task = progress.add_task("Download", total=total)
+ for chunk in response.iter_bytes():
+ download_file.write(chunk)
+ progress.update(download_task, completed=response.num_bytes_downloaded)
+
If you need to monitor upload progress of large responses, you can use request content generator streaming.
+For example, showing a progress bar using the tqdm library.
import io
+import random
+
+import httpx
+from tqdm import tqdm
+
+
+def gen():
+ """
+ this is a complete example with generated random bytes.
+ you can replace `io.BytesIO` with real file object.
+ """
+ total = 32 * 1024 * 1024 # 32m
+ with tqdm(ascii=True, unit_scale=True, unit='B', unit_divisor=1024, total=total) as bar:
+ with io.BytesIO(random.randbytes(total)) as f:
+ while data := f.read(1024):
+ yield data
+ bar.update(len(data))
+
+
+httpx.post("https://httpbin.org/post", content=gen())
+
As mentioned in the quickstart +multipart file encoding is available by passing a dictionary with the +name of the payloads as keys and either tuple of elements or a file-like object or a string as values.
+>>> files = {'upload-file': ('report.xls', open('report.xls', 'rb'), 'application/vnd.ms-excel')}
+>>> r = httpx.post("https://httpbin.org/post", files=files)
+>>> print(r.text)
+{
+ ...
+ "files": {
+ "upload-file": "<... binary content ...>"
+ },
+ ...
+}
+More specifically, if a tuple is used as a value, it must have between 2 and 3 elements:
+None.None then HTTPX will not include a content-type
+MIME header field.>>> files = {'upload-file': (None, 'text content', 'text/plain')}
+>>> r = httpx.post("https://httpbin.org/post", files=files)
+>>> print(r.text)
+{
+ ...
+ "files": {},
+ "form": {
+ "upload-file": "text-content"
+ },
+ ...
+}
+Tip
+It is safe to upload large files this way. File uploads are streaming by default, meaning that only one chunk will be loaded into memory at a time.
+Non-file data fields can be included in the multipart form using by passing them to data=....
You can also send multiple files in one go with a multiple file field form.
+To do that, pass a list of (field, <file>) items instead of a dictionary, allowing you to pass multiple items with the same field.
+For instance this request sends 2 files, foo.png and bar.png in one request on the images form field:
>>> files = [('images', ('foo.png', open('foo.png', 'rb'), 'image/png')),
+ ('images', ('bar.png', open('bar.png', 'rb'), 'image/png'))]
+>>> r = httpx.post("https://httpbin.org/post", files=files)
+HTTPX allows you to register "event hooks" with the client, that are called +every time a particular type of event takes place.
+There are currently two event hooks:
+request - Called after a request is fully prepared, but before it is sent to the network. Passed the request instance.response - Called after the response has been fetched from the network, but before it is returned to the caller. Passed the response instance.These allow you to install client-wide functionality such as logging, monitoring or tracing.
+def log_request(request):
+ print(f"Request event hook: {request.method} {request.url} - Waiting for response")
+
+def log_response(response):
+ request = response.request
+ print(f"Response event hook: {request.method} {request.url} - Status {response.status_code}")
+
+client = httpx.Client(event_hooks={'request': [log_request], 'response': [log_response]})
+You can also use these hooks to install response processing code, such as this
+example, which creates a client instance that always raises httpx.HTTPStatusError
+on 4xx and 5xx responses.
def raise_on_4xx_5xx(response):
+ response.raise_for_status()
+
+client = httpx.Client(event_hooks={'response': [raise_on_4xx_5xx]})
+Note
+Response event hooks are called before determining if the response body +should be read or not.
+If you need access to the response body inside an event hook, you'll
+need to call response.read(), or for AsyncClients, response.aread().
The hooks are also allowed to modify request and response objects.
def add_timestamp(request):
+ request.headers['x-request-timestamp'] = datetime.now(tz=datetime.utc).isoformat()
+
+client = httpx.Client(event_hooks={'request': [add_timestamp]})
+Event hooks must always be set as a list of callables, and you may register +multiple event hooks for each type of event.
+As well as being able to set event hooks on instantiating the client, there
+is also an .event_hooks property, that allows you to inspect and modify
+the installed hooks.
client = httpx.Client()
+client.event_hooks['request'] = [log_request]
+client.event_hooks['response'] = [log_response, raise_on_4xx_5xx]
+Note
+If you are using HTTPX's async support, then you need to be aware that
+hooks registered with httpx.AsyncClient MUST be async functions,
+rather than plain functions.
Request and response extensions provide a untyped space where additional information may be added.
+Extensions should be used for features that may not be available on all transports, and that do not fit neatly into the simplified request/response model that the underlying httpcore package uses as its API.
Several extensions are supported on the request:
+# Request timeouts actually implemented as an extension on
+# the request, ensuring that they are passed throughout the
+# entire call stack.
+client = httpx.Client()
+response = client.get(
+ "https://www.example.com",
+ extensions={"timeout": {"connect": 5.0}}
+)
+response.request.extensions["timeout"]
+{"connect": 5.0}
+And on the response:
+client = httpx.Client()
+response = client.get("https://www.example.com")
+print(response.extensions["http_version"]) # b"HTTP/1.1"
+# Other server responses could have been
+# b"HTTP/0.9", b"HTTP/1.0", or b"HTTP/1.1"
+"trace"The trace extension allows a callback handler to be installed to monitor the internal
+flow of events within the underlying httpcore transport.
The simplest way to explain this is with an example:
+import httpx
+
+def log(event_name, info):
+ print(event_name, info)
+
+client = httpx.Client()
+response = client.get("https://www.example.com/", extensions={"trace": log})
+# connection.connect_tcp.started {'host': 'www.example.com', 'port': 443, 'local_address': None, 'timeout': None}
+# connection.connect_tcp.complete {'return_value': <httpcore.backends.sync.SyncStream object at 0x1093f94d0>}
+# connection.start_tls.started {'ssl_context': <ssl.SSLContext object at 0x1093ee750>, 'server_hostname': b'www.example.com', 'timeout': None}
+# connection.start_tls.complete {'return_value': <httpcore.backends.sync.SyncStream object at 0x1093f9450>}
+# http11.send_request_headers.started {'request': <Request [b'GET']>}
+# http11.send_request_headers.complete {'return_value': None}
+# http11.send_request_body.started {'request': <Request [b'GET']>}
+# http11.send_request_body.complete {'return_value': None}
+# http11.receive_response_headers.started {'request': <Request [b'GET']>}
+# http11.receive_response_headers.complete {'return_value': (b'HTTP/1.1', 200, b'OK', [(b'Age', b'553715'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Thu, 21 Oct 2021 17:08:42 GMT'), (b'Etag', b'"3147526947+ident"'), (b'Expires', b'Thu, 28 Oct 2021 17:08:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECS (nyb/1DCD)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'1256')])}
+# http11.receive_response_body.started {'request': <Request [b'GET']>}
+# http11.receive_response_body.complete {'return_value': None}
+# http11.response_closed.started {}
+# http11.response_closed.complete {'return_value': None}
+The event_name and info arguments here will be one of the following:
{event_type}.{event_name}.started, <dictionary of keyword arguments>{event_type}.{event_name}.complete, {"return_value": <...>}{event_type}.{event_name}.failed, {"exception": <...>}Note that when using async code the handler function passed to "trace" must be an async def ... function.
The following event types are currently exposed...
+Establishing the connection
+"connection.connect_tcp""connection.connect_unix_socket""connection.start_tls"HTTP/1.1 events
+"http11.send_request_headers""http11.send_request_body""http11.receive_response""http11.receive_response_body""http11.response_closed"HTTP/2 events
+"http2.send_connection_init""http2.send_request_headers""http2.send_request_body""http2.receive_response_headers""http2.receive_response_body""http2.response_closed"The exact set of trace events may be subject to change across different versions of httpcore. If you need to rely on a particular set of events it is recommended that you pin installation of the package to a fixed version.
"sni_hostname"The server's hostname, which is used to confirm the hostname supplied by the SSL certificate.
+If you want to connect to an explicit IP address rather than using the standard DNS hostname lookup, then you'll need to use this request extension.
+For example:
+# Connect to '185.199.108.153' but use 'www.encode.io' in the Host header,
+#Â and use 'www.encode.io' when SSL verifying the server hostname.
+client = httpx.Client()
+headers = {"Host": "www.encode.io"}
+extensions = {"sni_hostname": "www.encode.io"}
+response = client.get(
+ "https://185.199.108.153/path",
+ headers=headers,
+ extensions=extensions
+)
+"timeout"A dictionary of str: Optional[float] timeout values.
May include values for 'connect', 'read', 'write', or 'pool'.
For example:
+# Timeout if a connection takes more than 5 seconds to established, or if
+# we are blocked waiting on the connection pool for more than 10 seconds.
+client = httpx.Client()
+response = client.get(
+ "https://www.example.com",
+ extensions={"timeout": {"connect": 5.0, "pool": 10.0}}
+)
+This extension is how the httpx timeouts are implemented, ensuring that the timeout values are associated with the request instance and passed throughout the stack. You shouldn't typically be working with this extension directly, but use the higher level timeout API instead.
"target"The target that is used as the HTTP target instead of the URL path.
+This enables support constructing requests that would otherwise be unsupported.
+CONNECT with hostname as the target.OPTIONS * requests.Some examples:
+Using the 'target' extension to send requests without the standard path escaping rules...
+# Typically a request to "https://www.example.com/test^path" would
+# connect to "www.example.com" and send an HTTP/1.1 request like...
+#
+# GET /test%5Epath HTTP/1.1
+#
+# Using the target extension we can include the literal '^'...
+#
+# GET /test^path HTTP/1.1
+#
+# Note that requests must still be valid HTTP requests.
+# For example including whitespace in the target will raise a `LocalProtocolError`.
+extensions = {"target": b"/test^path"}
+response = httpx.get("https://www.example.com", extensions=extensions)
+The target extension also allows server-wide OPTIONS * requests to be constructed...
# This will send the following request...
+#
+# CONNECT * HTTP/1.1
+extensions = {"target": b"*"}
+response = httpx.request("CONNECT", "https://www.example.com", extensions=extensions)
+"http_version"The HTTP version, as bytes. Eg. b"HTTP/1.1".
When using HTTP/1.1 the response line includes an explicit version, and the value of this key could feasibly be one of b"HTTP/0.9", b"HTTP/1.0", or b"HTTP/1.1".
When using HTTP/2 there is no further response versioning included in the protocol, and the value of this key will always be b"HTTP/2".
"reason_phrase"The reason-phrase of the HTTP response, as bytes. For example b"OK". Some servers may include a custom reason phrase, although this is not recommended.
HTTP/2 onwards does not include a reason phrase on the wire.
+When no key is included, a default based on the status code may be used.
+"stream_id"When HTTP/2 is being used the "stream_id" response extension can be accessed to determine the ID of the data stream that the response was sent on.
"network_stream"The "network_stream" extension allows developers to handle HTTP CONNECT and Upgrade requests, by providing an API that steps outside the standard request/response model, and can directly read or write to the network.
The interface provided by the network stream:
+read(max_bytes, timeout = None) -> byteswrite(buffer, timeout = None)close()start_tls(ssl_context, server_hostname = None, timeout = None) -> NetworkStreamget_extra_info(info) -> AnyThis API can be used as the foundation for working with HTTP proxies, WebSocket upgrades, and other advanced use-cases.
+See the network backends documentation for more information on working directly with network streams.
+Extra network information
+The network stream abstraction also allows access to various low-level information that may be exposed by the underlying socket:
+response = httpx.get("https://www.example.com")
+network_stream = response.extensions["network_stream"]
+
+client_addr = network_stream.get_extra_info("client_addr")
+server_addr = network_stream.get_extra_info("server_addr")
+print("Client address", client_addr)
+print("Server address", server_addr)
+The socket SSL information is also available through this interface, although you need to ensure that the underlying connection is still open, in order to access it...
+with httpx.stream("GET", "https://www.example.com") as response:
+ network_stream = response.extensions["network_stream"]
+
+ ssl_object = network_stream.get_extra_info("ssl_object")
+ print("TLS version", ssl_object.version())
+HTTPX supports setting up HTTP proxies via the proxy parameter to be passed on client initialization or top-level API functions like httpx.get(..., proxy=...).
example.com through a proxy.To route all traffic (HTTP and HTTPS) to a proxy located at http://localhost:8030, pass the proxy URL to the client...
with httpx.Client(proxy="http://localhost:8030") as client:
+ ...
+For more advanced use cases, pass a mounts dict. For example, to route HTTP and HTTPS requests to 2 different proxies, respectively located at http://localhost:8030, and http://localhost:8031, pass a dict of proxy URLs:
proxy_mounts = {
+ "http://": httpx.HTTPTransport(proxy="http://localhost:8030"),
+ "https://": httpx.HTTPTransport(proxy="http://localhost:8031"),
+}
+
+with httpx.Client(mounts=proxy_mounts) as client:
+ ...
+For detailed information about proxy routing, see the Routing section.
+Gotcha
+In most cases, the proxy URL for the https:// key should use the http:// scheme (that's not a typo!).
This is because HTTP proxying requires initiating a connection with the proxy server. While it's possible that your proxy supports doing it via HTTPS, most proxies only support doing it via HTTP.
+For more information, see FORWARD vs TUNNEL.
+Proxy credentials can be passed as the userinfo section of the proxy URL. For example:
with httpx.Client(proxy="http://username:password@localhost:8030") as client:
+ ...
+Note
+This section describes advanced proxy concepts and functionality.
+In general, the flow for making an HTTP request through a proxy is as follows:
+How exactly step 2/ is performed depends on which of two proxying mechanisms is used:
+If you encounter issues when setting up proxies, please refer to our Troubleshooting guide.
+In addition to HTTP proxies, httpcore also supports proxies using the SOCKS protocol.
+This is an optional feature that requires an additional third-party library be installed before use.
You can install SOCKS support using pip:
$ pip install httpx[socks]
+You can now configure a client to make requests via a proxy using the SOCKS protocol:
+httpx.Client(proxy='socks5://user:pass@host:port')
+You can control the connection pool size using the limits keyword
+argument on the client. It takes instances of httpx.Limits which define:
max_keepalive_connections, number of allowable keep-alive connections, or None to always
+allow. (Defaults 20)max_connections, maximum number of allowable connections, or None for no limits.
+(Default 100)keepalive_expiry, time limit on idle keep-alive connections in seconds, or None for no limits. (Default 5)limits = httpx.Limits(max_keepalive_connections=5, max_connections=10)
+client = httpx.Client(limits=limits)
+When making a request over HTTPS, HTTPX needs to verify the identity of the requested host. To do this, it uses a bundle of SSL certificates (a.k.a. CA bundle) delivered by a trusted certificate authority (CA).
+By default httpx will verify HTTPS connections, and raise an error for invalid SSL cases...
+>>> httpx.get("https://expired.badssl.com/")
+httpx.ConnectError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:997)
+You can disable SSL verification completely and allow insecure requests...
+>>> httpx.get("https://expired.badssl.com/", verify=False)
+<Response [200 OK]>
+If you're using a Client() instance you should pass any verify=<...> configuration when instantiating the client.
By default the certifi CA bundle is used for SSL verification.
+For more complex configurations you can pass an SSL Context instance...
+import certifi
+import httpx
+import ssl
+
+# This SSL context is equivelent to the default `verify=True`.
+ctx = ssl.create_default_context(cafile=certifi.where())
+client = httpx.Client(verify=ctx)
+Using the truststore package to support system certificate stores...
import ssl
+import truststore
+import httpx
+
+# Use system certificate stores.
+ctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+client = httpx.Client(verify=ctx)
+Loding an alternative certificate verification store using the standard SSL context API...
+import httpx
+import ssl
+
+# Use an explicitly configured certificate store.
+ctx = ssl.create_default_context(cafile="path/to/certs.pem") # Either cafile or capath.
+client = httpx.Client(verify=ctx)
+Client side certificates allow a remote server to verify the client. They tend to be used within private organizations to authenticate requests to remote servers.
+You can specify client-side certificates, using the .load_cert_chain() API...
ctx = ssl.create_default_context()
+ctx.load_cert_chain(certfile="path/to/client.pem") # Optionally also keyfile or password.
+client = httpx.Client(verify=ctx)
+SSL_CERT_FILE and SSL_CERT_DIRUnlike requests, the httpx package does not automatically pull in the environment variables SSL_CERT_FILE or SSL_CERT_DIR. If you want to use these they need to be enabled explicitly.
For example...
+# Use `SSL_CERT_FILE` or `SSL_CERT_DIR` if configured.
+# Otherwise default to certifi.
+ctx = ssl.create_default_context(
+ cafile=os.environ.get("SSL_CERT_FILE", certifi.where()),
+ capath=os.environ.get("SSL_CERT_DIR"),
+)
+client = httpx.Client(verify=ctx)
+When making requests to local servers, such as a development server running on localhost, you will typically be using unencrypted HTTP connections.
If you do need to make HTTPS connections to a local server, for example to test an HTTPS-only service, you will need to create and use your own certificates. Here's one way to do it...
+--ssl-keyfile and --ssl-certfile options.)httpx to use the certificates stored in client.pem.ctx = ssl.create_default_context(cafile="client.pem")
+client = httpx.Client(verify=ctx)
+When accessing response.text, we need to decode the response bytes into a unicode text representation.
By default httpx will use "charset" information included in the response Content-Type header to determine how the response bytes should be decoded into text.
In cases where no charset information is included on the response, the default behaviour is to assume "utf-8" encoding, which is by far the most widely used text encoding on the internet.
+To understand this better let's start by looking at the default behaviour for text decoding...
+import httpx
+# Instantiate a client with the default configuration.
+client = httpx.Client()
+# Using the client...
+response = client.get(...)
+print(response.encoding) # This will either print the charset given in
+ # the Content-Type charset, or else "utf-8".
+print(response.text) # The text will either be decoded with the Content-Type
+ # charset, or using "utf-8".
+This is normally absolutely fine. Most servers will respond with a properly formatted Content-Type header, including a charset encoding. And in most cases where no charset encoding is included, UTF-8 is very likely to be used, since it is so widely adopted.
+In some cases we might be making requests to a site where no character set information is being set explicitly by the server, but we know what the encoding is. In this case it's best to set the default encoding explicitly on the client.
+import httpx
+# Instantiate a client with a Japanese character set as the default encoding.
+client = httpx.Client(default_encoding="shift-jis")
+# Using the client...
+response = client.get(...)
+print(response.encoding) # This will either print the charset given in
+ # the Content-Type charset, or else "shift-jis".
+print(response.text) # The text will either be decoded with the Content-Type
+ # charset, or using "shift-jis".
+In cases where the server is not reliably including character set information, and where we don't know what encoding is being used, we can enable auto-detection to make a best-guess attempt when decoding from bytes to text.
+To use auto-detection you need to set the default_encoding argument to a callable instead of a string. This callable should be a function which takes the input bytes as an argument and returns the character set to use for decoding those bytes to text.
There are two widely used Python packages which both handle this functionality:
+chardet - This is a well established package, and is a port of the auto-detection code in Mozilla.charset-normalizer - A newer package, motivated by chardet, with a different approach.Let's take a look at installing autodetection using one of these packages...
+$ pip install httpx
+$ pip install chardet
+Once chardet is installed, we can configure a client to use character-set autodetection.
import httpx
+import chardet
+
+def autodetect(content):
+ return chardet.detect(content).get("encoding")
+
+# Using a client with character-set autodetection enabled.
+client = httpx.Client(default_encoding=autodetect)
+response = client.get(...)
+print(response.encoding) # This will either print the charset given in
+ # the Content-Type charset, or else the auto-detected
+ # character set.
+print(response.text)
+HTTPX is careful to enforce timeouts everywhere by default.
+The default behavior is to raise a TimeoutException after 5 seconds of
+network inactivity.
You can set timeouts for an individual request:
+# Using the top-level API:
+httpx.get('http://example.com/api/v1/example', timeout=10.0)
+
+# Using a client instance:
+with httpx.Client() as client:
+ client.get("http://example.com/api/v1/example", timeout=10.0)
+Or disable timeouts for an individual request:
+# Using the top-level API:
+httpx.get('http://example.com/api/v1/example', timeout=None)
+
+# Using a client instance:
+with httpx.Client() as client:
+ client.get("http://example.com/api/v1/example", timeout=None)
+You can set a timeout on a client instance, which results in the given
+timeout being used as the default for requests made with this client:
client = httpx.Client() # Use a default 5s timeout everywhere.
+client = httpx.Client(timeout=10.0) # Use a default 10s timeout everywhere.
+client = httpx.Client(timeout=None) # Disable all timeouts by default.
+HTTPX also allows you to specify the timeout behavior in more fine grained detail.
+There are four different types of timeouts that may occur. These are connect, +read, write, and pool timeouts.
+ConnectTimeout exception is raised.ReadTimeout exception is raised.WriteTimeout exception is raised.PoolTimeout exception is raised. A related
+configuration here is the maximum number of allowable connections in the
+connection pool, which is configured by the limits argument.You can configure the timeout behavior for any of these values...
+# A client with a 60s timeout for connecting, and a 10s timeout elsewhere.
+timeout = httpx.Timeout(10.0, connect=60.0)
+client = httpx.Client(timeout=timeout)
+
+response = client.get('http://example.com/')
+HTTPX's Client also accepts a transport argument. This argument allows you
+to provide a custom Transport object that will be used to perform the actual
+sending of the requests.
For some advanced configuration you might need to instantiate a transport
+class directly, and pass it to the client instance. One example is the
+local_address configuration which is only available via this low-level API.
>>> import httpx
+>>> transport = httpx.HTTPTransport(local_address="0.0.0.0")
+>>> client = httpx.Client(transport=transport)
+Connection retries are also available via this interface. Requests will be retried the given number of times in case an httpx.ConnectError or an httpx.ConnectTimeout occurs, allowing smoother operation under flaky networks. If you need other forms of retry behaviors, such as handling read/write errors or reacting to 503 Service Unavailable, consider general-purpose tools such as tenacity.
>>> import httpx
+>>> transport = httpx.HTTPTransport(retries=1)
+>>> client = httpx.Client(transport=transport)
+Similarly, instantiating a transport directly provides a uds option for
+connecting via a Unix Domain Socket that is only available via this low-level API:
>>> import httpx
+>>> # Connect to the Docker API via a Unix Socket.
+>>> transport = httpx.HTTPTransport(uds="/var/run/docker.sock")
+>>> client = httpx.Client(transport=transport)
+>>> response = client.get("http://docker/info")
+>>> response.json()
+{"ID": "...", "Containers": 4, "Images": 74, ...}
+You can configure an httpx client to call directly into a Python web application using the WSGI protocol.
This is particularly useful for two main use-cases:
+httpx as a client inside test cases.Here's an example of integrating against a Flask application:
+from flask import Flask
+import httpx
+
+
+app = Flask(__name__)
+
+@app.route("/")
+def hello():
+ return "Hello World!"
+
+transport = httpx.WSGITransport(app=app)
+with httpx.Client(transport=transport, base_url="http://testserver") as client:
+ r = client.get("/")
+ assert r.status_code == 200
+ assert r.text == "Hello World!"
+For some more complex cases you might need to customize the WSGI transport. This allows you to:
+raise_app_exceptions=False.script_name (WSGI).remote_addr (WSGI).For example:
+# Instantiate a client that makes WSGI requests with a client IP of "1.2.3.4".
+transport = httpx.WSGITransport(app=app, remote_addr="1.2.3.4")
+with httpx.Client(transport=transport, base_url="http://testserver") as client:
+ ...
+You can configure an httpx client to call directly into an async Python web application using the ASGI protocol.
This is particularly useful for two main use-cases:
+httpx as a client inside test cases.Let's take this Starlette application as an example:
+from starlette.applications import Starlette
+from starlette.responses import HTMLResponse
+from starlette.routing import Route
+
+
+async def hello(request):
+ return HTMLResponse("Hello World!")
+
+
+app = Starlette(routes=[Route("/", hello)])
+We can make requests directly against the application, like so:
+transport = httpx.ASGITransport(app=app)
+
+async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
+ r = await client.get("/")
+ assert r.status_code == 200
+ assert r.text == "Hello World!"
+For some more complex cases you might need to customise the ASGI transport. This allows you to:
+raise_app_exceptions=False.root_path.client.For example:
+# Instantiate a client that makes ASGI requests with a client IP of "1.2.3.4",
+# on port 123.
+transport = httpx.ASGITransport(app=app, client=("1.2.3.4", 123))
+async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
+ ...
+See the ASGI documentation for more details on the client and root_path keys.
It is not in the scope of HTTPX to trigger ASGI lifespan events of your app.
+However it is suggested to use LifespanManager from asgi-lifespan in pair with AsyncClient.
A transport instance must implement the low-level Transport API which deals
+with sending a single request, and returning a response. You should either
+subclass httpx.BaseTransport to implement a transport to use with Client,
+or subclass httpx.AsyncBaseTransport to implement a transport to
+use with AsyncClient.
At the layer of the transport API we're using the familiar Request and
+Response models.
See the handle_request and handle_async_request docstrings for more details
+on the specifics of the Transport API.
A complete example of a custom transport implementation would be:
+import json
+import httpx
+
+class HelloWorldTransport(httpx.BaseTransport):
+ """
+ A mock transport that always returns a JSON "Hello, world!" response.
+ """
+
+ def handle_request(self, request):
+ return httpx.Response(200, json={"text": "Hello, world!"})
+Or this example, which uses a custom transport and httpx.Mounts to always redirect http:// requests.
class HTTPSRedirect(httpx.BaseTransport):
+ """
+ A transport that always redirects to HTTPS.
+ """
+ def handle_request(self, request):
+ url = request.url.copy_with(scheme="https")
+ return httpx.Response(303, headers={"Location": str(url)})
+
+# A client where any `http` requests are always redirected to `https`
+transport = httpx.Mounts({
+ 'http://': HTTPSRedirect()
+ 'https://': httpx.HTTPTransport()
+})
+client = httpx.Client(transport=transport)
+A useful pattern here is custom transport classes that wrap the default HTTP implementation. For example...
+class DebuggingTransport(httpx.BaseTransport):
+ def __init__(self, **kwargs):
+ self._wrapper = httpx.HTTPTransport(**kwargs)
+
+ def handle_request(self, request):
+ print(f">>> {request}")
+ response = self._wrapper.handle_request(request)
+ print(f"<<< {response}")
+ return response
+
+ def close(self):
+ self._wrapper.close()
+
+transport = DebuggingTransport()
+client = httpx.Client(transport=transport)
+Here's another case, where we're using a round-robin across a number of different proxies...
+class ProxyRoundRobin(httpx.BaseTransport):
+ def __init__(self, proxies, **kwargs):
+ self._transports = [
+ httpx.HTTPTransport(proxy=proxy, **kwargs)
+ for proxy in proxies
+ ]
+ self._idx = 0
+
+ def handle_request(self, request):
+ transport = self._transports[self._idx]
+ self._idx = (self._idx + 1) % len(self._transports)
+ return transport.handle_request(request)
+
+ def close(self):
+ for transport in self._transports:
+ transport.close()
+
+proxies = [
+ httpx.Proxy("http://127.0.0.1:8081"),
+ httpx.Proxy("http://127.0.0.1:8082"),
+ httpx.Proxy("http://127.0.0.1:8083"),
+]
+transport = ProxyRoundRobin(proxies=proxies)
+client = httpx.Client(transport=transport)
+During testing it can often be useful to be able to mock out a transport, +and return pre-determined responses, rather than making actual network requests.
+The httpx.MockTransport class accepts a handler function, which can be used
+to map requests onto pre-determined responses:
def handler(request):
+ return httpx.Response(200, json={"text": "Hello, world!"})
+
+
+# Switch to a mock transport, if the TESTING environment variable is set.
+if os.environ.get('TESTING', '').upper() == "TRUE":
+ transport = httpx.MockTransport(handler)
+else:
+ transport = httpx.HTTPTransport()
+
+client = httpx.Client(transport=transport)
+For more advanced use-cases you might want to take a look at either the third-party +mocking library, RESPX, or the pytest-httpx library.
+You can also mount transports against given schemes or domains, to control +which transport an outgoing request should be routed via, with the same style +used for specifying proxy routing.
+import httpx
+
+class HTTPSRedirectTransport(httpx.BaseTransport):
+ """
+ A transport that always redirects to HTTPS.
+ """
+
+ def handle_request(self, method, url, headers, stream, extensions):
+ scheme, host, port, path = url
+ if port is None:
+ location = b"https://%s%s" % (host, path)
+ else:
+ location = b"https://%s:%d%s" % (host, port, path)
+ stream = httpx.ByteStream(b"")
+ headers = [(b"location", location)]
+ extensions = {}
+ return 303, headers, stream, extensions
+
+
+# A client where any `http` requests are always redirected to `https`
+mounts = {'http://': HTTPSRedirectTransport()}
+client = httpx.Client(mounts=mounts)
+A couple of other sketches of how you might take advantage of mounted transports...
+Disabling HTTP/2 on a single given domain...
+mounts = {
+ "all://": httpx.HTTPTransport(http2=True),
+ "all://*example.org": httpx.HTTPTransport()
+}
+client = httpx.Client(mounts=mounts)
+Mocking requests to a given domain:
+# All requests to "example.org" should be mocked out.
+# Other requests occur as usual.
+def handler(request):
+ return httpx.Response(200, json={"text": "Hello, World!"})
+
+mounts = {"all://example.org": httpx.MockTransport(handler)}
+client = httpx.Client(mounts=mounts)
+Adding support for custom schemes:
+# Support URLs like "file:///Users/sylvia_green/websites/new_client/index.html"
+mounts = {"file://": FileSystemTransport()}
+client = httpx.Client(mounts=mounts)
+HTTPX provides a powerful mechanism for routing requests, allowing you to write complex rules that specify which transport should be used for each request.
+The mounts dictionary maps URL patterns to HTTP transports. HTTPX matches requested URLs against URL patterns to decide which transport should be used, if any. Matching is done from most specific URL patterns (e.g. https://<domain>:<port>) to least specific ones (e.g. https://).
HTTPX supports routing requests based on scheme, domain, port, or a combination of these.
+Route everything through a transport...
+mounts = {
+ "all://": httpx.HTTPTransport(proxy="http://localhost:8030"),
+}
+Route HTTP requests through one transport, and HTTPS requests through another...
+mounts = {
+ "http://": httpx.HTTPTransport(proxy="http://localhost:8030"),
+ "https://": httpx.HTTPTransport(proxy="http://localhost:8031"),
+}
+Proxy all requests on domain "example.com", let other requests pass through...
+mounts = {
+ "all://example.com": httpx.HTTPTransport(proxy="http://localhost:8030"),
+}
+Proxy HTTP requests on domain "example.com", let HTTPS and other requests pass through...
+mounts = {
+ "http://example.com": httpx.HTTPTransport(proxy="http://localhost:8030"),
+}
+Proxy all requests to "example.com" and its subdomains, let other requests pass through...
+mounts = {
+ "all://*example.com": httpx.HTTPTransport(proxy="http://localhost:8030"),
+}
+Proxy all requests to strict subdomains of "example.com", let "example.com" and other requests pass through...
+mounts = {
+ "all://*.example.com": httpx.HTTPTransport(proxy="http://localhost:8030"),
+}
+Proxy HTTPS requests on port 1234 to "example.com"...
+mounts = {
+ "https://example.com:1234": httpx.HTTPTransport(proxy="http://localhost:8030"),
+}
+Proxy all requests on port 1234...
+mounts = {
+ "all://*:1234": httpx.HTTPTransport(proxy="http://localhost:8030"),
+}
+It is also possible to define requests that shouldn't be routed through the transport.
+To do so, pass None as the proxy URL. For example...
mounts = {
+ # Route requests through a proxy by default...
+ "all://": httpx.HTTPTransport(proxy="http://localhost:8031"),
+ # Except those for "example.com".
+ "all://example.com": None,
+}
+You can combine the routing features outlined above to build complex proxy routing configurations. For example...
+mounts = {
+ # Route all traffic through a proxy by default...
+ "all://": httpx.HTTPTransport(proxy="http://localhost:8030"),
+ # But don't use proxies for HTTPS requests to "domain.io"...
+ "https://domain.io": None,
+ # And use another proxy for requests to "example.com" and its subdomains...
+ "all://*example.com": httpx.HTTPTransport(proxy="http://localhost:8031"),
+ # And yet another proxy if HTTP is used,
+ # and the "internal" subdomain on port 5550 is requested...
+ "http://internal.example.com:5550": httpx.HTTPTransport(proxy="http://localhost:8032"),
+}
+There are also environment variables that can be used to control the dictionary of the client mounts. +They can be used to configure HTTP proxying for clients.
+See documentation on HTTP_PROXY, HTTPS_PROXY, ALL_PROXY
+and NO_PROXY for more information.
Note
+Only use these functions if you're testing HTTPX in a console
+or making a small number of requests. Using a Client will
+enable HTTP/2 and connection pooling for more efficient and
+long-lived connections.
httpx.request(method, url, *, params=None, content=None, data=None, files=None, json=None, headers=None, cookies=None, auth=None, proxy=None, timeout=Timeout(timeout=5.0), follow_redirects=False, verify=True, trust_env=True)Sends an HTTP request.
+Parameters:
+Request object: GET, OPTIONS,
+HEAD, POST, PUT, PATCH, or DELETE.Request object.True to use an SSL context with the
+default CA bundle, False to disable verification, or an instance of
+ssl.SSLContext to use a custom context.Returns: Response
Usage:
+>>> import httpx
+>>> response = httpx.request('GET', 'https://httpbin.org/get')
+>>> response
+<Response [200 OK]>
+httpx.get(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)Sends a GET request.
Parameters: See httpx.request.
Note that the data, files, json and content parameters are not available
+on this function, as GET requests should not include a request body.
httpx.options(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)Sends an OPTIONS request.
Parameters: See httpx.request.
Note that the data, files, json and content parameters are not available
+on this function, as OPTIONS requests should not include a request body.
httpx.head(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)Sends a HEAD request.
Parameters: See httpx.request.
Note that the data, files, json and content parameters are not available
+on this function, as HEAD requests should not include a request body.
httpx.post(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)Sends a POST request.
Parameters: See httpx.request.
httpx.put(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)Sends a PUT request.
Parameters: See httpx.request.
httpx.patch(url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, verify=True, timeout=Timeout(timeout=5.0), trust_env=True)Sends a PATCH request.
Parameters: See httpx.request.
httpx.delete(url, *, params=None, headers=None, cookies=None, auth=None, proxy=None, follow_redirects=False, timeout=Timeout(timeout=5.0), verify=True, trust_env=True)Sends a DELETE request.
Parameters: See httpx.request.
Note that the data, files, json and content parameters are not available
+on this function, as DELETE requests should not include a request body.
httpx.stream(method, url, *, params=None, content=None, data=None, files=None, json=None, headers=None, cookies=None, auth=None, proxy=None, timeout=Timeout(timeout=5.0), follow_redirects=False, verify=True, trust_env=True)Alternative to httpx.request() that streams the response body
+instead of loading it into memory at once.
Parameters: See httpx.request.
See also: Streaming Responses
Clienthttpx.Client(*, auth=None, params=None, headers=None, cookies=None, verify=True, cert=None, trust_env=True, http1=True, http2=False, proxy=None, mounts=None, timeout=Timeout(timeout=5.0), follow_redirects=False, limits=Limits(max_connections=100, max_keepalive_connections=20, keepalive_expiry=5.0), max_redirects=20, event_hooks=None, base_url='', transport=None, default_encoding='utf-8')An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.
+It can be shared between threads.
+Usage:
+>>> client = httpx.Client()
+>>> response = client.get('https://example.org')
+Parameters:
+True to use an SSL context with the
+default CA bundle, False to disable verification, or an instance of
+ssl.SSLContext to use a custom context.False.headersHTTP headers to include when sending requests.
cookiesCookie values to include when sending requests.
paramsQuery parameters to include in the URL when sending requests.
authAuthentication class used when none is passed at the request-level.
+See also Authentication.
request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Build and send a request.
+Equivalent to:
+request = client.build_request(...)
+response = client.send(request, ...)
+See Client.build_request(), Client.send() and
+Merging of configuration for how the various parameters
+are merged with client-level configuration.
get(self, url, *, params=None, headers=None, cookies=None, auth=Send a GET request.
Parameters: See httpx.request.
head(self, url, *, params=None, headers=None, cookies=None, auth=Send a HEAD request.
Parameters: See httpx.request.
options(self, url, *, params=None, headers=None, cookies=None, auth=Send an OPTIONS request.
Parameters: See httpx.request.
post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Send a POST request.
Parameters: See httpx.request.
put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Send a PUT request.
Parameters: See httpx.request.
patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Send a PATCH request.
Parameters: See httpx.request.
delete(self, url, *, params=None, headers=None, cookies=None, auth=Send a DELETE request.
Parameters: See httpx.request.
stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Alternative to httpx.request() that streams the response body
+instead of loading it into memory at once.
Parameters: See httpx.request.
See also: Streaming Responses
build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=Build and return a request instance.
+params, headers and cookies arguments
+are merged with any values set on the client.url argument is merged with any base_url set on the client.See also: Request instances
send(self, request, *, stream=False, auth=Send a request.
+The request is sent as-is, unmodified.
+Typically you'll want to build one with Client.build_request()
+so that any client-level configuration is merged into the request,
+but passing an explicit httpx.Request() is supported as well.
See also: Request instances
close(self)Close transport and proxies.
AsyncClienthttpx.AsyncClient(*, auth=None, params=None, headers=None, cookies=None, verify=True, cert=None, http1=True, http2=False, proxy=None, mounts=None, timeout=Timeout(timeout=5.0), follow_redirects=False, limits=Limits(max_connections=100, max_keepalive_connections=20, keepalive_expiry=5.0), max_redirects=20, event_hooks=None, base_url='', transport=None, trust_env=True, default_encoding='utf-8')An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, +cookie persistence, etc.
+It can be shared between tasks.
+Usage:
+>>> async with httpx.AsyncClient() as client:
+>>> response = await client.get('https://example.org')
+Parameters:
+True to use an SSL context with the
+default CA bundle, False to disable verification, or an instance of
+ssl.SSLContext to use a custom context.False.headersHTTP headers to include when sending requests.
cookiesCookie values to include when sending requests.
paramsQuery parameters to include in the URL when sending requests.
authAuthentication class used when none is passed at the request-level.
+See also Authentication.
request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Build and send a request.
+Equivalent to:
+request = client.build_request(...)
+response = await client.send(request, ...)
+See AsyncClient.build_request(), AsyncClient.send()
+and Merging of configuration for how the various parameters
+are merged with client-level configuration.
get(self, url, *, params=None, headers=None, cookies=None, auth=Send a GET request.
Parameters: See httpx.request.
head(self, url, *, params=None, headers=None, cookies=None, auth=Send a HEAD request.
Parameters: See httpx.request.
options(self, url, *, params=None, headers=None, cookies=None, auth=Send an OPTIONS request.
Parameters: See httpx.request.
post(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Send a POST request.
Parameters: See httpx.request.
put(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Send a PUT request.
Parameters: See httpx.request.
patch(self, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Send a PATCH request.
Parameters: See httpx.request.
delete(self, url, *, params=None, headers=None, cookies=None, auth=Send a DELETE request.
Parameters: See httpx.request.
stream(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, auth=Alternative to httpx.request() that streams the response body
+instead of loading it into memory at once.
Parameters: See httpx.request.
See also: Streaming Responses
build_request(self, method, url, *, content=None, data=None, files=None, json=None, params=None, headers=None, cookies=None, timeout=Build and return a request instance.
+params, headers and cookies arguments
+are merged with any values set on the client.url argument is merged with any base_url set on the client.See also: Request instances
send(self, request, *, stream=False, auth=Send a request.
+The request is sent as-is, unmodified.
+Typically you'll want to build one with AsyncClient.build_request()
+so that any client-level configuration is merged into the request,
+but passing an explicit httpx.Request() is supported as well.
See also: Request instances
aclose(self)Close transport and proxies.
ResponseAn HTTP response.
+def __init__(...).status_code - int.reason_phrase - str.http_version - "HTTP/2" or "HTTP/1.1".url - URL.headers - Headers.content - bytes.text - str.encoding - str.is_redirect - bool.request - Request.next_request - Optional[Request].cookies - Cookies.history - List[Response].elapsed - timedeltaclose() on the corresponding response received for that request.
+ total_seconds() to correctly get
+ the total elapsed seconds.def .raise_for_status() - Responsedef .json() - Anydef .read() - bytesdef .iter_raw([chunk_size]) - bytes iteratordef .iter_bytes([chunk_size]) - bytes iteratordef .iter_text([chunk_size]) - text iteratordef .iter_lines() - text iteratordef .close() - Nonedef .next() - Responsedef .aread() - bytesdef .aiter_raw([chunk_size]) - async bytes iteratordef .aiter_bytes([chunk_size]) - async bytes iteratordef .aiter_text([chunk_size]) - async text iteratordef .aiter_lines() - async text iteratordef .aclose() - Nonedef .anext() - ResponseRequestAn HTTP request. Can be constructed explicitly for more control over exactly +what gets sent over the wire.
+>>> request = httpx.Request("GET", "https://example.org", headers={'host': 'example.org'})
+>>> response = client.send(request)
+def __init__(method, url, [params], [headers], [cookies], [content], [data], [files], [json], [stream]).method - str.url - URL.content - byte, byte iterator, or byte async iterator.headers - Headers.cookies - CookiesURLA normalized, IDNA supporting URL.
+>>> url = URL("https://example.org/")
+>>> url.host
+'example.org'
+def __init__(url, **kwargs).scheme - str.authority - str.host - str.port - int.path - str.query - str.raw_path - str.fragment - str.is_ssl - bool.is_absolute_url - bool.is_relative_url - booldef .copy_with([scheme], [authority], [path], [query], [fragment]) - URLHeadersA case-insensitive multi-dict.
+>>> headers = Headers({'Content-Type': 'application/json'})
+>>> headers['content-type']
+'application/json'
+def __init__(self, headers, encoding=None)def copy() - HeadersCookiesA dict-like cookie store.
+>>> cookies = Cookies()
+>>> cookies.set("name", "value", domain="example.org")
+def __init__(cookies: [dict, Cookies, CookieJar]).jar - CookieJardef extract_cookies(response)def set_cookie_header(request)def set(name, value, [domain], [path])def get(name, [domain], [path])def delete(name, [domain], [path])def clear([domain], [path])