...
```
+
+## HTTP Proxying
+
+HTTPX supports setting up proxies the same way that Requests does via the `proxies` parameter.
+For example to forward all HTTP traffic to `http://127.0.0.1:3080` and all HTTPS traffic
+to `http://127.0.0.1:3081` your `proxies` config would look like this:
+
+```python
+>>> client = httpx.Client(proxies={
+ "http": "http://127.0.0.1:3080",
+ "https": "http://127.0.0.1:3081"
+})
+```
+
+Proxies can be configured for a specific scheme and host, all schemes of a host,
+all hosts for a scheme, or for all requests. When determining which proxy configuration
+to use for a given request this same order is used.
+
+```python
+>>> client = httpx.Client(proxies={
+ "http://example.com": "...", # Host+Scheme
+ "all://example.com": "...", # Host
+ "http": "...", # Scheme
+ "all": "...", # All
+})
+>>> client = httpx.Client(proxies="...") # Shortcut for 'all'
+```
+
+!!! warning
+ To make sure that proxies cannot read your traffic,
+ and even if the proxy_url uses HTTPS, it is recommended to
+ use HTTPS and tunnel requests if possible.
+
+By default `HTTPProxy` will operate as a forwarding proxy for `http://...` requests
+and will establish a `CONNECT` TCP tunnel for `https://` requests. This doesn't change
+regardless of the `proxy_url` being `http` or `https`.
+
+Proxies can be configured to have different behavior such as forwarding or tunneling all requests:
+
+```python
+proxy = httpx.HTTPProxy(
+ proxy_url="https://127.0.0.1",
+ proxy_mode=httpx.HTTPProxyMode.TUNNEL_ONLY
+)
+client = httpx.Client(proxies=proxy)
+
+# This request will be tunnelled instead of forwarded.
+client.get("http://example.com")
+```
enable HTTP/2 and connection pooling for more efficient and
long-lived connections.
-* `get(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `options(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `head(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `post(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `put(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `patch(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `delete(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `request(method, url, [data], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
+* `get(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `options(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `head(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `post(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `put(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `patch(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `delete(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `request(method, url, [data], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
* `build_request(method, url, [data], [files], [json], [params], [headers], [cookies])`
## `Client`
* `def __init__([auth], [headers], [cookies], [verify], [cert], [timeout], [pool_limits], [max_redirects], [app], [dispatch])`
* `.headers` - **Headers**
* `.cookies` - **Cookies**
-* `def .get(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `def .options(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `def .head(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `def .post(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `def .put(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `def .patch(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `def .delete(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
-* `def .request(method, url, [data], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout])`
+* `def .get(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `def .options(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `def .head(url, [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `def .post(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `def .put(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `def .patch(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `def .delete(url, [data], [json], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
+* `def .request(method, url, [data], [params], [headers], [cookies], [auth], [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
* `def .build_request(method, url, [data], [files], [json], [params], [headers], [cookies])`
-* `def .send(request, [stream], [allow_redirects], [verify], [cert], [timeout])`
+* `def .send(request, [stream], [allow_redirects], [verify], [cert], [timeout], [proxies])`
* `def .close()`
## `Response`
CLIENT_HANDSHAKE_TRAFFIC_SECRET XXXX
CLIENT_TRAFFIC_SECRET_0 XXXX
```
+
+`HTTP_PROXY`, `HTTPS_PROXY`, `ALL_PROXY`
+----------------------------------------
+
+Valid values: A URL to a proxy
+
+Sets the proxy to be used for `http`, `https`, or all requests respectively.
+
+```bash
+export HTTP_PROXY=http://127.0.0.1:3080
+
+# This request will be sent through the proxy
+python -c "import httpx; httpx.get('http://example.com')"
+```
AuthTypes,
CookieTypes,
HeaderTypes,
+ ProxiesTypes,
QueryParamTypes,
RequestData,
RequestFiles,
verify: VerifyTypes = True,
stream: bool = False,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
with Client(http_versions=["HTTP/1.1"]) as client:
return client.request(
verify: VerifyTypes = True,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return request(
"GET",
verify: VerifyTypes = True,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return request(
"OPTIONS",
verify: VerifyTypes = True,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return request(
"HEAD",
verify: VerifyTypes = True,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return request(
"POST",
verify: VerifyTypes = True,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return request(
"PUT",
verify: VerifyTypes = True,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return request(
"PATCH",
verify: VerifyTypes = True,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return request(
"DELETE",
from .dispatch.asgi import ASGIDispatch
from .dispatch.base import AsyncDispatcher, Dispatcher
from .dispatch.connection_pool import ConnectionPool
+from .dispatch.proxy_http import HTTPProxy
from .dispatch.threaded import ThreadedDispatcher
from .dispatch.wsgi import WSGIDispatch
from .exceptions import HTTPError, InvalidURL
CookieTypes,
Headers,
HeaderTypes,
+ ProxiesTypes,
QueryParamTypes,
RequestData,
RequestFiles,
ResponseContent,
URLTypes,
)
-from .utils import ElapsedTimer, get_netrc_login
+from .utils import ElapsedTimer, get_environment_proxies, get_netrc_login
class BaseClient:
def __init__(
self,
+ *,
auth: AuthTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
verify: VerifyTypes = True,
cert: CertTypes = None,
http_versions: HTTPVersionTypes = None,
+ proxies: ProxiesTypes = None,
timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
pool_limits: PoolLimits = DEFAULT_POOL_LIMITS,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
else:
self.base_url = URL(base_url)
+ if proxies is None and trust_env:
+ proxies = typing.cast(ProxiesTypes, get_environment_proxies())
+
+ self.proxies: typing.Dict[str, AsyncDispatcher] = _proxies_to_dispatchers(
+ proxies
+ )
+
self.auth = auth
self._headers = Headers(headers)
self._cookies = Cookies(cookies)
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
if request.url.scheme not in ("http", "https"):
raise InvalidURL('URL scheme must be "http" or "https".')
+ if proxies is not None:
+ dispatch_proxies = _proxies_to_dispatchers(proxies)
+ else:
+ dispatch_proxies = self.proxies
+ dispatch = self._dispatcher_for_request(request, dispatch_proxies)
+
async def get_response(request: AsyncRequest) -> AsyncResponse:
try:
with ElapsedTimer() as timer:
- response = await self.dispatch.send(
+ response = await dispatch.send(
request, verify=verify, cert=cert, timeout=timeout
)
response.elapsed = timer.elapsed
except HTTPError as exc:
- # Add the original request to any HTTPError
- exc.request = request
+ # Add the original request to any HTTPError unless
+ # there'a already a request attached in the case of
+ # a ProxyError.
+ if exc.request is None:
+ exc.request = request
raise
self.cookies.extract_cookies(response)
return None
+ def _dispatcher_for_request(
+ self, request: AsyncRequest, proxies: typing.Dict[str, AsyncDispatcher]
+ ) -> AsyncDispatcher:
+ """Gets the AsyncDispatcher instance that should be used for a given Request"""
+ if proxies:
+ url = request.url
+ is_default_port = (url.scheme == "http" and url.port == 80) or (
+ url.scheme == "https" and url.port == 443
+ )
+ hostname = f"{url.host}:{url.port}"
+ proxy_keys = (
+ f"{url.scheme}://{hostname}",
+ f"{url.scheme}://{url.host}" if is_default_port else None,
+ f"all://{hostname}",
+ f"all://{url.host}" if is_default_port else None,
+ url.scheme,
+ "all",
+ )
+ for proxy_key in proxy_keys:
+ if proxy_key and proxy_key in proxies:
+ dispatcher = proxies[proxy_key]
+ return dispatcher
+
+ return self.dispatch
+
def build_request(
self,
method: str,
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"GET",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def options(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"OPTIONS",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def head(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"HEAD",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def post(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"POST",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def put(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"PUT",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def patch(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"PATCH",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def delete(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self.request(
"DELETE",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def request(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
request = self.build_request(
method=method,
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
return response
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> AsyncResponse:
return await self._get_response(
request=request,
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
async def close(self) -> None:
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
request = self.build_request(
method=method,
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
return response
cert: CertTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
concurrency_backend = self.concurrency_backend
"cert": cert,
"timeout": timeout,
"trust_env": trust_env,
+ "proxies": proxies,
}
async_response = concurrency_backend.run(coroutine, *args, **kwargs)
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"GET",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
def options(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"OPTIONS",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
def head(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"HEAD",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
def post(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"POST",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
def put(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"PUT",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
def patch(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"PATCH",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
def delete(
verify: VerifyTypes = None,
timeout: TimeoutTypes = None,
trust_env: bool = None,
+ proxies: ProxiesTypes = None,
) -> Response:
return self.request(
"DELETE",
cert=cert,
timeout=timeout,
trust_env=trust_env,
+ proxies=proxies,
)
def close(self) -> None:
traceback: TracebackType = None,
) -> None:
self.close()
+
+
+def _proxy_from_url(url: URLTypes) -> AsyncDispatcher:
+ url = URL(url)
+ if url.scheme in ("http", "https"):
+ return HTTPProxy(url)
+ raise ValueError(f"Unknown proxy for {url!r}")
+
+
+def _proxies_to_dispatchers(
+ proxies: typing.Optional[ProxiesTypes]
+) -> typing.Dict[str, AsyncDispatcher]:
+ if proxies is None:
+ return {}
+ elif isinstance(proxies, (str, URL)):
+ return {"all": _proxy_from_url(proxies)}
+ elif isinstance(proxies, AsyncDispatcher):
+ return {"all": proxies}
+ else:
+ new_proxies = {}
+ for key, dispatcher_or_url in proxies.items():
+ if isinstance(dispatcher_or_url, (str, URL)):
+ new_proxies[str(key)] = _proxy_from_url(dispatcher_or_url)
+ else:
+ new_proxies[str(key)] = dispatcher_or_url
+ return new_proxies
return response
async def acquire_connection(self, origin: Origin) -> HTTPConnection:
- logger.debug("acquire_connection origin={origin!r}")
+ logger.debug(f"acquire_connection origin={origin!r}")
connection = self.pop_connection(origin)
if connection is None:
if typing.TYPE_CHECKING:
from .middleware.base import BaseMiddleware # noqa: F401
+ from .dispatch.base import AsyncDispatcher # noqa: F401
PrimitiveData = typing.Optional[typing.Union[str, int, float, bool]]
"BaseMiddleware",
]
+ProxiesTypes = typing.Union[
+ URLTypes,
+ "AsyncDispatcher",
+ typing.Dict[URLTypes, typing.Union[URLTypes, "AsyncDispatcher"]],
+]
+
AsyncRequestData = typing.Union[dict, str, bytes, typing.AsyncIterator[bytes]]
RequestData = typing.Union[dict, str, bytes, typing.Iterator[bytes]]
from pathlib import Path
from time import perf_counter
from types import TracebackType
+from urllib.request import getproxies
def normalize_header_key(value: typing.AnyStr, encoding: str = None) -> bytes:
return logging.getLogger(name)
+def get_environment_proxies() -> typing.Dict[str, str]:
+ """Gets proxy information from the environment"""
+
+ # urllib.request.getproxies() falls back on System
+ # Registry and Config for proxies on Windows and macOS.
+ # We don't want to propagate non-HTTP proxies into
+ # our configuration such as 'TRAVIS_APT_PROXY'.
+ proxies = {
+ key: val
+ for key, val in getproxies().items()
+ if ("://" in key or key in ("http", "https"))
+ }
+
+ # Favor lowercase environment variables over uppercase.
+ all_proxy = get_environ_lower_and_upper("ALL_PROXY")
+ if all_proxy is not None:
+ proxies["all"] = all_proxy
+
+ return proxies
+
+
+def get_environ_lower_and_upper(key: str) -> typing.Optional[str]:
+ """Gets a value from os.environ with both the lowercase and uppercase
+ environment variable. Prioritizes the lowercase environment variable.
+ """
+ for key in (key.lower(), key.upper()):
+ value = os.environ.get(key, None)
+ if value is not None and isinstance(value, str):
+ return value
+ return None
+
+
def to_bytes(value: typing.Union[str, bytes], encoding: str = "utf-8") -> bytes:
return value.encode(encoding) if isinstance(value, str) else value
-def to_str(str_or_bytes: typing.Union[str, bytes], encoding: str = "utf-8") -> str:
- return (
- str_or_bytes if isinstance(str_or_bytes, str) else str_or_bytes.decode(encoding)
- )
+def to_str(value: typing.Union[str, bytes], encoding: str = "utf-8") -> str:
+ return value if isinstance(value, str) else value.decode(encoding)
def unquote(value: str) -> str:
--- /dev/null
+import pytest
+
+import httpx
+
+
+@pytest.mark.parametrize(
+ ["proxies", "expected_proxies"],
+ [
+ ("http://127.0.0.1", [("all", "http://127.0.0.1")]),
+ ({"all": "http://127.0.0.1"}, [("all", "http://127.0.0.1")]),
+ (
+ {"http": "http://127.0.0.1", "https": "https://127.0.0.1"},
+ [("http", "http://127.0.0.1"), ("https", "https://127.0.0.1")],
+ ),
+ (httpx.HTTPProxy("http://127.0.0.1"), [("all", "http://127.0.0.1")]),
+ (
+ {"https": httpx.HTTPProxy("https://127.0.0.1"), "all": "http://127.0.0.1"},
+ [("all", "http://127.0.0.1"), ("https", "https://127.0.0.1")],
+ ),
+ ],
+)
+def test_proxies_parameter(proxies, expected_proxies):
+ client = httpx.Client(proxies=proxies)
+
+ for proxy_key, url in expected_proxies:
+ assert proxy_key in client.proxies
+ assert client.proxies[proxy_key].proxy_url == url
+
+ assert len(expected_proxies) == len(client.proxies)
import trustme
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import (
- load_pem_private_key,
BestAvailableEncryption,
Encoding,
PrivateFormat,
+ load_pem_private_key,
)
from uvicorn.config import Config
from uvicorn.main import Server
raw_io = MockRawSocketBackend(
data_to_send=(
[
+ # Tunnel Response
b"HTTP/1.1 200 OK\r\n"
b"Date: Sun, 10 Oct 2010 23:26:07 GMT\r\n"
b"Server: proxy-server\r\n"
b"\r\n",
+ # Response 1
b"HTTP/1.1 404 Not Found\r\n"
b"Date: Sun, 10 Oct 2010 23:26:07 GMT\r\n"
b"Server: origin-server\r\n"
+ b"Connection: keep-alive\r\n"
+ b"Content-Length: 0\r\n"
+ b"\r\n",
+ # Response 2
+ b"HTTP/1.1 200 OK\r\n"
+ b"Date: Sun, 10 Oct 2010 23:26:07 GMT\r\n"
+ b"Server: origin-server\r\n"
+ b"Connection: keep-alive\r\n"
+ b"Content-Length: 0\r\n"
b"\r\n",
]
),
backend=raw_io,
proxy_mode=httpx.HTTPProxyMode.TUNNEL_ONLY,
) as proxy:
- response = await proxy.request("GET", f"https://example.com")
+ resp = await proxy.request("GET", f"https://example.com")
- assert response.status_code == 404
- assert response.headers["Server"] == "origin-server"
+ assert resp.status_code == 404
+ assert resp.headers["Server"] == "origin-server"
- assert response.request.method == "GET"
- assert response.request.url == "https://example.com"
- assert response.request.headers["Host"] == "example.com"
+ assert resp.request.method == "GET"
+ assert resp.request.url == "https://example.com"
+ assert resp.request.headers["Host"] == "example.com"
+
+ await resp.read()
+
+ # Make another request to see that the tunnel is re-used.
+ resp = await proxy.request("GET", f"https://example.com/target")
+
+ assert resp.status_code == 200
+ assert resp.headers["Server"] == "origin-server"
+
+ assert resp.request.method == "GET"
+ assert resp.request.url == "https://example.com/target"
+ assert resp.request.headers["Host"] == "example.com"
+
+ await resp.read()
recv = raw_io.received_data
- assert len(recv) == 4
+ assert len(recv) == 5
assert recv[0] == b"--- CONNECT(127.0.0.1, 8000) ---"
assert recv[1].startswith(
b"CONNECT example.com:443 HTTP/1.1\r\nhost: 127.0.0.1:8000\r\n"
)
assert recv[2] == b"--- START_TLS(example.com) ---"
assert recv[3].startswith(b"GET / HTTP/1.1\r\nhost: example.com\r\n")
+ assert recv[4].startswith(b"GET /target HTTP/1.1\r\nhost: example.com\r\n")
@pytest.mark.parametrize(
return b""
return self.backend.data_to_send.pop(0)
+ def is_connection_dropped(self) -> bool:
+ return False
+
async def close(self) -> None:
pass
from httpx import utils
from httpx.utils import (
ElapsedTimer,
+ get_environment_proxies,
get_netrc_login,
guess_json_utf,
parse_header_links,
0.1
) # test to ensure time spent after timer exits isn't accounted for.
assert timer.elapsed.total_seconds() == pytest.approx(0.1, abs=0.05)
+
+
+@pytest.mark.parametrize(
+ ["environment", "proxies"],
+ [
+ ({}, {}),
+ ({"HTTP_PROXY": "http://127.0.0.1"}, {"http": "http://127.0.0.1"}),
+ (
+ {"https_proxy": "http://127.0.0.1", "HTTP_PROXY": "https://127.0.0.1"},
+ {"https": "http://127.0.0.1", "http": "https://127.0.0.1"},
+ ),
+ (
+ {"all_proxy": "http://127.0.0.1", "ALL_PROXY": "https://1.1.1.1"},
+ {"all": "http://127.0.0.1"},
+ ),
+ (
+ {"https_proxy": "http://127.0.0.1", "HTTPS_PROXY": "https://1.1.1.1"},
+ {"https": "http://127.0.0.1"},
+ ),
+ ({"TRAVIS_APT_PROXY": "http://127.0.0.1"}, {}),
+ ],
+)
+def test_get_environment_proxies(environment, proxies):
+ os.environ.update(environment)
+
+ assert get_environment_proxies() == proxies