]> git.ipfire.org Git - thirdparty/httpx.git/commitdiff
Rename URLMatcher -> URLPattern (#1109)
authorFlorimond Manca <florimond.manca@gmail.com>
Sat, 1 Aug 2020 09:07:31 +0000 (11:07 +0200)
committerGitHub <noreply@github.com>
Sat, 1 Aug 2020 09:07:31 +0000 (11:07 +0200)
httpx/_client.py
httpx/_utils.py
tests/client/test_proxies.py
tests/test_utils.py

index a718649e4ad0f5d8fb14ce62c679475325cf4487..c11b466c27ecddd4f4c1ca8a123f06716b5b63d0 100644 (file)
@@ -44,7 +44,7 @@ from ._types import (
 )
 from ._utils import (
     NetRCInfo,
-    URLMatcher,
+    URLPattern,
     enforce_http_url,
     get_environment_proxies,
     get_logger,
@@ -474,9 +474,9 @@ class Client(BaseClient):
             trust_env=trust_env,
         )
         self._proxies: typing.Dict[
-            URLMatcher, typing.Optional[httpcore.SyncHTTPTransport]
+            URLPattern, typing.Optional[httpcore.SyncHTTPTransport]
         ] = {
-            URLMatcher(key): None
+            URLPattern(key): None
             if proxy is None
             else self._init_proxy_transport(
                 proxy,
@@ -547,8 +547,8 @@ class Client(BaseClient):
         enforce_http_url(request)
 
         if self._proxies and not should_not_be_proxied(url):
-            for matcher, transport in self._proxies.items():
-                if matcher.matches(url):
+            for pattern, transport in self._proxies.items():
+                if pattern.matches(url):
                     return self._transport if transport is None else transport
 
         return self._transport
@@ -998,9 +998,9 @@ class AsyncClient(BaseClient):
             trust_env=trust_env,
         )
         self._proxies: typing.Dict[
-            URLMatcher, typing.Optional[httpcore.AsyncHTTPTransport]
+            URLPattern, typing.Optional[httpcore.AsyncHTTPTransport]
         ] = {
-            URLMatcher(key): None
+            URLPattern(key): None
             if proxy is None
             else self._init_proxy_transport(
                 proxy,
@@ -1071,8 +1071,8 @@ class AsyncClient(BaseClient):
         enforce_http_url(request)
 
         if self._proxies and not should_not_be_proxied(url):
-            for matcher, transport in self._proxies.items():
-                if matcher.matches(url):
+            for pattern, transport in self._proxies.items():
+                if pattern.matches(url):
                     return self._transport if transport is None else transport
 
         return self._transport
index 41b71c248f8e26bb48a334dee649c559dfcd3c6f..3f869810ac7282fd6e962f5bcafb46b45f4fb430 100644 (file)
@@ -434,24 +434,24 @@ class ElapsedTimer:
         return timedelta(seconds=self.end - self.start)
 
 
-class URLMatcher:
+class URLPattern:
     """
     A utility class currently used for making lookups against proxy keys...
 
     # Wildcard matching...
-    >>> pattern = URLMatcher("all")
+    >>> pattern = URLPattern("all")
     >>> pattern.matches(httpx.URL("http://example.com"))
     True
 
     # Witch scheme matching...
-    >>> pattern = URLMatcher("https")
+    >>> pattern = URLPattern("https")
     >>> pattern.matches(httpx.URL("https://example.com"))
     True
     >>> pattern.matches(httpx.URL("http://example.com"))
     False
 
     # With domain matching...
-    >>> pattern = URLMatcher("https://example.com")
+    >>> pattern = URLPattern("https://example.com")
     >>> pattern.matches(httpx.URL("https://example.com"))
     True
     >>> pattern.matches(httpx.URL("http://example.com"))
@@ -460,7 +460,7 @@ class URLMatcher:
     False
 
     # Wildcard scheme, with domain matching...
-    >>> pattern = URLMatcher("all://example.com")
+    >>> pattern = URLPattern("all://example.com")
     >>> pattern.matches(httpx.URL("https://example.com"))
     True
     >>> pattern.matches(httpx.URL("http://example.com"))
@@ -469,7 +469,7 @@ class URLMatcher:
     False
 
     # With port matching...
-    >>> pattern = URLMatcher("https://example.com:1234")
+    >>> pattern = URLPattern("https://example.com:1234")
     >>> pattern.matches(httpx.URL("https://example.com:1234"))
     True
     >>> pattern.matches(httpx.URL("https://example.com"))
@@ -500,7 +500,7 @@ class URLMatcher:
     @property
     def priority(self) -> tuple:
         """
-        The priority allows URLMatcher instances to be sortable, so that
+        The priority allows URLPattern instances to be sortable, so that
         we can match from most specific to least specific.
         """
         port_priority = -1 if self.port is not None else 0
@@ -511,11 +511,11 @@ class URLMatcher:
     def __hash__(self) -> int:
         return hash(self.pattern)
 
-    def __lt__(self, other: "URLMatcher") -> bool:
+    def __lt__(self, other: "URLPattern") -> bool:
         return self.priority < other.priority
 
     def __eq__(self, other: typing.Any) -> bool:
-        return isinstance(other, URLMatcher) and self.pattern == other.pattern
+        return isinstance(other, URLPattern) and self.pattern == other.pattern
 
 
 def warn_deprecated(message: str) -> None:  # pragma: nocover
index 8d012fe668b50419c5babf03a7fa4303e59be24a..4fc1831943aa32d776da838ceba776c0d2fffbbf 100644 (file)
@@ -2,7 +2,7 @@ import httpcore
 import pytest
 
 import httpx
-from httpx._utils import URLMatcher
+from httpx._utils import URLPattern
 
 
 def url_to_origin(url: str):
@@ -37,9 +37,9 @@ def test_proxies_parameter(proxies, expected_proxies):
     client = httpx.AsyncClient(proxies=proxies)
 
     for proxy_key, url in expected_proxies:
-        matcher = URLMatcher(proxy_key)
-        assert matcher in client._proxies
-        proxy = client._proxies[matcher]
+        pattern = URLPattern(proxy_key)
+        assert pattern in client._proxies
+        proxy = client._proxies[pattern]
         assert isinstance(proxy, httpcore.AsyncHTTPProxy)
         assert proxy.proxy_origin == url_to_origin(url)
 
index 88fb1000bbac33b5339cecbcd709008445d46909..c2ab33dfe13bf3791eff7daf9285d7247acf8c8c 100644 (file)
@@ -8,7 +8,7 @@ import httpx
 from httpx._utils import (
     ElapsedTimer,
     NetRCInfo,
-    URLMatcher,
+    URLPattern,
     get_ca_bundle_from_env,
     get_environment_proxies,
     guess_json_utf,
@@ -331,21 +331,21 @@ def test_not_same_origin():
     ],
 )
 def test_url_matches(pattern, url, expected):
-    matcher = URLMatcher(pattern)
-    assert matcher.matches(httpx.URL(url)) == expected
+    pattern = URLPattern(pattern)
+    assert pattern.matches(httpx.URL(url)) == expected
 
 
-def test_matcher_priority():
+def test_pattern_priority():
     matchers = [
-        URLMatcher("all://"),
-        URLMatcher("http://"),
-        URLMatcher("http://example.com"),
-        URLMatcher("http://example.com:123"),
+        URLPattern("all://"),
+        URLPattern("http://"),
+        URLPattern("http://example.com"),
+        URLPattern("http://example.com:123"),
     ]
     random.shuffle(matchers)
     assert sorted(matchers) == [
-        URLMatcher("http://example.com:123"),
-        URLMatcher("http://example.com"),
-        URLMatcher("http://"),
-        URLMatcher("all://"),
+        URLPattern("http://example.com:123"),
+        URLPattern("http://example.com"),
+        URLPattern("http://"),
+        URLPattern("all://"),
     ]