]> git.ipfire.org Git - thirdparty/tornado.git/commitdiff
Move static functions in curl_httpclient to methods of CurlAsyncHTTPClient.
authorBen Darnell <ben@bendarnell.com>
Sun, 21 Sep 2014 16:18:16 +0000 (12:18 -0400)
committerBen Darnell <ben@bendarnell.com>
Sun, 21 Sep 2014 16:19:49 +0000 (12:19 -0400)
No functional changes in this commit, just code movement.

tornado/curl_httpclient.py

index 9c2aeb9f2e0c4e9d4b70522650ec234c57690bf9..406f80c1026a886e759cebc4a9682aafa648c8c8 100644 (file)
@@ -40,7 +40,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
         self._multi = pycurl.CurlMulti()
         self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
         self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
-        self._curls = [_curl_create() for i in range(max_clients)]
+        self._curls = [self._curl_create() for i in range(max_clients)]
         self._free_list = self._curls[:]
         self._requests = collections.deque()
         self._fds = {}
@@ -206,8 +206,8 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                         "callback": callback,
                         "curl_start_time": time.time(),
                     }
-                    _curl_setup_request(curl, request, curl.info["buffer"],
-                                        curl.info["headers"])
+                    self._curl_setup_request(curl, request, curl.info["buffer"],
+                                             curl.info["headers"])
                     self._multi.add_handle(curl)
 
                 if not started:
@@ -254,6 +254,210 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
     def handle_callback_exception(self, callback):
         self.io_loop.handle_callback_exception(callback)
 
+    def _curl_create(self):
+        curl = pycurl.Curl()
+        if gen_log.isEnabledFor(logging.DEBUG):
+            curl.setopt(pycurl.VERBOSE, 1)
+            curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
+        return curl
+
+    def _curl_setup_request(self, curl, request, buffer, headers):
+        curl.setopt(pycurl.URL, native_str(request.url))
+
+        # libcurl's magic "Expect: 100-continue" behavior causes delays
+        # with servers that don't support it (which include, among others,
+        # Google's OpenID endpoint).  Additionally, this behavior has
+        # a bug in conjunction with the curl_multi_socket_action API
+        # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
+        # which increases the delays.  It's more trouble than it's worth,
+        # so just turn off the feature (yes, setting Expect: to an empty
+        # value is the official way to disable this)
+        if "Expect" not in request.headers:
+            request.headers["Expect"] = ""
+
+        # libcurl adds Pragma: no-cache by default; disable that too
+        if "Pragma" not in request.headers:
+            request.headers["Pragma"] = ""
+
+        # Request headers may be either a regular dict or HTTPHeaders object
+        if isinstance(request.headers, httputil.HTTPHeaders):
+            curl.setopt(pycurl.HTTPHEADER,
+                        [native_str("%s: %s" % i) for i in request.headers.get_all()])
+        else:
+            curl.setopt(pycurl.HTTPHEADER,
+                        [native_str("%s: %s" % i) for i in request.headers.items()])
+
+        if request.header_callback:
+            curl.setopt(pycurl.HEADERFUNCTION,
+                        lambda line: request.header_callback(native_str(line)))
+        else:
+            curl.setopt(pycurl.HEADERFUNCTION,
+                        lambda line: self._curl_header_callback(
+                            headers, native_str(line)))
+        if request.streaming_callback:
+            write_function = request.streaming_callback
+        else:
+            write_function = buffer.write
+        if bytes is str:  # py2
+            curl.setopt(pycurl.WRITEFUNCTION, write_function)
+        else:  # py3
+            # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
+            # a fork/port.  That version has a bug in which it passes unicode
+            # strings instead of bytes to the WRITEFUNCTION.  This means that
+            # if you use a WRITEFUNCTION (which tornado always does), you cannot
+            # download arbitrary binary data.  This needs to be fixed in the
+            # ported pycurl package, but in the meantime this lambda will
+            # make it work for downloading (utf8) text.
+            curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
+        curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
+        curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
+        curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
+        curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
+        if request.user_agent:
+            curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
+        else:
+            curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
+        if request.network_interface:
+            curl.setopt(pycurl.INTERFACE, request.network_interface)
+        if request.decompress_response:
+            curl.setopt(pycurl.ENCODING, "gzip,deflate")
+        else:
+            curl.setopt(pycurl.ENCODING, "none")
+        if request.proxy_host and request.proxy_port:
+            curl.setopt(pycurl.PROXY, request.proxy_host)
+            curl.setopt(pycurl.PROXYPORT, request.proxy_port)
+            if request.proxy_username:
+                credentials = '%s:%s' % (request.proxy_username,
+                                         request.proxy_password)
+                curl.setopt(pycurl.PROXYUSERPWD, credentials)
+        else:
+            curl.setopt(pycurl.PROXY, '')
+            curl.unsetopt(pycurl.PROXYUSERPWD)
+        if request.validate_cert:
+            curl.setopt(pycurl.SSL_VERIFYPEER, 1)
+            curl.setopt(pycurl.SSL_VERIFYHOST, 2)
+        else:
+            curl.setopt(pycurl.SSL_VERIFYPEER, 0)
+            curl.setopt(pycurl.SSL_VERIFYHOST, 0)
+        if request.ca_certs is not None:
+            curl.setopt(pycurl.CAINFO, request.ca_certs)
+        else:
+            # There is no way to restore pycurl.CAINFO to its default value
+            # (Using unsetopt makes it reject all certificates).
+            # I don't see any way to read the default value from python so it
+            # can be restored later.  We'll have to just leave CAINFO untouched
+            # if no ca_certs file was specified, and require that if any
+            # request uses a custom ca_certs file, they all must.
+            pass
+
+        if request.allow_ipv6 is False:
+            # Curl behaves reasonably when DNS resolution gives an ipv6 address
+            # that we can't reach, so allow ipv6 unless the user asks to disable.
+            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
+        else:
+            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
+
+        # Set the request method through curl's irritating interface which makes
+        # up names for almost every single method
+        curl_options = {
+            "GET": pycurl.HTTPGET,
+            "POST": pycurl.POST,
+            "PUT": pycurl.UPLOAD,
+            "HEAD": pycurl.NOBODY,
+        }
+        custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
+        for o in curl_options.values():
+            curl.setopt(o, False)
+        if request.method in curl_options:
+            curl.unsetopt(pycurl.CUSTOMREQUEST)
+            curl.setopt(curl_options[request.method], True)
+        elif request.allow_nonstandard_methods or request.method in custom_methods:
+            curl.setopt(pycurl.CUSTOMREQUEST, request.method)
+        else:
+            raise KeyError('unknown method ' + request.method)
+
+        # Handle curl's cryptic options for every individual HTTP method
+        if request.method == "GET":
+            if request.body is not None:
+                raise AssertionError('Body must be empty for GET request')
+        elif request.method in ("POST", "PUT") or request.body:
+            if request.body is None:
+                raise AssertionError(
+                    'Body must not be empty for "%s" request'
+                    % request.method)
+
+            request_buffer = BytesIO(utf8(request.body))
+            def ioctl(cmd):
+                if cmd == curl.IOCMD_RESTARTREAD:
+                    request_buffer.seek(0)
+            curl.setopt(pycurl.READFUNCTION, request_buffer.read)
+            curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
+            if request.method == "POST":
+                curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
+            else:
+                curl.setopt(pycurl.UPLOAD, True)
+                curl.setopt(pycurl.INFILESIZE, len(request.body))
+
+        if request.auth_username is not None:
+            userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
+
+            if request.auth_mode is None or request.auth_mode == "basic":
+                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
+            elif request.auth_mode == "digest":
+                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
+            else:
+                raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
+
+            curl.setopt(pycurl.USERPWD, native_str(userpwd))
+            gen_log.debug("%s %s (username: %r)", request.method, request.url,
+                          request.auth_username)
+        else:
+            curl.unsetopt(pycurl.USERPWD)
+            gen_log.debug("%s %s", request.method, request.url)
+
+        if request.client_cert is not None:
+            curl.setopt(pycurl.SSLCERT, request.client_cert)
+
+        if request.client_key is not None:
+            curl.setopt(pycurl.SSLKEY, request.client_key)
+
+        if threading.activeCount() > 1:
+            # libcurl/pycurl is not thread-safe by default.  When multiple threads
+            # are used, signals should be disabled.  This has the side effect
+            # of disabling DNS timeouts in some environments (when libcurl is
+            # not linked against ares), so we don't do it when there is only one
+            # thread.  Applications that use many short-lived threads may need
+            # to set NOSIGNAL manually in a prepare_curl_callback since
+            # there may not be any other threads running at the time we call
+            # threading.activeCount.
+            curl.setopt(pycurl.NOSIGNAL, 1)
+        if request.prepare_curl_callback is not None:
+            request.prepare_curl_callback(curl)
+
+    def _curl_header_callback(self, headers, header_line):
+        # header_line as returned by curl includes the end-of-line characters.
+        header_line = header_line.strip()
+        if header_line.startswith("HTTP/"):
+            headers.clear()
+            try:
+                (__, __, reason) = httputil.parse_response_start_line(header_line)
+                header_line = "X-Http-Reason: %s" % reason
+            except httputil.HTTPInputError:
+                return
+        if not header_line:
+            return
+        headers.parse_line(header_line)
+
+    def _curl_debug(self, debug_type, debug_msg):
+        debug_types = ('I', '<', '>', '<', '>')
+        if debug_type == 0:
+            gen_log.debug('%s', debug_msg.strip())
+        elif debug_type in (1, 2):
+            for line in debug_msg.splitlines():
+                gen_log.debug('%s %s', debug_types[debug_type], line)
+        elif debug_type == 4:
+            gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
+
 
 class CurlError(HTTPError):
     def __init__(self, errno, message):
@@ -261,213 +465,6 @@ class CurlError(HTTPError):
         self.errno = errno
 
 
-def _curl_create():
-    curl = pycurl.Curl()
-    if gen_log.isEnabledFor(logging.DEBUG):
-        curl.setopt(pycurl.VERBOSE, 1)
-        curl.setopt(pycurl.DEBUGFUNCTION, _curl_debug)
-    return curl
-
-
-def _curl_setup_request(curl, request, buffer, headers):
-    curl.setopt(pycurl.URL, native_str(request.url))
-
-    # libcurl's magic "Expect: 100-continue" behavior causes delays
-    # with servers that don't support it (which include, among others,
-    # Google's OpenID endpoint).  Additionally, this behavior has
-    # a bug in conjunction with the curl_multi_socket_action API
-    # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
-    # which increases the delays.  It's more trouble than it's worth,
-    # so just turn off the feature (yes, setting Expect: to an empty
-    # value is the official way to disable this)
-    if "Expect" not in request.headers:
-        request.headers["Expect"] = ""
-
-    # libcurl adds Pragma: no-cache by default; disable that too
-    if "Pragma" not in request.headers:
-        request.headers["Pragma"] = ""
-
-    # Request headers may be either a regular dict or HTTPHeaders object
-    if isinstance(request.headers, httputil.HTTPHeaders):
-        curl.setopt(pycurl.HTTPHEADER,
-                    [native_str("%s: %s" % i) for i in request.headers.get_all()])
-    else:
-        curl.setopt(pycurl.HTTPHEADER,
-                    [native_str("%s: %s" % i) for i in request.headers.items()])
-
-    if request.header_callback:
-        curl.setopt(pycurl.HEADERFUNCTION,
-                    lambda line: request.header_callback(native_str(line)))
-    else:
-        curl.setopt(pycurl.HEADERFUNCTION,
-                    lambda line: _curl_header_callback(headers,
-                                                       native_str(line)))
-    if request.streaming_callback:
-        write_function = request.streaming_callback
-    else:
-        write_function = buffer.write
-    if bytes is str:  # py2
-        curl.setopt(pycurl.WRITEFUNCTION, write_function)
-    else:  # py3
-        # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
-        # a fork/port.  That version has a bug in which it passes unicode
-        # strings instead of bytes to the WRITEFUNCTION.  This means that
-        # if you use a WRITEFUNCTION (which tornado always does), you cannot
-        # download arbitrary binary data.  This needs to be fixed in the
-        # ported pycurl package, but in the meantime this lambda will
-        # make it work for downloading (utf8) text.
-        curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
-    curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
-    curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
-    curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
-    curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
-    if request.user_agent:
-        curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
-    else:
-        curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
-    if request.network_interface:
-        curl.setopt(pycurl.INTERFACE, request.network_interface)
-    if request.decompress_response:
-        curl.setopt(pycurl.ENCODING, "gzip,deflate")
-    else:
-        curl.setopt(pycurl.ENCODING, "none")
-    if request.proxy_host and request.proxy_port:
-        curl.setopt(pycurl.PROXY, request.proxy_host)
-        curl.setopt(pycurl.PROXYPORT, request.proxy_port)
-        if request.proxy_username:
-            credentials = '%s:%s' % (request.proxy_username,
-                                     request.proxy_password)
-            curl.setopt(pycurl.PROXYUSERPWD, credentials)
-    else:
-        curl.setopt(pycurl.PROXY, '')
-        curl.unsetopt(pycurl.PROXYUSERPWD)
-    if request.validate_cert:
-        curl.setopt(pycurl.SSL_VERIFYPEER, 1)
-        curl.setopt(pycurl.SSL_VERIFYHOST, 2)
-    else:
-        curl.setopt(pycurl.SSL_VERIFYPEER, 0)
-        curl.setopt(pycurl.SSL_VERIFYHOST, 0)
-    if request.ca_certs is not None:
-        curl.setopt(pycurl.CAINFO, request.ca_certs)
-    else:
-        # There is no way to restore pycurl.CAINFO to its default value
-        # (Using unsetopt makes it reject all certificates).
-        # I don't see any way to read the default value from python so it
-        # can be restored later.  We'll have to just leave CAINFO untouched
-        # if no ca_certs file was specified, and require that if any
-        # request uses a custom ca_certs file, they all must.
-        pass
-
-    if request.allow_ipv6 is False:
-        # Curl behaves reasonably when DNS resolution gives an ipv6 address
-        # that we can't reach, so allow ipv6 unless the user asks to disable.
-        curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
-    else:
-        curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
-
-    # Set the request method through curl's irritating interface which makes
-    # up names for almost every single method
-    curl_options = {
-        "GET": pycurl.HTTPGET,
-        "POST": pycurl.POST,
-        "PUT": pycurl.UPLOAD,
-        "HEAD": pycurl.NOBODY,
-    }
-    custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
-    for o in curl_options.values():
-        curl.setopt(o, False)
-    if request.method in curl_options:
-        curl.unsetopt(pycurl.CUSTOMREQUEST)
-        curl.setopt(curl_options[request.method], True)
-    elif request.allow_nonstandard_methods or request.method in custom_methods:
-        curl.setopt(pycurl.CUSTOMREQUEST, request.method)
-    else:
-        raise KeyError('unknown method ' + request.method)
-
-    # Handle curl's cryptic options for every individual HTTP method
-    if request.method == "GET":
-        if request.body is not None:
-            raise AssertionError('Body must be empty for GET request')
-    elif request.method in ("POST", "PUT") or request.body:
-        if request.body is None:
-            raise AssertionError(
-                'Body must not be empty for "%s" request'
-                % request.method)
-
-        request_buffer = BytesIO(utf8(request.body))
-        def ioctl(cmd):
-            if cmd == curl.IOCMD_RESTARTREAD:
-                request_buffer.seek(0)
-        curl.setopt(pycurl.READFUNCTION, request_buffer.read)
-        curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
-        if request.method == "POST":
-            curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
-        else:
-            curl.setopt(pycurl.UPLOAD, True)
-            curl.setopt(pycurl.INFILESIZE, len(request.body))
-
-    if request.auth_username is not None:
-        userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
-
-        if request.auth_mode is None or request.auth_mode == "basic":
-            curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
-        elif request.auth_mode == "digest":
-            curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
-        else:
-            raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
-
-        curl.setopt(pycurl.USERPWD, native_str(userpwd))
-        gen_log.debug("%s %s (username: %r)", request.method, request.url,
-                      request.auth_username)
-    else:
-        curl.unsetopt(pycurl.USERPWD)
-        gen_log.debug("%s %s", request.method, request.url)
-
-    if request.client_cert is not None:
-        curl.setopt(pycurl.SSLCERT, request.client_cert)
-
-    if request.client_key is not None:
-        curl.setopt(pycurl.SSLKEY, request.client_key)
-
-    if threading.activeCount() > 1:
-        # libcurl/pycurl is not thread-safe by default.  When multiple threads
-        # are used, signals should be disabled.  This has the side effect
-        # of disabling DNS timeouts in some environments (when libcurl is
-        # not linked against ares), so we don't do it when there is only one
-        # thread.  Applications that use many short-lived threads may need
-        # to set NOSIGNAL manually in a prepare_curl_callback since
-        # there may not be any other threads running at the time we call
-        # threading.activeCount.
-        curl.setopt(pycurl.NOSIGNAL, 1)
-    if request.prepare_curl_callback is not None:
-        request.prepare_curl_callback(curl)
-
-
-def _curl_header_callback(headers, header_line):
-    # header_line as returned by curl includes the end-of-line characters.
-    header_line = header_line.strip()
-    if header_line.startswith("HTTP/"):
-        headers.clear()
-        try:
-            (__, __, reason) = httputil.parse_response_start_line(header_line)
-            header_line = "X-Http-Reason: %s" % reason
-        except httputil.HTTPInputError:
-            return
-    if not header_line:
-        return
-    headers.parse_line(header_line)
-
-
-def _curl_debug(debug_type, debug_msg):
-    debug_types = ('I', '<', '>', '<', '>')
-    if debug_type == 0:
-        gen_log.debug('%s', debug_msg.strip())
-    elif debug_type in (1, 2):
-        for line in debug_msg.splitlines():
-            gen_log.debug('%s %s', debug_types[debug_type], line)
-    elif debug_type == 4:
-        gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
-
 if __name__ == "__main__":
     AsyncHTTPClient.configure(CurlAsyncHTTPClient)
     main()