]> git.ipfire.org Git - thirdparty/curl.git/commitdiff
http2: upgrade tests and add fix for non-existing stream
authorStefan Eissing <stefan@eissing.org>
Tue, 1 Aug 2023 08:31:58 +0000 (10:31 +0200)
committerDaniel Stenberg <daniel@haxx.se>
Thu, 3 Aug 2023 15:05:13 +0000 (17:05 +0200)
- check in h2 filter recv that stream actually exists
  and return error if not
- add test for parallel, extreme h2 upgrades that fail if
  connections get reused before fully switched
- add h2 upgrade upload test just for completeness

Closes #11563

lib/http2.c
tests/http/clients/.gitignore
tests/http/clients/Makefile.inc
tests/http/clients/h2-upgrade-extreme.c [new file with mode: 0644]
tests/http/test_02_download.py
tests/http/test_07_upload.py
tests/http/testenv/client.py
tests/http/testenv/httpd.py

index e48ef96f38950e114d905eefaca98695c431281e..6e1607cb7766ed337211b7a5b9eab917eead926f 100644 (file)
@@ -1826,7 +1826,19 @@ static ssize_t cf_h2_recv(struct Curl_cfilter *cf, struct Curl_easy *data,
   ssize_t nread = -1;
   CURLcode result;
   struct cf_call_data save;
-  DEBUGASSERT(stream);
+
+  if(!stream) {
+    /* Abnormal call sequence: either this transfer has never opened a stream
+     * (unlikely) or the transfer has been done, cleaned up its resources, but
+     * a read() is called anyway. It is not clear what the calling sequence
+     * is for such a case. */
+    failf(data, "[%zd-%zd], http/2 recv on a transfer never opened "
+          "or already cleared", (ssize_t)data->id,
+          (ssize_t)cf->conn->connection_id);
+    *err = CURLE_HTTP2;
+    return -1;
+  }
+
   CF_DATA_SAVE(save, cf, data);
 
   nread = stream_recv(cf, data, buf, len, err);
index 02084e18a4f5a3e4905e844bba6e4bbfeff01204..2f2d6a7095d9c7ec08c2c6190c59114e90440ef5 100644 (file)
@@ -5,4 +5,5 @@
 h2-serverpush
 h2-download
 ws-data
-ws-pingpong
\ No newline at end of file
+ws-pingpong
+h2-upgrade-extreme
\ No newline at end of file
index e0abf0a338a39335598eb0dba5a074f44a3e0190..1ce98d5b7bd40d81a33b8f440b77ccb0fc485c9f 100644 (file)
@@ -27,4 +27,5 @@ check_PROGRAMS = \
   h2-serverpush \
   h2-download \
   ws-data \
-  ws-pingpong
+  ws-pingpong \
+  h2-upgrade-extreme
diff --git a/tests/http/clients/h2-upgrade-extreme.c b/tests/http/clients/h2-upgrade-extreme.c
new file mode 100644 (file)
index 0000000..e15e34c
--- /dev/null
@@ -0,0 +1,249 @@
+/***************************************************************************
+ *                                  _   _ ____  _
+ *  Project                     ___| | | |  _ \| |
+ *                             / __| | | | |_) | |
+ *                            | (__| |_| |  _ <| |___
+ *                             \___|\___/|_| \_\_____|
+ *
+ * Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
+ *
+ * This software is licensed as described in the file COPYING, which
+ * you should have received as part of this distribution. The terms
+ * are also available at https://curl.se/docs/copyright.html.
+ *
+ * You may opt to use, copy, modify, merge, publish, distribute and/or sell
+ * copies of the Software, and permit persons to whom the Software is
+ * furnished to do so, under the terms of the COPYING file.
+ *
+ * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
+ * KIND, either express or implied.
+ *
+ * SPDX-License-Identifier: curl
+ *
+ ***************************************************************************/
+/* <DESC>
+ * HTTP/2 Upgrade test
+ * </DESC>
+ */
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdint.h>
+#include <inttypes.h>
+/* #include <error.h> */
+#include <errno.h>
+#include <curl/curl.h>
+#include <curl/mprintf.h>
+
+
+static void log_line_start(FILE *log, const char *idsbuf, curl_infotype type)
+{
+  /*
+   * This is the trace look that is similar to what libcurl makes on its
+   * own.
+   */
+  static const char * const s_infotype[] = {
+    "* ", "< ", "> ", "{ ", "} ", "{ ", "} "
+  };
+  if(idsbuf && *idsbuf)
+    fprintf(log, "%s%s", idsbuf, s_infotype[type]);
+  else
+    fputs(s_infotype[type], log);
+}
+
+#define TRC_IDS_FORMAT_IDS_1  "[%" CURL_FORMAT_CURL_OFF_T "-x] "
+#define TRC_IDS_FORMAT_IDS_2  "[%" CURL_FORMAT_CURL_OFF_T "-%" \
+                                   CURL_FORMAT_CURL_OFF_T "] "
+/*
+** callback for CURLOPT_DEBUGFUNCTION
+*/
+static int debug_cb(CURL *handle, curl_infotype type,
+                    char *data, size_t size,
+                    void *userdata)
+{
+  FILE *output = stderr;
+  static int newl = 0;
+  static int traced_data = 0;
+  char idsbuf[60];
+  curl_off_t xfer_id, conn_id;
+
+  (void)handle; /* not used */
+  (void)userdata;
+
+  if(!curl_easy_getinfo(handle, CURLINFO_XFER_ID, &xfer_id) && xfer_id >= 0) {
+    if(!curl_easy_getinfo(handle, CURLINFO_CONN_ID, &conn_id) &&
+        conn_id >= 0) {
+      curl_msnprintf(idsbuf, sizeof(idsbuf), TRC_IDS_FORMAT_IDS_2,
+                     xfer_id, conn_id);
+    }
+    else {
+      curl_msnprintf(idsbuf, sizeof(idsbuf), TRC_IDS_FORMAT_IDS_1, xfer_id);
+    }
+  }
+  else
+    idsbuf[0] = 0;
+
+  switch(type) {
+  case CURLINFO_HEADER_OUT:
+    if(size > 0) {
+      size_t st = 0;
+      size_t i;
+      for(i = 0; i < size - 1; i++) {
+        if(data[i] == '\n') { /* LF */
+          if(!newl) {
+            log_line_start(output, idsbuf, type);
+          }
+          (void)fwrite(data + st, i - st + 1, 1, output);
+          st = i + 1;
+          newl = 0;
+        }
+      }
+      if(!newl)
+        log_line_start(output, idsbuf, type);
+      (void)fwrite(data + st, i - st + 1, 1, output);
+    }
+    newl = (size && (data[size - 1] != '\n')) ? 1 : 0;
+    traced_data = 0;
+    break;
+  case CURLINFO_TEXT:
+  case CURLINFO_HEADER_IN:
+    if(!newl)
+      log_line_start(output, idsbuf, type);
+    (void)fwrite(data, size, 1, output);
+    newl = (size && (data[size - 1] != '\n')) ? 1 : 0;
+    traced_data = 0;
+    break;
+  case CURLINFO_DATA_OUT:
+  case CURLINFO_DATA_IN:
+  case CURLINFO_SSL_DATA_IN:
+  case CURLINFO_SSL_DATA_OUT:
+    if(!traced_data) {
+      if(!newl)
+        log_line_start(output, idsbuf, type);
+      fprintf(output, "[%ld bytes data]\n", (long)size);
+      newl = 0;
+      traced_data = 1;
+    }
+    break;
+  default: /* nada */
+    newl = 0;
+    traced_data = 1;
+    break;
+  }
+
+  return 0;
+}
+
+static size_t write_cb(char *ptr, size_t size, size_t nmemb, void *opaque)
+{
+  (void)ptr;
+  (void)opaque;
+  return size * nmemb;
+}
+
+int main(int argc, char *argv[])
+{
+  const char *url;
+  CURLM *multi;
+  CURL *easy;
+  CURLMcode mc;
+  int running_handles = 0, start_count, numfds;
+  CURLMsg *msg;
+  int msgs_in_queue;
+  char range[128];
+
+  if(argc != 2) {
+    fprintf(stderr, "%s URL\n", argv[0]);
+    exit(2);
+  }
+
+  url = argv[1];
+  multi = curl_multi_init();
+  if(!multi) {
+    fprintf(stderr, "curl_multi_init failed\n");
+    exit(1);
+  }
+
+  start_count = 200;
+  do {
+    if(start_count) {
+      easy = curl_easy_init();
+      if(!easy) {
+        fprintf(stderr, "curl_easy_init failed\n");
+        exit(1);
+      }
+      curl_easy_setopt(easy, CURLOPT_VERBOSE, 1L);
+      curl_easy_setopt(easy, CURLOPT_DEBUGFUNCTION, debug_cb);
+      curl_easy_setopt(easy, CURLOPT_URL, url);
+      curl_easy_setopt(easy, CURLOPT_NOSIGNAL, 1L);
+      curl_easy_setopt(easy, CURLOPT_AUTOREFERER, 1L);
+      curl_easy_setopt(easy, CURLOPT_FAILONERROR, 1L);
+      curl_easy_setopt(easy, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2_0);
+      curl_easy_setopt(easy, CURLOPT_WRITEFUNCTION, write_cb);
+      curl_easy_setopt(easy, CURLOPT_WRITEDATA, NULL);
+      curl_easy_setopt(easy, CURLOPT_HTTPGET, 1L);
+      curl_msnprintf(range, sizeof(range), "%" PRIu64 "-%" PRIu64,
+                     UINT64_C(0), UINT64_C(16384));
+      curl_easy_setopt(easy, CURLOPT_RANGE, range);
+
+      mc = curl_multi_add_handle(multi, easy);
+      if(mc != CURLM_OK) {
+        fprintf(stderr, "curl_multi_add_handle: %s\n",
+               curl_multi_strerror(mc));
+        exit(1);
+      }
+      --start_count;
+    }
+
+    mc = curl_multi_perform(multi, &running_handles);
+    if(mc != CURLM_OK) {
+      fprintf(stderr, "curl_multi_perform: %s\n",
+             curl_multi_strerror(mc));
+      exit(1);
+    }
+
+    if(running_handles) {
+      mc = curl_multi_poll(multi, NULL, 0, 1000000, &numfds);
+      if(mc != CURLM_OK) {
+        fprintf(stderr, "curl_multi_poll: %s\n",
+               curl_multi_strerror(mc));
+        exit(1);
+      }
+    }
+
+    /* Check for finished handles and remove. */
+    while((msg = curl_multi_info_read(multi, &msgs_in_queue))) {
+      if(msg->msg == CURLMSG_DONE) {
+        long status = 0;
+        curl_off_t xfer_id;
+        curl_easy_getinfo(msg->easy_handle, CURLINFO_XFER_ID, &xfer_id);
+        curl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &status);
+        if(msg->data.result == CURLE_SEND_ERROR ||
+            msg->data.result == CURLE_RECV_ERROR) {
+          /* We get these if the server had a GOAWAY in transit on
+           * re-using a connection */
+        }
+        else if(msg->data.result) {
+          fprintf(stderr, "transfer #%" CURL_FORMAT_CURL_OFF_T
+                  ": failed with %d\n", xfer_id, msg->data.result);
+          exit(1);
+        }
+        else if(status != 206) {
+          fprintf(stderr, "transfer #%" CURL_FORMAT_CURL_OFF_T
+                  ": wrong http status %ld (expected 206)\n", xfer_id, status);
+          exit(1);
+        }
+        curl_multi_remove_handle(multi, msg->easy_handle);
+        curl_easy_cleanup(msg->easy_handle);
+        fprintf(stderr, "transfer #%" CURL_FORMAT_CURL_OFF_T" retiring "
+                "(%d now running)\n", xfer_id, running_handles);
+      }
+    }
+
+    fprintf(stderr, "running_handles=%d, yet_to_start=%d\n",
+            running_handles, start_count);
+
+  } while(running_handles > 0 || start_count);
+
+  fprintf(stderr, "exiting\n");
+  exit(EXIT_SUCCESS);
+}
index 336b571c4e9cd03a2b97f24028cdb26d64e33f73..c6ea590aac5807c8c04ebe58014111cfcca59ba6 100644 (file)
@@ -350,6 +350,22 @@ class TestDownload:
         assert r.duration > timedelta(seconds=4), \
             f'rate limited transfer should take more than 4s, not {r.duration}'
 
+    # make extreme paralllel h2 upgrades, check invalid conn reuse
+    # before protocol switch has happened
+    def test_02_25_h2_upgrade_x(self, env: Env, httpd, repeat):
+        # not locally reproducable timeouts with certain SSL libs
+        # Since this test is about connection reuse handling, we skip
+        # it on these builds. Although we would certainly like to understand
+        # why this happens.
+        if env.curl_uses_lib('bearssl'):
+            pytest.skip('CI workflows timeout on bearssl build')
+        url = f'http://localhost:{env.http_port}/data-100k'
+        client = LocalClient(name='h2-upgrade-extreme', env=env, timeout=15)
+        if not client.exists():
+            pytest.skip(f'example client not built: {client.name}')
+        r = client.run(args=[url])
+        assert r.exit_code == 0, f'{client.dump_logs()}'
+
     def check_downloads(self, client, srcfile: str, count: int,
                         complete: bool = True):
         for i in range(count):
index 84a23b261543e8a5e113ca00104e260dafefbb15..6804b6b89c5a8deae9210ab86d7a0d2c0bb37c0f 100644 (file)
@@ -305,6 +305,21 @@ class TestUpload:
         assert r.exit_code == 0, r.dump_logs()
         r.check_stats(1, 200)
 
+    # upload large data on a h1 to h2 upgrade
+    def test_07_35_h1_h2_upgrade_upload(self, env: Env, httpd, nghttpx, repeat):
+        fdata = os.path.join(env.gen_dir, 'data-100k')
+        curl = CurlClient(env=env)
+        url = f'http://{env.domain1}:{env.http_port}/curltest/echo?id=[0-0]'
+        r = curl.http_upload(urls=[url], data=f'@{fdata}', extra_args=[
+            '--http2'
+        ])
+        r.check_response(count=1, http_status=200)
+        # apache does not Upgrade on request with a body
+        assert r.stats[0]['http_version'] == '1.1', f'{r}'
+        indata = open(fdata).readlines()
+        respdata = open(curl.response_file(0)).readlines()
+        assert respdata == indata
+
     def check_download(self, count, srcfile, curl):
         for i in range(count):
             dfile = curl.download_file(i)
index 3b7ea0fc64cffa5d4de353af549e0d4d35b2500c..098e55b9cdcb85f82144b489410ae143f80f8934 100644 (file)
@@ -61,6 +61,10 @@ class LocalClient:
     def run_dir(self) -> str:
         return self._run_dir
 
+    @property
+    def stderr_file(self) -> str:
+        return self._stderrfile
+
     def exists(self) -> bool:
         return os.path.exists(self.path)
 
@@ -103,3 +107,12 @@ class LocalClient:
         return ExecResult(args=myargs, exit_code=exitcode, exception=exception,
                           stdout=coutput, stderr=cerrput,
                           duration=datetime.now() - start)
+
+    def dump_logs(self):
+        lines = []
+        lines.append('>>--stdout ----------------------------------------------\n')
+        lines.extend(open(self._stdoutfile).readlines())
+        lines.append('>>--stderr ----------------------------------------------\n')
+        lines.extend(open(self._stderrfile).readlines())
+        lines.append('<<-------------------------------------------------------\n')
+        return ''.join(lines)
index 4db1845bb4ccc3ac4e15c7fe5d67d3df3a00fd1e..ecc7c6c475c0384ee54797fc7dc2f76caabb5ca8 100644 (file)
@@ -242,8 +242,9 @@ class Httpd:
                 f'PidFile httpd.pid',
                 f'ErrorLog {self._error_log}',
                 f'LogLevel {self._get_log_level()}',
+                f'StartServers 4',
                 f'H2MinWorkers 16',
-                f'H2MaxWorkers 128',
+                f'H2MaxWorkers 256',
                 f'H2Direct on',
                 f'Listen {self.env.http_port}',
                 f'Listen {self.env.https_port}',