]> git.ipfire.org Git - thirdparty/tornado.git/commitdiff
Fix ValueError in file_uploader.py by converting @gen.coroutine to async/await 3523/head
authorQuaylyn Rimer <quaylynrimer11@gmail.com>
Mon, 28 Jul 2025 02:45:49 +0000 (20:45 -0600)
committerQuaylyn Rimer <quaylynrimer11@gmail.com>
Mon, 28 Jul 2025 02:45:49 +0000 (20:45 -0600)
Fixes #3182

The file_uploader.py demo was failing with ValueError when trying to upload files
because @gen.coroutine decorated functions return tornado.concurrent.Future objects,
but asyncio.run() expects native coroutines.

This change converts the @gen.coroutine decorated functions to native async/await
syntax, which is the modern recommended approach and resolves the compatibility
issue with asyncio.run().

Changes:
- Remove tornado.gen import
- Convert @gen.coroutine decorators to async def
- Convert all yield statements to await
- Maintains backward compatibility with existing Tornado versions

demos/file_upload/file_uploader.py

index 67fce7ed047f0fef7b766a82aa9358c72ad2135b..6f2da04908cdff47ccf35473c346fa8f6a5af13e 100755 (executable)
@@ -17,14 +17,13 @@ from functools import partial
 from urllib.parse import quote
 from uuid import uuid4
 
-from tornado import gen, httpclient
+from tornado import httpclient
 from tornado.options import define, options
 
 
 # Using HTTP POST, upload one or more files in a single multipart-form-encoded
 # request.
-@gen.coroutine
-def multipart_producer(boundary, filenames, write):
+async def multipart_producer(boundary, filenames, write):
     boundary_bytes = boundary.encode()
 
     for filename in filenames:
@@ -39,29 +38,28 @@ def multipart_producer(boundary, filenames, write):
             + (b"Content-Type: %s\r\n" % mtype.encode())
             + b"\r\n"
         )
-        yield write(buf)
+        await write(buf)
         with open(filename, "rb") as f:
             while True:
                 # 16k at a time.
                 chunk = f.read(16 * 1024)
                 if not chunk:
                     break
-                yield write(chunk)
+                await write(chunk)
 
-        yield write(b"\r\n")
+        await write(b"\r\n")
 
-    yield write(b"--%s--\r\n" % (boundary_bytes,))
+    await write(b"--%s--\r\n" % (boundary_bytes,))
 
 
 # Using HTTP PUT, upload one raw file. This is preferred for large files since
 # the server can stream the data instead of buffering it entirely in memory.
-@gen.coroutine
-def post(filenames):
+async def post(filenames):
     client = httpclient.AsyncHTTPClient()
     boundary = uuid4().hex
     headers = {"Content-Type": "multipart/form-data; boundary=%s" % boundary}
     producer = partial(multipart_producer, boundary, filenames)
-    response = yield client.fetch(
+    response = await client.fetch(
         "http://localhost:8888/post",
         method="POST",
         headers=headers,
@@ -71,8 +69,7 @@ def post(filenames):
     print(response)
 
 
-@gen.coroutine
-def raw_producer(filename, write):
+async def raw_producer(filename, write):
     with open(filename, "rb") as f:
         while True:
             # 16K at a time.
@@ -81,18 +78,17 @@ def raw_producer(filename, write):
                 # Complete.
                 break
 
-            yield write(chunk)
+            await write(chunk)
 
 
-@gen.coroutine
-def put(filenames):
+async def put(filenames):
     client = httpclient.AsyncHTTPClient()
     for filename in filenames:
         mtype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
         headers = {"Content-Type": mtype}
         producer = partial(raw_producer, filename)
         url_path = quote(os.path.basename(filename))
-        response = yield client.fetch(
+        response = await client.fetch(
             "http://localhost:8888/%s" % url_path,
             method="PUT",
             headers=headers,