]> git.ipfire.org Git - pbs.git/commitdiff
jobs: Stream the log file faster
authorMichael Tremer <michael.tremer@ipfire.org>
Fri, 24 Jan 2025 17:14:22 +0000 (17:14 +0000)
committerMichael Tremer <michael.tremer@ipfire.org>
Fri, 24 Jan 2025 17:14:22 +0000 (17:14 +0000)
When we are downloading the entire file, we only have to decompress it
and we don't have to search for line breaks before sending each chunk.

Signed-off-by: Michael Tremer <michael.tremer@ipfire.org>
src/web/jobs.py

index ca660a04ca6e17800fb539fa2568cb2eb1f6c94d..178b5a8d257c4a34e57a591f745a18a83e70a27e 100644 (file)
@@ -174,23 +174,39 @@ class LogHandler(base.BaseHandler):
                        self.render("jobs/log-stream.html", job=job)
                        return
 
+               # Set Content-Type header
+               self.set_header("Content-Type", "text/plain")
+
+               # Should we tail the log file?
                tail = self.get_argument_int("tail", None)
 
                # Should we tail the log, or stream the entire file?
                try:
-                       log = await job.tail_log(tail) if tail else await job.open_log()
+                       if tail:
+                               log = await job.tail_log(tail)
+
+                               # Since we are processing this line-based,
+                               # we will send all lines that have been sent to us
+                               for line in log:
+                                       self.write(line)
+
+                               # Done!
+                               return
+
+                       # If we are sending the entire file we won't split it by line before
+                       log = await job.open_log()
+
+                       while True:
+                               chunk = log.read(65536)
+                               if not chunk:
+                                       break
+
+                               self.write(chunk)
 
                # Send 404 if there is no log file
                except FileNotFoundError as e:
                        raise tornado.web.HTTPError(404, "Could not find log for %s" % job) from e
 
-               # Set Content-Type header
-               self.set_header("Content-Type", "text/plain")
-
-               # Stream the entire log
-               for line in log:
-                       self.write(line)
-
 
 class AbortHandler(base.BaseHandler):
        @base.authenticated