def log_digest_blake2s(self):
return self.data.log_digest_blake2s
- async def open_log(self, mode="r"):
+ async def open_log(self, mode="rb"):
"""
Opens the log file, and returns an open file handle
"""
else:
return open(open, mode)
+ async def tail_log(self, limit):
+ """
+ Tails the log file (i.e. returns the N last lines)
+ """
+ # Open the log file
+ with await self.open_log() as f:
+ return await asyncio.to_thread(self._tail_log, f, limit)
+
+ def _tail_log(self, f, limit):
+ # Create a new queue
+ q = []
+
+ # Walk through each line of the log file
+ for line in f:
+ q.append(line)
+
+ # Truncate the queue to limit to always have up to the last "limit" lines
+ del q[:-limit]
+
+ # Return all lines
+ return q
+
async def _import_log(self, upload):
# Create some destination path
path = self.backend.path(
with await job.open_log() as f:
self.assertIsInstance(f, io.IOBase)
+ # Fetch the last 100 lines of the log
+ lines = await job.tail_log(100)
+
+ # Check if we received the correct type
+ self.assertIsInstance(lines, list)
+
+ # Check if the result had the correct length
+ self.assertEqual(len(lines), 100)
+
if __name__ == "__main__":