]> git.ipfire.org Git - thirdparty/paperless-ngx.git/commitdiff
better exception logging
authorjonaswinkler <jonas.winkler@jpwinkler.de>
Thu, 11 Feb 2021 21:16:41 +0000 (22:16 +0100)
committerjonaswinkler <jonas.winkler@jpwinkler.de>
Thu, 11 Feb 2021 21:16:41 +0000 (22:16 +0100)
src/documents/classifier.py
src/documents/index.py
src/documents/management/commands/document_archiver.py
src/documents/management/commands/document_consumer.py
src/paperless_mail/tasks.py

index 81f8603149a64382ed6dab32037966f7f5fc1cf8..c880e50d6349a15aa858141f4aaf18847e282211 100755 (executable)
@@ -43,9 +43,9 @@ def load_classifier():
                       version=version, timeout=86400)
         except (EOFError, IncompatibleClassifierVersionError) as e:
             # there's something wrong with the model file.
-            logger.error(
+            logger.exception(
                 f"Unrecoverable error while loading document "
-                f"classification model: {str(e)}, deleting model file."
+                f"classification model, deleting model file."
             )
             os.unlink(settings.MODEL_FILE)
             classifier = None
index bc17daeb10bf94ba96e436ecd5ee6784ac805e28..ea788f4b37e10ae6a5512923c8fc9e832312514a 100644 (file)
@@ -78,8 +78,8 @@ def open_index(recreate=False):
     try:
         if exists_in(settings.INDEX_DIR) and not recreate:
             return open_dir(settings.INDEX_DIR, schema=get_schema())
-    except Exception as e:
-        logger.error(f"Error while opening the index: {e}, recreating.")
+    except Exception:
+        logger.exception(f"Error while opening the index, recreating.")
 
     if not os.path.isdir(settings.INDEX_DIR):
         os.makedirs(settings.INDEX_DIR, exist_ok=True)
index fe8c8b530b374bf31daa3eda35d64febffe93180..297c951218287a368df15ee4dbfdc28b61d1af7b 100644 (file)
@@ -60,7 +60,7 @@ def handle_document(document_id):
             index.update_document(writer, document)
 
     except Exception as e:
-        logger.error(f"Error while parsing document {document}: {str(e)}")
+        logger.exception(f"Error while parsing document {document}")
     finally:
         parser.cleanup()
 
index c01743628b7e630e375f879254c4817157d7e26d..aaa6448914a1e487aa44a3ae0603fde1aa8c05ca 100644 (file)
@@ -54,8 +54,7 @@ def _consume(filepath):
         if settings.CONSUMER_SUBDIRS_AS_TAGS:
             tag_ids = _tags_from_path(filepath)
     except Exception as e:
-        logger.error(
-            "Error creating tags from path: {}".format(e))
+        logger.exception("Error creating tags from path")
 
     try:
         async_task("documents.tasks.consume_file",
@@ -66,8 +65,7 @@ def _consume(filepath):
         # Catch all so that the consumer won't crash.
         # This is also what the test case is listening for to check for
         # errors.
-        logger.error(
-            "Error while consuming document: {}".format(e))
+        logger.exception("Error while consuming document")
 
 
 def _consume_wait_unmodified(file, num_tries=20, wait_time=1):
index 086edef7b27f2f7cc8ec85700e8b1650bbecc663..c591f04b90b31ff0185ebfeea52256f518e5faa3 100644 (file)
@@ -13,11 +13,8 @@ def process_mail_accounts():
         try:
             total_new_documents += MailAccountHandler().handle_mail_account(
                 account)
-        except MailError as e:
-            logger.error(
-                f"Error while processing mail account {account}: {e}",
-                exc_info=True
-            )
+        except MailError:
+            logger.exception(f"Error while processing mail account {account}")
 
     if total_new_documents > 0:
         return f"Added {total_new_documents} document(s)."