]> git.ipfire.org Git - thirdparty/paperless-ngx.git/commitdiff
Configures ruff as the one stop linter and resolves warnings it raised
authorTrenton H <797416+stumpylog@users.noreply.github.com>
Tue, 28 Mar 2023 16:39:30 +0000 (09:39 -0700)
committerTrenton H <797416+stumpylog@users.noreply.github.com>
Sun, 2 Apr 2023 00:03:52 +0000 (17:03 -0700)
110 files changed:
.github/scripts/cleanup-tags.py
.github/scripts/common.py
.github/scripts/get-build-json.py [changed mode: 0755->0644]
.github/scripts/github.py
.gitignore
.pre-commit-config.yaml
.ruff.toml [new file with mode: 0644]
Pipfile
Pipfile.lock
docker/wait-for-redis.py
gunicorn.conf.py
src/documents/barcodes.py
src/documents/bulk_download.py
src/documents/classifier.py
src/documents/consumer.py
src/documents/index.py
src/documents/management/commands/document_consumer.py
src/documents/management/commands/document_create_classifier.py
src/documents/management/commands/document_exporter.py
src/documents/management/commands/document_importer.py
src/documents/management/commands/document_retagger.py
src/documents/management/commands/document_thumbnails.py
src/documents/matching.py
src/documents/migrations/0001_initial.py
src/documents/migrations/0002_auto_20151226_1316.py
src/documents/migrations/0003_sender.py
src/documents/migrations/0004_auto_20160114_1844.py
src/documents/migrations/0005_auto_20160123_0313.py
src/documents/migrations/0006_auto_20160123_0430.py
src/documents/migrations/0007_auto_20160126_2114.py
src/documents/migrations/0008_document_file_type.py
src/documents/migrations/0009_auto_20160214_0040.py
src/documents/migrations/0010_log.py
src/documents/migrations/0011_auto_20160303_1929.py
src/documents/migrations/0012_auto_20160305_0040.py
src/documents/migrations/0013_auto_20160325_2111.py
src/documents/migrations/0014_document_checksum.py
src/documents/migrations/0015_add_insensitive_to_match.py
src/documents/migrations/0016_auto_20170325_1558.py
src/documents/migrations/0017_auto_20170512_0507.py
src/documents/migrations/0018_auto_20170715_1712.py
src/documents/migrations/0019_add_consumer_user.py
src/documents/migrations/0020_document_added.py
src/documents/migrations/0021_document_storage_type.py
src/documents/migrations/0023_document_current_filename.py
src/documents/migrations/1001_auto_20201109_1636.py
src/documents/migrations/1003_mime_types.py
src/documents/migrations/1007_savedview_savedviewfilterrule.py
src/documents/migrations/1011_auto_20210101_2340.py
src/documents/migrations/1012_fix_archive_files.py
src/documents/migrations/1013_migrate_tag_colour.py
src/documents/migrations/1015_remove_null_characters.py
src/documents/migrations/1016_auto_20210317_1351.py
src/documents/migrations/1018_alter_savedviewfilterrule_value.py
src/documents/migrations/1022_paperlesstask.py
src/documents/migrations/1026_transition_to_celery.py
src/documents/migrations/1033_alter_documenttype_options_alter_tag_options_and_more.py
src/documents/migrations/1035_rename_comment_note.py
src/documents/models.py
src/documents/parsers.py
src/documents/sanity_checker.py
src/documents/serialisers.py
src/documents/signals/handlers.py
src/documents/tasks.py
src/documents/tests/factories.py
src/documents/tests/test_api.py
src/documents/tests/test_barcodes.py
src/documents/tests/test_classifier.py
src/documents/tests/test_consumer.py
src/documents/tests/test_date_parsing.py
src/documents/tests/test_document_model.py
src/documents/tests/test_file_handling.py
src/documents/tests/test_importer.py
src/documents/tests/test_management_consumer.py
src/documents/tests/test_management_exporter.py
src/documents/tests/test_matchables.py
src/documents/tests/test_migration_archive_files.py
src/documents/tests/test_models.py
src/documents/tests/test_parsers.py
src/documents/tests/test_sanity_check.py
src/documents/tests/test_task_signals.py
src/documents/tests/test_views.py
src/documents/views.py
src/manage.py [changed mode: 0644->0755]
src/paperless/checks.py
src/paperless/consumers.py
src/paperless/serialisers.py
src/paperless/settings.py
src/paperless/signals.py
src/paperless/tests/__init__.py [new file with mode: 0644]
src/paperless/urls.py
src/paperless_mail/admin.py
src/paperless_mail/mail.py
src/paperless_mail/migrations/0005_help_texts.py
src/paperless_mail/migrations/0006_auto_20210101_2340.py
src/paperless_mail/migrations/0012_alter_mailrule_assign_tags.py
src/paperless_mail/migrations/0018_processedmail.py
src/paperless_mail/migrations/0019_mailrule_filter_to.py
src/paperless_mail/migrations/0020_mailaccount_is_token.py
src/paperless_mail/models.py
src/paperless_mail/parsers.py
src/paperless_mail/serialisers.py
src/paperless_mail/tests/test_live_mail.py
src/paperless_mail/tests/test_mail.py
src/paperless_mail/views.py
src/paperless_tesseract/parsers.py
src/paperless_tesseract/tests/test_checks.py
src/paperless_text/tests/__init__.py [new file with mode: 0644]
src/paperless_tika/parsers.py
src/paperless_tika/tests/__init__.py [new file with mode: 0644]

index 39d2f122e3ddd3a15d3af080cf038405c80edf40..ce2a32d27cf9b8e123083d13953d0b132395bbef 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/env python3
 import json
 import logging
 import os
@@ -390,8 +389,6 @@ class LibraryTagsCleaner(RegistryTagsCleaner):
     will need their own logic
     """
 
-    pass
-
 
 def _main():
     parser = ArgumentParser(
index bccd4fbbdc2067218aab63707c128a98f44f94ec..f7c91e7d7895933d8c6098b77045327af0258f3a 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/env python3
 import logging
 
 
old mode 100755 (executable)
new mode 100644 (file)
index 0bb140b..a37b31e
@@ -1,4 +1,3 @@
-#!/usr/bin/env python3
 """
 This is a helper script for the mutli-stage Docker image builder.
 It provides a single point of configuration for package version control.
index b24e168f564dc4cfdacbe9f534e652272aa86448..ccbd6d11e1ffd75efe5f171c33747a69813a184e 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/env python3
 """
 This module contains some useful classes for interacting with the Github API.
 The full documentation for the API can be found here: https://docs.github.com/en/rest
@@ -162,10 +161,7 @@ class ContainerPackage(_EndpointResponse):
         Returns True if the image has at least one tag which matches the given regex,
         False otherwise
         """
-        for tag in self.tags:
-            if re.match(pattern, tag) is not None:
-                return True
-        return False
+        return any(re.match(pattern, tag) is not None for tag in self.tags)
 
     def __repr__(self):
         return f"Package {self.name}"
index ff17f9e33a5b70933aaf3e26a01e8c4dede2769d..3ac5cc0dcbd0716db5c4fbcbaaf15d07378a3c9f 100644 (file)
@@ -73,6 +73,7 @@ virtualenv
 .venv/
 /docker-compose.env
 /docker-compose.yml
+.ruff_cache/
 
 # Used for development
 scripts/import-for-development
index 5660371f8e1ebb12cc3654b438d97b57967eab7a..86aed9973638920e1414b3c0b8e2288fa9f3be9d 100644 (file)
@@ -36,39 +36,14 @@ repos:
           - markdown
         exclude: "(^Pipfile\\.lock$)"
   # Python hooks
-  - repo: https://github.com/asottile/reorder_python_imports
-    rev: v3.9.0
+  - repo: https://github.com/charliermarsh/ruff-pre-commit
+    rev: 'v0.0.259'
     hooks:
-      - id: reorder-python-imports
-        exclude: "(migrations)"
-  - repo: https://github.com/asottile/yesqa
-    rev: "v1.4.0"
-    hooks:
-      - id: yesqa
-        exclude: "(migrations)"
-  - repo: https://github.com/asottile/add-trailing-comma
-    rev: "v2.4.0"
-    hooks:
-      - id: add-trailing-comma
-        exclude: "(migrations)"
-  - repo: https://github.com/PyCQA/flake8
-    rev: 6.0.0
-    hooks:
-      - id: flake8
-        files: ^src/
-        args:
-          - "--config=./src/setup.cfg"
+      - id: ruff
   - repo: https://github.com/psf/black
     rev: 22.12.0
     hooks:
       - id: black
-  - repo: https://github.com/asottile/pyupgrade
-    rev: v3.3.1
-    hooks:
-      - id: pyupgrade
-        exclude: "(migrations)"
-        args:
-          - "--py38-plus"
   # Dockerfile hooks
   - repo: https://github.com/AleksaC/hadolint-py
     rev: v2.10.0
diff --git a/.ruff.toml b/.ruff.toml
new file mode 100644 (file)
index 0000000..030c02b
--- /dev/null
@@ -0,0 +1,23 @@
+# https://beta.ruff.rs/docs/settings/
+# https://beta.ruff.rs/docs/rules/
+select = ["F", "E", "W", "UP", "COM", "DJ", "EXE", "ISC", "ICN", "G201", "INP", "PIE", "RSE", "SIM", "TID", "PLC", "PLE", "RUF"]
+# TODO PTH
+ignore = ["DJ001", "SIM105"]
+fix = true
+line-length = 88
+respect-gitignore = true
+src = ["src"]
+target-version = "py38"
+format = "grouped"
+show-fixes = true
+
+[per-file-ignores]
+".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
+"docker/wait-for-redis.py" = ["INP001"]
+"*/tests/*.py" = ["E501", "SIM117"]
+"*/migrations/*.py" = ["E501", "SIM"]
+"src/paperless_tesseract/tests/test_parser.py" = ["RUF001"]
+"src/documents/models.py" = ["SIM115"]
+
+[isort]
+force-single-line = true
diff --git a/Pipfile b/Pipfile
index b89eff575fd86e128b06aff67be8a925275a83d9..1308dc2a3bd93bcaca2331ed34438007c07ac7ae 100644 (file)
--- a/Pipfile
+++ b/Pipfile
@@ -78,6 +78,7 @@ black = "*"
 pre-commit = "*"
 imagehash = "*"
 mkdocs-material = "*"
+ruff = "*"
 
 [typing-dev]
 mypy = "*"
index 5ab143b3492d3ff8d03a395db94b0285ca845a04..4e738d14a00fa249b21fca750f5dc2895319e2f7 100644 (file)
             "markers": "python_version >= '3.7' and python_version < '4'",
             "version": "==2.28.2"
         },
+        "ruff": {
+            "hashes": [
+                "sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d",
+                "sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0",
+                "sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456",
+                "sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577",
+                "sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b",
+                "sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e",
+                "sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d",
+                "sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7",
+                "sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9",
+                "sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066",
+                "sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec",
+                "sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8",
+                "sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a",
+                "sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff",
+                "sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9",
+                "sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086",
+                "sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"
+            ],
+            "index": "pypi",
+            "version": "==0.0.259"
+        },
         "scipy": {
             "hashes": [
                 "sha256:02b567e722d62bddd4ac253dafb01ce7ed8742cf8031aea030a41414b86c1125",
index 86f35a7cf5dab118ed8d993d7ff9c45a9b275a05..cabfb1dc68c4cc8d3a983f11980db0a5bf091bea 100755 (executable)
@@ -18,7 +18,7 @@ if __name__ == "__main__":
 
     REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
 
-    print(f"Waiting for Redis...", flush=True)
+    print("Waiting for Redis...", flush=True)
 
     attempt = 0
     with Redis.from_url(url=REDIS_URL) as client:
@@ -37,8 +37,8 @@ if __name__ == "__main__":
                 attempt += 1
 
     if attempt >= MAX_RETRY_COUNT:
-        print(f"Failed to connect to redis using environment variable PAPERLESS_REDIS.")
+        print("Failed to connect to redis using environment variable PAPERLESS_REDIS.")
         sys.exit(os.EX_UNAVAILABLE)
     else:
-        print(f"Connected to Redis broker.")
+        print("Connected to Redis broker.")
         sys.exit(os.EX_OK)
index a3ada7a645b0fe2ad16415d5af5c7cb7baefad23..e3951ac5f470a2b64cbd22f987e94604a0183669 100644 (file)
@@ -30,7 +30,9 @@ def worker_int(worker):
     worker.log.info("worker received INT or QUIT signal")
 
     ## get traceback info
-    import threading, sys, traceback
+    import sys
+    import threading
+    import traceback
 
     id2name = {th.ident: th.name for th in threading.enumerate()}
     code = []
index 82b81fecd0be3b8f14a7886a46b1086df27ff102..8d114d68c818db4edb06844e0ddd6f56b87981fd 100644 (file)
@@ -136,9 +136,8 @@ def convert_from_tiff_to_pdf(filepath: Path) -> Path:
                 filepath,
             ],
         )
-    with filepath.open("rb") as img_file:
-        with newpath.open("wb") as pdf_file:
-            pdf_file.write(img2pdf.convert(img_file))
+    with filepath.open("rb") as img_file, newpath.open("wb") as pdf_file:
+        pdf_file.write(img2pdf.convert(img_file))
     return newpath
 
 
index 87d97afcce6541a871cae26ae0c9d66a25c41d6c..ecabd4515cf7a677c486a817ce6cbab3e5d6ac61 100644 (file)
@@ -52,7 +52,7 @@ class BulkArchiveStrategy:
         return in_archive_path
 
     def add_document(self, doc: Document):
-        raise NotImplementedError()  # pragma: no cover
+        raise NotImplementedError  # pragma: no cover
 
 
 class OriginalsOnlyStrategy(BulkArchiveStrategy):
index d2f5ed060bd7a9f2ad544488aa747bd9a269fadb..cbb8b1b906c618815e5e18f902d5dd013a9af67d 100644 (file)
@@ -104,7 +104,7 @@ class DocumentClassifier:
                         self.document_type_classifier = pickle.load(f)
                         self.storage_path_classifier = pickle.load(f)
                     except Exception as err:
-                        raise ClassifierModelCorruptError() from err
+                        raise ClassifierModelCorruptError from err
 
             # Check for the warning about unpickling from differing versions
             # and consider it incompatible
@@ -117,7 +117,7 @@ class DocumentClassifier:
                 if issubclass(warning.category, UserWarning):
                     w_msg = str(warning.message)
                     if sk_learn_warning_url in w_msg:
-                        raise IncompatibleClassifierVersionError()
+                        raise IncompatibleClassifierVersionError
 
     def save(self):
         target_file = settings.MODEL_FILE
index 797345ba6bf02892783b5b8357bdf37665734310..12993a75077e43e4bbbce8cc2753624f2dcef533 100644 (file)
@@ -590,9 +590,8 @@ class Consumer(LoggingMixin):
             )
 
     def _write(self, storage_type, source, target):
-        with open(source, "rb") as read_file:
-            with open(target, "wb") as write_file:
-                write_file.write(read_file.read())
+        with open(source, "rb") as read_file, open(target, "wb") as write_file:
+            write_file.write(read_file.read())
 
     def _log_script_outputs(self, completed_process: CompletedProcess):
         """
index 05d4d5d17912548c2102bdb1e976ea5ec525aac7..1bf6a921d4f490c90abf96546c6b158d0999d00c 100644 (file)
@@ -164,7 +164,7 @@ def remove_document_from_index(document):
 
 class DelayedQuery:
     def _get_query(self):
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def _get_query_filter(self):
         criterias = []
index 27749ea7c74232902fb24e163c8c829948bde26d..6cba1ea23f5adea76c660f301f2fc65020fa987b 100644 (file)
@@ -159,7 +159,7 @@ def _consume_wait_unmodified(file: str) -> None:
             new_size = stat_data.st_size
         except FileNotFoundError:
             logger.debug(
-                f"File {file} moved while waiting for it to remain " f"unmodified.",
+                f"File {file} moved while waiting for it to remain unmodified.",
             )
             return
         if new_mtime == mtime and new_size == size:
@@ -293,10 +293,7 @@ class Command(BaseCommand):
         while not finished:
             try:
                 for event in inotify.read(timeout=timeout):
-                    if recursive:
-                        path = inotify.get_path(event.wd)
-                    else:
-                        path = directory
+                    path = inotify.get_path(event.wd) if recursive else directory
                     filepath = os.path.join(path, event.name)
                     notified_files[filepath] = monotonic()
 
index 9610d50a09fbe23fd53127cb630eeae95fbb9e1f..04aa9ab2b9476e3403e055e849532192d3827024 100644 (file)
@@ -1,6 +1,6 @@
 from django.core.management.base import BaseCommand
 
-from ...tasks import train_classifier
+from documents.tasks import train_classifier
 
 
 class Command(BaseCommand):
index e92e09966b8a0d39696bf7ed206cc839b7ad685e..151868137fa8ace1a2ea06e242856c77a1c1eb0d 100644 (file)
@@ -35,8 +35,8 @@ from paperless.db import GnuPG
 from paperless_mail.models import MailAccount
 from paperless_mail.models import MailRule
 
-from ...file_handling import delete_empty_directories
-from ...file_handling import generate_filename
+from documents.file_handling import delete_empty_directories
+from documents.file_handling import generate_filename
 
 
 class Command(BaseCommand):
@@ -403,9 +403,10 @@ class Command(BaseCommand):
             if self.compare_checksums and source_checksum:
                 target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
                 perform_copy = target_checksum != source_checksum
-            elif source_stat.st_mtime != target_stat.st_mtime:
-                perform_copy = True
-            elif source_stat.st_size != target_stat.st_size:
+            elif (
+                source_stat.st_mtime != target_stat.st_mtime
+                or source_stat.st_size != target_stat.st_size
+            ):
                 perform_copy = True
         else:
             # Copy if it does not exist
index eeae68e65268ad4bae2f27c53a5a4a733159127a..d68f04ac331c107f1c5756bfc604e31903a054b9 100644 (file)
@@ -22,8 +22,8 @@ from documents.settings import EXPORTER_THUMBNAIL_NAME
 from filelock import FileLock
 from paperless import version
 
-from ...file_handling import create_source_path_directory
-from ...signals.handlers import update_filename_and_move_files
+from documents.file_handling import create_source_path_directory
+from documents.signals.handlers import update_filename_and_move_files
 
 
 @contextmanager
@@ -111,37 +111,36 @@ class Command(BaseCommand):
             post_save,
             receiver=update_filename_and_move_files,
             sender=Document,
+        ), disable_signal(
+            m2m_changed,
+            receiver=update_filename_and_move_files,
+            sender=Document.tags.through,
         ):
-            with disable_signal(
-                m2m_changed,
-                receiver=update_filename_and_move_files,
-                sender=Document.tags.through,
-            ):
-                # Fill up the database with whatever is in the manifest
-                try:
-                    for manifest_path in manifest_paths:
-                        call_command("loaddata", manifest_path)
-                except (FieldDoesNotExist, DeserializationError) as e:
-                    self.stdout.write(self.style.ERROR("Database import failed"))
-                    if (
-                        self.version is not None
-                        and self.version != version.__full_version_str__
-                    ):
-                        self.stdout.write(
-                            self.style.ERROR(
-                                "Version mismatch: "
-                                f"Currently {version.__full_version_str__},"
-                                f" importing {self.version}",
-                            ),
-                        )
-                        raise e
-                    else:
-                        self.stdout.write(
-                            self.style.ERROR("No version information present"),
-                        )
-                        raise e
+            # Fill up the database with whatever is in the manifest
+            try:
+                for manifest_path in manifest_paths:
+                    call_command("loaddata", manifest_path)
+            except (FieldDoesNotExist, DeserializationError) as e:
+                self.stdout.write(self.style.ERROR("Database import failed"))
+                if (
+                    self.version is not None
+                    and self.version != version.__full_version_str__
+                ):
+                    self.stdout.write(
+                        self.style.ERROR(
+                            "Version mismatch: "
+                            f"Currently {version.__full_version_str__},"
+                            f" importing {self.version}",
+                        ),
+                    )
+                    raise e
+                else:
+                    self.stdout.write(
+                        self.style.ERROR("No version information present"),
+                    )
+                    raise e
 
-                self._import_files_from_manifest(options["no_progress_bar"])
+            self._import_files_from_manifest(options["no_progress_bar"])
 
         self.stdout.write("Updating search index...")
         call_command(
@@ -154,14 +153,14 @@ class Command(BaseCommand):
     def _check_manifest_exists(path):
         if not os.path.exists(path):
             raise CommandError(
-                "That directory doesn't appear to contain a manifest.json " "file.",
+                "That directory doesn't appear to contain a manifest.json file.",
             )
 
     def _check_manifest(self):
 
         for record in self.manifest:
 
-            if not record["model"] == "documents.document":
+            if record["model"] != "documents.document":
                 continue
 
             if EXPORTER_FILE_NAME not in record:
index c42357eb50b46579eabcae5adae5fa0914314171..aa61f06964c2c52c6e4617806cc3bf1e70491e82 100644 (file)
@@ -5,10 +5,10 @@ from django.core.management.base import BaseCommand
 from documents.classifier import load_classifier
 from documents.models import Document
 
-from ...signals.handlers import set_correspondent
-from ...signals.handlers import set_document_type
-from ...signals.handlers import set_storage_path
-from ...signals.handlers import set_tags
+from documents.signals.handlers import set_correspondent
+from documents.signals.handlers import set_document_type
+from documents.signals.handlers import set_storage_path
+from documents.signals.handlers import set_tags
 
 
 logger = logging.getLogger("paperless.management.retagger")
index b56bc00428a01bfe363ffdad8cc357ee87f900a8..462853f84d347345b8d7798c89fb5c574ca7faa2 100644 (file)
@@ -7,7 +7,7 @@ from django import db
 from django.core.management.base import BaseCommand
 from documents.models import Document
 
-from ...parsers import get_parser_class_for_mime_type
+from documents.parsers import get_parser_class_for_mime_type
 
 
 def _process_document(doc_in):
index c38761afa2a02c7d522687dad2ac9467aadea79f..63534ffe3aaa202870d472cebe4bcf1682812041 100644 (file)
@@ -20,10 +20,7 @@ def log_reason(matching_model, document, reason):
 
 
 def match_correspondents(document, classifier):
-    if classifier:
-        pred_id = classifier.predict_correspondent(document.content)
-    else:
-        pred_id = None
+    pred_id = classifier.predict_correspondent(document.content) if classifier else None
 
     correspondents = Correspondent.objects.all()
 
@@ -33,10 +30,7 @@ def match_correspondents(document, classifier):
 
 
 def match_document_types(document, classifier):
-    if classifier:
-        pred_id = classifier.predict_document_type(document.content)
-    else:
-        pred_id = None
+    pred_id = classifier.predict_document_type(document.content) if classifier else None
 
     document_types = DocumentType.objects.all()
 
@@ -46,10 +40,7 @@ def match_document_types(document, classifier):
 
 
 def match_tags(document, classifier):
-    if classifier:
-        predicted_tag_ids = classifier.predict_tags(document.content)
-    else:
-        predicted_tag_ids = []
+    predicted_tag_ids = classifier.predict_tags(document.content) if classifier else []
 
     tags = Tag.objects.all()
 
@@ -59,10 +50,7 @@ def match_tags(document, classifier):
 
 
 def match_storage_paths(document, classifier):
-    if classifier:
-        pred_id = classifier.predict_storage_path(document.content)
-    else:
-        pred_id = None
+    pred_id = classifier.predict_storage_path(document.content) if classifier else None
 
     storage_paths = StoragePath.objects.all()
 
@@ -80,7 +68,7 @@ def matches(matching_model, document):
     document_content = document.content
 
     # Check that match is not empty
-    if matching_model.match.strip() == "":
+    if not matching_model.match.strip():
         return False
 
     if matching_model.is_insensitive:
@@ -132,7 +120,7 @@ def matches(matching_model, document):
             )
         except re.error:
             logger.error(
-                f"Error while processing regular expression " f"{matching_model.match}",
+                f"Error while processing regular expression {matching_model.match}",
             )
             return False
         if match:
index a388ac7f26268a0bc35e2751de14840a4a7a9992..e1b2f2a8b6a56fd88216a410adbf54d97941de02 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2015-12-20 19:10
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 from django.conf import settings
@@ -32,7 +30,7 @@ class Migration(migrations.Migration):
                     models.TextField(
                         db_index=(
                             "mysql" not in settings.DATABASES["default"]["ENGINE"]
-                        )
+                        ),
                     ),
                 ),
                 ("created", models.DateTimeField(auto_now_add=True)),
index 05d97d2c2a9de4a7c1ce51f31e35f2e0f6795e58..b953d8008e1a78333af1a5e29ea1f01c1e1e22b6 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2015-12-26 13:16
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 import django.utils.timezone
@@ -21,7 +19,8 @@ class Migration(migrations.Migration):
             model_name="document",
             name="created",
             field=models.DateTimeField(
-                default=django.utils.timezone.now, editable=False
+                default=django.utils.timezone.now,
+                editable=False,
             ),
         ),
     ]
index 796571cd7a49e24d07930e5e70e7e02dd526ff8e..c2b274085afbbbc0e227aa09086725f6e2952134 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-01-11 12:21
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 from django.template.defaultfilters import slugify
@@ -23,7 +21,8 @@ def move_sender_strings_to_sender_model(apps, schema_editor):
                 DOCUMENT_SENDER_MAP[document.pk],
                 created,
             ) = sender_model.objects.get_or_create(
-                name=document.sender, defaults={"slug": slugify(document.sender)}
+                name=document.sender,
+                defaults={"slug": slugify(document.sender)},
             )
 
 
index 5d377a4a1897c10713a586c6ad5af325465ce8f4..b3ccaaa9b0097dfd70432f0af37856fc975cf936 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-01-14 18:44
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 import django.db.models.deletion
index 893bf1d1fb6a107c4084349ea6f36d5258b04e07..98e2c1b29a2d0b3a151f2dfd5d5a3f8c77697029 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-01-23 03:13
-from __future__ import unicode_literals
 
 from django.db import migrations
 
index e8530f39a0c37cac2423df1ab6f5dffed4298674..3f24992e2a0e0b3e9c06bbb3c9eafade7f226658 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-01-23 04:30
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
index e7e2736111762e38f8b6197e13cd700b43494fbf..f4b3b913d8ad540bb405cf955229241a45309478 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-01-26 21:14
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
index a6770d9f78718d41d8921d0f98bca57d8e20b192..c079f927d14e7ed94861f4824fdeeb07c31f2f75 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-01-29 22:58
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
@@ -33,7 +31,9 @@ class Migration(migrations.Migration):
             model_name="document",
             name="tags",
             field=models.ManyToManyField(
-                blank=True, related_name="documents", to="documents.Tag"
+                blank=True,
+                related_name="documents",
+                to="documents.Tag",
             ),
         ),
     ]
index 5d833c1f2636b647b7fea27a80f74613d89b10e3..82e48ba240a379b96321eaeb2b6e4af09e5b32df 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-02-14 00:40
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
index b51aebc62fe4d8bdf6fc5fcbe0b2e9366ce3f58d..9be7b18ed2ae6fc9a70ab890c9315b04541f34c6 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9 on 2016-02-27 17:54
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
@@ -42,7 +40,7 @@ class Migration(migrations.Migration):
                 (
                     "component",
                     models.PositiveIntegerField(
-                        choices=[(1, "Consumer"), (2, "Mail Fetcher")]
+                        choices=[(1, "Consumer"), (2, "Mail Fetcher")],
                     ),
                 ),
                 ("created", models.DateTimeField(auto_now_add=True)),
index 6432128888ef07647a57a5a58a89e3054daae700..4c4fcd3adb04571cc91ee106139445bc7dbc2ceb 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9.2 on 2016-03-03 19:29
-from __future__ import unicode_literals
 
 from django.db import migrations
 
index 5ba3838d41e1dc34bb3cf0dc11075bf759a015bf..1470ace968ba64ba69878dc0e81178e736ecc7ca 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9.2 on 2016-03-05 00:40
-from __future__ import unicode_literals
 
 import gnupg
 import os
@@ -14,7 +12,7 @@ from django.db import migrations
 from django.utils.termcolors import colorize as colourise  # Spelling hurts me
 
 
-class GnuPG(object):
+class GnuPG:
     """
     A handy singleton to use when handling encrypted files.
     """
@@ -28,17 +26,22 @@ class GnuPG(object):
     @classmethod
     def encrypted(cls, file_handle):
         return cls.gpg.encrypt_file(
-            file_handle, recipients=None, passphrase=settings.PASSPHRASE, symmetric=True
+            file_handle,
+            recipients=None,
+            passphrase=settings.PASSPHRASE,
+            symmetric=True,
         ).data
 
 
 def move_documents_and_create_thumbnails(apps, schema_editor):
 
     os.makedirs(
-        os.path.join(settings.MEDIA_ROOT, "documents", "originals"), exist_ok=True
+        os.path.join(settings.MEDIA_ROOT, "documents", "originals"),
+        exist_ok=True,
     )
     os.makedirs(
-        os.path.join(settings.MEDIA_ROOT, "documents", "thumbnails"), exist_ok=True
+        os.path.join(settings.MEDIA_ROOT, "documents", "thumbnails"),
+        exist_ok=True,
     )
 
     documents = os.listdir(os.path.join(settings.MEDIA_ROOT, "documents"))
@@ -55,7 +58,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
             "  in order."
             "\n",
             opts=("bold",),
-        )
+        ),
     )
 
     try:
@@ -73,7 +76,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
                 colourise("*", fg="green"),
                 colourise("Generating a thumbnail for", fg="white"),
                 colourise(f, fg="cyan"),
-            )
+            ),
         )
 
         thumb_temp = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR)
@@ -95,7 +98,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
                 "remove",
                 orig_target,
                 os.path.join(thumb_temp, "convert-%04d.png"),
-            )
+            ),
         ).wait()
 
         thumb_source = os.path.join(thumb_temp, "convert-0000.png")
index 58f39758a7b70a583854d027628f3417d244c958..6663edad8740439e90f91450c58ac37ae7ab809f 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9.4 on 2016-03-25 21:11
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 import django.utils.timezone
index 1ec8380f4b459f54c6944a2ee2f8082077ca92fd..a687e43efae00387c106edbec86df5f9d9701072 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.9.4 on 2016-03-28 19:09
-from __future__ import unicode_literals
 
 import gnupg
 import hashlib
@@ -13,7 +11,7 @@ from django.template.defaultfilters import slugify
 from django.utils.termcolors import colorize as colourise  # Spelling hurts me
 
 
-class GnuPG(object):
+class GnuPG:
     """
     A handy singleton to use when handling encrypted files.
     """
@@ -27,11 +25,14 @@ class GnuPG(object):
     @classmethod
     def encrypted(cls, file_handle):
         return cls.gpg.encrypt_file(
-            file_handle, recipients=None, passphrase=settings.PASSPHRASE, symmetric=True
+            file_handle,
+            recipients=None,
+            passphrase=settings.PASSPHRASE,
+            symmetric=True,
         ).data
 
 
-class Document(object):
+class Document:
     """
     Django's migrations restrict access to model methods, so this is a snapshot
     of the methods that existed at the time this migration was written, since
@@ -49,9 +50,9 @@ class Document(object):
     def __str__(self):
         created = self.created.strftime("%Y%m%d%H%M%S")
         if self.correspondent and self.title:
-            return "{}: {} - {}".format(created, self.correspondent, self.title)
+            return f"{created}: {self.correspondent} - {self.title}"
         if self.correspondent or self.title:
-            return "{}: {}".format(created, self.correspondent or self.title)
+            return f"{created}: {self.correspondent or self.title}"
         return str(created)
 
     @property
@@ -60,7 +61,7 @@ class Document(object):
             settings.MEDIA_ROOT,
             "documents",
             "originals",
-            "{:07}.{}.gpg".format(self.pk, self.file_type),
+            f"{self.pk:07}.{self.file_type}.gpg",
         )
 
     @property
@@ -88,7 +89,7 @@ def set_checksums(apps, schema_editor):
             "  order."
             "\n",
             opts=("bold",),
-        )
+        ),
     )
 
     sums = {}
@@ -101,7 +102,7 @@ def set_checksums(apps, schema_editor):
                 colourise("*", fg="green"),
                 colourise("Generating a checksum for", fg="white"),
                 colourise(document.file_name, fg="cyan"),
-            )
+            ),
         )
 
         with document.source_file as encrypted:
@@ -122,15 +123,16 @@ def set_checksums(apps, schema_editor):
                     fg="yellow",
                 ),
                 doc1=colourise(
-                    "  * {} (id: {})".format(sums[checksum][1], sums[checksum][0]),
+                    f"  * {sums[checksum][1]} (id: {sums[checksum][0]})",
                     fg="red",
                 ),
                 doc2=colourise(
-                    "  * {} (id: {})".format(document.file_name, document.pk), fg="red"
+                    f"  * {document.file_name} (id: {document.pk})",
+                    fg="red",
                 ),
                 code=colourise(
                     "  $ echo 'DELETE FROM documents_document WHERE id = {pk};' | ./manage.py dbshell".format(
-                        pk=document.pk
+                        pk=document.pk,
                     ),
                     fg="green",
                 ),
@@ -171,7 +173,8 @@ class Migration(migrations.Migration):
             model_name="document",
             name="created",
             field=models.DateTimeField(
-                db_index=True, default=django.utils.timezone.now
+                db_index=True,
+                default=django.utils.timezone.now,
             ),
         ),
         migrations.AlterField(
index 351a4067bc868e5b48df4d348c2b400255e65cc8..79691844690ad5989d780b618a9b29bcf8b247c4 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.10.2 on 2016-10-05 21:38
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
index ae95b83f688a277bfa317041a31a0ef6e219e733..26ab3a720c0e051019e218f1aa0035a70adbdf8f 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.10.5 on 2017-03-25 15:58
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 from django.conf import settings
index 603aece5ec14377ccc4d23e63271738d607026ca..f775cdfe09a34668d2f5d23b5547502c2fc53d86 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.10.5 on 2017-05-12 05:07
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
index 492e016e338f2162aebc15ee9e4a7161355ccf17..047531feed21b1913ccddd225e0918f0ec6224e2 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.10.5 on 2017-07-15 17:12
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 import django.db.models.deletion
index 344297805d0579665f829c9ed5de31d2f0dd5343..b38d88538f38ad023fb70ffa24cde0ccdb41bdd9 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.10.5 on 2017-07-15 17:12
-from __future__ import unicode_literals
 
 from django.contrib.auth.models import User
 from django.db import migrations
index 66afa125848722493ae9248ea7a5ef2faeaa4353..67c4df4aa9c342b99050aa92fb91017a5a6a06e0 100644 (file)
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-
 from django.db import migrations, models
 import django.utils.timezone
 
@@ -22,7 +19,9 @@ class Migration(migrations.Migration):
             model_name="document",
             name="added",
             field=models.DateTimeField(
-                db_index=True, default=django.utils.timezone.now, editable=False
+                db_index=True,
+                default=django.utils.timezone.now,
+                editable=False,
             ),
         ),
         migrations.RunPython(set_added_time_to_created_time),
index 0e7425fb62115c675e44157af0d1dd46a72a7d41..bde86ceea056e0616f10a4209d9fb54f177f84e5 100644 (file)
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
 # Generated by Django 1.11.10 on 2018-02-04 13:07
-from __future__ import unicode_literals
 
 from django.db import migrations, models
 
index 8e9f65bb9b28efa6288f349b491b2482453b5989..65300c11e5bc571d90eb2d3dec192d0bd6c88101 100644 (file)
@@ -6,7 +6,7 @@ from django.db import migrations, models
 def set_filename(apps, schema_editor):
     Document = apps.get_model("documents", "Document")
     for doc in Document.objects.all():
-        file_name = "{:07}.{}".format(doc.pk, doc.file_type)
+        file_name = f"{doc.pk:07}.{doc.file_type}"
         if doc.storage_type == "gpg":
             file_name += ".gpg"
 
index 2558180bba636dcfcc97bee66573488329009735..9f6e152b62c5bee5b6938cde02d62d04cf820d45 100644 (file)
@@ -10,5 +10,5 @@ class Migration(migrations.Migration):
     ]
 
     operations = [
-        migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop)
+        migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop),
     ]
index 4eee1e0a2fd7cf0d1138a413d8587352a1456797..f5ac9475623183ef90b53b9a8d19c7221d8794cf 100644 (file)
@@ -1,5 +1,4 @@
 # Generated by Django 3.1.3 on 2020-11-20 11:21
-import mimetypes
 import os
 
 import magic
@@ -16,7 +15,7 @@ def source_path(self):
     if self.filename:
         fname = str(self.filename)
     else:
-        fname = "{:07}.{}".format(self.pk, self.file_type)
+        fname = f"{self.pk:07}.{self.file_type}"
         if self.storage_type == STORAGE_TYPE_GPG:
             fname += ".gpg"
 
index 401ab5adb8352050cc448c4e1e496d248b0a7e36..357d65c2d97493a1cbdff4ba06665cf59ad518e6 100644 (file)
@@ -73,7 +73,7 @@ class Migration(migrations.Migration):
                             (15, "Modified before"),
                             (16, "Modified after"),
                             (17, "Does not have tag"),
-                        ]
+                        ],
                     ),
                 ),
                 ("value", models.CharField(max_length=128)),
index d16051c21efcdfbef7551ed31d5d1cb5eb0000d0..c7c387226a36838ad86cecc370ac0b795c594a11 100644 (file)
@@ -165,7 +165,9 @@ class Migration(migrations.Migration):
             model_name="document",
             name="created",
             field=models.DateTimeField(
-                db_index=True, default=django.utils.timezone.now, verbose_name="created"
+                db_index=True,
+                default=django.utils.timezone.now,
+                verbose_name="created",
             ),
         ),
         migrations.AlterField(
@@ -196,14 +198,18 @@ class Migration(migrations.Migration):
             model_name="document",
             name="mime_type",
             field=models.CharField(
-                editable=False, max_length=256, verbose_name="mime type"
+                editable=False,
+                max_length=256,
+                verbose_name="mime type",
             ),
         ),
         migrations.AlterField(
             model_name="document",
             name="modified",
             field=models.DateTimeField(
-                auto_now=True, db_index=True, verbose_name="modified"
+                auto_now=True,
+                db_index=True,
+                verbose_name="modified",
             ),
         ),
         migrations.AlterField(
@@ -234,7 +240,10 @@ class Migration(migrations.Migration):
             model_name="document",
             name="title",
             field=models.CharField(
-                blank=True, db_index=True, max_length=128, verbose_name="title"
+                blank=True,
+                db_index=True,
+                max_length=128,
+                verbose_name="title",
             ),
         ),
         migrations.AlterField(
@@ -373,7 +382,10 @@ class Migration(migrations.Migration):
             model_name="savedviewfilterrule",
             name="value",
             field=models.CharField(
-                blank=True, max_length=128, null=True, verbose_name="value"
+                blank=True,
+                max_length=128,
+                null=True,
+                verbose_name="value",
             ),
         ),
         migrations.AlterField(
index 77e33afdd8222dc22caaa1bd1819845ecfdad7b1..51eb8ec2e05ceabcb3464af654f4d27b0e2e8ccb 100644 (file)
@@ -29,7 +29,7 @@ def archive_path_old(doc):
     if doc.filename:
         fname = archive_name_from_filename(doc.filename)
     else:
-        fname = "{:07}.pdf".format(doc.pk)
+        fname = f"{doc.pk:07}.pdf"
 
     return os.path.join(settings.ARCHIVE_DIR, fname)
 
@@ -48,7 +48,7 @@ def source_path(doc):
     if doc.filename:
         fname = str(doc.filename)
     else:
-        fname = "{:07}{}".format(doc.pk, doc.file_type)
+        fname = f"{doc.pk:07}{doc.file_type}"
         if doc.storage_type == STORAGE_TYPE_GPG:
             fname += ".gpg"  # pragma: no cover
 
@@ -67,7 +67,9 @@ def generate_unique_filename(doc, archive_filename=False):
 
     while True:
         new_filename = generate_filename(
-            doc, counter, archive_filename=archive_filename
+            doc,
+            counter,
+            archive_filename=archive_filename,
         )
         if new_filename == old_filename:
             # still the same as before.
@@ -93,14 +95,16 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
 
             if doc.correspondent:
                 correspondent = pathvalidate.sanitize_filename(
-                    doc.correspondent.name, replacement_text="-"
+                    doc.correspondent.name,
+                    replacement_text="-",
                 )
             else:
                 correspondent = "none"
 
             if doc.document_type:
                 document_type = pathvalidate.sanitize_filename(
-                    doc.document_type.name, replacement_text="-"
+                    doc.document_type.name,
+                    replacement_text="-",
                 )
             else:
                 document_type = "none"
@@ -111,9 +115,7 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
                 document_type=document_type,
                 created=datetime.date.isoformat(doc.created),
                 created_year=doc.created.year if doc.created else "none",
-                created_month=f"{doc.created.month:02}"
-                if doc.created
-                else "none",  # NOQA: E501
+                created_month=f"{doc.created.month:02}" if doc.created else "none",
                 created_day=f"{doc.created.day:02}" if doc.created else "none",
                 added=datetime.date.isoformat(doc.added),
                 added_year=doc.added.year if doc.added else "none",
@@ -128,7 +130,7 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
     except (ValueError, KeyError, IndexError):
         logger.warning(
             f"Invalid PAPERLESS_FILENAME_FORMAT: "
-            f"{settings.FILENAME_FORMAT}, falling back to default"
+            f"{settings.FILENAME_FORMAT}, falling back to default",
         )
 
     counter_str = f"_{counter:02}" if counter else ""
@@ -170,13 +172,17 @@ def create_archive_version(doc, retry_count=3):
         parser: DocumentParser = parser_class(None, None)
         try:
             parse_wrapper(
-                parser, source_path(doc), doc.mime_type, os.path.basename(doc.filename)
+                parser,
+                source_path(doc),
+                doc.mime_type,
+                os.path.basename(doc.filename),
             )
             doc.content = parser.get_text()
 
             if parser.get_archive_path() and os.path.isfile(parser.get_archive_path()):
                 doc.archive_filename = generate_unique_filename(
-                    doc, archive_filename=True
+                    doc,
+                    archive_filename=True,
                 )
                 with open(parser.get_archive_path(), "rb") as f:
                     doc.archive_checksum = hashlib.md5(f.read()).hexdigest()
@@ -186,7 +192,7 @@ def create_archive_version(doc, retry_count=3):
                 doc.archive_checksum = None
                 logger.error(
                     f"Parser did not return an archive document for document "
-                    f"ID:{doc.id}. Removing archive document."
+                    f"ID:{doc.id}. Removing archive document.",
                 )
             doc.save()
             return
@@ -195,7 +201,7 @@ def create_archive_version(doc, retry_count=3):
                 logger.exception(
                     f"Unable to regenerate archive document for ID:{doc.id}. You "
                     f"need to invoke the document_archiver management command "
-                    f"manually for that document."
+                    f"manually for that document.",
                 )
                 doc.archive_checksum = None
                 doc.save()
@@ -233,7 +239,7 @@ def move_old_to_new_locations(apps, schema_editor):
         old_path = archive_path_old(doc)
         if doc.id not in affected_document_ids and not os.path.isfile(old_path):
             raise ValueError(
-                f"Archived document ID:{doc.id} does not exist at: " f"{old_path}"
+                f"Archived document ID:{doc.id} does not exist at: {old_path}",
             )
 
     # check that we can regenerate affected archive versions
@@ -245,7 +251,7 @@ def move_old_to_new_locations(apps, schema_editor):
         if not parser_class:
             raise ValueError(
                 f"Document ID:{doc.id} has an invalid archived document, "
-                f"but no parsers are available. Cannot migrate."
+                f"but no parsers are available. Cannot migrate.",
             )
 
     for doc in Document.objects.filter(archive_checksum__isnull=False):
@@ -260,7 +266,7 @@ def move_old_to_new_locations(apps, schema_editor):
             # Set archive path for unaffected files
             doc.archive_filename = archive_name_from_filename(doc.filename)
             Document.objects.filter(id=doc.id).update(
-                archive_filename=doc.archive_filename
+                archive_filename=doc.archive_filename,
             )
 
     # regenerate archive documents
@@ -281,13 +287,13 @@ def move_new_to_old_locations(apps, schema_editor):
             raise ValueError(
                 f"Cannot migrate: Archive file name {old_archive_path} of "
                 f"document {doc.filename} would clash with another archive "
-                f"filename."
+                f"filename.",
             )
         old_archive_paths.add(old_archive_path)
         if new_archive_path != old_archive_path and os.path.isfile(old_archive_path):
             raise ValueError(
                 f"Cannot migrate: Cannot move {new_archive_path} to "
-                f"{old_archive_path}: file already exists."
+                f"{old_archive_path}: file already exists.",
             )
 
     for doc in Document.objects.filter(archive_checksum__isnull=False):
index 4714f97c55a28ac4967bb6fde94ed2d6e15f625c..8346ff18405f8d34214d5711dde12ae5dd49c0c9 100644 (file)
@@ -61,7 +61,9 @@ class Migration(migrations.Migration):
             model_name="tag",
             name="color",
             field=models.CharField(
-                default="#a6cee3", max_length=7, verbose_name="color"
+                default="#a6cee3",
+                max_length=7,
+                verbose_name="color",
             ),
         ),
         migrations.RunPython(forward, reverse),
index accc41162911954a98fc2d7d2bb86143a734ed76..cea9d5a64b600d14b7edda870d7387fda5338387 100644 (file)
@@ -25,5 +25,5 @@ class Migration(migrations.Migration):
     ]
 
     operations = [
-        migrations.RunPython(remove_null_characters, migrations.RunPython.noop)
+        migrations.RunPython(remove_null_characters, migrations.RunPython.noop),
     ]
index 53994f916dd766fceb5c7d487681b4a8d6476d87..d41fae8494ff5449a029120f2d90cdecf78eac37 100644 (file)
@@ -14,7 +14,10 @@ class Migration(migrations.Migration):
             model_name="savedview",
             name="sort_field",
             field=models.CharField(
-                blank=True, max_length=128, null=True, verbose_name="sort field"
+                blank=True,
+                max_length=128,
+                null=True,
+                verbose_name="sort field",
             ),
         ),
         migrations.AlterField(
index aa32c9c5cc37dcd5abe91ee29dc891f4518a228a..8453a86d808c75a28979c5b17e287fb8aa952b58 100644 (file)
@@ -14,7 +14,10 @@ class Migration(migrations.Migration):
             model_name="savedviewfilterrule",
             name="value",
             field=models.CharField(
-                blank=True, max_length=255, null=True, verbose_name="value"
+                blank=True,
+                max_length=255,
+                null=True,
+                verbose_name="value",
             ),
         ),
     ]
index 5f1293a1eec3854eb3945ab135b9b8b98474ea4b..e398402f32d043752670e83c5027301716e12e88 100644 (file)
@@ -48,5 +48,5 @@ class Migration(migrations.Migration):
                     ),
                 ),
             ],
-        )
+        ),
     ]
index 76c6edf11aea5a9f78f4521830933bf4b0b7bbc8..786ca36c44a8a45c2ac7bfc8f45cb90d19786c70 100644 (file)
@@ -46,12 +46,15 @@ class Migration(migrations.Migration):
         # Drop the django-q tables entirely
         # Must be done last or there could be references here
         migrations.RunSQL(
-            "DROP TABLE IF EXISTS django_q_ormq", reverse_sql=migrations.RunSQL.noop
+            "DROP TABLE IF EXISTS django_q_ormq",
+            reverse_sql=migrations.RunSQL.noop,
         ),
         migrations.RunSQL(
-            "DROP TABLE IF EXISTS django_q_schedule", reverse_sql=migrations.RunSQL.noop
+            "DROP TABLE IF EXISTS django_q_schedule",
+            reverse_sql=migrations.RunSQL.noop,
         ),
         migrations.RunSQL(
-            "DROP TABLE IF EXISTS django_q_task", reverse_sql=migrations.RunSQL.noop
+            "DROP TABLE IF EXISTS django_q_task",
+            reverse_sql=migrations.RunSQL.noop,
         ),
     ]
index 543f3a492952014cd5642d2b0616f6322db5bec6..433363e9fa00defbd895a3717db0cc92193497cb 100644 (file)
@@ -79,7 +79,8 @@ class Migration(migrations.Migration):
         migrations.AddConstraint(
             model_name="storagepath",
             constraint=models.UniqueConstraint(
-                fields=("name", "owner"), name="documents_storagepath_unique_name_owner"
+                fields=("name", "owner"),
+                name="documents_storagepath_unique_name_owner",
             ),
         ),
         migrations.AddConstraint(
@@ -93,7 +94,8 @@ class Migration(migrations.Migration):
         migrations.AddConstraint(
             model_name="tag",
             constraint=models.UniqueConstraint(
-                fields=("name", "owner"), name="documents_tag_unique_name_owner"
+                fields=("name", "owner"),
+                name="documents_tag_unique_name_owner",
             ),
         ),
         migrations.AddConstraint(
index 5cd05b815e040a260621ad39198433132eaa5cbd..11c3da3800faa551452b6f419d303d3e2fa339d3 100644 (file)
@@ -43,7 +43,9 @@ class Migration(migrations.Migration):
             model_name="note",
             name="note",
             field=models.TextField(
-                blank=True, help_text="Note for the document", verbose_name="content"
+                blank=True,
+                help_text="Note for the document",
+                verbose_name="content",
             ),
         ),
         migrations.AlterField(
index deaa2c28adadde78b734080c089774030335c2ab..079459d0039828444d2e3d6fe04862671e4c33d6 100644 (file)
@@ -269,7 +269,7 @@ class Document(ModelWithOwner):
             MinValueValidator(ARCHIVE_SERIAL_NUMBER_MIN),
         ],
         help_text=_(
-            "The position of this document in your physical document " "archive.",
+            "The position of this document in your physical document archive.",
         ),
     )
 
@@ -470,6 +470,9 @@ class SavedViewFilterRule(models.Model):
         verbose_name = _("filter rule")
         verbose_name_plural = _("filter rules")
 
+    def __str__(self) -> str:
+        return f"SavedViewFilterRule: {self.rule_type} : {self.value}"
+
 
 # TODO: why is this in the models file?
 # TODO: how about, what is this and where is it documented?
@@ -483,7 +486,7 @@ class FileInfo:
             (
                 "created-title",
                 re.compile(
-                    r"^(?P<created>\d{8}(\d{6})?Z) - " r"(?P<title>.*)$",
+                    r"^(?P<created>\d{8}(\d{6})?Z) - (?P<title>.*)$",
                     flags=re.IGNORECASE,
                 ),
             ),
@@ -634,6 +637,9 @@ class PaperlessTask(models.Model):
         ),
     )
 
+    def __str__(self) -> str:
+        return f"Task {self.task_id}"
+
 
 class Note(models.Model):
     note = models.TextField(
index cd17a33b1dd5dbfd66da899ade9afacb789047cc..4bec79c617cb5919077e2e78254369c218c28851 100644 (file)
@@ -323,7 +323,7 @@ class DocumentParser(LoggingMixin):
         return []
 
     def parse(self, document_path, mime_type, file_name=None):
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def get_archive_path(self):
         return self.archive_path
@@ -332,7 +332,7 @@ class DocumentParser(LoggingMixin):
         """
         Returns the path to a file we can use as a thumbnail for this document.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
     def get_text(self):
         return self.text
index 578f1a936697b25fba67e008947f763495fdabee..b74d07bd5f872525dbff984912232239d8a3ba8b 100644 (file)
@@ -94,7 +94,7 @@ def check_sanity(progress=False) -> SanityCheckMessages:
             except OSError as e:
                 messages.error(doc.pk, f"Cannot read original file of document: {e}")
             else:
-                if not checksum == doc.checksum:
+                if checksum != doc.checksum:
                     messages.error(
                         doc.pk,
                         "Checksum mismatch. "
@@ -127,7 +127,7 @@ def check_sanity(progress=False) -> SanityCheckMessages:
                         f"Cannot read archive file of document : {e}",
                     )
                 else:
-                    if not checksum == doc.archive_checksum:
+                    if checksum != doc.archive_checksum:
                         messages.error(
                             doc.pk,
                             "Checksum mismatch of archived document. "
index 6111badaeab1f36e21012bb822fe1c1c97477ab3..2ad392f4c0f5cc38e178a23c23409295b8a0b25a 100644 (file)
@@ -7,7 +7,7 @@ from celery import states
 try:
     import zoneinfo
 except ImportError:
-    import backports.zoneinfo as zoneinfo
+    from backports import zoneinfo
 import magic
 from django.conf import settings
 from django.utils.text import slugify
@@ -152,7 +152,7 @@ class SetPermissionsMixin:
 class OwnedObjectSerializer(serializers.ModelSerializer, SetPermissionsMixin):
     def __init__(self, *args, **kwargs):
         self.user = kwargs.pop("user", None)
-        return super().__init__(*args, **kwargs)
+        super().__init__(*args, **kwargs)
 
     def get_permissions(self, obj):
         view_codename = f"view_{obj.__class__.__name__.lower()}"
@@ -282,7 +282,7 @@ class ColorField(serializers.Field):
         for id, color in self.COLOURS:
             if id == data:
                 return color
-        raise serializers.ValidationError()
+        raise serializers.ValidationError
 
     def to_representation(self, value):
         for id, color in self.COLOURS:
@@ -513,12 +513,12 @@ class DocumentListSerializer(serializers.Serializer):
     def _validate_document_id_list(self, documents, name="documents"):
         if not type(documents) == list:
             raise serializers.ValidationError(f"{name} must be a list")
-        if not all([type(i) == int for i in documents]):
+        if not all(type(i) == int for i in documents):
             raise serializers.ValidationError(f"{name} must be a list of integers")
         count = Document.objects.filter(id__in=documents).count()
         if not count == len(documents):
             raise serializers.ValidationError(
-                f"Some documents in {name} don't exist or were " f"specified twice.",
+                f"Some documents in {name} don't exist or were specified twice.",
             )
 
     def validate_documents(self, documents):
@@ -549,7 +549,7 @@ class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
     def _validate_tag_id_list(self, tags, name="tags"):
         if not type(tags) == list:
             raise serializers.ValidationError(f"{name} must be a list")
-        if not all([type(i) == int for i in tags]):
+        if not all(type(i) == int for i in tags):
             raise serializers.ValidationError(f"{name} must be a list of integers")
         count = Tag.objects.filter(id__in=tags).count()
         if not count == len(tags):
@@ -826,8 +826,8 @@ class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
                 original_name="testfile",
             )
 
-        except (KeyError):
-            raise serializers.ValidationError(_("Invalid variable detected."))
+        except KeyError as err:
+            raise serializers.ValidationError(_("Invalid variable detected.")) from err
 
         return path
 
@@ -919,7 +919,7 @@ class AcknowledgeTasksViewSerializer(serializers.Serializer):
         pass
         if not type(tasks) == list:
             raise serializers.ValidationError(f"{name} must be a list")
-        if not all([type(i) == int for i in tasks]):
+        if not all(type(i) == int for i in tasks):
             raise serializers.ValidationError(f"{name} must be a list of integers")
         count = PaperlessTask.objects.filter(id__in=tasks).count()
         if not count == len(tasks):
index 92f8e61597323f11e360340a995e85e0f616d7c4..271fb459738e61919a3eed0bc658b88fd9838848 100644 (file)
@@ -19,14 +19,14 @@ from django.utils import termcolors
 from django.utils import timezone
 from filelock import FileLock
 
-from .. import matching
-from ..file_handling import create_source_path_directory
-from ..file_handling import delete_empty_directories
-from ..file_handling import generate_unique_filename
-from ..models import Document
-from ..models import MatchingModel
-from ..models import PaperlessTask
-from ..models import Tag
+from documents import matching
+from documents.file_handling import create_source_path_directory
+from documents.file_handling import delete_empty_directories
+from documents.file_handling import generate_unique_filename
+from documents.models import Document
+from documents.models import MatchingModel
+from documents.models import PaperlessTask
+from documents.models import Tag
 
 logger = logging.getLogger("paperless.handlers")
 
@@ -54,10 +54,7 @@ def set_correspondent(
     potential_correspondents = matching.match_correspondents(document, classifier)
 
     potential_count = len(potential_correspondents)
-    if potential_correspondents:
-        selected = potential_correspondents[0]
-    else:
-        selected = None
+    selected = potential_correspondents[0] if potential_correspondents else None
     if potential_count > 1:
         if use_first:
             logger.debug(
@@ -120,10 +117,7 @@ def set_document_type(
     potential_document_type = matching.match_document_types(document, classifier)
 
     potential_count = len(potential_document_type)
-    if potential_document_type:
-        selected = potential_document_type[0]
-    else:
-        selected = None
+    selected = potential_document_type[0] if potential_document_type else None
 
     if potential_count > 1:
         if use_first:
@@ -255,10 +249,7 @@ def set_storage_path(
     )
 
     potential_count = len(potential_storage_path)
-    if potential_storage_path:
-        selected = potential_storage_path[0]
-    else:
-        selected = None
+    selected = potential_storage_path[0] if potential_storage_path else None
 
     if potential_count > 1:
         if use_first:
@@ -370,7 +361,7 @@ def validate_move(instance, old_path, new_path):
     if not os.path.isfile(old_path):
         # Can't do anything if the old file does not exist anymore.
         logger.fatal(f"Document {str(instance)}: File {old_path} has gone.")
-        raise CannotMoveFilesException()
+        raise CannotMoveFilesException
 
     if os.path.isfile(new_path):
         # Can't do anything if the new file already exists. Skip updating file.
@@ -378,7 +369,7 @@ def validate_move(instance, old_path, new_path):
             f"Document {str(instance)}: Cannot rename file "
             f"since target path {new_path} already exists.",
         )
-        raise CannotMoveFilesException()
+        raise CannotMoveFilesException
 
 
 @receiver(models.signals.m2m_changed, sender=Document.tags.through)
@@ -546,10 +537,10 @@ def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
             date_started=None,
             date_done=None,
         )
-    except Exception as e:  # pragma: no cover
+    except Exception:  # pragma: no cover
         # Don't let an exception in the signal handlers prevent
         # a document from being consumed.
-        logger.error(f"Creating PaperlessTask failed: {e}", exc_info=True)
+        logger.exception("Creating PaperlessTask failed")
 
 
 @task_prerun.connect
@@ -568,15 +559,20 @@ def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs):
             task_instance.status = states.STARTED
             task_instance.date_started = timezone.now()
             task_instance.save()
-    except Exception as e:  # pragma: no cover
+    except Exception:  # pragma: no cover
         # Don't let an exception in the signal handlers prevent
         # a document from being consumed.
-        logger.error(f"Setting PaperlessTask started failed: {e}", exc_info=True)
+        logger.exception("Setting PaperlessTask started failed")
 
 
 @task_postrun.connect
 def task_postrun_handler(
-    sender=None, task_id=None, task=None, retval=None, state=None, **kwargs
+    sender=None,
+    task_id=None,
+    task=None,
+    retval=None,
+    state=None,
+    **kwargs,
 ):
     """
     Updates the result of the PaperlessTask.
@@ -591,7 +587,7 @@ def task_postrun_handler(
             task_instance.result = retval
             task_instance.date_done = timezone.now()
             task_instance.save()
-    except Exception as e:  # pragma: no cover
+    except Exception:  # pragma: no cover
         # Don't let an exception in the signal handlers prevent
         # a document from being consumed.
-        logger.error(f"Updating PaperlessTask failed: {e}", exc_info=True)
+        logger.exception("Updating PaperlessTask failed")
index 5c300bca288260eb11dca82bd5cd4e83a1a7e77b..c2d72640581bbe85d8c724e39f4e71e189d46057 100644 (file)
@@ -297,7 +297,7 @@ def update_document_archive_file(document_id):
 
     except Exception:
         logger.exception(
-            f"Error while parsing document {document} " f"(ID: {document_id})",
+            f"Error while parsing document {document} (ID: {document_id})",
         )
     finally:
         parser.cleanup()
index 83644c41142e8bf6e46db51bf972e1a3649a1ace..de41bbd02fdd62699799f3deccb5e5712a3c0a98 100644 (file)
@@ -1,8 +1,8 @@
 from factory import Faker
 from factory.django import DjangoModelFactory
 
-from ..models import Correspondent
-from ..models import Document
+from documents.models import Correspondent
+from documents.models import Document
 
 
 class CorrespondentFactory(DjangoModelFactory):
index da60ab3c4f90ee0d24d32b6dfc8af0a703b79da4..6cd6b610a79a35a5181c5d95667e15c93c4d9a6d 100644 (file)
@@ -17,7 +17,7 @@ import celery
 try:
     import zoneinfo
 except ImportError:
-    import backports.zoneinfo as zoneinfo
+    from backports import zoneinfo
 
 import pytest
 from django.conf import settings
@@ -110,9 +110,9 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
     def test_document_fields(self):
         c = Correspondent.objects.create(name="c", pk=41)
         dt = DocumentType.objects.create(name="dt", pk=63)
-        tag = Tag.objects.create(name="t", pk=85)
+        Tag.objects.create(name="t", pk=85)
         storage_path = StoragePath.objects.create(name="sp", pk=77, path="p")
-        doc = Document.objects.create(
+        Document.objects.create(
             title="WOW",
             content="the content",
             correspondent=c,
@@ -877,7 +877,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             added=timezone.make_aware(datetime.datetime(2020, 7, 13)),
             content="test",
         )
-        d6 = Document.objects.create(checksum="6", content="test2")
+        Document.objects.create(checksum="6", content="test2")
         d7 = Document.objects.create(checksum="7", storage_path=sp, content="test")
 
         with AsyncWriter(index.open_index()) as writer:
@@ -1046,13 +1046,13 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             mime_type="application/pdf",
             content="abc",
         )
-        doc2 = Document.objects.create(
+        Document.objects.create(
             title="none2",
             checksum="B",
             mime_type="application/pdf",
             content="123",
         )
-        doc3 = Document.objects.create(
+        Document.objects.create(
             title="none3",
             checksum="C",
             mime_type="text/plain",
@@ -1546,14 +1546,14 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             show_on_dashboard=False,
             show_in_sidebar=False,
         )
-        v2 = SavedView.objects.create(
+        SavedView.objects.create(
             owner=u2,
             name="test2",
             sort_field="",
             show_on_dashboard=False,
             show_in_sidebar=False,
         )
-        v3 = SavedView.objects.create(
+        SavedView.objects.create(
             owner=u2,
             name="test3",
             sort_field="",
@@ -1594,7 +1594,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
     def test_create_update_patch(self):
 
-        u1 = User.objects.create_user("user1")
+        User.objects.create_user("user1")
 
         view = {
             "name": "test",
@@ -3020,7 +3020,7 @@ class TestBulkDownload(DirectoriesMixin, APITestCase):
                 self.assertEqual(f.read(), zipf.read("2021-01-01 document A_01.pdf"))
 
     def test_compression(self):
-        response = self.client.post(
+        self.client.post(
             self.ENDPOINT,
             json.dumps(
                 {"documents": [self.doc2.id, self.doc2b.id], "compression": "lzma"},
@@ -3271,7 +3271,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         user = User.objects.create_user(username="test")
         self.client.force_authenticate(user)
 
-        d = Document.objects.create(title="Test")
+        Document.objects.create(title="Test")
 
         self.assertEqual(
             self.client.get("/api/documents/").status_code,
@@ -3305,7 +3305,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         user.user_permissions.add(*Permission.objects.all())
         self.client.force_authenticate(user)
 
-        d = Document.objects.create(title="Test")
+        Document.objects.create(title="Test")
 
         self.assertEqual(
             self.client.get("/api/documents/").status_code,
@@ -3696,7 +3696,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         THEN:
             - No task data is returned
         """
-        task1 = PaperlessTask.objects.create(
+        PaperlessTask.objects.create(
             task_id=str(uuid.uuid4()),
             task_file_name="task_one.pdf",
         )
@@ -3746,7 +3746,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         THEN:
             - The returned data includes the task result
         """
-        task = PaperlessTask.objects.create(
+        PaperlessTask.objects.create(
             task_id=str(uuid.uuid4()),
             task_file_name="task_one.pdf",
             status=celery.states.SUCCESS,
@@ -3772,7 +3772,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         THEN:
             - The returned result is the exception info
         """
-        task = PaperlessTask.objects.create(
+        PaperlessTask.objects.create(
             task_id=str(uuid.uuid4()),
             task_file_name="task_one.pdf",
             status=celery.states.FAILURE,
@@ -3801,7 +3801,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         THEN:
             - Returned data include the filename
         """
-        task = PaperlessTask.objects.create(
+        PaperlessTask.objects.create(
             task_id=str(uuid.uuid4()),
             task_file_name="test.pdf",
             task_name="documents.tasks.some_task",
@@ -3827,7 +3827,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         THEN:
             - Returned data include the filename
         """
-        task = PaperlessTask.objects.create(
+        PaperlessTask.objects.create(
             task_id=str(uuid.uuid4()),
             task_file_name="anothertest.pdf",
             task_name="documents.tasks.some_task",
index 975a3cc1bb464599c942973e21a86a8a475fa423..c828b8afe795f9c15d31a2ef771e58a7d4ec893e 100644 (file)
@@ -1,7 +1,7 @@
-import os
 import shutil
 from pathlib import Path
 from unittest import mock
+import platform
 
 import pytest
 from django.conf import settings
@@ -11,19 +11,11 @@ from documents import barcodes
 from documents import tasks
 from documents.consumer import ConsumerError
 from documents.data_models import ConsumableDocument
-from documents.data_models import DocumentMetadataOverrides
 from documents.data_models import DocumentSource
 from documents.tests.utils import DirectoriesMixin
 from documents.tests.utils import FileSystemAssertsMixin
 from PIL import Image
 
-try:
-    import zxingcpp
-
-    ZXING_AVAILIBLE = True
-except ImportError:
-    ZXING_AVAILIBLE = False
-
 
 @override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
 class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@@ -459,7 +451,7 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertDictEqual(separator_page_numbers, {})
 
     @override_settings(CONSUMER_BARCODE_STRING="ADAR-NEXTDOC")
-    def test_scan_file_for_separating_qr_barcodes(self):
+    def test_scan_file_qr_barcodes_was_problem(self):
         """
         GIVEN:
             - Input PDF with certain QR codes that aren't detected at current size
@@ -1068,7 +1060,7 @@ class TestAsnBarcode(DirectoriesMixin, TestCase):
 
 
 @pytest.mark.skipif(
-    not ZXING_AVAILIBLE,
+    platform.machine().upper() not in {"AMD64"},
     reason="No zxingcpp",
 )
 @override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
@@ -1077,7 +1069,7 @@ class TestBarcodeZxing(TestBarcode):
 
 
 @pytest.mark.skipif(
-    not ZXING_AVAILIBLE,
+    platform.machine().upper() not in {"AMD64"},
     reason="No zxingcpp",
 )
 @override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
index 1dad8e128dbc7345e3d924e69841bcd7afe8b5eb..f0aa5894e4590e9c9631ccfb3a9ab1108ca54b52 100644 (file)
@@ -386,7 +386,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
         # rebuilding the file and committing that.  Not developer friendly
         # Need to rethink how to pass the load through to a file with a single
         # old model?
-        pass
 
     def test_one_correspondent_predict(self):
         c1 = Correspondent.objects.create(
@@ -516,7 +515,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
 
     def test_one_tag_predict_unassigned(self):
-        t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
+        Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
 
         doc1 = Document.objects.create(
             title="doc1",
@@ -643,7 +642,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertIsNotNone(classifier)
 
         with mock.patch("documents.classifier.DocumentClassifier.load") as load:
-            classifier2 = load_classifier()
+            load_classifier()
             load.assert_not_called()
 
     @mock.patch("documents.classifier.DocumentClassifier.load")
index b22870780b99ab89d5b92e14430c6541ad0cbeb9..cd06e9782d2a542dc4b7fee32361ba809d716583 100644 (file)
@@ -12,23 +12,23 @@ from dateutil import tz
 try:
     import zoneinfo
 except ImportError:
-    import backports.zoneinfo as zoneinfo
+    from backports import zoneinfo
 
 from django.conf import settings
 from django.utils import timezone
 from django.test import override_settings
 from django.test import TestCase
 
-from ..consumer import Consumer
-from ..consumer import ConsumerError
-from ..models import Correspondent
-from ..models import Document
-from ..models import DocumentType
-from ..models import FileInfo
-from ..models import Tag
-from ..parsers import DocumentParser
-from ..parsers import ParseError
-from ..tasks import sanity_check
+from documents.consumer import Consumer
+from documents.consumer import ConsumerError
+from documents.models import Correspondent
+from documents.models import Document
+from documents.models import DocumentType
+from documents.models import FileInfo
+from documents.models import Tag
+from documents.parsers import DocumentParser
+from documents.parsers import ParseError
+from documents.tasks import sanity_check
 from .utils import DirectoriesMixin
 from documents.tests.utils import FileSystemAssertsMixin
 
@@ -72,8 +72,8 @@ class TestFieldPermutations(TestCase):
         "20150102030405Z",
         "20150102Z",
     )
-    valid_correspondents = ["timmy", "Dr. McWheelie", "Dash Gor-don", "ο Θερμαστής", ""]
-    valid_titles = ["title", "Title w Spaces", "Title a-dash", "Τίτλος", ""]
+    valid_correspondents = ["timmy", "Dr. McWheelie", "Dash Gor-don", "o Θεpμaoτής", ""]
+    valid_titles = ["title", "Title w Spaces", "Title a-dash", "Tίτλoς", ""]
     valid_tags = ["tag", "tig,tag", "tag1,tag2,tag-3"]
 
     def _test_guessed_attributes(
@@ -135,9 +135,7 @@ class TestFieldPermutations(TestCase):
         filename = "tag1,tag2_20190908_180610_0001.pdf"
         all_patt = re.compile("^.*$")
         none_patt = re.compile("$a")
-        exact_patt = re.compile("^([a-z0-9,]+)_(\\d{8})_(\\d{6})_([0-9]+)\\.")
-        repl1 = " - \\4 - \\1."  # (empty) corrspondent, title and tags
-        repl2 = "\\2Z - " + repl1  # creation date + repl1
+        re.compile("^([a-z0-9,]+)_(\\d{8})_(\\d{6})_([0-9]+)\\.")
 
         # No transformations configured (= default)
         info = FileInfo.from_filename(filename)
@@ -177,10 +175,6 @@ class TestFieldPermutations(TestCase):
 
 
 class DummyParser(DocumentParser):
-    def get_thumbnail(self, document_path, mime_type, file_name=None):
-        # not important during tests
-        raise NotImplementedError()
-
     def __init__(self, logging_group, scratch_dir, archive_path):
         super().__init__(logging_group, None)
         _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
@@ -197,9 +191,6 @@ class CopyParser(DocumentParser):
     def get_thumbnail(self, document_path, mime_type, file_name=None):
         return self.fake_thumb
 
-    def get_thumbnail(self, document_path, mime_type, file_name=None):
-        return self.fake_thumb
-
     def __init__(self, logging_group, progress_callback=None):
         super().__init__(logging_group, progress_callback)
         _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=self.tempdir)
@@ -211,10 +202,6 @@ class CopyParser(DocumentParser):
 
 
 class FaultyParser(DocumentParser):
-    def get_thumbnail(self, document_path, mime_type, file_name=None):
-        # not important during tests
-        raise NotImplementedError()
-
     def __init__(self, logging_group, scratch_dir):
         super().__init__(logging_group)
         _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
index 30466a83f05df6f47fdf7f67254f3f04b756d808..e1b1799760db71a8857e41fd6d42b1b3120733f8 100644 (file)
@@ -46,7 +46,7 @@ class TestDate(TestCase):
         )
 
     def test_date_format_5(self):
-        text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem " "ipsum"
+        text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem ipsum"
         date = parse_date("", text)
         self.assertEqual(
             date,
@@ -68,7 +68,7 @@ class TestDate(TestCase):
         self.assertEqual(parse_date("", text), None)
 
     def test_date_format_7(self):
-        text = "lorem ipsum\n" "März 2019\n" "lorem ipsum"
+        text = "lorem ipsum\nMärz 2019\nlorem ipsum"
         date = parse_date("", text)
         self.assertEqual(
             date,
@@ -95,7 +95,7 @@ class TestDate(TestCase):
 
     @override_settings(SCRATCH_DIR=SCRATCH)
     def test_date_format_9(self):
-        text = "lorem ipsum\n" "27. Nullmonth 2020\n" "März 2020\n" "lorem ipsum"
+        text = "lorem ipsum\n27. Nullmonth 2020\nMärz 2020\nlorem ipsum"
         self.assertEqual(
             parse_date("", text),
             datetime.datetime(2020, 3, 1, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)),
@@ -262,7 +262,7 @@ class TestDate(TestCase):
         THEN:
             - Should parse the date non-ignored date from content
         """
-        text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem " "ipsum"
+        text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem ipsum"
         self.assertEqual(
             parse_date("", text),
             datetime.datetime(2018, 2, 13, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)),
@@ -283,7 +283,7 @@ class TestDate(TestCase):
         THEN:
             - Should parse the date non-ignored date from content
         """
-        text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem " "ipsum"
+        text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem ipsum"
 
         self.assertEqual(
             parse_date("", text),
index 57455fb8fe167e69ffa58ea6b671b3dbe0dbb14d..763f5049ca4c848a7b592ac82062730f0c479825 100644 (file)
@@ -6,14 +6,14 @@ from unittest import mock
 try:
     import zoneinfo
 except ImportError:
-    import backports.zoneinfo as zoneinfo
+    from backports import zoneinfo
 
 from django.test import override_settings
 from django.test import TestCase
 from django.utils import timezone
 
-from ..models import Correspondent
-from ..models import Document
+from documents.models import Correspondent
+from documents.models import Document
 
 
 class TestDocument(TestCase):
index 04ef0a79fe97c21c825e825794cbcae085c3c87b..d2f61eb1c30abf6f640c2fa4dba88a899e045507 100644 (file)
@@ -10,17 +10,16 @@ from django.db import DatabaseError
 from django.test import override_settings
 from django.test import TestCase
 from django.utils import timezone
-from documents.tests.utils import FileSystemAssertsMixin
 
-from ..file_handling import create_source_path_directory
-from ..file_handling import delete_empty_directories
-from ..file_handling import generate_filename
-from ..models import Correspondent
-from ..models import Document
-from ..models import DocumentType
-from ..models import StoragePath
-from .utils import DirectoriesMixin
-from .utils import FileSystemAssertsMixin
+from documents.file_handling import create_source_path_directory
+from documents.file_handling import delete_empty_directories
+from documents.file_handling import generate_filename
+from documents.models import Correspondent
+from documents.models import Document
+from documents.models import DocumentType
+from documents.models import StoragePath
+from documents.tests.utils import DirectoriesMixin
+from documents.tests.utils import FileSystemAssertsMixin
 
 
 class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@@ -121,7 +120,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
     def test_file_renaming_database_error(self):
 
-        document1 = Document.objects.create(
+        Document.objects.create(
             mime_type="application/pdf",
             storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
             checksum="AAAAA",
@@ -171,7 +170,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         Path(document.source_path).touch()
 
         # Ensure file deletion after delete
-        pk = document.pk
         document.delete()
         self.assertIsNotFile(
             os.path.join(settings.ORIGINALS_DIR, "none", "none.pdf"),
@@ -440,7 +438,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         # Check proper handling of files
         self.assertIsDir(os.path.join(settings.ORIGINALS_DIR, "none/none"))
 
-        pk = document.pk
         document.delete()
 
         self.assertIsNotFile(
@@ -705,7 +702,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
     def test_move_archive_error(self, m):
         def fake_rename(src, dst):
             if "archive" in str(src):
-                raise OSError()
+                raise OSError
             else:
                 os.remove(src)
                 Path(dst).touch()
@@ -756,7 +753,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
     def test_move_file_error(self, m):
         def fake_rename(src, dst):
             if "original" in str(src):
-                raise OSError()
+                raise OSError
             else:
                 os.remove(src)
                 Path(dst).touch()
index 10146ff30575d08a026840836309d72894bbf22b..8a659dbb6947a9d4a8a187abd50087ac7ed32c30 100644 (file)
@@ -2,7 +2,7 @@ from django.core.management.base import CommandError
 from django.test import TestCase
 from documents.settings import EXPORTER_FILE_NAME
 
-from ..management.commands.document_importer import Command
+from documents.management.commands.document_importer import Command
 
 
 class TestImporter(TestCase):
index 637a8cb204170fef06a2e780501015f24a05386d..150880116d06b6f773a2def522342b0044ed7190 100644 (file)
@@ -13,7 +13,6 @@ from django.test import override_settings
 from django.test import TransactionTestCase
 from documents.consumer import ConsumerError
 from documents.data_models import ConsumableDocument
-from documents.data_models import DocumentMetadataOverrides
 from documents.management.commands import document_consumer
 from documents.models import Tag
 from documents.tests.utils import DirectoriesMixin
index 284151ffc16ca9c8c1ac74f0569472e02f77389a..05b6db5b3c844f1e1a9f75b78ce72554def4c77f 100644 (file)
@@ -204,7 +204,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 self.assertEqual(element["fields"]["document"], self.d1.id)
                 self.assertEqual(element["fields"]["user"], self.user.id)
 
-        with paperless_environment() as dirs:
+        with paperless_environment():
             self.assertEqual(Document.objects.count(), 4)
             Document.objects.all().delete()
             Correspondent.objects.all().delete()
@@ -345,7 +345,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             os.path.join(self.dirs.media_dir, "documents"),
         )
 
-        m = self._do_export(use_filename_format=True)
+        self._do_export(use_filename_format=True)
         self.assertIsFile(os.path.join(self.target, "wow1", "c.pdf"))
 
         self.assertIsFile(os.path.join(self.target, "manifest.json"))
@@ -537,7 +537,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 )
         self.assertFalse(has_archive)
 
-        with paperless_environment() as dirs:
+        with paperless_environment():
             self.assertEqual(Document.objects.count(), 4)
             Document.objects.all().delete()
             self.assertEqual(Document.objects.count(), 0)
@@ -580,7 +580,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 )
         self.assertFalse(has_thumbnail)
 
-        with paperless_environment() as dirs:
+        with paperless_environment():
             self.assertEqual(Document.objects.count(), 4)
             Document.objects.all().delete()
             self.assertEqual(Document.objects.count(), 0)
@@ -609,7 +609,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             has_document = has_document or element["model"] == "documents.document"
         self.assertFalse(has_document)
 
-        with paperless_environment() as dirs:
+        with paperless_environment():
             self.assertEqual(Document.objects.count(), 4)
             Document.objects.all().delete()
             self.assertEqual(Document.objects.count(), 0)
@@ -631,9 +631,9 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             os.path.join(self.dirs.media_dir, "documents"),
         )
 
-        manifest = self._do_export(use_folder_prefix=True)
+        self._do_export(use_folder_prefix=True)
 
-        with paperless_environment() as dirs:
+        with paperless_environment():
             self.assertEqual(Document.objects.count(), 4)
             Document.objects.all().delete()
             self.assertEqual(Document.objects.count(), 0)
index 8d5cd6695b3de657d00d430b82bed9f375372bab..56d47ee463425c29f7ae01f89bc00e6a36611a1e 100644 (file)
@@ -8,12 +8,12 @@ from django.contrib.auth.models import User
 from django.test import override_settings
 from django.test import TestCase
 
-from .. import matching
-from ..models import Correspondent
-from ..models import Document
-from ..models import DocumentType
-from ..models import Tag
-from ..signals import document_consumption_finished
+from documents import matching
+from documents.models import Correspondent
+from documents.models import Document
+from documents.models import DocumentType
+from documents.models import Tag
+from documents.signals import document_consumption_finished
 
 
 class _TestMatchingBase(TestCase):
index acf8761c74b49e42fd97895af46f6525fef8312e..32929d92c22911b656acc780ccf859f7a6506d44 100644 (file)
@@ -310,7 +310,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
     def test_parser_missing(self):
         Document = self.apps.get_model("documents", "Document")
 
-        doc1 = make_test_document(
+        make_test_document(
             Document,
             "document",
             "invalid/typesss768",
@@ -318,7 +318,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
             "document.png",
             simple_pdf,
         )
-        doc2 = make_test_document(
+        make_test_document(
             Document,
             "document",
             "invalid/typesss768",
@@ -462,7 +462,7 @@ class TestMigrateArchiveFilesBackwards(
 
         Document = apps.get_model("documents", "Document")
 
-        doc_unrelated = make_test_document(
+        make_test_document(
             Document,
             "unrelated",
             "application/pdf",
@@ -471,14 +471,14 @@ class TestMigrateArchiveFilesBackwards(
             simple_pdf2,
             "unrelated.pdf",
         )
-        doc_no_archive = make_test_document(
+        make_test_document(
             Document,
             "no_archive",
             "text/plain",
             simple_txt,
             "no_archive.txt",
         )
-        clashB = make_test_document(
+        make_test_document(
             Document,
             "clash",
             "image/jpeg",
index d230511ff285287c26106a3f42a6d444fa5fa7cf..ee882dd8491224308e887e45a72be13e924b1b1a 100644 (file)
@@ -1,14 +1,14 @@
 from django.test import TestCase
 
-from ..models import Correspondent
-from ..models import Document
+from documents.models import Correspondent
+from documents.models import Document
 from .factories import CorrespondentFactory
 from .factories import DocumentFactory
 
 
 class CorrespondentTestCase(TestCase):
     def test___str__(self):
-        for s in ("test", "οχι", "test with fun_charÅc'\"terß"):
+        for s in ("test", "oχi", "test with fun_charÅc'\"terß"):
             correspondent = CorrespondentFactory.create(name=s)
             self.assertEqual(str(correspondent), s)
 
index eda4bacf8db07002685f0fbe4f68c94c8d343620..7ec06d1a07cabea58eeb6221b22703798e7cc5ee 100644 (file)
@@ -94,7 +94,7 @@ class TestParserDiscovery(TestCase):
             - No parser class is returned
         """
         m.return_value = []
-        with TemporaryDirectory() as tmpdir:
+        with TemporaryDirectory():
             self.assertIsNone(get_parser_class_for_mime_type("application/pdf"))
 
     @mock.patch("documents.parsers.document_consumer_declaration.send")
index 9bb424cbc750cd7ff8427813cbbf34bb73d78a1b..5fb4adfccf20790d0fe20aed9924cf9f70fd1646 100644 (file)
@@ -149,7 +149,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
         )
 
     def test_orphaned_file(self):
-        doc = self.make_test_data()
+        self.make_test_data()
         Path(self.dirs.originals_dir, "orphaned").touch()
         messages = check_sanity()
         self.assertTrue(messages.has_warning)
index 18600d709a6d23863fb97c837488f3f2ffcc588d..a6befc25e03b05889b51d16da1ae4aab6ed6bc2b 100644 (file)
@@ -4,7 +4,6 @@ from unittest import mock
 import celery
 from django.test import TestCase
 from documents.data_models import ConsumableDocument
-from documents.data_models import DocumentMetadataOverrides
 from documents.data_models import DocumentSource
 from documents.models import PaperlessTask
 from documents.signals.handlers import before_task_publish_handler
index c23a25c04f42041f154e18e049b9fdfe6602e9ca..a8c6a67daf37cbd5c6773e4d34928b259f3ab7ca 100644 (file)
@@ -47,7 +47,7 @@ class TestViews(TestCase):
                 self.client.cookies.load(
                     {settings.LANGUAGE_COOKIE_NAME: language_given},
                 )
-            elif settings.LANGUAGE_COOKIE_NAME in self.client.cookies.keys():
+            elif settings.LANGUAGE_COOKIE_NAME in self.client.cookies:
                 self.client.cookies.pop(settings.LANGUAGE_COOKIE_NAME)
 
             response = self.client.get(
index a50d9f7f40afb6784a0a00d27d89102cbbd1d84b..597555be9983b908f4a44b34c9466beb01f8922b 100644 (file)
@@ -265,10 +265,7 @@ class DocumentViewSet(
     def get_serializer(self, *args, **kwargs):
         super().get_serializer(*args, **kwargs)
         fields_param = self.request.query_params.get("fields", None)
-        if fields_param:
-            fields = fields_param.split(",")
-        else:
-            fields = None
+        fields = fields_param.split(",") if fields_param else None
         truncate_content = self.request.query_params.get("truncate_content", "False")
         serializer_class = self.get_serializer_class()
         kwargs.setdefault("context", self.get_serializer_context())
@@ -358,7 +355,7 @@ class DocumentViewSet(
         try:
             doc = Document.objects.get(pk=pk)
         except Document.DoesNotExist:
-            raise Http404()
+            raise Http404
 
         meta = {
             "original_checksum": doc.checksum,
@@ -422,7 +419,7 @@ class DocumentViewSet(
             response = self.file_response(pk, request, "inline")
             return response
         except (FileNotFoundError, Document.DoesNotExist):
-            raise Http404()
+            raise Http404
 
     @action(methods=["get"], detail=True)
     @method_decorator(cache_control(public=False, max_age=315360000))
@@ -438,14 +435,14 @@ class DocumentViewSet(
 
             return HttpResponse(handle, content_type="image/webp")
         except (FileNotFoundError, Document.DoesNotExist):
-            raise Http404()
+            raise Http404
 
     @action(methods=["get"], detail=True)
     def download(self, request, pk=None):
         try:
             return self.file_response(pk, request, "attachment")
         except (FileNotFoundError, Document.DoesNotExist):
-            raise Http404()
+            raise Http404
 
     def getNotes(self, doc):
         return [
@@ -468,7 +465,7 @@ class DocumentViewSet(
         try:
             doc = Document.objects.get(pk=pk)
         except Document.DoesNotExist:
-            raise Http404()
+            raise Http404
 
         currentUser = request.user
 
@@ -569,7 +566,7 @@ class UnifiedSearchViewSet(DocumentViewSet):
             elif "more_like_id" in self.request.query_params:
                 query_class = index.DelayedMoreLikeThisQuery
             else:
-                raise ValueError()
+                raise ValueError
 
             return query_class(
                 self.searcher,
@@ -606,12 +603,12 @@ class LogViewSet(ViewSet):
 
     def retrieve(self, request, pk=None, *args, **kwargs):
         if pk not in self.log_files:
-            raise Http404()
+            raise Http404
 
         filename = self.get_log_filename(pk)
 
         if not os.path.isfile(filename):
-            raise Http404()
+            raise Http404
 
         with open(filename) as f:
             lines = [line.rstrip() for line in f.readlines()]
old mode 100644 (file)
new mode 100755 (executable)
index 3da37e264b64581e7bed93b72fe429c9b4bb2a71..b6a931ecc202434cf94f7952850efc99cc4e6f41 100644 (file)
@@ -42,7 +42,7 @@ def path_check(var, directory):
                     Error(
                         writeable_message.format(var),
                         writeable_hint.format(
-                            f"\n{dir_mode} {dir_owner} {dir_group} " f"{directory}\n",
+                            f"\n{dir_mode} {dir_owner} {dir_group} {directory}\n",
                         ),
                     ),
                 )
@@ -158,7 +158,7 @@ def settings_values_check(app_configs, **kwargs):
         try:
             import zoneinfo
         except ImportError:  # pragma: nocover
-            import backports.zoneinfo as zoneinfo
+            from backports import zoneinfo
         msgs = []
         if settings.TIME_ZONE not in zoneinfo.available_timezones():
             msgs.append(
index 7013a8e79ef5f2bc4c37ac71eaddaafd7d3fbd88..7c34c8c39d2dcdf278712ca752bec8db9cb470e3 100644 (file)
@@ -12,13 +12,13 @@ class StatusConsumer(WebsocketConsumer):
 
     def connect(self):
         if not self._authenticated():
-            raise DenyConnection()
+            raise DenyConnection
         else:
             async_to_sync(self.channel_layer.group_add)(
                 "status_updates",
                 self.channel_name,
             )
-            raise AcceptConnection()
+            raise AcceptConnection
 
     def disconnect(self, close_code):
         async_to_sync(self.channel_layer.group_discard)(
index ab41ab67c56d98e34c21d3e5204138432388ba2f..54bc2b35318618697595566e13ed54b5d6f0ac1b 100644 (file)
@@ -65,9 +65,11 @@ class UserSerializer(serializers.ModelSerializer):
         if "user_permissions" in validated_data:
             user_permissions = validated_data.pop("user_permissions")
         password = None
-        if "password" in validated_data:
-            if len(validated_data.get("password").replace("*", "")) > 0:
-                password = validated_data.pop("password")
+        if (
+            "password" in validated_data
+            and len(validated_data.get("password").replace("*", "")) > 0
+        ):
+            password = validated_data.pop("password")
         user = User.objects.create(**validated_data)
         # set groups
         if groups:
index b6ee75fdaa5e05e690d4877f5817405938fff127..b8abea5ff77ce165eeea877788e5fe278e1c7005 100644 (file)
@@ -282,7 +282,8 @@ INSTALLED_APPS = [
     "django_filters",
     "django_celery_results",
     "guardian",
-] + env_apps
+    *env_apps,
+]
 
 if DEBUG:
     INSTALLED_APPS.append("channels")
@@ -398,10 +399,7 @@ if ENABLE_HTTP_REMOTE_USER:
     )
 
 # X-Frame options for embedded PDF display:
-if DEBUG:
-    X_FRAME_OPTIONS = "ANY"
-else:
-    X_FRAME_OPTIONS = "SAMEORIGIN"
+X_FRAME_OPTIONS = "ANY" if DEBUG else "SAMEORIGIN"
 
 
 # The next 3 settings can also be set using just PAPERLESS_URL
@@ -424,7 +422,7 @@ if _paperless_url:
     _paperless_uri = urlparse(_paperless_url)
     CSRF_TRUSTED_ORIGINS.append(_paperless_url)
     CORS_ALLOWED_ORIGINS.append(_paperless_url)
-    if ALLOWED_HOSTS != ["*"]:
+    if ["*"] != ALLOWED_HOSTS:
         ALLOWED_HOSTS.append(_paperless_uri.hostname)
     else:
         # always allow localhost. Necessary e.g. for healthcheck in docker.
index cedad7f67ed0842afeeef09e6198383066a1c29e..40e2682ddea6383b9481578a67682861affd6b3a 100644 (file)
@@ -15,18 +15,18 @@ def handle_failed_login(sender, credentials, request, **kwargs):
     if client_ip is None:
         logger.info(
             f"Login failed for user `{credentials['username']}`."
-            " Unable to determine IP address.",
+            " Unable to determine IP address.",
         )
     else:
         if is_routable:
             # We got the client's IP address
             logger.info(
                 f"Login failed for user `{credentials['username']}`"
-                f" from IP `{client_ip}.`",
+                f" from IP `{client_ip}.`",
             )
         else:
             # The client's IP address is private
             logger.info(
                 f"Login failed for user `{credentials['username']}`"
-                f" from private IP `{client_ip}.`",
+                f" from private IP `{client_ip}.`",
             )
diff --git a/src/paperless/tests/__init__.py b/src/paperless/tests/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
index a2f4772f7f5c57e6dc5fb722a7b23ec6d40af18f..2172685087f1d2ee60557f7bf8e99c61ec0bfbd0 100644 (file)
@@ -56,61 +56,57 @@ urlpatterns = [
         include(
             [
                 re_path(
-                    r"^auth/",
+                    "^auth/",
                     include(
                         ("rest_framework.urls", "rest_framework"),
                         namespace="rest_framework",
                     ),
                 ),
                 re_path(
-                    r"^search/autocomplete/",
+                    "^search/autocomplete/",
                     SearchAutoCompleteView.as_view(),
                     name="autocomplete",
                 ),
-                re_path(r"^statistics/", StatisticsView.as_view(), name="statistics"),
+                re_path("^statistics/", StatisticsView.as_view(), name="statistics"),
                 re_path(
-                    r"^documents/post_document/",
+                    "^documents/post_document/",
                     PostDocumentView.as_view(),
                     name="post_document",
                 ),
                 re_path(
-                    r"^documents/bulk_edit/",
+                    "^documents/bulk_edit/",
                     BulkEditView.as_view(),
                     name="bulk_edit",
                 ),
                 re_path(
-                    r"^documents/selection_data/",
+                    "^documents/selection_data/",
                     SelectionDataView.as_view(),
                     name="selection_data",
                 ),
                 re_path(
-                    r"^documents/bulk_download/",
+                    "^documents/bulk_download/",
                     BulkDownloadView.as_view(),
                     name="bulk_download",
                 ),
                 re_path(
-                    r"^remote_version/",
+                    "^remote_version/",
                     RemoteVersionView.as_view(),
                     name="remoteversion",
                 ),
+                re_path("^ui_settings/", UiSettingsView.as_view(), name="ui_settings"),
                 re_path(
-                    r"^ui_settings/",
-                    UiSettingsView.as_view(),
-                    name="ui_settings",
-                ),
-                re_path(
-                    r"^acknowledge_tasks/",
+                    "^acknowledge_tasks/",
                     AcknowledgeTasksView.as_view(),
                     name="acknowledge_tasks",
                 ),
                 re_path(
-                    r"^mail_accounts/test/",
+                    "^mail_accounts/test/",
                     MailAccountTestView.as_view(),
                     name="mail_accounts_test",
                 ),
                 path("token/", views.obtain_auth_token),
-            ]
-            + api_router.urls,
+                *api_router.urls,
+            ],
         ),
     ),
     re_path(r"^favicon.ico$", FaviconView.as_view(), name="favicon"),
index e80f66d7c034c44406d6fd23501415aa43fff3a7..07582cc0ff469047834ed5b26b21249fdfbdca89 100644 (file)
@@ -18,7 +18,16 @@ class MailAccountAdminForm(forms.ModelForm):
         widgets = {
             "password": forms.PasswordInput(),
         }
-        fields = "__all__"
+        fields = [
+            "name",
+            "imap_server",
+            "username",
+            "imap_security",
+            "username",
+            "password",
+            "is_token",
+            "character_set",
+        ]
 
 
 class MailAccountAdmin(admin.ModelAdmin):
@@ -27,7 +36,10 @@ class MailAccountAdmin(admin.ModelAdmin):
 
     fieldsets = [
         (None, {"fields": ["name", "imap_server", "imap_port"]}),
-        (_("Authentication"), {"fields": ["imap_security", "username", "password"]}),
+        (
+            _("Authentication"),
+            {"fields": ["imap_security", "username", "password", "is_token"]},
+        ),
         (_("Advanced settings"), {"fields": ["character_set"]}),
     ]
     form = MailAccountAdminForm
index 06dd3ac6cb387fabc85dec3f99909dc94adba03a..d792b5a9776d55b649d37a6e25522428794117cc 100644 (file)
@@ -94,7 +94,7 @@ class BaseMailAction:
         """
         Perform mail action on the given mail uid in the mailbox.
         """
-        raise NotImplementedError()
+        raise NotImplementedError
 
 
 class DeleteMailAction(BaseMailAction):
@@ -152,7 +152,7 @@ class TagMailAction(BaseMailAction):
             _, self.color = parameter.split(":")
             self.color = self.color.strip()
 
-            if not self.color.lower() in APPLE_MAIL_TAG_COLORS.keys():
+            if self.color.lower() not in APPLE_MAIL_TAG_COLORS.keys():
                 raise MailError("Not a valid AppleMail tag color.")
 
             self.keyword = None
@@ -274,7 +274,7 @@ def apply_mail_action(
             status="SUCCESS",
         )
 
-    except Exception as e:
+    except Exception:
         ProcessedMail.objects.create(
             owner=rule.owner,
             rule=rule,
@@ -285,7 +285,7 @@ def apply_mail_action(
             status="FAILED",
             error=traceback.format_exc(),
         )
-        raise e
+        raise
 
 
 @shared_task
@@ -548,7 +548,7 @@ class MailAccountHandler(LoggingMixin):
 
         self.log(
             "debug",
-            f"Rule {rule}: Searching folder with criteria " f"{str(criterias)}",
+            f"Rule {rule}: Searching folder with criteria {str(criterias)}",
         )
 
         try:
@@ -582,7 +582,7 @@ class MailAccountHandler(LoggingMixin):
             except Exception as e:
                 self.log(
                     "error",
-                    f"Rule {rule}: Error while processing mail " f"{message.uid}: {e}",
+                    f"Rule {rule}: Error while processing mail {message.uid}: {e}",
                     exc_info=True,
                 )
 
@@ -653,7 +653,7 @@ class MailAccountHandler(LoggingMixin):
         for att in message.attachments:
 
             if (
-                not att.content_disposition == "attachment"
+                att.content_disposition != "attachment"
                 and rule.attachment_type
                 == MailRule.AttachmentProcessing.ATTACHMENTS_ONLY
             ):
@@ -665,14 +665,13 @@ class MailAccountHandler(LoggingMixin):
                 )
                 continue
 
-            if rule.filter_attachment_filename:
+            if rule.filter_attachment_filename and not fnmatch(
+                att.filename.lower(),
+                rule.filter_attachment_filename.lower(),
+            ):
                 # Force the filename and pattern to the lowercase
                 # as this is system dependent otherwise
-                if not fnmatch(
-                    att.filename.lower(),
-                    rule.filter_attachment_filename.lower(),
-                ):
-                    continue
+                continue
 
             title = self._get_title(message, att, rule)
 
index ba1ab397c764b8dd60b72579b51a82d3315739d6..2d7a543b369f30f05a05bba61f682fb7b45510c7 100644 (file)
@@ -27,7 +27,8 @@ class Migration(migrations.Migration):
             model_name="mailrule",
             name="maximum_age",
             field=models.PositiveIntegerField(
-                default=30, help_text="Specified in days."
+                default=30,
+                help_text="Specified in days.",
             ),
         ),
     ]
index 1776ce4d02596ae92e979e0c82ef0df3c532d939..90d76884222fff4e030fa18d37e84848a8d80288 100644 (file)
@@ -160,35 +160,48 @@ class Migration(migrations.Migration):
             model_name="mailrule",
             name="filter_body",
             field=models.CharField(
-                blank=True, max_length=256, null=True, verbose_name="filter body"
+                blank=True,
+                max_length=256,
+                null=True,
+                verbose_name="filter body",
             ),
         ),
         migrations.AlterField(
             model_name="mailrule",
             name="filter_from",
             field=models.CharField(
-                blank=True, max_length=256, null=True, verbose_name="filter from"
+                blank=True,
+                max_length=256,
+                null=True,
+                verbose_name="filter from",
             ),
         ),
         migrations.AlterField(
             model_name="mailrule",
             name="filter_subject",
             field=models.CharField(
-                blank=True, max_length=256, null=True, verbose_name="filter subject"
+                blank=True,
+                max_length=256,
+                null=True,
+                verbose_name="filter subject",
             ),
         ),
         migrations.AlterField(
             model_name="mailrule",
             name="folder",
             field=models.CharField(
-                default="INBOX", max_length=256, verbose_name="folder"
+                default="INBOX",
+                max_length=256,
+                verbose_name="folder",
             ),
         ),
         migrations.AlterField(
             model_name="mailrule",
             name="maximum_age",
             field=models.PositiveIntegerField(
-                default=30, help_text="Specified in days.", verbose_name="maximum age"
+                default=30,
+                help_text="Specified in days.",
+                verbose_name="maximum age",
             ),
         ),
         migrations.AlterField(
index 811c7d90a30a9a32871ca90d366d1e59091fd55c..610237f3b4ee9653cc2b67483da20955e9b4b640 100644 (file)
@@ -14,7 +14,9 @@ class Migration(migrations.Migration):
             model_name="mailrule",
             name="assign_tags",
             field=models.ManyToManyField(
-                blank=True, to="documents.Tag", verbose_name="assign this tag"
+                blank=True,
+                to="documents.Tag",
+                verbose_name="assign this tag",
             ),
         ),
     ]
index ded498a8721245ca76fbfba4f4d3f3e51fa82b36..93ca64ddaa1102b8ec3d02bd0a95bbc0fecd53bf 100644 (file)
@@ -29,19 +29,25 @@ class Migration(migrations.Migration):
                 (
                     "folder",
                     models.CharField(
-                        editable=False, max_length=256, verbose_name="folder"
+                        editable=False,
+                        max_length=256,
+                        verbose_name="folder",
                     ),
                 ),
                 (
                     "uid",
                     models.CharField(
-                        editable=False, max_length=256, verbose_name="uid"
+                        editable=False,
+                        max_length=256,
+                        verbose_name="uid",
                     ),
                 ),
                 (
                     "subject",
                     models.CharField(
-                        editable=False, max_length=256, verbose_name="subject"
+                        editable=False,
+                        max_length=256,
+                        verbose_name="subject",
                     ),
                 ),
                 (
@@ -59,13 +65,18 @@ class Migration(migrations.Migration):
                 (
                     "status",
                     models.CharField(
-                        editable=False, max_length=256, verbose_name="status"
+                        editable=False,
+                        max_length=256,
+                        verbose_name="status",
                     ),
                 ),
                 (
                     "error",
                     models.TextField(
-                        blank=True, editable=False, null=True, verbose_name="error"
+                        blank=True,
+                        editable=False,
+                        null=True,
+                        verbose_name="error",
                     ),
                 ),
                 (
index 42be99452da2843c93b55f2f2712baa196e2ec0d..5089670b0a047b98680b0ca09500821c3a3be500 100644 (file)
@@ -13,7 +13,10 @@ class Migration(migrations.Migration):
             model_name="mailrule",
             name="filter_to",
             field=models.CharField(
-                blank=True, max_length=256, null=True, verbose_name="filter to"
+                blank=True,
+                max_length=256,
+                null=True,
+                verbose_name="filter to",
             ),
         ),
     ]
index 5a2f8c5728c579ed4a54cd43ebb310759e0f56d0..98a48ec5108b22077a9b2bdaafa4f918ee3ecaaa 100644 (file)
@@ -13,7 +13,8 @@ class Migration(migrations.Migration):
             model_name="mailaccount",
             name="is_token",
             field=models.BooleanField(
-                default=False, verbose_name="Is token authentication"
+                default=False,
+                verbose_name="Is token authentication",
             ),
         ),
     ]
index 2b263166440d0f7b4cea237c2b58d12c88e46852..c0dc1916a5c408e15b531086fac0dc99bb16c6a2 100644 (file)
@@ -69,7 +69,7 @@ class MailRule(document_models.ModelWithOwner):
 
     class AttachmentProcessing(models.IntegerChoices):
         ATTACHMENTS_ONLY = 1, _("Only process attachments.")
-        EVERYTHING = 2, _("Process all files, including 'inline' " "attachments.")
+        EVERYTHING = 2, _("Process all files, including 'inline' attachments.")
 
     class MailAction(models.IntegerChoices):
         DELETE = 1, _("Delete")
index f1ee263aa7d236a31b9c7fa451d752348922ceff..4ce8a601958204e05f58b08375dcfaf9c44d7263 100644 (file)
@@ -38,7 +38,7 @@ class MailDocumentParser(DocumentParser):
             except Exception as err:
                 raise ParseError(
                     f"Could not parse {document_path}: {err}",
-                )
+                ) from err
             if not self._parsed.from_values:
                 self._parsed = None
                 raise ParseError(
@@ -65,7 +65,7 @@ class MailDocumentParser(DocumentParser):
         except ParseError as e:
             self.log(
                 "warning",
-                f"Error while fetching document metadata for " f"{document_path}: {e}",
+                f"Error while fetching document metadata for {document_path}: {e}",
             )
             return result
 
@@ -132,7 +132,7 @@ class MailDocumentParser(DocumentParser):
 
             self.text += f"Attachments: {', '.join(att)}\n\n"
 
-        if mail.html != "":
+        if mail.html:
             self.text += "HTML content: " + strip_text(self.tika_parse(mail.html))
 
         self.text += f"\n\n{strip_text(mail.text)}"
@@ -153,7 +153,7 @@ class MailDocumentParser(DocumentParser):
             raise ParseError(
                 f"Could not parse content with tika server at "
                 f"{self.tika_server}: {err}",
-            )
+            ) from err
         if parsed["content"]:
             return parsed["content"]
         else:
@@ -167,7 +167,7 @@ class MailDocumentParser(DocumentParser):
 
         pdf_collection.append(("1_mail.pdf", self.generate_pdf_from_mail(mail)))
 
-        if mail.html == "":
+        if not mail.html:
             with open(pdf_path, "wb") as file:
                 file.write(pdf_collection[0][1])
                 file.close()
@@ -188,7 +188,7 @@ class MailDocumentParser(DocumentParser):
             response = requests.post(url_merge, files=files, headers=headers)
             response.raise_for_status()  # ensure we notice bad responses
         except Exception as err:
-            raise ParseError(f"Error while converting document to PDF: {err}")
+            raise ParseError(f"Error while converting document to PDF: {err}") from err
 
         with open(pdf_path, "wb") as file:
             file.write(response.content)
@@ -212,26 +212,26 @@ class MailDocumentParser(DocumentParser):
             return text
 
         data["subject"] = clean_html(mail.subject)
-        if data["subject"] != "":
+        if data["subject"]:
             data["subject_label"] = "Subject"
         data["from"] = clean_html(mail.from_values.full)
-        if data["from"] != "":
+        if data["from"]:
             data["from_label"] = "From"
         data["to"] = clean_html(", ".join(address.full for address in mail.to_values))
-        if data["to"] != "":
+        if data["to"]:
             data["to_label"] = "To"
         data["cc"] = clean_html(", ".join(address.full for address in mail.cc_values))
-        if data["cc"] != "":
+        if data["cc"]:
             data["cc_label"] = "CC"
         data["bcc"] = clean_html(", ".join(address.full for address in mail.bcc_values))
-        if data["bcc"] != "":
+        if data["bcc"]:
             data["bcc_label"] = "BCC"
 
         att = []
         for a in mail.attachments:
             att.append(f"{a.filename} ({format_size(a.size, binary=True)})")
         data["attachments"] = clean_html(", ".join(att))
-        if data["attachments"] != "":
+        if data["attachments"]:
             data["attachments_label"] = "Attachments"
 
         data["date"] = clean_html(mail.date.astimezone().strftime("%Y-%m-%d %H:%M"))
@@ -290,7 +290,9 @@ class MailDocumentParser(DocumentParser):
                 )
                 response.raise_for_status()  # ensure we notice bad responses
             except Exception as err:
-                raise ParseError(f"Error while converting document to PDF: {err}")
+                raise ParseError(
+                    f"Error while converting document to PDF: {err}",
+                ) from err
 
         return response.content
 
@@ -344,6 +346,6 @@ class MailDocumentParser(DocumentParser):
             )
             response.raise_for_status()  # ensure we notice bad responses
         except Exception as err:
-            raise ParseError(f"Error while converting document to PDF: {err}")
+            raise ParseError(f"Error while converting document to PDF: {err}") from err
 
         return response.content
index 133227468e5094d37b3fc30e23cc65a9c86a5bbf..e04a5c066116fc6a084c2fb982fa803b0186ba86 100644 (file)
@@ -38,9 +38,11 @@ class MailAccountSerializer(OwnedObjectSerializer):
         ]
 
     def update(self, instance, validated_data):
-        if "password" in validated_data:
-            if len(validated_data.get("password").replace("*", "")) == 0:
-                validated_data.pop("password")
+        if (
+            "password" in validated_data
+            and len(validated_data.get("password").replace("*", "")) == 0
+        ):
+            validated_data.pop("password")
         super().update(instance, validated_data)
         return instance
 
index 1af870156b27c72d02f62097da454a38d4dd5e6c..757bc5f4f1ce30f9dac5682de356357fc9cab3e3 100644 (file)
@@ -47,7 +47,7 @@ class TestMailLiveServer(TestCase):
 
         except MailError as e:
             self.fail(f"Failure: {e}")
-        except Exception as e:
+        except Exception:
             pass
 
     def test_process_non_gmail_server_tag(self):
@@ -66,5 +66,5 @@ class TestMailLiveServer(TestCase):
 
         except MailError as e:
             self.fail(f"Failure: {e}")
-        except Exception as e:
+        except Exception:
             pass
index e08f0ad1866d8843dd2e26ae36dcf0a80f66f91f..1f482f3367c2a0ac724da0e607ee5e82a382a104 100644 (file)
@@ -12,11 +12,8 @@ from unittest import mock
 from django.core.management import call_command
 from django.db import DatabaseError
 from django.test import TestCase
-from documents.data_models import ConsumableDocument
-from documents.data_models import DocumentMetadataOverrides
 from documents.models import Correspondent
 from documents.tests.utils import DirectoriesMixin
-from documents.tests.utils import DocumentConsumeDelayMixin
 from documents.tests.utils import FileSystemAssertsMixin
 from imap_tools import EmailAddress
 from imap_tools import FolderInfo
@@ -183,7 +180,7 @@ class BogusMailBox(ContextManager):
             )
             self.messages = list(filter(lambda m: m.uid not in uid_list, self.messages))
         else:
-            raise Exception()
+            raise Exception
 
 
 def fake_magic_from_buffer(buffer, mime=False):
@@ -769,7 +766,7 @@ class TestMail(
         with self.assertRaisesRegex(
             MailError,
             "Error while authenticating account",
-        ) as context:
+        ):
             self.mail_account_handler.handle_mail_account(account)
 
     def test_error_skip_account(self):
index 2cabb4ca97d5a4a920fa5a2f721b08c63e39e705..e2bee34ff50abadf090300a05127c98547ca91bf 100644 (file)
@@ -65,9 +65,8 @@ class MailAccountTestView(GenericAPIView):
             try:
                 mailbox_login(M, account)
                 return Response({"success": True})
-            except MailError as e:
+            except MailError:
                 logger.error(
-                    f"Mail account {account} test failed: {e}",
-                    exc_info=False,
+                    f"Mail account {account} test failed",
                 )
                 return HttpResponseBadRequest("Unable to connect to server")
index bbb25feb95debfc86639d4de7099485403a80ee3..f3e8e21fd2fae98a19a3cc02af0697cf5bc43fac 100644 (file)
@@ -56,7 +56,7 @@ class RasterisedDocumentParser(DocumentParser):
                 except Exception as e:
                     self.log(
                         "warning",
-                        f"Error while reading metadata {key}: {value}. Error: " f"{e}",
+                        f"Error while reading metadata {key}: {value}. Error: {e}",
                     )
         return result
 
@@ -160,11 +160,10 @@ class RasterisedDocumentParser(DocumentParser):
             return post_process_text(text)
 
         except Exception:
-            # TODO catch all for various issues with PDFminer.six.
             #  If pdftotext fails, fall back to OCR.
             self.log(
                 "warning",
-                "Error while getting text from PDF document with " "pdfminer.six",
+                "Error while getting text from PDF document with pdftotext",
                 exc_info=True,
             )
             # probably not a PDF file.
@@ -284,10 +283,13 @@ class RasterisedDocumentParser(DocumentParser):
     def parse(self, document_path: Path, mime_type, file_name=None):
         # This forces tesseract to use one core per page.
         os.environ["OMP_THREAD_LIMIT"] = "1"
+        VALID_TEXT_LENGTH = 50
 
         if mime_type == "application/pdf":
             text_original = self.extract_text(None, document_path)
-            original_has_text = text_original is not None and len(text_original) > 50
+            original_has_text = (
+                text_original is not None and len(text_original) > VALID_TEXT_LENGTH
+            )
         else:
             text_original = None
             original_has_text = False
index 4d46ad9a398260ebb687823baa3cc9cf344a977f..fdcbf76565c1f6437afe1929e1ca7716cdfca308 100644 (file)
@@ -8,7 +8,7 @@ from paperless_tesseract import check_default_language_available
 
 class TestChecks(TestCase):
     def test_default_language(self):
-        msgs = check_default_language_available(None)
+        check_default_language_available(None)
 
     @override_settings(OCR_LANGUAGE="")
     def test_no_language(self):
diff --git a/src/paperless_text/tests/__init__.py b/src/paperless_text/tests/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
index 39ab75d7938452ed1ac983f6ad654ec5bd6f9924..ea6a83f6c6601080ed4237ba71edd2c5da94c830 100644 (file)
@@ -39,7 +39,7 @@ class TikaDocumentParser(DocumentParser):
         except Exception as e:
             self.log(
                 "warning",
-                f"Error while fetching document metadata for " f"{document_path}: {e}",
+                f"Error while fetching document metadata for {document_path}: {e}",
             )
             return []
 
@@ -76,7 +76,7 @@ class TikaDocumentParser(DocumentParser):
         except Exception as e:
             self.log(
                 "warning",
-                f"Unable to extract date for document " f"{document_path}: {e}",
+                f"Unable to extract date for document {document_path}: {e}",
             )
 
         self.archive_path = self.convert_to_pdf(document_path, file_name)
diff --git a/src/paperless_tika/tests/__init__.py b/src/paperless_tika/tests/__init__.py
new file mode 100644 (file)
index 0000000..e69de29