]> git.ipfire.org Git - thirdparty/paperless-ngx.git/commitdiff
Chore(mypy): Annotate `None` returns for typing improvements (#11213)
authorSebastian Steinbeißer <33968289+gothicVI@users.noreply.github.com>
Mon, 2 Feb 2026 16:44:12 +0000 (17:44 +0100)
committerGitHub <noreply@github.com>
Mon, 2 Feb 2026 16:44:12 +0000 (08:44 -0800)
113 files changed:
src/documents/apps.py
src/documents/bulk_edit.py
src/documents/classifier.py
src/documents/consumer.py
src/documents/data_models.py
src/documents/filters.py
src/documents/index.py
src/documents/loggers.py
src/documents/management/commands/convert_mariadb_uuid.py
src/documents/management/commands/document_exporter.py
src/documents/management/commands/document_importer.py
src/documents/management/commands/document_thumbnails.py
src/documents/management/commands/manage_superuser.py
src/documents/management/commands/mixins.py
src/documents/matching.py
src/documents/models.py
src/documents/parsers.py
src/documents/permissions.py
src/documents/sanity_checker.py
src/documents/serialisers.py
src/documents/signals/handlers.py
src/documents/tasks.py
src/documents/tests/test_admin.py
src/documents/tests/test_api_app_config.py
src/documents/tests/test_api_bulk_download.py
src/documents/tests/test_api_bulk_edit.py
src/documents/tests/test_api_custom_fields.py
src/documents/tests/test_api_documents.py
src/documents/tests/test_api_email.py
src/documents/tests/test_api_filter_by_custom_fields.py
src/documents/tests/test_api_objects.py
src/documents/tests/test_api_permissions.py
src/documents/tests/test_api_profile.py
src/documents/tests/test_api_remote_version.py
src/documents/tests/test_api_schema.py
src/documents/tests/test_api_search.py
src/documents/tests/test_api_status.py
src/documents/tests/test_api_tasks.py
src/documents/tests/test_api_trash.py
src/documents/tests/test_api_uisettings.py
src/documents/tests/test_api_workflows.py
src/documents/tests/test_barcodes.py
src/documents/tests/test_bulk_edit.py
src/documents/tests/test_caching.py
src/documents/tests/test_checks.py
src/documents/tests/test_classifier.py
src/documents/tests/test_consumer.py
src/documents/tests/test_date_parsing.py
src/documents/tests/test_delayedquery.py
src/documents/tests/test_document_model.py
src/documents/tests/test_double_sided.py
src/documents/tests/test_file_handling.py
src/documents/tests/test_filters.py
src/documents/tests/test_index.py
src/documents/tests/test_management.py
src/documents/tests/test_management_exporter.py
src/documents/tests/test_management_fuzzy.py
src/documents/tests/test_management_importer.py
src/documents/tests/test_management_retagger.py
src/documents/tests/test_management_superuser.py
src/documents/tests/test_management_thumbnails.py
src/documents/tests/test_matchables.py
src/documents/tests/test_migration_share_link_bundle.py
src/documents/tests/test_models.py
src/documents/tests/test_parsers.py
src/documents/tests/test_sanity_check.py
src/documents/tests/test_share_link_bundles.py
src/documents/tests/test_tag_hierarchy.py
src/documents/tests/test_task_signals.py
src/documents/tests/test_tasks.py
src/documents/tests/test_views.py
src/documents/tests/test_workflows.py
src/documents/tests/utils.py
src/documents/views.py
src/paperless/apps.py
src/paperless/auth.py
src/paperless/consumers.py
src/paperless/db_cache.py
src/paperless/tests/test_adapter.py
src/paperless/tests/test_checks.py
src/paperless/tests/test_db_cache.py
src/paperless/tests/test_remote_user.py
src/paperless/tests/test_settings.py
src/paperless/tests/test_signals.py
src/paperless/tests/test_websockets.py
src/paperless_ai/client.py
src/paperless_ai/tests/test_ai_indexing.py
src/paperless_ai/tests/test_chat.py
src/paperless_ai/tests/test_matching.py
src/paperless_mail/apps.py
src/paperless_mail/mail.py
src/paperless_mail/parsers.py
src/paperless_mail/preprocessor.py
src/paperless_mail/tests/test_api.py
src/paperless_mail/tests/test_mail.py
src/paperless_mail/tests/test_mail_oauth.py
src/paperless_mail/tests/test_parsers.py
src/paperless_mail/tests/test_preprocessor.py
src/paperless_remote/apps.py
src/paperless_remote/tests/test_checks.py
src/paperless_remote/tests/test_parser.py
src/paperless_tesseract/apps.py
src/paperless_tesseract/parsers.py
src/paperless_tesseract/tests/test_checks.py
src/paperless_tesseract/tests/test_parser.py
src/paperless_tesseract/tests/test_parser_custom_settings.py
src/paperless_text/apps.py
src/paperless_text/parsers.py
src/paperless_text/tests/test_parser.py
src/paperless_tika/apps.py
src/paperless_tika/parsers.py
src/paperless_tika/tests/test_live_tika.py
src/paperless_tika/tests/test_tika_parser.py

index 32e49b160dd17fd406a802ee46fb9666e94baa78..d8200edac6b917a37e71610bd83c558bd0860d6b 100644 (file)
@@ -7,7 +7,7 @@ class DocumentsConfig(AppConfig):
 
     verbose_name = _("Documents")
 
-    def ready(self):
+    def ready(self) -> None:
         from documents.signals import document_consumption_finished
         from documents.signals import document_updated
         from documents.signals.handlers import add_inbox_tags
index 43cb132610ef0cfcb0f8123c80b2ba12baefd6c2..ec6217a0a5ff321eaf52bccf596b16f61c4e9e30 100644 (file)
@@ -721,7 +721,7 @@ def reflect_doclinks(
     document: Document,
     field: CustomField,
     target_doc_ids: list[int],
-):
+) -> None:
     """
     Add or remove 'symmetrical' links to `document` on all `target_doc_ids`
     """
@@ -784,7 +784,7 @@ def remove_doclink(
     document: Document,
     field: CustomField,
     target_doc_id: int,
-):
+) -> None:
     """
     Removes a 'symmetrical' link to `document` from the target document's existing custom field instance
     """
index 613c1d5adc8f39a97cc70f2013a73981860edf52..1e9da7ce6dcb9f8c1f37261159c280aa6b9f4a69 100644 (file)
@@ -122,7 +122,7 @@ class DocumentClassifier:
             )
         self._stop_words = None
 
-    def _update_data_vectorizer_hash(self):
+    def _update_data_vectorizer_hash(self) -> None:
         self.data_vectorizer_hash = sha256(
             pickle.dumps(self.data_vectorizer),
         ).hexdigest()
index 1ff60220bc414fbf8b5a60e9b3825223b67d1423..76aa293d09b36ce119590087762e65186756924e 100644 (file)
@@ -120,7 +120,7 @@ class ConsumerPluginMixin:
         status: ProgressStatusOptions,
         message: ConsumerStatusShortMessage | str | None = None,
         document_id=None,
-    ):  # pragma: no cover
+    ) -> None:  # pragma: no cover
         self.status_mgr.send_progress(
             status,
             message,
@@ -158,7 +158,7 @@ class ConsumerPlugin(
 ):
     logging_name = "paperless.consumer"
 
-    def run_pre_consume_script(self):
+    def run_pre_consume_script(self) -> None:
         """
         If one is configured and exists, run the pre-consume script and
         handle its output and/or errors
@@ -201,7 +201,7 @@ class ConsumerPlugin(
                 exception=e,
             )
 
-    def run_post_consume_script(self, document: Document):
+    def run_post_consume_script(self, document: Document) -> None:
         """
         If one is configured and exists, run the pre-consume script and
         handle its output and/or errors
@@ -361,7 +361,10 @@ class ConsumerPlugin(
                 tempdir.cleanup()
             raise
 
-        def progress_callback(current_progress, max_progress):  # pragma: no cover
+        def progress_callback(
+            current_progress,
+            max_progress,
+        ) -> None:  # pragma: no cover
             # recalculate progress to be within 20 and 80
             p = int((current_progress / max_progress) * 50 + 20)
             self._send_progress(p, 100, ProgressStatusOptions.WORKING)
@@ -670,7 +673,7 @@ class ConsumerPlugin(
 
         return document
 
-    def apply_overrides(self, document):
+    def apply_overrides(self, document) -> None:
         if self.metadata.correspondent_id:
             document.correspondent = Correspondent.objects.get(
                 pk=self.metadata.correspondent_id,
@@ -730,7 +733,7 @@ class ConsumerPlugin(
                 }
                 CustomFieldInstance.objects.create(**args)  # adds to document
 
-    def _write(self, source, target):
+    def _write(self, source, target) -> None:
         with (
             Path(source).open("rb") as read_file,
             Path(target).open("wb") as write_file,
@@ -755,7 +758,7 @@ class ConsumerPreflightPlugin(
     NAME: str = "ConsumerPreflightPlugin"
     logging_name = "paperless.consumer"
 
-    def pre_check_file_exists(self):
+    def pre_check_file_exists(self) -> None:
         """
         Confirm the input file still exists where it should
         """
@@ -769,7 +772,7 @@ class ConsumerPreflightPlugin(
                 f"Cannot consume {self.input_doc.original_file}: File not found.",
             )
 
-    def pre_check_duplicate(self):
+    def pre_check_duplicate(self) -> None:
         """
         Using the MD5 of the file, check this exact file doesn't already exist
         """
@@ -819,7 +822,7 @@ class ConsumerPreflightPlugin(
                     failure_msg,
                 )
 
-    def pre_check_directories(self):
+    def pre_check_directories(self) -> None:
         """
         Ensure all required directories exist before attempting to use them
         """
@@ -828,7 +831,7 @@ class ConsumerPreflightPlugin(
         settings.ORIGINALS_DIR.mkdir(parents=True, exist_ok=True)
         settings.ARCHIVE_DIR.mkdir(parents=True, exist_ok=True)
 
-    def pre_check_asn_value(self):
+    def pre_check_asn_value(self) -> None:
         """
         Check that if override_asn is given, it is unique and within a valid range
         """
index a4b1150dd25d35b5bf9bc2d84862e71f5d679a54..7c023dc1354ae9ba988f39ac370fe56620b9a2b5 100644 (file)
@@ -164,7 +164,7 @@ class ConsumableDocument:
     mailrule_id: int | None = None
     mime_type: str = dataclasses.field(init=False, default=None)
 
-    def __post_init__(self):
+    def __post_init__(self) -> None:
         """
         After a dataclass is initialized, this is called to finalize some data
         1. Make sure the original path is an absolute, fully qualified path
index 9e53d01af4f749b1cad31f49a2c8ccb2a4510fab..f1713882c71677443e32e08bbe352edcf3c4e58c 100644 (file)
@@ -120,7 +120,7 @@ class StoragePathFilterSet(FilterSet):
 
 
 class ObjectFilter(Filter):
-    def __init__(self, *, exclude=False, in_list=False, field_name=""):
+    def __init__(self, *, exclude=False, in_list=False, field_name="") -> None:
         super().__init__()
         self.exclude = exclude
         self.in_list = in_list
@@ -255,7 +255,7 @@ class MimeTypeFilter(Filter):
 
 
 class SelectField(serializers.CharField):
-    def __init__(self, custom_field: CustomField):
+    def __init__(self, custom_field: CustomField) -> None:
         self._options = custom_field.extra_data["select_options"]
         super().__init__(max_length=16)
 
@@ -676,7 +676,7 @@ class CustomFieldQueryParser:
 
 @extend_schema_field(serializers.CharField)
 class CustomFieldQueryFilter(Filter):
-    def __init__(self, validation_prefix):
+    def __init__(self, validation_prefix) -> None:
         """
         A filter that filters documents based on custom field name and value.
 
index 8afc31fe9af8cfd5342e481faff2d5c3c98a9314..be944b48bff85971b17dd5721978a869a87b276d 100644 (file)
@@ -414,13 +414,13 @@ class DelayedQuery:
 
 
 class ManualResultsPage(list):
-    def __init__(self, hits):
+    def __init__(self, hits) -> None:
         super().__init__(hits)
         self.results = ManualResults(hits)
 
 
 class ManualResults:
-    def __init__(self, hits):
+    def __init__(self, hits) -> None:
         self._docnums = [hit.docnum for hit in hits]
 
     def docs(self):
index 87ee58868d21c469e99c54ee6d4dcbde10286931..f30c823f1673330195dbef17c7bf9b4c4570ec24 100644 (file)
@@ -3,7 +3,7 @@ import uuid
 
 
 class LoggingMixin:
-    def renew_logging_group(self):
+    def renew_logging_group(self) -> None:
         """
         Creates a new UUID to group subsequent log calls together with
         the extra data named group
index 76ccf9e76284edb25a42411f915fb76764257a04..3533d03f3e4caf0f9bc3dab025bc276b7e00c14a 100644 (file)
@@ -9,7 +9,7 @@ class Command(BaseCommand):
     # This code is taken almost entirely from https://github.com/wagtail/wagtail/pull/11912 with all credit to the original author.
     help = "Converts UUID columns from char type to the native UUID type used in MariaDB 10.7+ and Django 5.0+."
 
-    def convert_field(self, model, field_name, *, null=False):
+    def convert_field(self, model, field_name, *, null=False) -> None:
         if model._meta.get_field(field_name).model != model:  # pragma: no cover
             # Field is inherited from a parent model
             return
index 77b3b6416fbb66ca1426dfb1a30d69787979949e..bd962efc4df97af644f165e0d529f6ee8fa54635 100644 (file)
@@ -67,7 +67,7 @@ class Command(CryptMixin, BaseCommand):
         "easy import."
     )
 
-    def add_arguments(self, parser):
+    def add_arguments(self, parser) -> None:
         parser.add_argument("target")
 
         parser.add_argument(
@@ -186,7 +186,7 @@ class Command(CryptMixin, BaseCommand):
             help="If provided, is used to encrypt sensitive data in the export",
         )
 
-    def handle(self, *args, **options):
+    def handle(self, *args, **options) -> None:
         self.target = Path(options["target"]).resolve()
         self.split_manifest: bool = options["split_manifest"]
         self.compare_checksums: bool = options["compare_checksums"]
@@ -244,7 +244,7 @@ class Command(CryptMixin, BaseCommand):
             if self.zip_export and temp_dir is not None:
                 temp_dir.cleanup()
 
-    def dump(self):
+    def dump(self) -> None:
         # 1. Take a snapshot of what files exist in the current export folder
         for x in self.target.glob("**/*"):
             if x.is_file():
@@ -498,7 +498,7 @@ class Command(CryptMixin, BaseCommand):
         self,
         content: list[dict] | dict,
         target: Path,
-    ):
+    ) -> None:
         """
         Writes the source content to the target json file.
         If --compare-json arg was used, don't write to target file if
@@ -528,7 +528,7 @@ class Command(CryptMixin, BaseCommand):
         source: Path,
         source_checksum: str | None,
         target: Path,
-    ):
+    ) -> None:
         """
         Copies the source to the target, if target doesn't exist or the target doesn't seem to match
         the source attributes
index ba3d793b37e62030d82d114be499104804b2ba7c..5cd7435902f330365b08b1efdd654f64f79e0573 100644 (file)
@@ -246,7 +246,7 @@ class Command(CryptMixin, BaseCommand):
                 self.source = Path(tmp_dir)
             self._run_import()
 
-    def _run_import(self):
+    def _run_import(self) -> None:
         self.pre_check()
         self.load_metadata()
         self.load_manifest_files()
index d4653f0b3639e5a5c59d072a96949a6579ed25cc..e50c837d3e6af66b5121567ae6a2da97caf84d8b 100644 (file)
@@ -12,7 +12,7 @@ from documents.models import Document
 from documents.parsers import get_parser_class_for_mime_type
 
 
-def _process_document(doc_id):
+def _process_document(doc_id) -> None:
     document: Document = Document.objects.get(id=doc_id)
     parser_class = get_parser_class_for_mime_type(document.mime_type)
 
@@ -37,7 +37,7 @@ def _process_document(doc_id):
 class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
     help = "This will regenerate the thumbnails for all documents."
 
-    def add_arguments(self, parser):
+    def add_arguments(self, parser) -> None:
         parser.add_argument(
             "-d",
             "--document",
index e0d23843853afa37fbd3dfa6cc6bfc4059a60ebc..3a81a47c52f517f4f039de08442a94241dd04019 100644 (file)
@@ -25,7 +25,7 @@ class Command(BaseCommand):
         parser.formatter_class = RawTextHelpFormatter
         return parser
 
-    def handle(self, *args, **options):
+    def handle(self, *args, **options) -> None:
         username = os.getenv("PAPERLESS_ADMIN_USER", "admin")
         mail = os.getenv("PAPERLESS_ADMIN_MAIL", "root@localhost")
         password = os.getenv("PAPERLESS_ADMIN_PASSWORD")
index a2ad326e44df114bf35fcfb336c7f595360bfb65..109f3aea7276adef3e5c24d985bf8a8270f59803 100644 (file)
@@ -27,7 +27,7 @@ class MultiProcessMixin:
     for the use of multiple processes
     """
 
-    def add_argument_processes_mixin(self, parser: ArgumentParser):
+    def add_argument_processes_mixin(self, parser: ArgumentParser) -> None:
         parser.add_argument(
             "--processes",
             default=max(1, os.cpu_count() // 4),
@@ -35,7 +35,7 @@ class MultiProcessMixin:
             help="Number of processes to distribute work amongst",
         )
 
-    def handle_processes_mixin(self, *args, **options):
+    def handle_processes_mixin(self, *args, **options) -> None:
         self.process_count = options["processes"]
         if self.process_count < 1:
             raise CommandError("There must be at least 1 process")
@@ -47,7 +47,7 @@ class ProgressBarMixin:
     via this class
     """
 
-    def add_argument_progress_bar_mixin(self, parser: ArgumentParser):
+    def add_argument_progress_bar_mixin(self, parser: ArgumentParser) -> None:
         parser.add_argument(
             "--no-progress-bar",
             default=False,
@@ -55,7 +55,7 @@ class ProgressBarMixin:
             help="If set, the progress bar will not be shown",
         )
 
-    def handle_progress_bar_mixin(self, *args, **options):
+    def handle_progress_bar_mixin(self, *args, **options) -> None:
         self.no_progress_bar = options["no_progress_bar"]
         self.use_progress_bar = not self.no_progress_bar
 
@@ -120,7 +120,7 @@ class CryptMixin:
             },
         }
 
-    def load_crypt_params(self, metadata: dict):
+    def load_crypt_params(self, metadata: dict) -> None:
         # Load up the values for setting up decryption
         self.kdf_algorithm: str = metadata[EXPORTER_CRYPTO_SETTINGS_NAME][
             EXPORTER_CRYPTO_ALGO_NAME
@@ -135,7 +135,7 @@ class CryptMixin:
             EXPORTER_CRYPTO_SALT_NAME
         ]
 
-    def setup_crypto(self, *, passphrase: str, salt: str | None = None):
+    def setup_crypto(self, *, passphrase: str, salt: str | None = None) -> None:
         """
         Constructs a class for encryption or decryption using the specified passphrase and salt
 
index 9276ad5838f7a31cfd49974d81d522146207a150..a8cc798110a80c028d014b752fec8d501ada7693 100644 (file)
@@ -34,7 +34,7 @@ def log_reason(
     matching_model: MatchingModel | WorkflowTrigger,
     document: Document,
     reason: str,
-):
+) -> None:
     class_name = type(matching_model).__name__
     name = (
         matching_model.name if hasattr(matching_model, "name") else str(matching_model)
index 72470ef6e2c7153bb23e4a8f6b03ca1fbc30b3f5..2e187e98c23303c34839da8c31120e300720e5fe 100644 (file)
@@ -118,7 +118,7 @@ class Tag(MatchingModel, TreeNodeModel):
         verbose_name = _("tag")
         verbose_name_plural = _("tags")
 
-    def clean(self):
+    def clean(self) -> None:
         # Prevent self-parenting and assigning a descendant as parent
         parent = self.get_parent()
         if parent == self:
@@ -410,7 +410,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
     def created_date(self):
         return self.created
 
-    def add_nested_tags(self, tags):
+    def add_nested_tags(self, tags) -> None:
         tag_ids = set()
         for tag in tags:
             tag_ids.add(tag.id)
@@ -862,7 +862,7 @@ class ShareLinkBundle(models.Model):
             return None
         return (settings.SHARE_LINK_BUNDLE_DIR / Path(self.file_path)).resolve()
 
-    def remove_file(self):
+    def remove_file(self) -> None:
         if self.absolute_file_path is not None and self.absolute_file_path.exists():
             try:
                 self.absolute_file_path.unlink()
index f6417e285b11b2ba871b869a2671c149f27b50db..211fb61fe98a8a28647d111d4f2baf14d2f197d3 100644 (file)
@@ -340,7 +340,7 @@ class DocumentParser(LoggingMixin):
 
     logging_name = "paperless.parsing"
 
-    def __init__(self, logging_group, progress_callback=None):
+    def __init__(self, logging_group, progress_callback=None) -> None:
         super().__init__()
         self.renew_logging_group()
         self.logging_group = logging_group
@@ -355,7 +355,7 @@ class DocumentParser(LoggingMixin):
         self.date: datetime.datetime | None = None
         self.progress_callback = progress_callback
 
-    def progress(self, current_progress, max_progress):
+    def progress(self, current_progress, max_progress) -> None:
         if self.progress_callback:
             self.progress_callback(current_progress, max_progress)
 
@@ -380,7 +380,7 @@ class DocumentParser(LoggingMixin):
     def extract_metadata(self, document_path, mime_type):
         return []
 
-    def get_page_count(self, document_path, mime_type):
+    def get_page_count(self, document_path, mime_type) -> None:
         return None
 
     def parse(self, document_path, mime_type, file_name=None):
@@ -401,6 +401,6 @@ class DocumentParser(LoggingMixin):
     def get_date(self) -> datetime.datetime | None:
         return self.date
 
-    def cleanup(self):
+    def cleanup(self) -> None:
         self.log.debug(f"Deleting directory {self.tempdir}")
         shutil.rmtree(self.tempdir)
index 9d5c9eb68b6ea21e87d3e979b946fa7cadf95072..a47762c46570dfa94e7ab724cf1d13a4cdafc420 100644 (file)
@@ -61,7 +61,12 @@ def get_groups_with_only_permission(obj, codename):
     return Group.objects.filter(id__in=group_object_perm_group_ids).distinct()
 
 
-def set_permissions_for_object(permissions: dict, object, *, merge: bool = False):
+def set_permissions_for_object(
+    permissions: dict,
+    object,
+    *,
+    merge: bool = False,
+) -> None:
     """
     Set permissions for an object. The permissions are given as a mapping of actions
     to a dict of user / group id lists, e.g.
index 5e5510971d81a808b748b6f523ef6c9e318c184e..08763d937d591228cad98a679568eacac923b6e9 100644 (file)
@@ -16,23 +16,23 @@ from paperless.config import GeneralConfig
 
 
 class SanityCheckMessages:
-    def __init__(self):
+    def __init__(self) -> None:
         self._messages: dict[int, list[dict]] = defaultdict(list)
         self.has_error = False
         self.has_warning = False
 
-    def error(self, doc_pk, message):
+    def error(self, doc_pk, message) -> None:
         self._messages[doc_pk].append({"level": logging.ERROR, "message": message})
         self.has_error = True
 
-    def warning(self, doc_pk, message):
+    def warning(self, doc_pk, message) -> None:
         self._messages[doc_pk].append({"level": logging.WARNING, "message": message})
         self.has_warning = True
 
-    def info(self, doc_pk, message):
+    def info(self, doc_pk, message) -> None:
         self._messages[doc_pk].append({"level": logging.INFO, "message": message})
 
-    def log_messages(self):
+    def log_messages(self) -> None:
         logger = logging.getLogger("paperless.sanity_checker")
 
         if len(self._messages) == 0:
index f7ed197da797aa74e5b95194a8d87f8b86939e1f..cfd2ad3cfaef0975d7f8296ca611801fab63cded 100644 (file)
@@ -101,7 +101,7 @@ class DynamicFieldsModelSerializer(serializers.ModelSerializer):
     controls which fields should be displayed.
     """
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         # Don't pass the 'fields' arg up to the superclass
         fields = kwargs.pop("fields", None)
 
@@ -205,12 +205,12 @@ class SetPermissionsMixin:
                     del permissions_dict[action]
         return permissions_dict
 
-    def _set_permissions(self, permissions, object):
+    def _set_permissions(self, permissions, object) -> None:
         set_permissions_for_object(permissions, object)
 
 
 class SerializerWithPerms(serializers.Serializer):
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         self.user = kwargs.pop("user", None)
         self.full_perms = kwargs.pop("full_perms", False)
         self.all_fields = kwargs.pop("all_fields", False)
@@ -259,7 +259,7 @@ class OwnedObjectSerializer(
     serializers.ModelSerializer,
     SetPermissionsMixin,
 ):
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         super().__init__(*args, **kwargs)
 
         if not self.all_fields:
@@ -409,7 +409,7 @@ class OwnedObjectSerializer(
     )
     # other methods in mixin
 
-    def validate_unique_together(self, validated_data, instance=None):
+    def validate_unique_together(self, validated_data, instance=None) -> None:
         # workaround for https://github.com/encode/django-rest-framework/issues/9358
         if "owner" in validated_data and "name" in self.Meta.fields:
             name = validated_data.get("name", instance.name if instance else None)
@@ -720,7 +720,7 @@ class StoragePathField(serializers.PrimaryKeyRelatedField):
 
 
 class CustomFieldSerializer(serializers.ModelSerializer):
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         context = kwargs.get("context")
         self.api_version = int(
             context.get("request").version
@@ -846,7 +846,7 @@ class ReadWriteSerializerMethodField(serializers.SerializerMethodField):
     Based on https://stackoverflow.com/a/62579804
     """
 
-    def __init__(self, method_name=None, *args, **kwargs):
+    def __init__(self, method_name=None, *args, **kwargs) -> None:
         self.method_name = method_name
         kwargs["source"] = "*"
         super(serializers.SerializerMethodField, self).__init__(*args, **kwargs)
@@ -1261,7 +1261,7 @@ class DocumentSerializer(
         CustomFieldInstance.deleted_objects.filter(document=instance).delete()
         return instance
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         self.truncate_content = kwargs.pop("truncate_content", False)
 
         # return full permissions if we're doing a PATCH or PUT
@@ -1456,7 +1456,7 @@ class DocumentListSerializer(serializers.Serializer):
         child=serializers.IntegerField(),
     )
 
-    def _validate_document_id_list(self, documents, name="documents"):
+    def _validate_document_id_list(self, documents, name="documents") -> None:
         if not isinstance(documents, list):
             raise serializers.ValidationError(f"{name} must be a list")
         if not all(isinstance(i, int) for i in documents):
@@ -1502,7 +1502,7 @@ class BulkEditSerializer(
 
     parameters = serializers.DictField(allow_empty=True, default={}, write_only=True)
 
-    def _validate_tag_id_list(self, tags, name="tags"):
+    def _validate_tag_id_list(self, tags, name="tags") -> None:
         if not isinstance(tags, list):
             raise serializers.ValidationError(f"{name} must be a list")
         if not all(isinstance(i, int) for i in tags):
@@ -1517,7 +1517,7 @@ class BulkEditSerializer(
         self,
         custom_fields,
         name="custom_fields",
-    ):
+    ) -> None:
         ids = custom_fields
         if isinstance(custom_fields, dict):
             try:
@@ -1576,7 +1576,7 @@ class BulkEditSerializer(
             # This will never happen as it is handled by the ChoiceField
             raise serializers.ValidationError("Unsupported method.")
 
-    def _validate_parameters_tags(self, parameters):
+    def _validate_parameters_tags(self, parameters) -> None:
         if "tag" in parameters:
             tag_id = parameters["tag"]
             try:
@@ -1586,7 +1586,7 @@ class BulkEditSerializer(
         else:
             raise serializers.ValidationError("tag not specified")
 
-    def _validate_parameters_document_type(self, parameters):
+    def _validate_parameters_document_type(self, parameters) -> None:
         if "document_type" in parameters:
             document_type_id = parameters["document_type"]
             if document_type_id is None:
@@ -1599,7 +1599,7 @@ class BulkEditSerializer(
         else:
             raise serializers.ValidationError("document_type not specified")
 
-    def _validate_parameters_correspondent(self, parameters):
+    def _validate_parameters_correspondent(self, parameters) -> None:
         if "correspondent" in parameters:
             correspondent_id = parameters["correspondent"]
             if correspondent_id is None:
@@ -1611,7 +1611,7 @@ class BulkEditSerializer(
         else:
             raise serializers.ValidationError("correspondent not specified")
 
-    def _validate_storage_path(self, parameters):
+    def _validate_storage_path(self, parameters) -> None:
         if "storage_path" in parameters:
             storage_path_id = parameters["storage_path"]
             if storage_path_id is None:
@@ -1625,7 +1625,7 @@ class BulkEditSerializer(
         else:
             raise serializers.ValidationError("storage path not specified")
 
-    def _validate_parameters_modify_tags(self, parameters):
+    def _validate_parameters_modify_tags(self, parameters) -> None:
         if "add_tags" in parameters:
             self._validate_tag_id_list(parameters["add_tags"], "add_tags")
         else:
@@ -1636,7 +1636,7 @@ class BulkEditSerializer(
         else:
             raise serializers.ValidationError("remove_tags not specified")
 
-    def _validate_parameters_modify_custom_fields(self, parameters):
+    def _validate_parameters_modify_custom_fields(self, parameters) -> None:
         if "add_custom_fields" in parameters:
             self._validate_custom_field_id_list_or_dict(
                 parameters["add_custom_fields"],
@@ -1659,7 +1659,7 @@ class BulkEditSerializer(
             raise serializers.ValidationError("Specified owner cannot be found")
         return ownerUser
 
-    def _validate_parameters_set_permissions(self, parameters):
+    def _validate_parameters_set_permissions(self, parameters) -> None:
         parameters["set_permissions"] = self.validate_set_permissions(
             parameters["set_permissions"],
         )
@@ -1668,7 +1668,7 @@ class BulkEditSerializer(
         if "merge" not in parameters:
             parameters["merge"] = False
 
-    def _validate_parameters_rotate(self, parameters):
+    def _validate_parameters_rotate(self, parameters) -> None:
         try:
             if (
                 "degrees" not in parameters
@@ -1678,7 +1678,7 @@ class BulkEditSerializer(
         except ValueError:
             raise serializers.ValidationError("invalid rotation degrees")
 
-    def _validate_parameters_split(self, parameters):
+    def _validate_parameters_split(self, parameters) -> None:
         if "pages" not in parameters:
             raise serializers.ValidationError("pages not specified")
         try:
@@ -1707,7 +1707,7 @@ class BulkEditSerializer(
         else:
             parameters["delete_originals"] = False
 
-    def _validate_parameters_delete_pages(self, parameters):
+    def _validate_parameters_delete_pages(self, parameters) -> None:
         if "pages" not in parameters:
             raise serializers.ValidationError("pages not specified")
         if not isinstance(parameters["pages"], list):
@@ -1715,7 +1715,7 @@ class BulkEditSerializer(
         if not all(isinstance(i, int) for i in parameters["pages"]):
             raise serializers.ValidationError("pages must be a list of integers")
 
-    def _validate_parameters_merge(self, parameters):
+    def _validate_parameters_merge(self, parameters) -> None:
         if "delete_originals" in parameters:
             if not isinstance(parameters["delete_originals"], bool):
                 raise serializers.ValidationError("delete_originals must be a boolean")
@@ -1727,7 +1727,7 @@ class BulkEditSerializer(
         else:
             parameters["archive_fallback"] = False
 
-    def _validate_parameters_edit_pdf(self, parameters, document_id):
+    def _validate_parameters_edit_pdf(self, parameters, document_id) -> None:
         if "operations" not in parameters:
             raise serializers.ValidationError("operations not specified")
         if not isinstance(parameters["operations"], list):
@@ -2212,7 +2212,7 @@ class AcknowledgeTasksViewSerializer(serializers.Serializer):
         child=serializers.IntegerField(),
     )
 
-    def _validate_task_id_list(self, tasks, name="tasks"):
+    def _validate_task_id_list(self, tasks, name="tasks") -> None:
         if not isinstance(tasks, list):
             raise serializers.ValidationError(f"{name} must be a list")
         if not all(isinstance(i, int) for i in tasks):
@@ -2417,7 +2417,7 @@ class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
             )
         return objects
 
-    def _validate_permissions(self, permissions):
+    def _validate_permissions(self, permissions) -> None:
         self.validate_set_permissions(
             permissions,
         )
@@ -2529,7 +2529,7 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
         return attrs
 
     @staticmethod
-    def normalize_workflow_trigger_sources(trigger):
+    def normalize_workflow_trigger_sources(trigger) -> None:
         """
         Convert sources to strings to handle django-multiselectfield v1.0 changes
         """
@@ -2703,7 +2703,12 @@ class WorkflowSerializer(serializers.ModelSerializer):
             "actions",
         ]
 
-    def update_triggers_and_actions(self, instance: Workflow, triggers, actions):
+    def update_triggers_and_actions(
+        self,
+        instance: Workflow,
+        triggers,
+        actions,
+    ) -> None:
         set_triggers = []
         set_actions = []
 
@@ -2863,7 +2868,7 @@ class WorkflowSerializer(serializers.ModelSerializer):
             instance.actions.set(set_actions)
         instance.save()
 
-    def prune_triggers_and_actions(self):
+    def prune_triggers_and_actions(self) -> None:
         """
         ManyToMany fields dont support e.g. on_delete so we need to discard unattached
         triggers and actions manually
index cfd2f185b5d92c628e79e656f7d5ee90e6823d29..8ef5cad04530418b1c4f620ef8ee065e463d36d2 100644 (file)
@@ -64,7 +64,7 @@ if TYPE_CHECKING:
 logger = logging.getLogger("paperless.handlers")
 
 
-def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs):
+def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs) -> None:
     if document.owner is not None:
         tags = get_objects_for_user_owner_aware(
             document.owner,
@@ -84,7 +84,7 @@ def _suggestion_printer(
     document: Document,
     selected: MatchingModel,
     base_url: str | None = None,
-):
+) -> None:
     """
     Smaller helper to reduce duplication when just outputting suggestions to the console
     """
@@ -110,7 +110,7 @@ def set_correspondent(
     stdout=None,
     style_func=None,
     **kwargs,
-):
+) -> None:
     if document.correspondent and not replace:
         return
 
@@ -166,7 +166,7 @@ def set_document_type(
     stdout=None,
     style_func=None,
     **kwargs,
-):
+) -> None:
     if document.document_type and not replace:
         return
 
@@ -222,7 +222,7 @@ def set_tags(
     stdout=None,
     style_func=None,
     **kwargs,
-):
+) -> None:
     if replace:
         Document.tags.through.objects.filter(document=document).exclude(
             Q(tag__is_inbox_tag=True),
@@ -279,7 +279,7 @@ def set_storage_path(
     stdout=None,
     style_func=None,
     **kwargs,
-):
+) -> None:
     if document.storage_path and not replace:
         return
 
@@ -327,7 +327,7 @@ def set_storage_path(
 
 
 # see empty_trash in documents/tasks.py for signal handling
-def cleanup_document_deletion(sender, instance, **kwargs):
+def cleanup_document_deletion(sender, instance, **kwargs) -> None:
     with FileLock(settings.MEDIA_LOCK):
         if settings.EMPTY_TRASH_DIR:
             # Find a non-conflicting filename in case a document with the same
@@ -415,13 +415,13 @@ def update_filename_and_move_files(
     sender,
     instance: Document | CustomFieldInstance,
     **kwargs,
-):
+) -> None:
     if isinstance(instance, CustomFieldInstance):
         if not _filename_template_uses_custom_fields(instance.document):
             return
         instance = instance.document
 
-    def validate_move(instance, old_path: Path, new_path: Path, root: Path):
+    def validate_move(instance, old_path: Path, new_path: Path, root: Path) -> None:
         if not new_path.is_relative_to(root):
             msg = (
                 f"Document {instance!s}: Refusing to move file outside root {root}: "
@@ -594,7 +594,7 @@ def update_filename_and_move_files(
 
 
 @shared_task
-def process_cf_select_update(custom_field: CustomField):
+def process_cf_select_update(custom_field: CustomField) -> None:
     """
     Update documents tied to a select custom field:
 
@@ -620,7 +620,11 @@ def process_cf_select_update(custom_field: CustomField):
 
 # should be disabled in /src/documents/management/commands/document_importer.py handle
 @receiver(models.signals.post_save, sender=CustomField)
-def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs):
+def check_paths_and_prune_custom_fields(
+    sender,
+    instance: CustomField,
+    **kwargs,
+) -> None:
     """
     When a custom field is updated, check if we need to update any documents. Done async to avoid slowing down the save operation.
     """
@@ -633,7 +637,7 @@ def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs)
 
 
 @receiver(models.signals.post_delete, sender=CustomField)
-def cleanup_custom_field_deletion(sender, instance: CustomField, **kwargs):
+def cleanup_custom_field_deletion(sender, instance: CustomField, **kwargs) -> None:
     """
     When a custom field is deleted, ensure no saved views reference it.
     """
@@ -670,7 +674,7 @@ def update_llm_suggestions_cache(sender, instance, **kwargs):
 
 @receiver(models.signals.post_delete, sender=User)
 @receiver(models.signals.post_delete, sender=Group)
-def cleanup_user_deletion(sender, instance: User | Group, **kwargs):
+def cleanup_user_deletion(sender, instance: User | Group, **kwargs) -> None:
     """
     When a user or group is deleted, remove non-cascading references.
     At the moment, just the default permission settings in UiSettings.
@@ -713,7 +717,7 @@ def cleanup_user_deletion(sender, instance: User | Group, **kwargs):
             )
 
 
-def add_to_index(sender, document, **kwargs):
+def add_to_index(sender, document, **kwargs) -> None:
     from documents import index
 
     index.add_or_update_document(document)
@@ -725,7 +729,7 @@ def run_workflows_added(
     logging_group=None,
     original_file=None,
     **kwargs,
-):
+) -> None:
     run_workflows(
         trigger_type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         document=document,
@@ -735,7 +739,12 @@ def run_workflows_added(
     )
 
 
-def run_workflows_updated(sender, document: Document, logging_group=None, **kwargs):
+def run_workflows_updated(
+    sender,
+    document: Document,
+    logging_group=None,
+    **kwargs,
+) -> None:
     run_workflows(
         trigger_type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
         document=document,
@@ -841,7 +850,7 @@ def run_workflows(
 
 
 @before_task_publish.connect
-def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
+def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs) -> None:
     """
     Creates the PaperlessTask object in a pending state.  This is sent before
     the task reaches the broker, but before it begins executing on a worker.
@@ -883,7 +892,7 @@ def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
 
 
 @task_prerun.connect
-def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs):
+def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs) -> None:
     """
 
     Updates the PaperlessTask to be started.  Sent before the task begins execution
@@ -913,7 +922,7 @@ def task_postrun_handler(
     retval=None,
     state=None,
     **kwargs,
-):
+) -> None:
     """
     Updates the result of the PaperlessTask.
 
@@ -942,7 +951,7 @@ def task_failure_handler(
     args=None,
     traceback=None,
     **kwargs,
-):
+) -> None:
     """
     Updates the result of a failed PaperlessTask.
 
@@ -962,7 +971,7 @@ def task_failure_handler(
 
 
 @worker_process_init.connect
-def close_connection_pool_on_worker_init(**kwargs):
+def close_connection_pool_on_worker_init(**kwargs) -> None:
     """
     Close the DB connection pool for each Celery child process after it starts.
 
index fc89117051c4c6bbade518ffed2e3fa822c1483a..91a266856588a0085620552c3aaec053e778a20c 100644 (file)
@@ -71,13 +71,13 @@ logger = logging.getLogger("paperless.tasks")
 
 
 @shared_task
-def index_optimize():
+def index_optimize() -> None:
     ix = index.open_index()
     writer = AsyncWriter(ix)
     writer.commit(optimize=True)
 
 
-def index_reindex(*, progress_bar_disable=False):
+def index_reindex(*, progress_bar_disable=False) -> None:
     documents = Document.objects.all()
 
     ix = index.open_index(recreate=True)
@@ -88,7 +88,7 @@ def index_reindex(*, progress_bar_disable=False):
 
 
 @shared_task
-def train_classifier(*, scheduled=True):
+def train_classifier(*, scheduled=True) -> None:
     task = PaperlessTask.objects.create(
         type=PaperlessTask.TaskType.SCHEDULED_TASK
         if scheduled
@@ -234,7 +234,7 @@ def sanity_check(*, scheduled=True, raise_on_error=True):
 
 
 @shared_task
-def bulk_update_documents(document_ids):
+def bulk_update_documents(document_ids) -> None:
     documents = Document.objects.filter(id__in=document_ids)
 
     ix = index.open_index()
@@ -261,7 +261,7 @@ def bulk_update_documents(document_ids):
 
 
 @shared_task
-def update_document_content_maybe_archive_file(document_id):
+def update_document_content_maybe_archive_file(document_id) -> None:
     """
     Re-creates OCR content and thumbnail for a document, and archive file if
     it exists.
@@ -373,7 +373,7 @@ def update_document_content_maybe_archive_file(document_id):
 
 
 @shared_task
-def empty_trash(doc_ids=None):
+def empty_trash(doc_ids=None) -> None:
     if doc_ids is None:
         logger.info("Emptying trash of all expired documents")
     documents = (
@@ -410,7 +410,7 @@ def empty_trash(doc_ids=None):
 
 
 @shared_task
-def check_scheduled_workflows():
+def check_scheduled_workflows() -> None:
     """
     Check and run all enabled scheduled workflows.
 
@@ -588,7 +588,7 @@ def llmindex_index(
     rebuild=False,
     scheduled=True,
     auto=False,
-):
+) -> None:
     ai_config = AIConfig()
     if ai_config.llm_index_enabled:
         task = PaperlessTask.objects.create(
@@ -624,17 +624,17 @@ def llmindex_index(
 
 
 @shared_task
-def update_document_in_llm_index(document):
+def update_document_in_llm_index(document) -> None:
     llm_index_add_or_update_document(document)
 
 
 @shared_task
-def remove_document_from_llm_index(document):
+def remove_document_from_llm_index(document) -> None:
     llm_index_remove_document(document)
 
 
 @shared_task
-def build_share_link_bundle(bundle_id: int):
+def build_share_link_bundle(bundle_id: int) -> None:
     try:
         bundle = (
             ShareLinkBundle.objects.filter(pk=bundle_id)
@@ -726,7 +726,7 @@ def build_share_link_bundle(bundle_id: int):
 
 
 @shared_task
-def cleanup_expired_share_link_bundles():
+def cleanup_expired_share_link_bundles() -> None:
     now = timezone.now()
     expired_qs = ShareLinkBundle.objects.filter(
         expiration__isnull=False,
index 61a579dc748c91125be46310eca4339f64dc4d07..de2f07df544298ac1cfa37ed686b4f602eb8462e 100644 (file)
@@ -27,7 +27,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase):
         super().setUp()
         self.doc_admin = DocumentAdmin(model=Document, admin_site=AdminSite())
 
-    def test_save_model(self):
+    def test_save_model(self) -> None:
         doc = Document.objects.create(title="test")
 
         doc.title = "new title"
@@ -35,7 +35,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase):
         self.assertEqual(Document.objects.get(id=doc.id).title, "new title")
         self.assertEqual(self.get_document_from_index(doc)["id"], doc.id)
 
-    def test_delete_model(self):
+    def test_delete_model(self) -> None:
         doc = Document.objects.create(title="test")
         index.add_or_update_document(doc)
         self.assertIsNotNone(self.get_document_from_index(doc))
@@ -45,7 +45,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase):
         self.assertRaises(Document.DoesNotExist, Document.objects.get, id=doc.id)
         self.assertIsNone(self.get_document_from_index(doc))
 
-    def test_delete_queryset(self):
+    def test_delete_queryset(self) -> None:
         docs = []
         for i in range(42):
             doc = Document.objects.create(
@@ -67,7 +67,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase):
         for doc in docs:
             self.assertIsNone(self.get_document_from_index(doc))
 
-    def test_created(self):
+    def test_created(self) -> None:
         doc = Document.objects.create(
             title="test",
             created=timezone.make_aware(timezone.datetime(2020, 4, 12)),
@@ -98,7 +98,7 @@ class TestPaperlessAdmin(DirectoriesMixin, TestCase):
         super().setUp()
         self.user_admin = PaperlessUserAdmin(model=User, admin_site=AdminSite())
 
-    def test_request_is_passed_to_form(self):
+    def test_request_is_passed_to_form(self) -> None:
         user = User.objects.create(username="test", is_superuser=False)
         non_superuser = User.objects.create(username="requestuser")
         request = types.SimpleNamespace(user=non_superuser)
@@ -106,7 +106,7 @@ class TestPaperlessAdmin(DirectoriesMixin, TestCase):
         form = formType(data={}, instance=user)
         self.assertEqual(form.request, request)
 
-    def test_only_superuser_can_change_superuser(self):
+    def test_only_superuser_can_change_superuser(self) -> None:
         superuser = User.objects.create_superuser(username="superuser", password="test")
         non_superuser = User.objects.create(username="requestuser")
         user = User.objects.create(username="test", is_superuser=False)
@@ -128,7 +128,7 @@ class TestPaperlessAdmin(DirectoriesMixin, TestCase):
         self.assertTrue(form.is_valid())
         self.assertEqual({}, form.errors)
 
-    def test_superuser_can_only_be_modified_by_superuser(self):
+    def test_superuser_can_only_be_modified_by_superuser(self) -> None:
         superuser = User.objects.create_superuser(username="superuser", password="test")
         user = User.objects.create(
             username="test",
index f2ed902f46a77cc4099d94f5575025097fdc8637..7717c3488ca8ae9df5bd4e94dfbcceaba05b0e11 100644 (file)
@@ -22,7 +22,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=user)
 
-    def test_api_get_config(self):
+    def test_api_get_config(self) -> None:
         """
         GIVEN:
             - API request to get app config
@@ -78,7 +78,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
             },
         )
 
-    def test_api_get_ui_settings_with_config(self):
+    def test_api_get_ui_settings_with_config(self) -> None:
         """
         GIVEN:
             - Existing config with app_title, app_logo specified
@@ -101,7 +101,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
             | response.data["settings"],
         )
 
-    def test_api_update_config(self):
+    def test_api_update_config(self) -> None:
         """
         GIVEN:
             - API request to update app config
@@ -124,7 +124,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         config = ApplicationConfiguration.objects.first()
         self.assertEqual(config.color_conversion_strategy, ColorConvertChoices.RGB)
 
-    def test_api_update_config_empty_fields(self):
+    def test_api_update_config_empty_fields(self) -> None:
         """
         GIVEN:
             - API request to update app config with empty string for user_args JSONField and language field
@@ -151,7 +151,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertEqual(config.language, None)
         self.assertEqual(config.barcode_tag_mapping, None)
 
-    def test_api_replace_app_logo(self):
+    def test_api_replace_app_logo(self) -> None:
         """
         GIVEN:
             - Existing config with app_logo specified
@@ -200,7 +200,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         )
         self.assertFalse(Path(old_logo.path).exists())
 
-    def test_api_rejects_malicious_svg_logo(self):
+    def test_api_rejects_malicious_svg_logo(self) -> None:
         """
         GIVEN:
             - An SVG logo containing a <script> tag
@@ -227,7 +227,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertIn("disallowed svg tag", str(response.data).lower())
 
-    def test_api_rejects_malicious_svg_with_style_javascript(self):
+    def test_api_rejects_malicious_svg_with_style_javascript(self) -> None:
         """
         GIVEN:
             - An SVG logo containing javascript: in style attribute
@@ -257,7 +257,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         )
         self.assertIn("style", str(response.data).lower())
 
-    def test_api_rejects_svg_with_style_expression(self):
+    def test_api_rejects_svg_with_style_expression(self) -> None:
         """
         GIVEN:
             - An SVG logo containing CSS expression() in style
@@ -283,7 +283,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertIn("disallowed", str(response.data).lower())
 
-    def test_api_rejects_svg_with_style_cdata_javascript(self):
+    def test_api_rejects_svg_with_style_cdata_javascript(self) -> None:
         """
         GIVEN:
             - An SVG logo with javascript: hidden in a CDATA style block
@@ -312,7 +312,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertIn("disallowed", str(response.data).lower())
 
-    def test_api_rejects_svg_with_style_import(self):
+    def test_api_rejects_svg_with_style_import(self) -> None:
         """
         GIVEN:
             - An SVG logo containing @import in style
@@ -338,7 +338,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertIn("disallowed", str(response.data).lower())
 
-    def test_api_accepts_valid_svg_with_safe_style(self):
+    def test_api_accepts_valid_svg_with_safe_style(self) -> None:
         """
         GIVEN:
             - A valid SVG logo with safe style attributes
@@ -364,7 +364,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_api_accepts_valid_svg_with_safe_style_tag(self):
+    def test_api_accepts_valid_svg_with_safe_style_tag(self) -> None:
         """
         GIVEN:
             - A valid SVG logo with an embedded <style> tag
@@ -394,7 +394,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_api_rejects_svg_with_disallowed_attribute(self):
+    def test_api_rejects_svg_with_disallowed_attribute(self) -> None:
         """
         GIVEN:
             - An SVG with a disallowed attribute (onclick)
@@ -421,7 +421,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertIn("disallowed", str(response.data).lower())
         self.assertIn("attribute", str(response.data).lower())
 
-    def test_api_rejects_svg_with_disallowed_tag(self):
+    def test_api_rejects_svg_with_disallowed_tag(self) -> None:
         """
         GIVEN:
             - An SVG with a disallowed tag (script)
@@ -449,7 +449,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertIn("disallowed", str(response.data).lower())
         self.assertIn("tag", str(response.data).lower())
 
-    def test_api_rejects_svg_with_javascript_href(self):
+    def test_api_rejects_svg_with_javascript_href(self) -> None:
         """
         GIVEN:
             - An SVG with javascript: in href attribute
@@ -478,7 +478,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertIn("disallowed", str(response.data).lower())
         self.assertIn("javascript", str(response.data).lower())
 
-    def test_api_rejects_svg_with_javascript_xlink_href(self):
+    def test_api_rejects_svg_with_javascript_xlink_href(self) -> None:
         """
         GIVEN:
             - An SVG with javascript: in xlink:href attribute
@@ -504,7 +504,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertIn("disallowed", str(response.data).lower())
         self.assertIn("javascript", str(response.data).lower())
 
-    def test_api_rejects_svg_with_data_text_html_href(self):
+    def test_api_rejects_svg_with_data_text_html_href(self) -> None:
         """
         GIVEN:
             - An SVG with data:text/html in href attribute
@@ -533,7 +533,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         # This will now catch "Disallowed URI scheme"
         self.assertIn("disallowed", str(response.data).lower())
 
-    def test_api_rejects_svg_with_unknown_namespace_attribute(self):
+    def test_api_rejects_svg_with_unknown_namespace_attribute(self) -> None:
         """
         GIVEN:
             - An SVG with an attribute in an unknown/custom namespace
@@ -599,7 +599,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         # Check for the error message raised by the safe_prefixes check
         self.assertIn("uri scheme not allowed", str(response.data).lower())
 
-    def test_create_not_allowed(self):
+    def test_create_not_allowed(self) -> None:
         """
         GIVEN:
             - API request to create a new app config
@@ -621,7 +621,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
         self.assertEqual(ApplicationConfiguration.objects.count(), 1)
 
-    def test_update_llm_api_key(self):
+    def test_update_llm_api_key(self) -> None:
         """
         GIVEN:
             - Existing config with llm_api_key specified
@@ -663,7 +663,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
         config.refresh_from_db()
         self.assertEqual(config.llm_api_key, None)
 
-    def test_enable_ai_index_triggers_update(self):
+    def test_enable_ai_index_triggers_update(self) -> None:
         """
         GIVEN:
             - Existing config with AI disabled
index a7e8f5df3224e902ecdcc0fed5a34a3100e60675..865f57247c829499c4ade16d884cbe14acf1f933 100644 (file)
@@ -20,7 +20,7 @@ from documents.tests.utils import SampleDirMixin
 class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
     ENDPOINT = "/api/documents/bulk_download/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
@@ -56,7 +56,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
         shutil.copy(self.SAMPLE_DIR / "simple.jpg", self.doc3.source_path)
         shutil.copy(self.SAMPLE_DIR / "test_with_bom.pdf", self.doc3.archive_path)
 
-    def test_download_originals(self):
+    def test_download_originals(self) -> None:
         response = self.client.post(
             self.ENDPOINT,
             json.dumps(
@@ -79,7 +79,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
             with self.doc3.source_file as f:
                 self.assertEqual(f.read(), zipf.read("2020-03-21 document B.jpg"))
 
-    def test_download_default(self):
+    def test_download_default(self) -> None:
         response = self.client.post(
             self.ENDPOINT,
             json.dumps({"documents": [self.doc2.id, self.doc3.id]}),
@@ -100,7 +100,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
             with self.doc3.archive_file as f:
                 self.assertEqual(f.read(), zipf.read("2020-03-21 document B.pdf"))
 
-    def test_download_both(self):
+    def test_download_both(self) -> None:
         response = self.client.post(
             self.ENDPOINT,
             json.dumps({"documents": [self.doc2.id, self.doc3.id], "content": "both"}),
@@ -134,7 +134,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
                     zipf.read("originals/2020-03-21 document B.jpg"),
                 )
 
-    def test_filename_clashes(self):
+    def test_filename_clashes(self) -> None:
         response = self.client.post(
             self.ENDPOINT,
             json.dumps({"documents": [self.doc2.id, self.doc2b.id]}),
@@ -156,7 +156,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
             with self.doc2b.source_file as f:
                 self.assertEqual(f.read(), zipf.read("2021-01-01 document A_01.pdf"))
 
-    def test_compression(self):
+    def test_compression(self) -> None:
         self.client.post(
             self.ENDPOINT,
             json.dumps(
@@ -166,7 +166,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
         )
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
-    def test_formatted_download_originals(self):
+    def test_formatted_download_originals(self) -> None:
         """
         GIVEN:
             - Defined file naming format
@@ -218,7 +218,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
                 )
 
     @override_settings(FILENAME_FORMAT="somewhere/{title}")
-    def test_formatted_download_archive(self):
+    def test_formatted_download_archive(self) -> None:
         """
         GIVEN:
             - Defined file naming format
@@ -261,7 +261,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
                 self.assertEqual(f.read(), zipf.read("somewhere/Title 2 - Doc 3.pdf"))
 
     @override_settings(FILENAME_FORMAT="{document_type}/{title}")
-    def test_formatted_download_both(self):
+    def test_formatted_download_both(self) -> None:
         """
         GIVEN:
             - Defined file naming format
@@ -322,7 +322,7 @@ class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
                     zipf.read("originals/statement/Title 2 - Doc 3.jpg"),
                 )
 
-    def test_download_insufficient_permissions(self):
+    def test_download_insufficient_permissions(self) -> None:
         user = User.objects.create_user(username="temp_user")
         self.client.force_authenticate(user=user)
 
index 2ba9f1af6369a2faca9759abcb58fd786e88000e..deb1d5586eaaf43b6b1af22994633a1c44c3d3a7 100644 (file)
@@ -19,7 +19,7 @@ from documents.tests.utils import DirectoriesMixin
 
 
 class TestBulkEditAPI(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         user = User.objects.create_superuser(username="temp_admin")
@@ -58,12 +58,12 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.cf1 = CustomField.objects.create(name="cf1", data_type="string")
         self.cf2 = CustomField.objects.create(name="cf2", data_type="string")
 
-    def setup_mock(self, m, method_name, return_value="OK"):
+    def setup_mock(self, m, method_name, return_value="OK") -> None:
         m.return_value = return_value
         m.__name__ = method_name
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_set_correspondent(self, bulk_update_task_mock):
+    def test_api_set_correspondent(self, bulk_update_task_mock) -> None:
         self.assertNotEqual(self.doc1.correspondent, self.c1)
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -82,7 +82,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_unset_correspondent(self, bulk_update_task_mock):
+    def test_api_unset_correspondent(self, bulk_update_task_mock) -> None:
         self.doc1.correspondent = self.c1
         self.doc1.save()
         self.assertIsNotNone(self.doc1.correspondent)
@@ -104,7 +104,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertIsNone(self.doc1.correspondent)
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_set_type(self, bulk_update_task_mock):
+    def test_api_set_type(self, bulk_update_task_mock) -> None:
         self.assertNotEqual(self.doc1.document_type, self.dt1)
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -123,7 +123,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_unset_type(self, bulk_update_task_mock):
+    def test_api_unset_type(self, bulk_update_task_mock) -> None:
         self.doc1.document_type = self.dt1
         self.doc1.save()
 
@@ -144,7 +144,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_add_tag(self, bulk_update_task_mock):
+    def test_api_add_tag(self, bulk_update_task_mock) -> None:
         self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists())
 
         response = self.client.post(
@@ -166,7 +166,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_remove_tag(self, bulk_update_task_mock):
+    def test_api_remove_tag(self, bulk_update_task_mock) -> None:
         self.doc1.tags.add(self.t1)
 
         response = self.client.post(
@@ -185,7 +185,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists())
 
     @mock.patch("documents.serialisers.bulk_edit.modify_tags")
-    def test_api_modify_tags(self, m):
+    def test_api_modify_tags(self, m) -> None:
         self.setup_mock(m, "modify_tags")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -209,7 +209,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["remove_tags"], [self.t2.id])
 
     @mock.patch("documents.serialisers.bulk_edit.modify_tags")
-    def test_api_modify_tags_not_provided(self, m):
+    def test_api_modify_tags_not_provided(self, m) -> None:
         """
         GIVEN:
             - API data to modify tags is missing remove_tags field
@@ -237,7 +237,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         m.assert_not_called()
 
     @mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
-    def test_api_modify_custom_fields(self, m):
+    def test_api_modify_custom_fields(self, m) -> None:
         self.setup_mock(m, "modify_custom_fields")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -263,7 +263,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["remove_custom_fields"], [self.cf2.id])
 
     @mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
-    def test_api_modify_custom_fields_with_values(self, m):
+    def test_api_modify_custom_fields_with_values(self, m) -> None:
         self.setup_mock(m, "modify_custom_fields")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -287,7 +287,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["remove_custom_fields"], [self.cf2.id])
 
     @mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
-    def test_api_modify_custom_fields_invalid_params(self, m):
+    def test_api_modify_custom_fields_invalid_params(self, m) -> None:
         """
         GIVEN:
             - API data to modify custom fields is malformed
@@ -407,7 +407,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         m.assert_not_called()
 
     @mock.patch("documents.serialisers.bulk_edit.delete")
-    def test_api_delete(self, m):
+    def test_api_delete(self, m) -> None:
         self.setup_mock(m, "delete")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -423,7 +423,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(len(kwargs), 0)
 
     @mock.patch("documents.serialisers.bulk_edit.set_storage_path")
-    def test_api_set_storage_path(self, m):
+    def test_api_set_storage_path(self, m) -> None:
         """
         GIVEN:
             - API data to set the storage path of a document
@@ -453,7 +453,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["storage_path"], self.sp1.id)
 
     @mock.patch("documents.serialisers.bulk_edit.set_storage_path")
-    def test_api_unset_storage_path(self, m):
+    def test_api_unset_storage_path(self, m) -> None:
         """
         GIVEN:
             - API data to clear/unset the storage path of a document
@@ -482,7 +482,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertListEqual(args[0], [self.doc1.id])
         self.assertEqual(kwargs["storage_path"], None)
 
-    def test_api_invalid_storage_path(self):
+    def test_api_invalid_storage_path(self) -> None:
         """
         GIVEN:
             - API data to set the storage path of a document
@@ -507,7 +507,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.async_task.assert_not_called()
 
-    def test_api_set_storage_path_not_provided(self):
+    def test_api_set_storage_path_not_provided(self) -> None:
         """
         GIVEN:
             - API data to set the storage path of a document
@@ -532,7 +532,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.async_task.assert_not_called()
 
-    def test_api_invalid_doc(self):
+    def test_api_invalid_doc(self) -> None:
         self.assertEqual(Document.objects.count(), 5)
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -542,7 +542,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertEqual(Document.objects.count(), 5)
 
-    def test_api_invalid_method(self):
+    def test_api_invalid_method(self) -> None:
         self.assertEqual(Document.objects.count(), 5)
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -558,7 +558,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertEqual(Document.objects.count(), 5)
 
-    def test_api_invalid_correspondent(self):
+    def test_api_invalid_correspondent(self) -> None:
         self.assertEqual(self.doc2.correspondent, self.c1)
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -576,7 +576,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         doc2 = Document.objects.get(id=self.doc2.id)
         self.assertEqual(doc2.correspondent, self.c1)
 
-    def test_api_no_correspondent(self):
+    def test_api_no_correspondent(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -590,7 +590,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_api_invalid_document_type(self):
+    def test_api_invalid_document_type(self) -> None:
         self.assertEqual(self.doc2.document_type, self.dt1)
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -608,7 +608,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         doc2 = Document.objects.get(id=self.doc2.id)
         self.assertEqual(doc2.document_type, self.dt1)
 
-    def test_api_no_document_type(self):
+    def test_api_no_document_type(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -622,7 +622,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_api_add_invalid_tag(self):
+    def test_api_add_invalid_tag(self) -> None:
         self.assertEqual(list(self.doc2.tags.all()), [self.t1])
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -639,7 +639,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
 
         self.assertEqual(list(self.doc2.tags.all()), [self.t1])
 
-    def test_api_add_tag_no_tag(self):
+    def test_api_add_tag_no_tag(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -649,7 +649,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_api_delete_invalid_tag(self):
+    def test_api_delete_invalid_tag(self) -> None:
         self.assertEqual(list(self.doc2.tags.all()), [self.t1])
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -666,7 +666,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
 
         self.assertEqual(list(self.doc2.tags.all()), [self.t1])
 
-    def test_api_delete_tag_no_tag(self):
+    def test_api_delete_tag_no_tag(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -676,7 +676,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_api_modify_invalid_tags(self):
+    def test_api_modify_invalid_tags(self) -> None:
         self.assertEqual(list(self.doc2.tags.all()), [self.t1])
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -694,7 +694,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_api_modify_tags_no_tags(self):
+    def test_api_modify_tags_no_tags(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -721,7 +721,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_api_selection_data_empty(self):
+    def test_api_selection_data_empty(self) -> None:
         response = self.client.post(
             "/api/documents/selection_data/",
             json.dumps({"documents": []}),
@@ -741,7 +741,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
                 map(lambda c: c["id"], Entity.objects.values("id")),
             )
 
-    def test_api_selection_data(self):
+    def test_api_selection_data(self) -> None:
         response = self.client.post(
             "/api/documents/selection_data/",
             json.dumps(
@@ -774,7 +774,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
 
     @mock.patch("documents.serialisers.bulk_edit.set_permissions")
-    def test_set_permissions(self, m):
+    def test_set_permissions(self, m) -> None:
         self.setup_mock(m, "set_permissions")
         user1 = User.objects.create(username="user1")
         user2 = User.objects.create(username="user2")
@@ -809,7 +809,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(len(kwargs["set_permissions"]["view"]["users"]), 2)
 
     @mock.patch("documents.serialisers.bulk_edit.set_permissions")
-    def test_set_permissions_merge(self, m):
+    def test_set_permissions_merge(self, m) -> None:
         self.setup_mock(m, "set_permissions")
         user1 = User.objects.create(username="user1")
         user2 = User.objects.create(username="user2")
@@ -862,7 +862,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
 
     @mock.patch("documents.serialisers.bulk_edit.set_storage_path")
     @mock.patch("documents.serialisers.bulk_edit.merge")
-    def test_insufficient_global_perms(self, mock_merge, mock_set_storage):
+    def test_insufficient_global_perms(self, mock_merge, mock_set_storage) -> None:
         """
         GIVEN:
             - User has no global permissions to change a document
@@ -929,7 +929,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         mock_merge.assert_not_called()
 
     @mock.patch("documents.serialisers.bulk_edit.set_permissions")
-    def test_insufficient_permissions_ownership(self, m):
+    def test_insufficient_permissions_ownership(self, m) -> None:
         """
         GIVEN:
             - Documents owned by user other than logged in user
@@ -983,7 +983,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         m.assert_called_once()
 
     @mock.patch("documents.serialisers.bulk_edit.set_storage_path")
-    def test_insufficient_permissions_edit(self, m):
+    def test_insufficient_permissions_edit(self, m) -> None:
         """
         GIVEN:
             - Documents for which current user only has view permissions
@@ -1037,7 +1037,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         m.assert_called_once()
 
     @mock.patch("documents.serialisers.bulk_edit.rotate")
-    def test_rotate(self, m):
+    def test_rotate(self, m) -> None:
         self.setup_mock(m, "rotate")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -1059,7 +1059,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["degrees"], 90)
 
     @mock.patch("documents.serialisers.bulk_edit.rotate")
-    def test_rotate_invalid_params(self, m):
+    def test_rotate_invalid_params(self, m) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -1091,7 +1091,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         m.assert_not_called()
 
     @mock.patch("documents.serialisers.bulk_edit.merge")
-    def test_merge(self, m):
+    def test_merge(self, m) -> None:
         self.setup_mock(m, "merge")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -1114,7 +1114,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["user"], self.user)
 
     @mock.patch("documents.serialisers.bulk_edit.merge")
-    def test_merge_and_delete_insufficient_permissions(self, m):
+    def test_merge_and_delete_insufficient_permissions(self, m) -> None:
         self.doc1.owner = User.objects.get(username="temp_admin")
         self.doc1.save()
         user1 = User.objects.create(username="user1")
@@ -1161,7 +1161,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         m.assert_called_once()
 
     @mock.patch("documents.serialisers.bulk_edit.merge")
-    def test_merge_invalid_parameters(self, m):
+    def test_merge_invalid_parameters(self, m) -> None:
         """
         GIVEN:
             - API data for merging documents is called
@@ -1190,7 +1190,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         m.assert_not_called()
 
     @mock.patch("documents.serialisers.bulk_edit.split")
-    def test_split(self, m):
+    def test_split(self, m) -> None:
         self.setup_mock(m, "split")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -1212,7 +1212,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["pages"], [[1], [2, 3, 4], [5, 6], [7]])
         self.assertEqual(kwargs["user"], self.user)
 
-    def test_split_invalid_params(self):
+    def test_split_invalid_params(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -1280,7 +1280,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertIn(b"delete_originals must be a boolean", response.content)
 
     @mock.patch("documents.serialisers.bulk_edit.delete_pages")
-    def test_delete_pages(self, m):
+    def test_delete_pages(self, m) -> None:
         self.setup_mock(m, "delete_pages")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -1301,7 +1301,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertCountEqual(args[0], [self.doc2.id])
         self.assertEqual(kwargs["pages"], [1, 2, 3, 4])
 
-    def test_delete_pages_invalid_params(self):
+    def test_delete_pages_invalid_params(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -1371,7 +1371,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertIn(b"pages must be a list of integers", response.content)
 
     @mock.patch("documents.serialisers.bulk_edit.edit_pdf")
-    def test_edit_pdf(self, m):
+    def test_edit_pdf(self, m) -> None:
         self.setup_mock(m, "edit_pdf")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -1393,7 +1393,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(kwargs["operations"], [{"page": 1}])
         self.assertEqual(kwargs["user"], self.user)
 
-    def test_edit_pdf_invalid_params(self):
+    def test_edit_pdf_invalid_params(self) -> None:
         # multiple documents
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -1557,7 +1557,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         )
 
     @mock.patch("documents.serialisers.bulk_edit.edit_pdf")
-    def test_edit_pdf_page_out_of_bounds(self, m):
+    def test_edit_pdf_page_out_of_bounds(self, m) -> None:
         """
         GIVEN:
             - API data for editing PDF is called
@@ -1583,7 +1583,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertIn(b"out of bounds", response.content)
 
     @mock.patch("documents.serialisers.bulk_edit.remove_password")
-    def test_remove_password(self, m):
+    def test_remove_password(self, m) -> None:
         self.setup_mock(m, "remove_password")
         response = self.client.post(
             "/api/documents/bulk_edit/",
@@ -1605,7 +1605,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertTrue(kwargs["update_document"])
         self.assertEqual(kwargs["user"], self.user)
 
-    def test_remove_password_invalid_params(self):
+    def test_remove_password_invalid_params(self) -> None:
         response = self.client.post(
             "/api/documents/bulk_edit/",
             json.dumps(
@@ -1635,7 +1635,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertIn(b"password must be a string", response.content)
 
     @override_settings(AUDIT_LOG_ENABLED=True)
-    def test_bulk_edit_audit_log_enabled_simple_field(self):
+    def test_bulk_edit_audit_log_enabled_simple_field(self) -> None:
         """
         GIVEN:
             - Audit log is enabled
@@ -1661,7 +1661,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
 
     @override_settings(AUDIT_LOG_ENABLED=True)
-    def test_bulk_edit_audit_log_enabled_tags(self):
+    def test_bulk_edit_audit_log_enabled_tags(self) -> None:
         """
         GIVEN:
             - Audit log is enabled
@@ -1690,7 +1690,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
 
     @override_settings(AUDIT_LOG_ENABLED=True)
-    def test_bulk_edit_audit_log_enabled_custom_fields(self):
+    def test_bulk_edit_audit_log_enabled_custom_fields(self) -> None:
         """
         GIVEN:
             - Audit log is enabled
index 8cc8f2cb2acf4a845461ba89101a7fab6c1ca33b..9d9014b298c48b51fc52b1234b8772455245e841 100644 (file)
@@ -18,12 +18,12 @@ from documents.tests.utils import DirectoriesMixin
 class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/custom_fields/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         self.user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=self.user)
         return super().setUp()
 
-    def test_create_custom_field(self):
+    def test_create_custom_field(self) -> None:
         """
         GIVEN:
             - Each of the supported data types is created
@@ -88,7 +88,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
             ],
         )
 
-    def test_create_custom_field_nonunique_name(self):
+    def test_create_custom_field_nonunique_name(self) -> None:
         """
         GIVEN:
             - Custom field exists
@@ -111,7 +111,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_create_custom_field_select_invalid_options(self):
+    def test_create_custom_field_select_invalid_options(self) -> None:
         """
         GIVEN:
             - Custom field does not exist
@@ -150,7 +150,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_custom_field_select_unique_ids(self):
+    def test_custom_field_select_unique_ids(self) -> None:
         """
         GIVEN:
             - Existing custom field
@@ -214,7 +214,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         )
 
     @override_settings(CELERY_TASK_ALWAYS_EAGER=True)
-    def test_custom_field_select_options_pruned(self):
+    def test_custom_field_select_options_pruned(self) -> None:
         """
         GIVEN:
             - Select custom field exists and document instance with one of the options
@@ -278,7 +278,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(doc.custom_fields.first().value, None)
 
     @mock.patch("documents.signals.handlers.process_cf_select_update.delay")
-    def test_custom_field_update_offloaded_once(self, mock_delay):
+    def test_custom_field_update_offloaded_once(self, mock_delay) -> None:
         """
         GIVEN:
             - A select custom field attached to multiple documents
@@ -323,7 +323,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
 
         mock_delay.assert_called_once_with(cf_select)
 
-    def test_custom_field_select_old_version(self):
+    def test_custom_field_select_old_version(self) -> None:
         """
         GIVEN:
             - Nothing
@@ -377,7 +377,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
             ],
         )
 
-    def test_custom_field_select_value_old_version(self):
+    def test_custom_field_select_value_old_version(self) -> None:
         """
         GIVEN:
             - Existing document with custom field select
@@ -430,7 +430,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         data = resp.json()
         self.assertEqual(data["custom_fields"][0]["value"], 1)
 
-    def test_create_custom_field_monetary_validation(self):
+    def test_create_custom_field_monetary_validation(self) -> None:
         """
         GIVEN:
             - Custom field does not exist
@@ -490,7 +490,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
 
-    def test_create_custom_field_instance(self):
+    def test_create_custom_field_instance(self) -> None:
         """
         GIVEN:
             - Field of each data type is created
@@ -634,7 +634,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         doc.refresh_from_db()
         self.assertEqual(len(doc.custom_fields.all()), 10)
 
-    def test_change_custom_field_instance_value(self):
+    def test_change_custom_field_instance_value(self) -> None:
         """
         GIVEN:
             - Custom field instance is created and attached to document
@@ -692,7 +692,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(CustomFieldInstance.objects.count(), 1)
         self.assertEqual(doc.custom_fields.first().value, "a new test value")
 
-    def test_delete_custom_field_instance(self):
+    def test_delete_custom_field_instance(self) -> None:
         """
         GIVEN:
             - Multiple custom field instances are created and attached to document
@@ -758,7 +758,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(len(doc.custom_fields.all()), 1)
         self.assertEqual(doc.custom_fields.first().value, date_value)
 
-    def test_custom_field_validation(self):
+    def test_custom_field_validation(self) -> None:
         """
         GIVEN:
             - Document exists with no fields
@@ -799,7 +799,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(CustomFieldInstance.objects.count(), 0)
         self.assertEqual(len(doc.custom_fields.all()), 0)
 
-    def test_custom_field_value_url_validation(self):
+    def test_custom_field_value_url_validation(self) -> None:
         """
         GIVEN:
             - Document & custom field exist
@@ -853,7 +853,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
 
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
 
-    def test_custom_field_value_integer_validation(self):
+    def test_custom_field_value_integer_validation(self) -> None:
         """
         GIVEN:
             - Document & custom field exist
@@ -891,7 +891,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(CustomFieldInstance.objects.count(), 0)
         self.assertEqual(len(doc.custom_fields.all()), 0)
 
-    def test_custom_field_value_monetary_validation(self):
+    def test_custom_field_value_monetary_validation(self) -> None:
         """
         GIVEN:
             - Document & custom field exist
@@ -962,7 +962,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(CustomFieldInstance.objects.count(), 0)
         self.assertEqual(len(doc.custom_fields.all()), 0)
 
-    def test_custom_field_value_short_text_validation(self):
+    def test_custom_field_value_short_text_validation(self) -> None:
         """
         GIVEN:
             - Document & custom field exist
@@ -997,7 +997,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(CustomFieldInstance.objects.count(), 0)
         self.assertEqual(len(doc.custom_fields.all()), 0)
 
-    def test_custom_field_value_select_validation(self):
+    def test_custom_field_value_select_validation(self) -> None:
         """
         GIVEN:
             - Document & custom field exist
@@ -1038,7 +1038,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(CustomFieldInstance.objects.count(), 0)
         self.assertEqual(len(doc.custom_fields.all()), 0)
 
-    def test_custom_field_value_documentlink_validation(self):
+    def test_custom_field_value_documentlink_validation(self) -> None:
         """
         GIVEN:
             - Document & custom field exist
@@ -1088,7 +1088,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertEqual(CustomFieldInstance.objects.count(), 0)
 
-    def test_custom_field_not_null(self):
+    def test_custom_field_not_null(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -1114,7 +1114,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
 
         self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_symmetric_doclink_fields(self):
+    def test_symmetric_doclink_fields(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -1247,7 +1247,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
         self.assertEqual(doc5.custom_fields.first().value, [1])
 
-    def test_custom_field_filters(self):
+    def test_custom_field_filters(self) -> None:
         custom_field_string = CustomField.objects.create(
             name="Test Custom Field String",
             data_type=CustomField.FieldDataType.STRING,
@@ -1283,7 +1283,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
         self.assertEqual(len(results), 1)
         self.assertEqual(results[0]["name"], custom_field_int.name)
 
-    def test_custom_fields_document_count(self):
+    def test_custom_fields_document_count(self) -> None:
         custom_field_string = CustomField.objects.create(
             name="Test Custom Field String",
             data_type=CustomField.FieldDataType.STRING,
index ef06ce64813a446563faac8db25a200bc340a2a6..e3ccd8e4e3067ac955d73150452814a9a5970db2 100644 (file)
@@ -50,14 +50,14 @@ from documents.tests.utils import DocumentConsumeDelayMixin
 
 
 class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=self.user)
         cache.clear()
 
-    def testDocuments(self):
+    def testDocuments(self) -> None:
         response = self.client.get("/api/documents/").data
 
         self.assertEqual(response["count"], 0)
@@ -110,7 +110,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.assertEqual(len(Document.objects.all()), 0)
 
-    def test_document_fields(self):
+    def test_document_fields(self) -> None:
         c = Correspondent.objects.create(name="c", pk=41)
         dt = DocumentType.objects.create(name="dt", pk=63)
         Tag.objects.create(name="t", pk=85)
@@ -176,7 +176,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         results = response.data["results"]
         self.assertEqual(len(results[0]), 0)
 
-    def test_document_fields_api_version_8_respects_created(self):
+    def test_document_fields_api_version_8_respects_created(self) -> None:
         Document.objects.create(
             title="legacy",
             checksum="123",
@@ -205,7 +205,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIn("created", results[0])
         self.assertRegex(results[0]["created"], r"^2024-01-15T00:00:00.*$")
 
-    def test_document_legacy_created_format(self):
+    def test_document_legacy_created_format(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -258,7 +258,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         doc.refresh_from_db()
         self.assertEqual(doc.created, date(2023, 6, 28))
 
-    def test_document_update_legacy_created_format(self):
+    def test_document_update_legacy_created_format(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -286,7 +286,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         doc.refresh_from_db()
         self.assertEqual(doc.created, date(2023, 2, 1))
 
-    def test_document_update_with_created_date(self):
+    def test_document_update_with_created_date(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -312,7 +312,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         doc.refresh_from_db()
         self.assertEqual(doc.created_date, created_date)
 
-    def test_document_actions(self):
+    def test_document_actions(self) -> None:
         _, filename = tempfile.mkstemp(dir=self.dirs.originals_dir)
 
         content = b"This is a test"
@@ -347,7 +347,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.content, content_thumbnail)
 
-    def test_document_actions_with_perms(self):
+    def test_document_actions_with_perms(self) -> None:
         """
         GIVEN:
             - Document with owner and without granted permissions
@@ -404,7 +404,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
     @override_settings(FILENAME_FORMAT="")
-    def test_download_with_archive(self):
+    def test_download_with_archive(self) -> None:
         content = b"This is a test"
         content_archive = b"This is the same test but archived"
 
@@ -445,7 +445,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.content, content)
 
-    def test_document_actions_not_existing_file(self):
+    def test_document_actions_not_existing_file(self) -> None:
         doc = Document.objects.create(
             title="none",
             filename=Path("asd").name,
@@ -461,7 +461,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         response = self.client.get(f"/api/documents/{doc.pk}/thumb/")
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
-    def test_document_history_action(self):
+    def test_document_history_action(self) -> None:
         """
         GIVEN:
             - Document
@@ -492,7 +492,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             {"title": ["First title", "New title"]},
         )
 
-    def test_document_history_action_w_custom_fields(self):
+    def test_document_history_action_w_custom_fields(self) -> None:
         """
         GIVEN:
             - Document with custom fields
@@ -555,7 +555,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.data[1]["action"], "create")
 
     @override_settings(AUDIT_LOG_ENABLED=False)
-    def test_document_history_action_disabled(self):
+    def test_document_history_action_disabled(self) -> None:
         """
         GIVEN:
             - Audit log is disabled
@@ -580,7 +580,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         response = self.client.get(f"/api/documents/{doc.pk}/history/")
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_document_history_insufficient_perms(self):
+    def test_document_history_insufficient_perms(self) -> None:
         """
         GIVEN:
             - Audit log is enabled
@@ -618,7 +618,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         response = self.client.get(f"/api/documents/{doc2.pk}/history/")
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_document_filters(self):
+    def test_document_filters(self) -> None:
         doc1 = Document.objects.create(
             title="none1",
             checksum="A",
@@ -793,7 +793,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         results = response.data["results"]
         self.assertEqual(len(results), 3)
 
-    def test_custom_field_select_filter(self):
+    def test_custom_field_select_filter(self) -> None:
         """
         GIVEN:
             - Documents with select custom field values
@@ -828,7 +828,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(r.status_code, status.HTTP_200_OK)
         self.assertEqual(r.data["count"], 0)
 
-    def test_document_checksum_filter(self):
+    def test_document_checksum_filter(self) -> None:
         Document.objects.create(
             title="none1",
             checksum="A",
@@ -856,7 +856,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         results = response.data["results"]
         self.assertEqual(len(results), 0)
 
-    def test_document_original_filename_filter(self):
+    def test_document_original_filename_filter(self) -> None:
         doc1 = Document.objects.create(
             title="none1",
             checksum="A",
@@ -896,7 +896,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             [doc1.id, doc2.id, doc3.id],
         )
 
-    def test_documents_title_content_filter(self):
+    def test_documents_title_content_filter(self) -> None:
         doc1 = Document.objects.create(
             title="title A",
             content="content A",
@@ -945,7 +945,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         results = response.data["results"]
         self.assertEqual(len(results), 0)
 
-    def test_documents_title_content_filter_strips_boundary_whitespace(self):
+    def test_documents_title_content_filter_strips_boundary_whitespace(self) -> None:
         doc = Document.objects.create(
             title="Testwort",
             content="",
@@ -962,7 +962,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(len(results), 1)
         self.assertEqual(results[0]["id"], doc.id)
 
-    def test_document_permissions_filters(self):
+    def test_document_permissions_filters(self) -> None:
         """
         GIVEN:
             - Documents with owners, with and without granted permissions
@@ -1066,7 +1066,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             [u1_doc1.id],
         )
 
-    def test_pagination_all(self):
+    def test_pagination_all(self) -> None:
         """
         GIVEN:
             - A set of 50 documents
@@ -1091,7 +1091,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(len(response.data["all"]), 50)
         self.assertCountEqual(response.data["all"], [d.id for d in docs])
 
-    def test_statistics(self):
+    def test_statistics(self) -> None:
         doc1 = Document.objects.create(
             title="none1",
             checksum="A",
@@ -1141,7 +1141,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.data["document_type_count"], 1)
         self.assertEqual(response.data["storage_path_count"], 2)
 
-    def test_statistics_no_inbox_tag(self):
+    def test_statistics_no_inbox_tag(self) -> None:
         Document.objects.create(title="none1", checksum="A")
 
         response = self.client.get("/api/statistics/")
@@ -1149,7 +1149,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.data["documents_inbox"], None)
         self.assertEqual(response.data["inbox_tags"], None)
 
-    def test_statistics_multiple_users(self):
+    def test_statistics_multiple_users(self) -> None:
         """
         GIVEN:
             - Inbox tags with different owners and documents that are accessible to different users
@@ -1188,7 +1188,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data["documents_inbox"], 0)
 
-    def test_upload(self):
+    def test_upload(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1212,7 +1212,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.document_type_id)
         self.assertIsNone(overrides.tag_ids)
 
-    def test_create_wrong_endpoint(self):
+    def test_create_wrong_endpoint(self) -> None:
         response = self.client.post(
             "/api/documents/",
             {},
@@ -1220,7 +1220,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
 
-    def test_upload_insufficient_permissions(self):
+    def test_upload_insufficient_permissions(self) -> None:
         self.client.force_authenticate(user=User.objects.create_user("testuser2"))
 
         with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
@@ -1231,7 +1231,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_upload_empty_metadata(self):
+    def test_upload_empty_metadata(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1262,7 +1262,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.storage_path_id)
         self.assertIsNone(overrides.tag_ids)
 
-    def test_upload_invalid_form(self):
+    def test_upload_invalid_form(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1275,7 +1275,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.consume_file_mock.assert_not_called()
 
-    def test_upload_invalid_file(self):
+    def test_upload_invalid_file(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1288,7 +1288,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.consume_file_mock.assert_not_called()
 
-    def test_upload_with_title(self):
+    def test_upload_with_title(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1309,7 +1309,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.document_type_id)
         self.assertIsNone(overrides.tag_ids)
 
-    def test_upload_with_correspondent(self):
+    def test_upload_with_correspondent(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1331,7 +1331,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.document_type_id)
         self.assertIsNone(overrides.tag_ids)
 
-    def test_upload_with_invalid_correspondent(self):
+    def test_upload_with_invalid_correspondent(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1345,7 +1345,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.consume_file_mock.assert_not_called()
 
-    def test_upload_with_document_type(self):
+    def test_upload_with_document_type(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1367,7 +1367,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.title)
         self.assertIsNone(overrides.tag_ids)
 
-    def test_upload_with_invalid_document_type(self):
+    def test_upload_with_invalid_document_type(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1381,7 +1381,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.consume_file_mock.assert_not_called()
 
-    def test_upload_with_storage_path(self):
+    def test_upload_with_storage_path(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1403,7 +1403,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.title)
         self.assertIsNone(overrides.tag_ids)
 
-    def test_upload_with_invalid_storage_path(self):
+    def test_upload_with_invalid_storage_path(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1417,7 +1417,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.consume_file_mock.assert_not_called()
 
-    def test_upload_with_tags(self):
+    def test_upload_with_tags(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1440,7 +1440,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.correspondent_id)
         self.assertIsNone(overrides.title)
 
-    def test_upload_with_invalid_tags(self):
+    def test_upload_with_invalid_tags(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1456,7 +1456,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.consume_file_mock.assert_not_called()
 
-    def test_upload_with_created(self):
+    def test_upload_with_created(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1484,7 +1484,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.assertEqual(overrides.created, created.date())
 
-    def test_upload_with_asn(self):
+    def test_upload_with_asn(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1508,7 +1508,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(overrides.tag_ids)
         self.assertEqual(500, overrides.asn)
 
-    def test_upload_with_custom_fields(self):
+    def test_upload_with_custom_fields(self) -> None:
         self.consume_file_mock.return_value = celery.result.AsyncResult(
             id=str(uuid.uuid4()),
         )
@@ -1537,7 +1537,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(overrides.filename, "simple.pdf")
         self.assertEqual(overrides.custom_fields, {custom_field.id: None})
 
-    def test_upload_with_custom_fields_and_workflow(self):
+    def test_upload_with_custom_fields_and_workflow(self) -> None:
         """
         GIVEN: A document with a source file
         WHEN: Upload the document with custom fields and a workflow
@@ -1599,7 +1599,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         overrides.update(new_overrides)
         self.assertEqual(overrides.custom_fields, {cf.id: None, cf2.id: 123})
 
-    def test_upload_with_custom_field_values(self):
+    def test_upload_with_custom_field_values(self) -> None:
         """
         GIVEN: A document with a source file
         WHEN: Upload the document with custom fields and values
@@ -1645,7 +1645,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             {cf_string.id: "a string", cf_int.id: 123},
         )
 
-    def test_upload_with_custom_fields_errors(self):
+    def test_upload_with_custom_fields_errors(self) -> None:
         """
         GIVEN: A document with a source file
         WHEN: Upload the document with invalid custom fields payloads
@@ -1679,7 +1679,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.consume_file_mock.assert_not_called()
 
-    def test_patch_document_integer_custom_field_out_of_range(self):
+    def test_patch_document_integer_custom_field_out_of_range(self) -> None:
         """
         GIVEN:
             - An integer custom field
@@ -1717,7 +1717,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIn("custom_fields", response.data)
         self.assertEqual(CustomFieldInstance.objects.count(), 0)
 
-    def test_upload_with_webui_source(self):
+    def test_upload_with_webui_source(self) -> None:
         """
         GIVEN: A document with a source file
         WHEN: Upload the document with 'from_webui' flag
@@ -1741,7 +1741,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.assertEqual(input_doc.source, WorkflowTrigger.DocumentSourceChoices.WEB_UI)
 
-    def test_upload_invalid_pdf(self):
+    def test_upload_invalid_pdf(self) -> None:
         """
         GIVEN: Invalid PDF named "*.pdf" that mime_type is in settings.CONSUMER_PDF_RECOVERABLE_MIME_TYPES
         WHEN: Upload the file
@@ -1759,7 +1759,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_get_metadata(self):
+    def test_get_metadata(self) -> None:
         doc = Document.objects.create(
             title="test",
             filename="file.pdf",
@@ -1797,11 +1797,11 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         response = self.client.get(f"/api/documents/{doc.pk}/metadata/")
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_get_metadata_invalid_doc(self):
+    def test_get_metadata_invalid_doc(self) -> None:
         response = self.client.get("/api/documents/34576/metadata/")
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
-    def test_get_metadata_no_archive(self):
+    def test_get_metadata_no_archive(self) -> None:
         doc = Document.objects.create(
             title="test",
             filename="file.pdf",
@@ -1821,7 +1821,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(meta["archive_metadata"])
         self.assertIsNone(meta["archive_media_filename"])
 
-    def test_get_metadata_missing_files(self):
+    def test_get_metadata_missing_files(self) -> None:
         doc = Document.objects.create(
             title="test",
             filename="file.pdf",
@@ -1842,7 +1842,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertIsNone(meta["archive_metadata"])
         self.assertIsNone(meta["archive_size"])
 
-    def test_get_empty_suggestions(self):
+    def test_get_empty_suggestions(self) -> None:
         doc = Document.objects.create(title="test", mime_type="application/pdf")
 
         response = self.client.get(f"/api/documents/{doc.pk}/suggestions/")
@@ -1859,7 +1859,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             },
         )
 
-    def test_get_suggestions_invalid_doc(self):
+    def test_get_suggestions_invalid_doc(self) -> None:
         response = self.client.get("/api/documents/34676/suggestions/")
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
@@ -2012,7 +2012,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.client.get(f"/api/documents/{doc.pk}/suggestions/")
         self.assertFalse(parse_date_generator.called)
 
-    def test_saved_views(self):
+    def test_saved_views(self) -> None:
         u1 = User.objects.create_superuser("user1")
         u2 = User.objects.create_superuser("user2")
 
@@ -2069,7 +2069,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             status.HTTP_404_NOT_FOUND,
         )
 
-    def test_saved_view_create_update_patch(self):
+    def test_saved_view_create_update_patch(self) -> None:
         User.objects.create_user("user1")
 
         view = {
@@ -2116,7 +2116,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         v1 = SavedView.objects.get(id=v1.id)
         self.assertEqual(v1.filter_rules.count(), 0)
 
-    def test_saved_view_display_options(self):
+    def test_saved_view_display_options(self) -> None:
         """
         GIVEN:
             - Saved view
@@ -2207,7 +2207,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         v1.refresh_from_db()
         self.assertEqual(v1.display_fields, None)
 
-    def test_saved_view_display_customfields(self):
+    def test_saved_view_display_customfields(self) -> None:
         """
         GIVEN:
             - Saved view
@@ -2278,7 +2278,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_saved_view_cleanup_after_custom_field_deletion(self):
+    def test_saved_view_cleanup_after_custom_field_deletion(self) -> None:
         """
         GIVEN:
             - Saved view with custom field in display fields and as sort field
@@ -2314,7 +2314,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             [str(SavedView.DisplayFields.TITLE), str(SavedView.DisplayFields.CREATED)],
         )
 
-    def test_get_logs(self):
+    def test_get_logs(self) -> None:
         log_data = "test\ntest2\n"
         with (Path(settings.LOGGING_DIR) / "mail.log").open("w") as f:
             f.write(log_data)
@@ -2324,7 +2324,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertCountEqual(response.data, ["mail", "paperless"])
 
-    def test_get_logs_only_when_exist(self):
+    def test_get_logs_only_when_exist(self) -> None:
         log_data = "test\ntest2\n"
         with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
             f.write(log_data)
@@ -2332,16 +2332,16 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertCountEqual(response.data, ["paperless"])
 
-    def test_get_invalid_log(self):
+    def test_get_invalid_log(self) -> None:
         response = self.client.get("/api/logs/bogus_log/")
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
     @override_settings(LOGGING_DIR="bogus_dir")
-    def test_get_nonexistent_log(self):
+    def test_get_nonexistent_log(self) -> None:
         response = self.client.get("/api/logs/paperless/")
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
-    def test_get_log(self):
+    def test_get_log(self) -> None:
         log_data = "test\ntest2\n"
         with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
             f.write(log_data)
@@ -2349,7 +2349,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertListEqual(response.data, ["test", "test2"])
 
-    def test_get_log_with_limit(self):
+    def test_get_log_with_limit(self) -> None:
         log_data = "test1\ntest2\ntest3\n"
         with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
             f.write(log_data)
@@ -2357,7 +2357,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertListEqual(response.data, ["test2", "test3"])
 
-    def test_get_log_with_invalid_limit(self):
+    def test_get_log_with_invalid_limit(self) -> None:
         log_data = "test1\ntest2\n"
         with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
             f.write(log_data)
@@ -2366,7 +2366,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         response = self.client.get("/api/logs/paperless/", {"limit": -5})
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_invalid_regex_other_algorithm(self):
+    def test_invalid_regex_other_algorithm(self) -> None:
         for endpoint in ["correspondents", "tags", "document_types"]:
             response = self.client.post(
                 f"/api/{endpoint}/",
@@ -2379,7 +2379,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             )
             self.assertEqual(response.status_code, status.HTTP_201_CREATED, endpoint)
 
-    def test_invalid_regex(self):
+    def test_invalid_regex(self) -> None:
         for endpoint in ["correspondents", "tags", "document_types"]:
             response = self.client.post(
                 f"/api/{endpoint}/",
@@ -2396,7 +2396,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
                 endpoint,
             )
 
-    def test_valid_regex(self):
+    def test_valid_regex(self) -> None:
         for endpoint in ["correspondents", "tags", "document_types"]:
             response = self.client.post(
                 f"/api/{endpoint}/",
@@ -2409,7 +2409,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             )
             self.assertEqual(response.status_code, status.HTTP_201_CREATED, endpoint)
 
-    def test_regex_no_algorithm(self):
+    def test_regex_no_algorithm(self) -> None:
         for endpoint in ["correspondents", "tags", "document_types"]:
             response = self.client.post(
                 f"/api/{endpoint}/",
@@ -2418,7 +2418,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             )
             self.assertEqual(response.status_code, status.HTTP_201_CREATED, endpoint)
 
-    def test_tag_color_default(self):
+    def test_tag_color_default(self) -> None:
         response = self.client.post("/api/tags/", {"name": "tag"}, format="json")
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(Tag.objects.get(id=response.data["id"]).color, "#a6cee3")
@@ -2431,7 +2431,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             1,
         )
 
-    def test_tag_color(self):
+    def test_tag_color(self) -> None:
         response = self.client.post(
             "/api/tags/",
             data={"name": "tag", "colour": 3},
@@ -2449,7 +2449,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             3,
         )
 
-    def test_tag_color_invalid(self):
+    def test_tag_color_invalid(self) -> None:
         response = self.client.post(
             "/api/tags/",
             data={"name": "tag", "colour": 34},
@@ -2458,7 +2458,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_tag_color_custom(self):
+    def test_tag_color_custom(self) -> None:
         tag = Tag.objects.create(name="test", color="#abcdef")
         self.assertEqual(
             self.client.get(
@@ -2469,7 +2469,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             1,
         )
 
-    def test_get_existing_notes(self):
+    def test_get_existing_notes(self) -> None:
         """
         GIVEN:
             - A document with a single note
@@ -2531,7 +2531,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             },
         )
 
-    def test_docnote_serializer_v7(self):
+    def test_docnote_serializer_v7(self) -> None:
         doc = Document.objects.create(
             title="test",
             mime_type="application/pdf",
@@ -2551,7 +2551,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             self.user.id,
         )
 
-    def test_create_note(self):
+    def test_create_note(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -2595,7 +2595,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         # modified was updated to today
         self.assertEqual(doc.modified.day, timezone.now().day)
 
-    def test_notes_permissions_aware(self):
+    def test_notes_permissions_aware(self) -> None:
         """
         GIVEN:
             - Existing document owned by user2 but with granted view perms for user1
@@ -2651,7 +2651,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(response.content, b"Insufficient permissions to delete notes")
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_delete_note(self):
+    def test_delete_note(self) -> None:
         """
         GIVEN:
             - Existing document, existing note
@@ -2687,7 +2687,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         # modified was updated to today
         self.assertEqual(doc.modified.day, timezone.now().day)
 
-    def test_get_notes_no_doc(self):
+    def test_get_notes_no_doc(self) -> None:
         """
         GIVEN:
             - A request to get notes from a non-existent document
@@ -2702,7 +2702,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
-    def test_tag_unique_name_and_owner(self):
+    def test_tag_unique_name_and_owner(self) -> None:
         """
         GIVEN:
             - Multiple users
@@ -2762,7 +2762,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_tag_unique_name_and_owner_enforced_on_update(self):
+    def test_tag_unique_name_and_owner_enforced_on_update(self) -> None:
         """
         GIVEN:
             - Multiple users
@@ -2796,7 +2796,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_create_share_links(self):
+    def test_create_share_links(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -2868,7 +2868,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
-    def test_share_links_permissions_aware(self):
+    def test_share_links_permissions_aware(self) -> None:
         """
         GIVEN:
             - Existing document owned by user2 but with granted view perms for user1
@@ -2909,7 +2909,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         )
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
 
-    def test_next_asn(self):
+    def test_next_asn(self) -> None:
         """
         GIVEN:
             - Existing documents with ASNs, highest owned by user2
@@ -2953,7 +2953,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
         self.assertEqual(resp.content, b"1000")
 
-    def test_next_asn_no_documents_with_asn(self):
+    def test_next_asn_no_documents_with_asn(self) -> None:
         """
         GIVEN:
             - Existing document, but with no ASN assugned
@@ -2982,7 +2982,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
         self.assertEqual(resp.content, b"1")
 
-    def test_asn_not_unique_with_trashed_doc(self):
+    def test_asn_not_unique_with_trashed_doc(self) -> None:
         """
         GIVEN:
             - Existing document with ASN that is trashed
@@ -3026,7 +3026,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
             },
         )
 
-    def test_remove_inbox_tags(self):
+    def test_remove_inbox_tags(self) -> None:
         """
         GIVEN:
             - Existing document with or without inbox tags
@@ -3096,7 +3096,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         EMAIL_ENABLED=True,
         EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
     )
-    def test_email_document(self):
+    def test_email_document(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -3156,7 +3156,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(mail.outbox[1].attachments[0][0], expected_filename2)
 
     @mock.patch("django.core.mail.message.EmailMessage.send", side_effect=Exception)
-    def test_email_document_errors(self, mocked_send):
+    def test_email_document_errors(self, mocked_send) -> None:
         """
         GIVEN:
             - Existing document
@@ -3242,7 +3242,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
 
     @mock.patch("django_softdelete.models.SoftDeleteModel.delete")
-    def test_warn_on_delete_with_old_uuid_field(self, mocked_delete):
+    def test_warn_on_delete_with_old_uuid_field(self, mocked_delete) -> None:
         """
         GIVEN:
             - Existing document in a (mocked) MariaDB database with an old UUID field
@@ -3272,7 +3272,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 
 
 class TestDocumentApiV2(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
@@ -3280,7 +3280,7 @@ class TestDocumentApiV2(DirectoriesMixin, APITestCase):
         self.client.force_authenticate(user=self.user)
         self.client.defaults["HTTP_ACCEPT"] = "application/json; version=2"
 
-    def test_tag_validate_color(self):
+    def test_tag_validate_color(self) -> None:
         self.assertEqual(
             self.client.post(
                 "/api/tags/",
@@ -3323,7 +3323,7 @@ class TestDocumentApiV2(DirectoriesMixin, APITestCase):
             status.HTTP_400_BAD_REQUEST,
         )
 
-    def test_tag_text_color(self):
+    def test_tag_text_color(self) -> None:
         t = Tag.objects.create(name="tag1", color="#000000")
         self.assertEqual(
             self.client.get(f"/api/tags/{t.id}/", format="json").data["text_color"],
@@ -3353,7 +3353,7 @@ class TestDocumentApiV2(DirectoriesMixin, APITestCase):
 
 
 class TestDocumentApiCustomFieldsSorting(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
@@ -3377,7 +3377,7 @@ class TestDocumentApiCustomFieldsSorting(DirectoriesMixin, APITestCase):
 
         cache.clear()
 
-    def test_document_custom_fields_sorting(self):
+    def test_document_custom_fields_sorting(self) -> None:
         """
         GIVEN:
             - Documents with custom fields
@@ -3497,7 +3497,7 @@ class TestDocumentApiCustomFieldsSorting(DirectoriesMixin, APITestCase):
                     [self.doc1.id, self.doc3.id, self.doc2.id],
                 )
 
-    def test_document_custom_fields_sorting_invalid(self):
+    def test_document_custom_fields_sorting_invalid(self) -> None:
         """
         GIVEN:
             - Documents with custom fields
@@ -3512,7 +3512,7 @@ class TestDocumentApiCustomFieldsSorting(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_document_custom_fields_sorting_invalid_data_type(self):
+    def test_document_custom_fields_sorting_invalid_data_type(self) -> None:
         """
         GIVEN:
             - Documents with custom fields
index 884553dba77363142e2a1ba20d72dfe25aea746e..f6f30a2260f603a7b9c84690348314b852f9e62c 100644 (file)
@@ -17,7 +17,7 @@ from documents.tests.utils import SampleDirMixin
 class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
     ENDPOINT = "/api/documents/email/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
@@ -58,7 +58,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         EMAIL_ENABLED=True,
         EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
     )
-    def test_email_success(self):
+    def test_email_success(self) -> None:
         """
         GIVEN:
             - Multiple existing documents (doc1 with archive, doc2 without)
@@ -113,7 +113,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         EMAIL_ENABLED=True,
         EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
     )
-    def test_email_use_original_version(self):
+    def test_email_use_original_version(self) -> None:
         """
         GIVEN:
             - Documents with archive versions
@@ -145,7 +145,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         original_size = self.doc1.source_path.stat().st_size
         self.assertEqual(len(attachment[1]), original_size)
 
-    def test_email_missing_required_fields(self):
+    def test_email_missing_required_fields(self) -> None:
         """
         GIVEN:
             - Request with missing required fields
@@ -210,7 +210,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_email_empty_document_list(self):
+    def test_email_empty_document_list(self) -> None:
         """
         GIVEN:
             - Request with empty document list
@@ -233,7 +233,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_email_invalid_document_id(self):
+    def test_email_invalid_document_id(self) -> None:
         """
         GIVEN:
             - Request with non-existent document ID
@@ -256,7 +256,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_email_invalid_email_address(self):
+    def test_email_invalid_email_address(self) -> None:
         """
         GIVEN:
             - Request with invalid email address
@@ -294,7 +294,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_email_insufficient_permissions(self):
+    def test_email_insufficient_permissions(self) -> None:
         """
         GIVEN:
             - User without permissions to view document
@@ -329,7 +329,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_email_only_requires_view_permission(self):
+    def test_email_only_requires_view_permission(self) -> None:
         """
         GIVEN:
             - User having only view documents permission
@@ -361,7 +361,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         EMAIL_ENABLED=True,
         EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
     )
-    def test_email_duplicate_filenames(self):
+    def test_email_duplicate_filenames(self) -> None:
         """
         GIVEN:
             - Multiple documents with the same title
@@ -414,7 +414,7 @@ class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
         "django.core.mail.message.EmailMessage.send",
         side_effect=Exception("Email error"),
     )
-    def test_email_send_error(self, mocked_send):
+    def test_email_send_error(self, mocked_send) -> None:
         """
         GIVEN:
             - Existing documents
index 70d43dfde3c92f5bd5ca4317f0ee9a1060d1c9c0..2f9b5b6774bd68e852b8f2cd0832cbbc48919732 100644 (file)
@@ -29,7 +29,7 @@ class DocumentWrapper:
 
 
 class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
@@ -167,7 +167,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
         reference_predicate: Callable[[DocumentWrapper], bool],
         *,
         match_nothing_ok=False,
-    ):
+    ) -> None:
         """
         Checks the results of the query against a callable reference predicate.
         """
@@ -208,7 +208,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
         ]
         self.assertEqual(reference_document_ids, response_document_ids)
 
-    def _assert_validation_error(self, query: str, path: list, keyword: str):
+    def _assert_validation_error(self, query: str, path: list, keyword: str) -> None:
         """
         Asserts that the query raises a validation error.
         Checks the message to make sure it points to the right place.
@@ -240,7 +240,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
     # ==========================================================#
     # Sanity checks                                             #
     # ==========================================================#
-    def test_name_value_association(self):
+    def test_name_value_association(self) -> None:
         """
         GIVEN:
             - A document with `{"string_field": "https://docs.paperless-ngx.com/",
@@ -256,7 +256,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["url_field"] == "https://docs.paperless-ngx.com/",
         )
 
-    def test_filter_by_multiple_fields(self):
+    def test_filter_by_multiple_fields(self) -> None:
         """
         GIVEN:
             - A document with `{"string_field": "https://docs.paperless-ngx.com/",
@@ -274,40 +274,40 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
     # ==========================================================#
     # Basic expressions supported by all custom field types     #
     # ==========================================================#
-    def test_exact(self):
+    def test_exact(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "exact", "paperless"],
             lambda document: "string_field" in document
             and document["string_field"] == "paperless",
         )
 
-    def test_in(self):
+    def test_in(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "in", ["paperless", "Paperless"]],
             lambda document: "string_field" in document
             and document["string_field"] in ("paperless", "Paperless"),
         )
 
-    def test_isnull(self):
+    def test_isnull(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "isnull", True],
             lambda document: "string_field" in document
             and document["string_field"] is None,
         )
 
-    def test_exists(self):
+    def test_exists(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "exists", True],
             lambda document: "string_field" in document,
         )
 
-    def test_exists_false(self):
+    def test_exists_false(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "exists", False],
             lambda document: "string_field" not in document,
         )
 
-    def test_select(self):
+    def test_select(self) -> None:
         # For select fields, you can either specify the id of the option
         # or the name of the option. They function exactly the same.
         self._assert_query_match_predicate(
@@ -325,7 +325,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
     # ==========================================================#
     # Expressions for string, URL, and monetary fields          #
     # ==========================================================#
-    def test_icontains(self):
+    def test_icontains(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "icontains", "aper"],
             lambda document: "string_field" in document
@@ -333,7 +333,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and "aper" in document["string_field"].lower(),
         )
 
-    def test_istartswith(self):
+    def test_istartswith(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "istartswith", "paper"],
             lambda document: "string_field" in document
@@ -341,7 +341,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["string_field"].lower().startswith("paper"),
         )
 
-    def test_iendswith(self):
+    def test_iendswith(self) -> None:
         self._assert_query_match_predicate(
             ["string_field", "iendswith", "less"],
             lambda document: "string_field" in document
@@ -349,7 +349,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["string_field"].lower().endswith("less"),
         )
 
-    def test_url_field_istartswith(self):
+    def test_url_field_istartswith(self) -> None:
         # URL fields supports all of the expressions above.
         # Just showing one of them here.
         self._assert_query_match_predicate(
@@ -362,7 +362,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
     # ==========================================================#
     # Arithmetic comparisons                                    #
     # ==========================================================#
-    def test_gt(self):
+    def test_gt(self) -> None:
         self._assert_query_match_predicate(
             ["date_field", "gt", date(2024, 8, 22).isoformat()],
             lambda document: "date_field" in document
@@ -370,7 +370,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["date_field"] > date(2024, 8, 22),
         )
 
-    def test_gte(self):
+    def test_gte(self) -> None:
         self._assert_query_match_predicate(
             ["date_field", "gte", date(2024, 8, 22).isoformat()],
             lambda document: "date_field" in document
@@ -378,7 +378,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["date_field"] >= date(2024, 8, 22),
         )
 
-    def test_lt(self):
+    def test_lt(self) -> None:
         self._assert_query_match_predicate(
             ["integer_field", "lt", 0],
             lambda document: "integer_field" in document
@@ -386,7 +386,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["integer_field"] < 0,
         )
 
-    def test_lte(self):
+    def test_lte(self) -> None:
         self._assert_query_match_predicate(
             ["integer_field", "lte", 0],
             lambda document: "integer_field" in document
@@ -394,7 +394,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["integer_field"] <= 0,
         )
 
-    def test_range(self):
+    def test_range(self) -> None:
         self._assert_query_match_predicate(
             ["float_field", "range", [-0.05, 0.05]],
             lambda document: "float_field" in document
@@ -402,7 +402,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and -0.05 <= document["float_field"] <= 0.05,
         )
 
-    def test_date_modifier(self):
+    def test_date_modifier(self) -> None:
         # For date fields you can optionally prefix the operator
         # with the part of the date you are comparing with.
         self._assert_query_match_predicate(
@@ -412,7 +412,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["date_field"].year >= 2024,
         )
 
-    def test_gt_monetary(self):
+    def test_gt_monetary(self) -> None:
         self._assert_query_match_predicate(
             ["monetary_field", "gt", "99"],
             lambda document: "monetary_field" in document
@@ -426,7 +426,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
     # ==========================================================#
     # Subset check (document link field only)                   #
     # ==========================================================#
-    def test_document_link_contains(self):
+    def test_document_link_contains(self) -> None:
         # Document link field "contains" performs a subset check.
         self._assert_query_match_predicate(
             ["documentlink_field", "contains", [1, 2]],
@@ -442,7 +442,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and set(document["documentlink_field"]) >= {1, 2},
         )
 
-    def test_document_link_contains_empty_set(self):
+    def test_document_link_contains_empty_set(self) -> None:
         # An empty set is a subset of any set.
         self._assert_query_match_predicate(
             ["documentlink_field", "contains", []],
@@ -450,7 +450,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["documentlink_field"] is not None,
         )
 
-    def test_document_link_contains_no_reverse_link(self):
+    def test_document_link_contains_no_reverse_link(self) -> None:
         # An edge case is that the document in the value list
         # doesn't have a document link field and thus has no reverse link.
         self._assert_query_match_predicate(
@@ -464,7 +464,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
     # ==========================================================#
     # Logical expressions                                       #
     # ==========================================================#
-    def test_logical_and(self):
+    def test_logical_and(self) -> None:
         self._assert_query_match_predicate(
             [
                 "AND",
@@ -476,7 +476,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and document["date_field"].month < 9,
         )
 
-    def test_logical_or(self):
+    def test_logical_or(self) -> None:
         # This is also the recommend way to check for "empty" text, URL, and monetary fields.
         self._assert_query_match_predicate(
             [
@@ -487,7 +487,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             and not bool(document["string_field"]),
         )
 
-    def test_logical_not(self):
+    def test_logical_not(self) -> None:
         # This means `NOT ((document has string_field) AND (string_field iexact "paperless"))`,
         # not `(document has string_field) AND (NOT (string_field iexact "paperless"))`!
         self._assert_query_match_predicate(
@@ -504,63 +504,63 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
     # Tests for invalid queries                                 #
     # ==========================================================#
 
-    def test_invalid_json(self):
+    def test_invalid_json(self) -> None:
         self._assert_validation_error(
             "not valid json",
             ["custom_field_query"],
             "must be valid JSON",
         )
 
-    def test_invalid_expression(self):
+    def test_invalid_expression(self) -> None:
         self._assert_validation_error(
             json.dumps("valid json but not valid expr"),
             ["custom_field_query"],
             "Invalid custom field query expression",
         )
 
-    def test_invalid_custom_field_name(self):
+    def test_invalid_custom_field_name(self) -> None:
         self._assert_validation_error(
             json.dumps(["invalid name", "iexact", "foo"]),
             ["custom_field_query", "0"],
             "is not a valid custom field",
         )
 
-    def test_invalid_operator(self):
+    def test_invalid_operator(self) -> None:
         self._assert_validation_error(
             json.dumps(["integer_field", "iexact", "foo"]),
             ["custom_field_query", "1"],
             "does not support query expr",
         )
 
-    def test_invalid_value(self):
+    def test_invalid_value(self) -> None:
         self._assert_validation_error(
             json.dumps(["select_field", "exact", []]),
             ["custom_field_query", "2"],
             "string",
         )
 
-    def test_invalid_logical_operator(self):
+    def test_invalid_logical_operator(self) -> None:
         self._assert_validation_error(
             json.dumps(["invalid op", ["integer_field", "gt", 0]]),
             ["custom_field_query", "0"],
             "Invalid logical operator",
         )
 
-    def test_invalid_expr_list(self):
+    def test_invalid_expr_list(self) -> None:
         self._assert_validation_error(
             json.dumps(["AND", "not a list"]),
             ["custom_field_query", "1"],
             "Invalid expression list",
         )
 
-    def test_invalid_operator_prefix(self):
+    def test_invalid_operator_prefix(self) -> None:
         self._assert_validation_error(
             json.dumps(["integer_field", "foo__gt", 0]),
             ["custom_field_query", "1"],
             "does not support query expr",
         )
 
-    def test_query_too_deep(self):
+    def test_query_too_deep(self) -> None:
         query = ["string_field", "exact", "paperless"]
         for _ in range(10):
             query = ["NOT", query]
@@ -570,7 +570,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
             "Maximum nesting depth exceeded",
         )
 
-    def test_query_too_many_atoms(self):
+    def test_query_too_many_atoms(self) -> None:
         atom = ["string_field", "exact", "paperless"]
         query = ["AND", [atom for _ in range(21)]]
         self._assert_validation_error(
index 0eb99f0232864bb2ccd801191a184fc66ac78bb3..867c10a1727c548ed154560ba9f33f5801a387a7 100644 (file)
@@ -34,7 +34,7 @@ class TestApiObjects(DirectoriesMixin, APITestCase):
         self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{title}")
         self.sp2 = StoragePath.objects.create(name="sp2", path="Something2/{title}")
 
-    def test_object_filters(self):
+    def test_object_filters(self) -> None:
         response = self.client.get(
             f"/api/tags/?id={self.tag2.id}",
         )
@@ -91,7 +91,7 @@ class TestApiObjects(DirectoriesMixin, APITestCase):
         results = response.data["results"]
         self.assertEqual(len(results), 2)
 
-    def test_correspondent_last_correspondence(self):
+    def test_correspondent_last_correspondence(self) -> None:
         """
         GIVEN:
             - Correspondent with documents
@@ -154,7 +154,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
 
         self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
 
-    def test_api_get_storage_path(self):
+    def test_api_get_storage_path(self) -> None:
         """
         GIVEN:
             - API request to get all storage paths
@@ -172,7 +172,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         self.assertEqual(resp_storage_path["id"], self.sp1.id)
         self.assertEqual(resp_storage_path["path"], self.sp1.path)
 
-    def test_api_create_storage_path(self):
+    def test_api_create_storage_path(self) -> None:
         """
         GIVEN:
             - API request to create a storage paths
@@ -195,7 +195,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(StoragePath.objects.count(), 2)
 
-    def test_api_create_invalid_storage_path(self):
+    def test_api_create_invalid_storage_path(self) -> None:
         """
         GIVEN:
             - API request to create a storage paths
@@ -219,7 +219,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertEqual(StoragePath.objects.count(), 1)
 
-    def test_api_create_storage_path_rejects_traversal(self):
+    def test_api_create_storage_path_rejects_traversal(self) -> None:
         """
         GIVEN:
             - API request to create a storage paths
@@ -243,7 +243,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertEqual(StoragePath.objects.count(), 1)
 
-    def test_api_storage_path_placeholders(self):
+    def test_api_storage_path_placeholders(self) -> None:
         """
         GIVEN:
             - API request to create a storage path with placeholders
@@ -273,7 +273,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         self.assertEqual(StoragePath.objects.count(), 2)
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_update_storage_path(self, bulk_update_mock):
+    def test_api_update_storage_path(self, bulk_update_mock) -> None:
         """
         GIVEN:
             - API request to get all storage paths
@@ -302,7 +302,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         self.assertCountEqual([document.pk], args[0])
 
     @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
-    def test_api_delete_storage_path(self, bulk_update_mock):
+    def test_api_delete_storage_path(self, bulk_update_mock) -> None:
         """
         GIVEN:
             - API request to delete a storage
@@ -330,7 +330,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         # only called once
         bulk_update_mock.assert_called_once_with([document.pk])
 
-    def test_test_storage_path(self):
+    def test_test_storage_path(self) -> None:
         """
         GIVEN:
             - API request to test a storage path
@@ -359,7 +359,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data, "path/Something")
 
-    def test_test_storage_path_respects_none_placeholder_setting(self):
+    def test_test_storage_path_respects_none_placeholder_setting(self) -> None:
         """
         GIVEN:
             - A storage path template referencing an empty field
@@ -401,7 +401,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
 
 class TestBulkEditObjects(APITestCase):
     # See test_api_permissions.py for bulk tests on permissions
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.temp_admin = User.objects.create_superuser(username="temp_admin")
@@ -416,7 +416,7 @@ class TestBulkEditObjects(APITestCase):
         self.user2 = User.objects.create(username="user2")
         self.user3 = User.objects.create(username="user3")
 
-    def test_bulk_objects_delete(self):
+    def test_bulk_objects_delete(self) -> None:
         """
         GIVEN:
             - Existing objects
@@ -485,7 +485,7 @@ class TestBulkEditObjects(APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(StoragePath.objects.count(), 0)
 
-    def test_bulk_edit_object_permissions_insufficient_global_perms(self):
+    def test_bulk_edit_object_permissions_insufficient_global_perms(self) -> None:
         """
         GIVEN:
             - Existing objects, user does not have global delete permissions
@@ -511,7 +511,7 @@ class TestBulkEditObjects(APITestCase):
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
         self.assertEqual(response.content, b"Insufficient permissions")
 
-    def test_bulk_edit_object_permissions_sufficient_global_perms(self):
+    def test_bulk_edit_object_permissions_sufficient_global_perms(self) -> None:
         """
         GIVEN:
             - Existing objects, user does have global delete permissions
@@ -540,7 +540,7 @@ class TestBulkEditObjects(APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_bulk_edit_object_permissions_insufficient_object_perms(self):
+    def test_bulk_edit_object_permissions_insufficient_object_perms(self) -> None:
         """
         GIVEN:
             - Objects owned by user other than logged in user
index 31b8607452552432696d87ccafe87510ecf9a6b8..ada93033b369d21e1369db95a1280b75505e6fdb 100644 (file)
@@ -23,7 +23,7 @@ from documents.tests.utils import DirectoriesMixin
 
 
 class TestApiAuth(DirectoriesMixin, APITestCase):
-    def test_auth_required(self):
+    def test_auth_required(self) -> None:
         d = Document.objects.create(title="Test")
 
         self.assertEqual(
@@ -87,19 +87,19 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
             status.HTTP_401_UNAUTHORIZED,
         )
 
-    def test_api_version_no_auth(self):
+    def test_api_version_no_auth(self) -> None:
         response = self.client.get("/api/documents/")
         self.assertNotIn("X-Api-Version", response)
         self.assertNotIn("X-Version", response)
 
-    def test_api_version_with_auth(self):
+    def test_api_version_with_auth(self) -> None:
         user = User.objects.create_superuser(username="test")
         self.client.force_authenticate(user)
         response = self.client.get("/api/documents/")
         self.assertIn("X-Api-Version", response)
         self.assertIn("X-Version", response)
 
-    def test_api_insufficient_permissions(self):
+    def test_api_insufficient_permissions(self) -> None:
         user = User.objects.create_user(username="test")
         self.client.force_authenticate(user)
 
@@ -132,7 +132,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
             status.HTTP_403_FORBIDDEN,
         )
 
-    def test_api_sufficient_permissions(self):
+    def test_api_sufficient_permissions(self) -> None:
         user = User.objects.create_user(username="test")
         user.user_permissions.add(*Permission.objects.all())
         user.is_staff = True
@@ -161,7 +161,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
             status.HTTP_200_OK,
         )
 
-    def test_api_get_object_permissions(self):
+    def test_api_get_object_permissions(self) -> None:
         user1 = User.objects.create_user(username="test1")
         user2 = User.objects.create_user(username="test2")
         user1.user_permissions.add(*Permission.objects.filter(codename="view_document"))
@@ -192,7 +192,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
             status.HTTP_404_NOT_FOUND,
         )
 
-    def test_api_default_owner(self):
+    def test_api_default_owner(self) -> None:
         """
         GIVEN:
             - API request to create an object (Tag)
@@ -221,7 +221,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         tag1 = Tag.objects.filter(name="test1").first()
         self.assertEqual(tag1.owner, user1)
 
-    def test_api_set_no_owner(self):
+    def test_api_set_no_owner(self) -> None:
         """
         GIVEN:
             - API request to create an object (Tag)
@@ -251,7 +251,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         tag1 = Tag.objects.filter(name="test1").first()
         self.assertEqual(tag1.owner, None)
 
-    def test_api_set_owner_w_permissions(self):
+    def test_api_set_owner_w_permissions(self) -> None:
         """
         GIVEN:
             - API request to create an object (Tag) that supplies set_permissions object
@@ -299,7 +299,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         self.assertEqual(checker.has_perm("view_tag", tag1), True)
         self.assertIn("view_tag", get_perms(group1, tag1))
 
-    def test_api_set_other_owner_w_permissions(self):
+    def test_api_set_other_owner_w_permissions(self) -> None:
         """
         GIVEN:
             - API request to create an object (Tag)
@@ -344,7 +344,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         self.assertEqual(tag1.owner, user2)
         self.assertIn("view_tag", get_perms(group1, tag1))
 
-    def test_api_set_doc_permissions(self):
+    def test_api_set_doc_permissions(self) -> None:
         """
         GIVEN:
             - API request to update doc permissions and owner
@@ -395,7 +395,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         self.assertTrue(checker.has_perm("view_document", doc))
         self.assertIn("view_document", get_perms(group1, doc))
 
-    def test_patch_doesnt_remove_permissions(self):
+    def test_patch_doesnt_remove_permissions(self) -> None:
         """
         GIVEN:
             - existing document with permissions set
@@ -441,7 +441,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         self.assertTrue(checker.has_perm("change_document", doc))
         self.assertIn("change_document", get_perms(group1, doc))
 
-    def test_document_permissions_change_requires_owner(self):
+    def test_document_permissions_change_requires_owner(self) -> None:
         owner = User.objects.create_user(username="owner")
         editor = User.objects.create_user(username="editor")
         editor.user_permissions.add(
@@ -494,7 +494,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_dynamic_permissions_fields(self):
+    def test_dynamic_permissions_fields(self) -> None:
         user1 = User.objects.create_user(username="user1")
         user1.user_permissions.add(*Permission.objects.filter(codename="view_document"))
         user2 = User.objects.create_user(username="user2")
@@ -565,7 +565,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         self.assertNotIn("is_shared_by_requester", results[0])
 
     @mock.patch("allauth.mfa.adapter.DefaultMFAAdapter.is_mfa_enabled")
-    def test_basic_auth_mfa_enabled(self, mock_is_mfa_enabled):
+    def test_basic_auth_mfa_enabled(self, mock_is_mfa_enabled) -> None:
         """
         GIVEN:
             - User with MFA enabled
@@ -589,7 +589,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
         self.assertEqual(response.data["detail"], "MFA required")
 
     @mock.patch("allauth.mfa.totp.internal.auth.TOTP.validate_code")
-    def test_get_token_mfa_enabled(self, mock_validate_code):
+    def test_get_token_mfa_enabled(self, mock_validate_code) -> None:
         """
         GIVEN:
             - User with MFA enabled
@@ -657,13 +657,13 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
 class TestApiUser(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/users/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=self.user)
 
-    def test_get_users(self):
+    def test_get_users(self) -> None:
         """
         GIVEN:
             - Configured users
@@ -691,7 +691,7 @@ class TestApiUser(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_user2["first_name"], user1.first_name)
         self.assertEqual(returned_user2["last_name"], user1.last_name)
 
-    def test_create_user(self):
+    def test_create_user(self) -> None:
         """
         WHEN:
             - API request is made to add a user account
@@ -719,7 +719,7 @@ class TestApiUser(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_user1.first_name, user1["first_name"])
         self.assertEqual(returned_user1.last_name, user1["last_name"])
 
-    def test_delete_user(self):
+    def test_delete_user(self) -> None:
         """
         GIVEN:
             - Existing user account
@@ -746,7 +746,7 @@ class TestApiUser(DirectoriesMixin, APITestCase):
 
         self.assertEqual(User.objects.count(), nUsers - 1)
 
-    def test_update_user(self):
+    def test_update_user(self) -> None:
         """
         GIVEN:
             - Existing user accounts
@@ -793,7 +793,7 @@ class TestApiUser(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_user2.first_name, "Updated Name 2")
         self.assertNotEqual(returned_user2.password, initial_password)
 
-    def test_deactivate_totp(self):
+    def test_deactivate_totp(self) -> None:
         """
         GIVEN:
             - Existing user account with TOTP enabled
@@ -846,7 +846,7 @@ class TestApiUser(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_only_superusers_can_create_or_alter_superuser_status(self):
+    def test_only_superusers_can_create_or_alter_superuser_status(self) -> None:
         """
         GIVEN:
             - Existing user account
@@ -924,13 +924,13 @@ class TestApiUser(DirectoriesMixin, APITestCase):
 class TestApiGroup(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/groups/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=self.user)
 
-    def test_get_groups(self):
+    def test_get_groups(self) -> None:
         """
         GIVEN:
             - Configured groups
@@ -952,7 +952,7 @@ class TestApiGroup(DirectoriesMixin, APITestCase):
 
         self.assertEqual(returned_group1["name"], group1.name)
 
-    def test_create_group(self):
+    def test_create_group(self) -> None:
         """
         WHEN:
             - API request is made to add a group
@@ -975,7 +975,7 @@ class TestApiGroup(DirectoriesMixin, APITestCase):
 
         self.assertEqual(returned_group1.name, group1["name"])
 
-    def test_delete_group(self):
+    def test_delete_group(self) -> None:
         """
         GIVEN:
             - Existing group
@@ -997,7 +997,7 @@ class TestApiGroup(DirectoriesMixin, APITestCase):
 
         self.assertEqual(len(Group.objects.all()), 0)
 
-    def test_update_group(self):
+    def test_update_group(self) -> None:
         """
         GIVEN:
             - Existing groups
@@ -1025,7 +1025,7 @@ class TestApiGroup(DirectoriesMixin, APITestCase):
 
 
 class TestBulkEditObjectPermissions(APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.temp_admin = User.objects.create_superuser(username="temp_admin")
@@ -1040,7 +1040,7 @@ class TestBulkEditObjectPermissions(APITestCase):
         self.user2 = User.objects.create(username="user2")
         self.user3 = User.objects.create(username="user3")
 
-    def test_bulk_object_set_permissions(self):
+    def test_bulk_object_set_permissions(self) -> None:
         """
         GIVEN:
             - Existing objects
@@ -1156,7 +1156,7 @@ class TestBulkEditObjectPermissions(APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(StoragePath.objects.get(pk=self.sp1.id).owner, self.user3)
 
-    def test_bulk_object_set_permissions_merge(self):
+    def test_bulk_object_set_permissions_merge(self) -> None:
         """
         GIVEN:
             - Existing objects
@@ -1231,7 +1231,7 @@ class TestBulkEditObjectPermissions(APITestCase):
         # user3 should be removed
         self.assertNotIn(self.user3, get_users_with_perms(self.t1))
 
-    def test_bulk_edit_object_permissions_insufficient_perms(self):
+    def test_bulk_edit_object_permissions_insufficient_perms(self) -> None:
         """
         GIVEN:
             - Objects owned by user other than logged in user
@@ -1260,7 +1260,7 @@ class TestBulkEditObjectPermissions(APITestCase):
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
         self.assertEqual(response.content, b"Insufficient permissions")
 
-    def test_bulk_edit_object_permissions_validation(self):
+    def test_bulk_edit_object_permissions_validation(self) -> None:
         """
         GIVEN:
             - Existing objects
@@ -1334,12 +1334,12 @@ class TestBulkEditObjectPermissions(APITestCase):
 
 
 class TestFullPermissionsFlag(APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.admin = User.objects.create_superuser(username="admin")
 
-    def test_full_perms_flag(self):
+    def test_full_perms_flag(self) -> None:
         """
         GIVEN:
             - API request to list documents
index 2eedf32971db8b533191f5ef53efcf6473123122..d642f52d88dda2d60bbc12bfeb4feeb633f26b72 100644 (file)
@@ -29,7 +29,7 @@ class MockOpenIDProvider:
 
 # see allauth.socialaccount.providers.openid_connect.provider.OpenIDConnectProviderAccount
 class MockOpenIDConnectProviderAccount:
-    def __init__(self, mock_social_account_dict):
+    def __init__(self, mock_social_account_dict) -> None:
         self.account = mock_social_account_dict
 
     def to_str(self):
@@ -41,7 +41,7 @@ class MockOpenIDConnectProvider:
     id = "openid_connect"
     name = "OpenID Connect"
 
-    def __init__(self, app=None):
+    def __init__(self, app=None) -> None:
         self.app = app
         self.name = app.name
 
@@ -52,7 +52,7 @@ class MockOpenIDConnectProvider:
 class TestApiProfile(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/profile/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(
@@ -62,7 +62,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
         )
         self.client.force_authenticate(user=self.user)
 
-    def setupSocialAccount(self):
+    def setupSocialAccount(self) -> None:
         SocialApp.objects.create(
             name="Keycloak",
             provider="openid_connect",
@@ -73,7 +73,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
             bulk=False,
         )
 
-    def test_get_profile(self):
+    def test_get_profile(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -96,7 +96,11 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
     @mock.patch(
         "allauth.socialaccount.adapter.DefaultSocialAccountAdapter.list_providers",
     )
-    def test_get_profile_w_social(self, mock_list_providers, mock_get_provider_account):
+    def test_get_profile_w_social(
+        self,
+        mock_list_providers,
+        mock_get_provider_account,
+    ) -> None:
         """
         GIVEN:
             - Configured user and setup social account
@@ -136,7 +140,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
             ],
         )
 
-    def test_profile_w_social_removed_app(self):
+    def test_profile_w_social_removed_app(self) -> None:
         """
         GIVEN:
             - Configured user and setup social account
@@ -166,7 +170,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
             ],
         )
 
-    def test_update_profile(self):
+    def test_update_profile(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -192,7 +196,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
         self.assertEqual(user.first_name, user_data["first_name"])
         self.assertEqual(user.last_name, user_data["last_name"])
 
-    def test_update_profile_invalid_password_returns_field_error(self):
+    def test_update_profile_invalid_password_returns_field_error(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -220,7 +224,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
             ),
         )
 
-    def test_update_profile_placeholder_password_skips_validation(self):
+    def test_update_profile_placeholder_password_skips_validation(self) -> None:
         """
         GIVEN:
             - Configured user with existing password
@@ -251,7 +255,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
         self.assertEqual(user.first_name, user_data["first_name"])
         self.assertEqual(user.last_name, user_data["last_name"])
 
-    def test_update_auth_token(self):
+    def test_update_auth_token(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -274,7 +278,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
 
         self.assertNotEqual(token1.key, token2.key)
 
-    def test_profile_not_logged_in(self):
+    def test_profile_not_logged_in(self) -> None:
         """
         GIVEN:
             - User not logged in
@@ -298,7 +302,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
     def test_get_social_account_providers(
         self,
         mock_list_providers,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Configured user
@@ -333,7 +337,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
     def test_get_social_account_providers_openid(
         self,
         mock_list_providers,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Configured user and openid social account provider
@@ -355,7 +359,7 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
             2,
         )
 
-    def test_disconnect_social_account(self):
+    def test_disconnect_social_account(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -394,13 +398,13 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
 class TestApiTOTPViews(APITestCase):
     ENDPOINT = "/api/profile/totp/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=self.user)
 
-    def test_get_totp(self):
+    def test_get_totp(self) -> None:
         """
         GIVEN:
             - Existing user account
@@ -418,7 +422,7 @@ class TestApiTOTPViews(APITestCase):
         self.assertIn("secret", response.data)
 
     @mock.patch("allauth.mfa.totp.internal.auth.validate_totp_code")
-    def test_activate_totp(self, mock_validate_totp_code):
+    def test_activate_totp(self, mock_validate_totp_code) -> None:
         """
         GIVEN:
             - Existing user account
@@ -441,7 +445,7 @@ class TestApiTOTPViews(APITestCase):
         self.assertTrue(Authenticator.objects.filter(user=self.user).exists())
         self.assertIn("recovery_codes", response.data)
 
-    def test_deactivate_totp(self):
+    def test_deactivate_totp(self) -> None:
         """
         GIVEN:
             - Existing user account with TOTP enabled
index 9ade7d2c36324b650ab472d5f53f588ac7ed7829..22dd390093476fc485ebc6987c27dca57a40c156 100644 (file)
@@ -9,14 +9,14 @@ from paperless import version
 class TestApiRemoteVersion:
     ENDPOINT = "/api/remote_version/"
 
-    def setup_method(self):
+    def setup_method(self) -> None:
         cache.clear()
 
     def test_remote_version_enabled_no_update_prefix(
         self,
         rest_api_client: APIClient,
         httpx_mock: HTTPXMock,
-    ):
+    ) -> None:
         httpx_mock.add_response(
             url="https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest",
             json={"tag_name": "ngx-1.6.0"},
@@ -35,7 +35,7 @@ class TestApiRemoteVersion:
         self,
         rest_api_client: APIClient,
         httpx_mock: HTTPXMock,
-    ):
+    ) -> None:
         httpx_mock.add_response(
             url="https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest",
             json={"tag_name": version.__full_version_str__},
@@ -54,7 +54,7 @@ class TestApiRemoteVersion:
         self,
         rest_api_client: APIClient,
         httpx_mock: HTTPXMock,
-    ):
+    ) -> None:
         new_version = (
             version.__version__[0],
             version.__version__[1],
@@ -80,7 +80,7 @@ class TestApiRemoteVersion:
         self,
         rest_api_client: APIClient,
         httpx_mock: HTTPXMock,
-    ):
+    ) -> None:
         httpx_mock.add_response(
             content=b'{ "blah":',
             headers={"Content-Type": "application/json"},
@@ -99,7 +99,7 @@ class TestApiRemoteVersion:
         self,
         rest_api_client: APIClient,
         httpx_mock: HTTPXMock,
-    ):
+    ) -> None:
         httpx_mock.add_response(status_code=503)
 
         response = rest_api_client.get(self.ENDPOINT)
index fc2e0fdf306259fc7a0efe756797565ebe72f31b..24d44fbc0c90c7f19eea068a7f3e406c3585bcc7 100644 (file)
@@ -7,7 +7,7 @@ from rest_framework.test import APITestCase
 class TestApiSchema(APITestCase):
     ENDPOINT = "/api/schema/"
 
-    def test_valid_schema(self):
+    def test_valid_schema(self) -> None:
         """
         Test that the schema is valid
         """
@@ -16,7 +16,7 @@ class TestApiSchema(APITestCase):
         except CommandError as e:
             self.fail(f"Schema validation failed: {e}")
 
-    def test_get_schema_endpoints(self):
+    def test_get_schema_endpoints(self) -> None:
         """
         Test that the schema endpoints exist and return a 200 status code
         """
index 19138172146f8190612de442e4fc6cc4202e42be..2aa3f1ae7ccb6af531b7a06e9debd9dd995b639c 100644 (file)
@@ -31,13 +31,13 @@ from paperless_mail.models import MailRule
 
 
 class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=self.user)
 
-    def test_search(self):
+    def test_search(self) -> None:
         d1 = Document.objects.create(
             title="invoice",
             content="the thing i bought at a shop and paid with bank account",
@@ -89,7 +89,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(len(results), 0)
         self.assertCountEqual(response.data["all"], [])
 
-    def test_search_custom_field_ordering(self):
+    def test_search_custom_field_ordering(self) -> None:
         custom_field = CustomField.objects.create(
             name="Sortable field",
             data_type=CustomField.FieldDataType.INT,
@@ -148,7 +148,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
             [d1.id, d3.id, d2.id],
         )
 
-    def test_search_multi_page(self):
+    def test_search_multi_page(self) -> None:
         with AsyncWriter(index.open_index()) as writer:
             for i in range(55):
                 doc = Document.objects.create(
@@ -183,7 +183,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
             self.assertNotIn(result["id"], seen_ids)
             seen_ids.append(result["id"])
 
-    def test_search_invalid_page(self):
+    def test_search_invalid_page(self) -> None:
         with AsyncWriter(index.open_index()) as writer:
             for i in range(15):
                 doc = Document.objects.create(
@@ -202,7 +202,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
     @override_settings(
         TIME_ZONE="UTC",
     )
-    def test_search_added_in_last_week(self):
+    def test_search_added_in_last_week(self) -> None:
         """
         GIVEN:
             - Three documents added right now
@@ -254,7 +254,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
     @override_settings(
         TIME_ZONE="America/Chicago",
     )
-    def test_search_added_in_last_week_with_timezone_behind(self):
+    def test_search_added_in_last_week_with_timezone_behind(self) -> None:
         """
         GIVEN:
             - Two documents added right now
@@ -306,7 +306,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
     @override_settings(
         TIME_ZONE="Europe/Sofia",
     )
-    def test_search_added_in_last_week_with_timezone_ahead(self):
+    def test_search_added_in_last_week_with_timezone_ahead(self) -> None:
         """
         GIVEN:
             - Two documents added right now
@@ -355,7 +355,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
             # Assert subset in results
             self.assertDictEqual(result, {**result, **subset})
 
-    def test_search_added_in_last_month(self):
+    def test_search_added_in_last_month(self) -> None:
         """
         GIVEN:
             - One document added right now
@@ -410,7 +410,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
     @override_settings(
         TIME_ZONE="America/Denver",
     )
-    def test_search_added_in_last_month_timezone_behind(self):
+    def test_search_added_in_last_month_timezone_behind(self) -> None:
         """
         GIVEN:
             - One document added right now
@@ -466,7 +466,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
     @override_settings(
         TIME_ZONE="Europe/Sofia",
     )
-    def test_search_added_specific_date_with_timezone_ahead(self):
+    def test_search_added_specific_date_with_timezone_ahead(self) -> None:
         """
         GIVEN:
             - Two documents added right now
@@ -519,7 +519,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
             # Assert subset in results
             self.assertDictEqual(result, {**result, **subset})
 
-    def test_search_added_invalid_date(self):
+    def test_search_added_invalid_date(self) -> None:
         """
         GIVEN:
             - One document added right now
@@ -545,7 +545,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(len(results), 0)
 
     @mock.patch("documents.index.autocomplete")
-    def test_search_autocomplete_limits(self, m):
+    def test_search_autocomplete_limits(self, m) -> None:
         """
         GIVEN:
             - No pre-conditions
@@ -576,7 +576,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(len(response.data), 10)
 
-    def test_search_autocomplete_respect_permissions(self):
+    def test_search_autocomplete_respect_permissions(self) -> None:
         """
         GIVEN:
             - Multiple users and documents with & without permissions
@@ -636,7 +636,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data, [b"apples", b"applebaum", b"appletini"])
 
-    def test_search_autocomplete_field_name_match(self):
+    def test_search_autocomplete_field_name_match(self) -> None:
         """
         GIVEN:
             - One document exists in index (must be one document to experience the crash)
@@ -659,7 +659,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data, [])
 
-    def test_search_autocomplete_search_term(self):
+    def test_search_autocomplete_search_term(self) -> None:
         """
         GIVEN:
             - Search results for autocomplete include the exact search term
@@ -681,7 +681,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data[0], b"auto")
 
-    def test_search_spelling_suggestion(self):
+    def test_search_spelling_suggestion(self) -> None:
         with AsyncWriter(index.open_index()) as writer:
             for i in range(55):
                 doc = Document.objects.create(
@@ -706,7 +706,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         "whoosh.searching.Searcher.correct_query",
         side_effect=Exception("Test error"),
     )
-    def test_corrected_query_error(self, mock_correct_query):
+    def test_corrected_query_error(self, mock_correct_query) -> None:
         """
         GIVEN:
             - A query that raises an error on correction
@@ -722,7 +722,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
             expected_str = "Error while correcting query '2025-06-04': Test error"
             self.assertIn(expected_str, error_str)
 
-    def test_search_more_like(self):
+    def test_search_more_like(self) -> None:
         """
         GIVEN:
             - Documents exist which have similar content
@@ -772,7 +772,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(results[0]["id"], d3.id)
         self.assertEqual(results[1]["id"], d1.id)
 
-    def test_search_filtering(self):
+    def test_search_filtering(self) -> None:
         t = Tag.objects.create(name="tag")
         t2 = Tag.objects.create(name="tag2")
         c = Correspondent.objects.create(name="correspondent")
@@ -1031,7 +1031,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
             ),
         )
 
-    def test_search_filtering_respect_owner(self):
+    def test_search_filtering_respect_owner(self) -> None:
         """
         GIVEN:
             - Documents with owners set & without
@@ -1087,7 +1087,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         r = self.client.get(f"/api/documents/?query=test&owner__id__none={u1.id}")
         self.assertEqual(r.data["count"], 3)
 
-    def test_search_filtering_with_object_perms(self):
+    def test_search_filtering_with_object_perms(self) -> None:
         """
         GIVEN:
             - Documents with granted view permissions to others
@@ -1148,7 +1148,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         r = self.client.get(f"/api/documents/?query=test&shared_by__id={u1.id}")
         self.assertEqual(r.data["count"], 1)
 
-    def test_search_sorting(self):
+    def test_search_sorting(self) -> None:
         u1 = User.objects.create_user("user1")
         u2 = User.objects.create_user("user2")
         c1 = Correspondent.objects.create(name="corres Ax")
@@ -1238,7 +1238,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         )
 
     @mock.patch("documents.bulk_edit.bulk_update_documents")
-    def test_global_search(self, m):
+    def test_global_search(self, m) -> None:
         """
         GIVEN:
             - Multiple documents and objects
@@ -1357,7 +1357,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
         self.assertEqual(results["custom_fields"][0]["id"], custom_field1.id)
         self.assertEqual(results["workflows"][0]["id"], workflow1.id)
 
-    def test_global_search_bad_request(self):
+    def test_global_search_bad_request(self) -> None:
         """
         WHEN:
             - Global search query is made without or with query < 3 characters
index 8e29c53d2cfa8555363c1624f176e83be1432100..d2a092726afba527da7f2504a3994f7ca32d8576 100644 (file)
@@ -17,7 +17,7 @@ from paperless import version
 class TestSystemStatus(APITestCase):
     ENDPOINT = "/api/status/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.user = User.objects.create_superuser(
             username="temp_admin",
@@ -26,13 +26,13 @@ class TestSystemStatus(APITestCase):
         self.override = override_settings(MEDIA_ROOT=self.tmp_dir)
         self.override.enable()
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         super().tearDown()
 
         self.override.disable()
         shutil.rmtree(self.tmp_dir)
 
-    def test_system_status(self):
+    def test_system_status(self) -> None:
         """
         GIVEN:
             - A user is logged in
@@ -58,7 +58,7 @@ class TestSystemStatus(APITestCase):
         self.assertEqual(response.data["tasks"]["redis_status"], "ERROR")
         self.assertIsNotNone(response.data["tasks"]["redis_error"])
 
-    def test_system_status_insufficient_permissions(self):
+    def test_system_status_insufficient_permissions(self) -> None:
         """
         GIVEN:
             - A user is not logged in or does not have permissions
@@ -74,7 +74,7 @@ class TestSystemStatus(APITestCase):
         response = self.client.get(self.ENDPOINT)
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_system_status_container_detection(self):
+    def test_system_status_container_detection(self) -> None:
         """
         GIVEN:
             - The application is running in a containerized environment
@@ -93,7 +93,7 @@ class TestSystemStatus(APITestCase):
         self.assertEqual(response.data["install_type"], "kubernetes")
 
     @mock.patch("redis.Redis.execute_command")
-    def test_system_status_redis_ping(self, mock_ping):
+    def test_system_status_redis_ping(self, mock_ping) -> None:
         """
         GIVEN:
             - Redies ping returns True
@@ -108,7 +108,7 @@ class TestSystemStatus(APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data["tasks"]["redis_status"], "OK")
 
-    def test_system_status_redis_no_credentials(self):
+    def test_system_status_redis_no_credentials(self) -> None:
         """
         GIVEN:
             - Redis URL with credentials
@@ -128,7 +128,7 @@ class TestSystemStatus(APITestCase):
                 "redis://localhost:6379",
             )
 
-    def test_system_status_redis_socket(self):
+    def test_system_status_redis_socket(self) -> None:
         """
         GIVEN:
             - Redis URL is socket
@@ -148,7 +148,7 @@ class TestSystemStatus(APITestCase):
             )
 
     @mock.patch("celery.app.control.Inspect.ping")
-    def test_system_status_celery_ping(self, mock_ping):
+    def test_system_status_celery_ping(self, mock_ping) -> None:
         """
         GIVEN:
             - Celery ping returns pong
@@ -165,7 +165,7 @@ class TestSystemStatus(APITestCase):
 
     @override_settings(INDEX_DIR=Path("/tmp/index"))
     @mock.patch("whoosh.index.FileIndex.last_modified")
-    def test_system_status_index_ok(self, mock_last_modified):
+    def test_system_status_index_ok(self, mock_last_modified) -> None:
         """
         GIVEN:
             - The index last modified time is set
@@ -183,7 +183,7 @@ class TestSystemStatus(APITestCase):
 
     @override_settings(INDEX_DIR=Path("/tmp/index/"))
     @mock.patch("documents.index.open_index", autospec=True)
-    def test_system_status_index_error(self, mock_open_index):
+    def test_system_status_index_error(self, mock_open_index) -> None:
         """
         GIVEN:
             - The index is not found
@@ -201,7 +201,7 @@ class TestSystemStatus(APITestCase):
         self.assertEqual(response.data["tasks"]["index_status"], "ERROR")
         self.assertIsNotNone(response.data["tasks"]["index_error"])
 
-    def test_system_status_classifier_ok(self):
+    def test_system_status_classifier_ok(self) -> None:
         """
         GIVEN:
             - The classifier is found
@@ -221,7 +221,7 @@ class TestSystemStatus(APITestCase):
         self.assertEqual(response.data["tasks"]["classifier_status"], "OK")
         self.assertIsNone(response.data["tasks"]["classifier_error"])
 
-    def test_system_status_classifier_warning(self):
+    def test_system_status_classifier_warning(self) -> None:
         """
         GIVEN:
             - No classifier task is found
@@ -238,7 +238,7 @@ class TestSystemStatus(APITestCase):
             "WARNING",
         )
 
-    def test_system_status_classifier_error(self):
+    def test_system_status_classifier_error(self) -> None:
         """
         GIVEN:
             - An error occurred while loading the classifier
@@ -262,7 +262,7 @@ class TestSystemStatus(APITestCase):
         )
         self.assertIsNotNone(response.data["tasks"]["classifier_error"])
 
-    def test_system_status_sanity_check_ok(self):
+    def test_system_status_sanity_check_ok(self) -> None:
         """
         GIVEN:
             - The sanity check is successful
@@ -282,7 +282,7 @@ class TestSystemStatus(APITestCase):
         self.assertEqual(response.data["tasks"]["sanity_check_status"], "OK")
         self.assertIsNone(response.data["tasks"]["sanity_check_error"])
 
-    def test_system_status_sanity_check_warning(self):
+    def test_system_status_sanity_check_warning(self) -> None:
         """
         GIVEN:
             - No sanity check task is found
@@ -299,7 +299,7 @@ class TestSystemStatus(APITestCase):
             "WARNING",
         )
 
-    def test_system_status_sanity_check_error(self):
+    def test_system_status_sanity_check_error(self) -> None:
         """
         GIVEN:
             - The sanity check failed
@@ -323,7 +323,7 @@ class TestSystemStatus(APITestCase):
         )
         self.assertIsNotNone(response.data["tasks"]["sanity_check_error"])
 
-    def test_system_status_ai_disabled(self):
+    def test_system_status_ai_disabled(self) -> None:
         """
         GIVEN:
             - The AI feature is disabled
@@ -339,7 +339,7 @@ class TestSystemStatus(APITestCase):
             self.assertEqual(response.data["tasks"]["llmindex_status"], "DISABLED")
             self.assertIsNone(response.data["tasks"]["llmindex_error"])
 
-    def test_system_status_ai_enabled(self):
+    def test_system_status_ai_enabled(self) -> None:
         """
         GIVEN:
             - The AI index feature is enabled, but no tasks are found
@@ -367,7 +367,7 @@ class TestSystemStatus(APITestCase):
             self.assertEqual(response.data["tasks"]["llmindex_status"], "OK")
             self.assertIsNone(response.data["tasks"]["llmindex_error"])
 
-    def test_system_status_ai_error(self):
+    def test_system_status_ai_error(self) -> None:
         """
         GIVEN:
             - The AI index feature is enabled and a task is found with an error
index 6429ef44f98092cbfa1958151c174d581fcc0596..5dd003565f1524d9833f975e9a8fd1c7c0086162 100644 (file)
@@ -16,13 +16,13 @@ from documents.views import TasksViewSet
 class TestTasks(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/tasks/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(username="temp_admin")
         self.client.force_authenticate(user=self.user)
 
-    def test_get_tasks(self):
+    def test_get_tasks(self) -> None:
         """
         GIVEN:
             - Attempted celery tasks
@@ -57,7 +57,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_task2["status"], celery.states.PENDING)
         self.assertEqual(returned_task2["task_file_name"], task2.task_file_name)
 
-    def test_get_single_task_status(self):
+    def test_get_single_task_status(self) -> None:
         """
         GIVEN
             - Query parameter for a valid task ID
@@ -86,7 +86,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
 
         self.assertEqual(returned_task1["task_id"], task1.task_id)
 
-    def test_get_single_task_status_not_valid(self):
+    def test_get_single_task_status_not_valid(self) -> None:
         """
         GIVEN
             - Query parameter for a non-existent task ID
@@ -110,7 +110,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(len(response.data), 0)
 
-    def test_acknowledge_tasks(self):
+    def test_acknowledge_tasks(self) -> None:
         """
         GIVEN:
             - Attempted celery tasks
@@ -136,7 +136,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         response = self.client.get(self.ENDPOINT + "?acknowledged=false")
         self.assertEqual(len(response.data), 0)
 
-    def test_acknowledge_tasks_requires_change_permission(self):
+    def test_acknowledge_tasks_requires_change_permission(self) -> None:
         """
         GIVEN:
             - A regular user initially without change permissions
@@ -174,7 +174,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_tasks_owner_aware(self):
+    def test_tasks_owner_aware(self) -> None:
         """
         GIVEN:
             - Existing PaperlessTasks with owner and with no owner
@@ -220,7 +220,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         self.assertEqual(acknowledge_response.status_code, status.HTTP_200_OK)
         self.assertEqual(acknowledge_response.data, {"result": 2})
 
-    def test_task_result_no_error(self):
+    def test_task_result_no_error(self) -> None:
         """
         GIVEN:
             - A celery task completed without error
@@ -246,7 +246,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_data["result"], "Success. New document id 1 created")
         self.assertEqual(returned_data["related_document"], "1")
 
-    def test_task_result_with_error(self):
+    def test_task_result_with_error(self) -> None:
         """
         GIVEN:
             - A celery task completed with an exception
@@ -274,7 +274,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
             "test.pdf: Unexpected error during ingestion.",
         )
 
-    def test_task_name_webui(self):
+    def test_task_name_webui(self) -> None:
         """
         GIVEN:
             - Attempted celery task
@@ -300,7 +300,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
 
         self.assertEqual(returned_data["task_file_name"], "test.pdf")
 
-    def test_task_name_consume_folder(self):
+    def test_task_name_consume_folder(self) -> None:
         """
         GIVEN:
             - Attempted celery task
@@ -326,7 +326,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
 
         self.assertEqual(returned_data["task_file_name"], "anothertest.pdf")
 
-    def test_task_result_duplicate_warning_includes_count(self):
+    def test_task_result_duplicate_warning_includes_count(self) -> None:
         """
         GIVEN:
             - A celery task succeeds, but a duplicate exists
@@ -365,7 +365,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
 
         self.assertEqual(returned_data["related_document"], str(created_doc.pk))
 
-    def test_run_train_classifier_task(self):
+    def test_run_train_classifier_task(self) -> None:
         """
         GIVEN:
             - A superuser
@@ -402,7 +402,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
         mock_train_classifier.assert_called_once_with(scheduled=False)
 
     @mock.patch("documents.tasks.sanity_check")
-    def test_run_task_requires_superuser(self, mock_check_sanity):
+    def test_run_task_requires_superuser(self, mock_check_sanity) -> None:
         """
         GIVEN:
             - A regular user
index 757728690d3cc32a75d9cd29d15860ca12dbdee5..65fb4306ac8f185ff03b56c15a25cda4fd6b0391 100644 (file)
@@ -10,7 +10,7 @@ from documents.models import Document
 
 
 class TestTrashAPI(APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_user(username="temp_admin")
@@ -18,7 +18,7 @@ class TestTrashAPI(APITestCase):
         self.client.force_authenticate(user=self.user)
         cache.clear()
 
-    def test_api_trash(self):
+    def test_api_trash(self) -> None:
         """
         GIVEN:
             - Existing document
@@ -68,7 +68,7 @@ class TestTrashAPI(APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
         self.assertEqual(Document.global_objects.count(), 0)
 
-    def test_trash_api_empty_all(self):
+    def test_trash_api_empty_all(self) -> None:
         """
         GIVEN:
             - Existing documents in trash
@@ -101,7 +101,7 @@ class TestTrashAPI(APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
         self.assertEqual(Document.global_objects.count(), 0)
 
-    def test_api_trash_show_owned_only(self):
+    def test_api_trash_show_owned_only(self) -> None:
         """
         GIVEN:
             - Existing documents in trash
@@ -153,7 +153,7 @@ class TestTrashAPI(APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_200_OK)
         self.assertEqual(resp.data["count"], 3)
 
-    def test_api_trash_insufficient_permissions(self):
+    def test_api_trash_insufficient_permissions(self) -> None:
         """
         GIVEN:
             - Existing document with owner = user2 in trash
@@ -180,7 +180,7 @@ class TestTrashAPI(APITestCase):
         self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
         self.assertEqual(Document.global_objects.count(), 1)
 
-    def test_api_trash_invalid_params(self):
+    def test_api_trash_invalid_params(self) -> None:
         """
         GIVEN:
             - Existing documents
index c733315e6a7c09b08949ca4f50379156c9450f12..ec973e2f13d63aa563379c1f9bd89379c8808156 100644 (file)
@@ -13,7 +13,7 @@ from paperless.version import __full_version_str__
 class TestApiUiSettings(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/ui_settings/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.test_user = User.objects.create_superuser(username="test")
         self.test_user.first_name = "Test"
@@ -21,7 +21,7 @@ class TestApiUiSettings(DirectoriesMixin, APITestCase):
         self.test_user.save()
         self.client.force_authenticate(user=self.test_user)
 
-    def test_api_get_ui_settings(self):
+    def test_api_get_ui_settings(self) -> None:
         response = self.client.get(self.ENDPOINT, format="json")
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.maxDiff = None
@@ -53,7 +53,7 @@ class TestApiUiSettings(DirectoriesMixin, APITestCase):
             },
         )
 
-    def test_api_set_ui_settings(self):
+    def test_api_set_ui_settings(self) -> None:
         settings = {
             "settings": {
                 "dark_mode": {
@@ -76,7 +76,7 @@ class TestApiUiSettings(DirectoriesMixin, APITestCase):
             settings["settings"],
         )
 
-    def test_api_set_ui_settings_insufficient_global_permissions(self):
+    def test_api_set_ui_settings_insufficient_global_permissions(self) -> None:
         not_superuser = User.objects.create_user(username="test_not_superuser")
         self.client.force_authenticate(user=not_superuser)
 
@@ -96,7 +96,7 @@ class TestApiUiSettings(DirectoriesMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_api_set_ui_settings_sufficient_global_permissions(self):
+    def test_api_set_ui_settings_sufficient_global_permissions(self) -> None:
         not_superuser = User.objects.create_user(username="test_not_superuser")
         not_superuser.user_permissions.add(
             *Permission.objects.filter(codename__contains="uisettings"),
@@ -120,7 +120,7 @@ class TestApiUiSettings(DirectoriesMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_settings_must_be_dict(self):
+    def test_settings_must_be_dict(self) -> None:
         """
         GIVEN:
             - API request to update ui_settings with settings not being a dict
@@ -153,7 +153,7 @@ class TestApiUiSettings(DirectoriesMixin, APITestCase):
         OUTLOOK_OAUTH_CLIENT_SECRET="jkl012",
         OUTLOOK_OAUTH_ENABLED=True,
     )
-    def test_settings_includes_oauth_urls_if_enabled(self):
+    def test_settings_includes_oauth_urls_if_enabled(self) -> None:
         response = self.client.get(self.ENDPOINT, format="json")
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertIsNotNone(
index 1d3efd4570290afc4b7dfbc8fdeaee89cf4dd4d0..a11cb490ad63fb6fd63413d89a9633c4a992950c 100644 (file)
@@ -78,7 +78,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         self.workflow.actions.add(self.action)
         self.workflow.save()
 
-    def test_api_get_workflow(self):
+    def test_api_get_workflow(self) -> None:
         """
         GIVEN:
             - API request to get all workflows
@@ -99,7 +99,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
             self.action.assign_correspondent.pk,
         )
 
-    def test_api_create_workflow(self):
+    def test_api_create_workflow(self) -> None:
         """
         GIVEN:
             - API request to create a workflow, trigger and action separately
@@ -160,7 +160,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(Workflow.objects.count(), 2)
 
-    def test_api_create_workflow_nested(self):
+    def test_api_create_workflow_nested(self) -> None:
         """
         GIVEN:
             - API request to create a workflow with nested trigger and action
@@ -280,7 +280,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
             json.dumps(["AND", [[self.cf1.id, "exact", "value"]]]),
         )
 
-    def test_api_create_invalid_workflow_trigger(self):
+    def test_api_create_invalid_workflow_trigger(self) -> None:
         """
         GIVEN:
             - API request to create a workflow trigger
@@ -316,7 +316,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
 
         self.assertEqual(WorkflowTrigger.objects.count(), 1)
 
-    def test_api_create_invalid_assign_title(self):
+    def test_api_create_invalid_assign_title(self) -> None:
         """
         GIVEN:
             - API request to create a workflow
@@ -355,7 +355,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
 
         self.assertEqual(Workflow.objects.count(), 1)
 
-    def test_api_create_workflow_trigger_action_empty_fields(self):
+    def test_api_create_workflow_trigger_action_empty_fields(self) -> None:
         """
         GIVEN:
             - API request to create a workflow trigger and action
@@ -412,7 +412,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         self.assertEqual(trigger2.filter_path, "*/test/*")
         self.assertIsNone(trigger2.filter_filename)
 
-    def test_api_update_workflow_nested_triggers_actions(self):
+    def test_api_update_workflow_nested_triggers_actions(self) -> None:
         """
         GIVEN:
             - Existing workflow with trigger and action
@@ -498,7 +498,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(workflow.actions.first().assign_title, "Action New Title")
 
-    def test_api_update_workflow_no_trigger_actions(self):
+    def test_api_update_workflow_no_trigger_actions(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -542,7 +542,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         self.assertEqual(workflow.triggers.count(), 1)
         self.assertEqual(workflow.actions.count(), 0)
 
-    def test_api_auto_remove_orphaned_triggers_actions(self):
+    def test_api_auto_remove_orphaned_triggers_actions(self) -> None:
         """
         GIVEN:
             - Existing trigger and action
@@ -582,7 +582,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         self.assertEqual(WorkflowAction.objects.all().count(), 1)
         self.assertNotEqual(workflow.actions.first().id, self.action.id)
 
-    def test_email_action_validation(self):
+    def test_email_action_validation(self) -> None:
         """
         GIVEN:
             - API request to create a workflow with an email action
@@ -675,7 +675,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
-    def test_webhook_action_validation(self):
+    def test_webhook_action_validation(self) -> None:
         """
         GIVEN:
             - API request to create a workflow with a notification action
@@ -737,7 +737,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
-    def test_webhook_action_url_validation(self):
+    def test_webhook_action_url_validation(self) -> None:
         """
         GIVEN:
             - API request to create a workflow with a notification action
@@ -779,7 +779,7 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
             )
             self.assertEqual(response.status_code, expected_resp_code)
 
-    def test_patch_trigger_cannot_change_id(self):
+    def test_patch_trigger_cannot_change_id(self) -> None:
         """
         GIVEN:
             - An existing workflow trigger
index f7d11db661457b53af7eccdc52581b008b9ef447..beb4e2a9a24ecbaafa518ce3f5c0fbed21ff9cdb 100644 (file)
@@ -55,7 +55,7 @@ class TestBarcode(
     GetReaderPluginMixin,
     TestCase,
 ):
-    def test_scan_file_for_separating_barcodes(self):
+    def test_scan_file_for_separating_barcodes(self) -> None:
         """
         GIVEN:
             - PDF containing barcodes
@@ -76,7 +76,7 @@ class TestBarcode(
     @override_settings(
         CONSUMER_BARCODE_TIFF_SUPPORT=True,
     )
-    def test_scan_tiff_for_separating_barcodes(self):
+    def test_scan_tiff_for_separating_barcodes(self) -> None:
         """
         GIVEN:
             - TIFF image containing barcodes
@@ -96,7 +96,7 @@ class TestBarcode(
     @override_settings(
         CONSUMER_BARCODE_TIFF_SUPPORT=True,
     )
-    def test_scan_tiff_with_alpha_for_separating_barcodes(self):
+    def test_scan_tiff_with_alpha_for_separating_barcodes(self) -> None:
         """
         GIVEN:
             - TIFF image containing barcodes
@@ -113,7 +113,7 @@ class TestBarcode(
 
             self.assertDictEqual(separator_page_numbers, {1: False})
 
-    def test_scan_file_for_separating_barcodes_none_present(self):
+    def test_scan_file_for_separating_barcodes_none_present(self) -> None:
         """
         GIVEN:
             - File with no barcodes
@@ -131,7 +131,7 @@ class TestBarcode(
             self.assertEqual(reader.pdf_file, test_file)
             self.assertDictEqual(separator_page_numbers, {})
 
-    def test_scan_file_for_separating_barcodes_middle_page(self):
+    def test_scan_file_for_separating_barcodes_middle_page(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 1 (zero indexed)
@@ -149,7 +149,7 @@ class TestBarcode(
             self.assertEqual(reader.pdf_file, test_file)
             self.assertDictEqual(separator_page_numbers, {1: False})
 
-    def test_scan_file_for_separating_barcodes_multiple_pages(self):
+    def test_scan_file_for_separating_barcodes_multiple_pages(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on pages 2 and 5 (zero indexed)
@@ -167,7 +167,7 @@ class TestBarcode(
             self.assertEqual(reader.pdf_file, test_file)
             self.assertDictEqual(separator_page_numbers, {2: False, 5: False})
 
-    def test_scan_file_for_separating_barcodes_hard_to_detect(self):
+    def test_scan_file_for_separating_barcodes_hard_to_detect(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 1 (zero indexed)
@@ -192,7 +192,7 @@ class TestBarcode(
                 self.assertEqual(reader.pdf_file, test_file)
                 self.assertDictEqual(separator_page_numbers, {1: False})
 
-    def test_scan_file_for_separating_barcodes_unreadable(self):
+    def test_scan_file_for_separating_barcodes_unreadable(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 1 (zero indexed)
@@ -211,7 +211,7 @@ class TestBarcode(
             self.assertEqual(reader.pdf_file, test_file)
             self.assertDictEqual(separator_page_numbers, {})
 
-    def test_scan_file_for_separating_barcodes_fax_decode(self):
+    def test_scan_file_for_separating_barcodes_fax_decode(self) -> None:
         """
         GIVEN:
             - A PDF containing an image encoded as CCITT Group 4 encoding
@@ -229,7 +229,7 @@ class TestBarcode(
             self.assertEqual(reader.pdf_file, test_file)
             self.assertDictEqual(separator_page_numbers, {1: False})
 
-    def test_scan_file_for_separating_qr_barcodes(self):
+    def test_scan_file_for_separating_qr_barcodes(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 0 (zero indexed)
@@ -249,7 +249,7 @@ class TestBarcode(
             self.assertDictEqual(separator_page_numbers, {0: False})
 
     @override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
-    def test_scan_file_for_separating_custom_barcodes(self):
+    def test_scan_file_for_separating_custom_barcodes(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 0 (zero indexed)
@@ -269,7 +269,7 @@ class TestBarcode(
             self.assertDictEqual(separator_page_numbers, {0: False})
 
     @override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
-    def test_scan_file_for_separating_custom_qr_barcodes(self):
+    def test_scan_file_for_separating_custom_qr_barcodes(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 0 (zero indexed)
@@ -290,7 +290,7 @@ class TestBarcode(
             self.assertDictEqual(separator_page_numbers, {0: False})
 
     @override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
-    def test_scan_file_for_separating_custom_128_barcodes(self):
+    def test_scan_file_for_separating_custom_128_barcodes(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 0 (zero indexed)
@@ -310,7 +310,7 @@ class TestBarcode(
             self.assertEqual(reader.pdf_file, test_file)
             self.assertDictEqual(separator_page_numbers, {0: False})
 
-    def test_scan_file_for_separating_wrong_qr_barcodes(self):
+    def test_scan_file_for_separating_wrong_qr_barcodes(self) -> None:
         """
         GIVEN:
             - PDF file containing a separator on page 0 (zero indexed)
@@ -331,7 +331,7 @@ class TestBarcode(
             self.assertDictEqual(separator_page_numbers, {})
 
     @override_settings(CONSUMER_BARCODE_STRING="ADAR-NEXTDOC")
-    def test_scan_file_qr_barcodes_was_problem(self):
+    def test_scan_file_qr_barcodes_was_problem(self) -> None:
         """
         GIVEN:
             - Input PDF with certain QR codes that aren't detected at current size
@@ -350,7 +350,7 @@ class TestBarcode(
             self.assertGreater(len(reader.barcodes), 0)
             self.assertDictEqual(separator_page_numbers, {1: False})
 
-    def test_scan_file_for_separating_barcodes_password(self):
+    def test_scan_file_for_separating_barcodes_password(self) -> None:
         """
         GIVEN:
             - Password protected PDF
@@ -372,7 +372,7 @@ class TestBarcode(
                 self.assertEqual(reader.pdf_file, test_file)
                 self.assertDictEqual(separator_page_numbers, {})
 
-    def test_separate_pages(self):
+    def test_separate_pages(self) -> None:
         """
         GIVEN:
             - Input PDF 2 pages after separation
@@ -389,7 +389,7 @@ class TestBarcode(
             self.assertEqual(reader.pdf_file, test_file)
             self.assertEqual(len(documents), 2)
 
-    def test_separate_pages_double_code(self):
+    def test_separate_pages_double_code(self) -> None:
         """
         GIVEN:
             - Input PDF with two patch code pages in a row
@@ -406,7 +406,7 @@ class TestBarcode(
             self.assertEqual(len(documents), 2)
 
     @override_settings(CONSUMER_ENABLE_BARCODES=True)
-    def test_separate_pages_no_list(self):
+    def test_separate_pages_no_list(self) -> None:
         """
         GIVEN:
             - Input file to separate
@@ -427,7 +427,7 @@ class TestBarcode(
         CONSUMER_ENABLE_BARCODES=True,
         CONSUMER_BARCODE_TIFF_SUPPORT=True,
     )
-    def test_consume_barcode_unsupported_jpg_file(self):
+    def test_consume_barcode_unsupported_jpg_file(self) -> None:
         """
         GIVEN:
             - JPEG image as input
@@ -446,7 +446,7 @@ class TestBarcode(
         CONSUMER_ENABLE_BARCODES=True,
         CONSUMER_ENABLE_ASN_BARCODE=True,
     )
-    def test_separate_pages_by_asn_barcodes_and_patcht(self):
+    def test_separate_pages_by_asn_barcodes_and_patcht(self) -> None:
         """
         GIVEN:
             - Input PDF with a patch code on page 3 and ASN barcodes on pages 1,5,6,9,11
@@ -483,7 +483,7 @@ class TestBarcode(
         CONSUMER_ENABLE_BARCODES=True,
         CONSUMER_ENABLE_ASN_BARCODE=True,
     )
-    def test_separate_pages_by_asn_barcodes(self):
+    def test_separate_pages_by_asn_barcodes(self) -> None:
         """
         GIVEN:
             - Input PDF with ASN barcodes on pages 1,3,4,7,9
@@ -517,7 +517,7 @@ class TestBarcode(
         CONSUMER_ENABLE_ASN_BARCODE=True,
         CONSUMER_BARCODE_RETAIN_SPLIT_PAGES=True,
     )
-    def test_separate_pages_by_asn_barcodes_and_patcht_retain_pages(self):
+    def test_separate_pages_by_asn_barcodes_and_patcht_retain_pages(self) -> None:
         """
         GIVEN:
             - Input PDF with a patch code on page 3 and ASN barcodes on pages 1,5,6,9,11
@@ -548,7 +548,7 @@ class TestBarcode(
                 },
             )
 
-    def test_barcode_config(self):
+    def test_barcode_config(self) -> None:
         """
         GIVEN:
             - Barcode app config is set (settings are not)
@@ -579,7 +579,7 @@ class TestBarcodeNewConsume(
     TestCase,
 ):
     @override_settings(CONSUMER_ENABLE_BARCODES=True)
-    def test_consume_barcode_file(self):
+    def test_consume_barcode_file(self) -> None:
         """
         GIVEN:
             - Incoming file with at 1 barcode producing 2 documents
@@ -642,7 +642,7 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         reader.cleanup()
 
     @override_settings(CONSUMER_ASN_BARCODE_PREFIX="CUSTOM-PREFIX-")
-    def test_scan_file_for_asn_custom_prefix(self):
+    def test_scan_file_for_asn_custom_prefix(self) -> None:
         """
         GIVEN:
             - PDF containing an ASN barcode with custom prefix
@@ -660,7 +660,7 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
             self.assertEqual(reader.pdf_file, test_file)
             self.assertEqual(asn, 123)
 
-    def test_scan_file_for_asn_barcode(self):
+    def test_scan_file_for_asn_barcode(self) -> None:
         """
         GIVEN:
             - PDF containing an ASN barcode
@@ -679,7 +679,7 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
             self.assertEqual(reader.pdf_file, test_file)
             self.assertEqual(asn, 123)
 
-    def test_scan_file_for_asn_not_found(self):
+    def test_scan_file_for_asn_not_found(self) -> None:
         """
         GIVEN:
             - PDF without an ASN barcode
@@ -696,7 +696,7 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
             self.assertEqual(reader.pdf_file, test_file)
             self.assertEqual(asn, None)
 
-    def test_scan_file_for_asn_barcode_invalid(self):
+    def test_scan_file_for_asn_barcode_invalid(self) -> None:
         """
         GIVEN:
             - PDF containing an ASN barcode
@@ -718,7 +718,7 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
             self.assertEqual(asn, None)
 
     @override_settings(CONSUMER_ENABLE_ASN_BARCODE=True)
-    def test_consume_barcode_file_asn_assignment(self):
+    def test_consume_barcode_file_asn_assignment(self) -> None:
         """
         GIVEN:
             - PDF containing an ASN barcode
@@ -749,7 +749,7 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
             self.assertEqual(document.archive_serial_number, 123)
 
     @override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
-    def test_scan_file_for_qrcode_without_upscale(self):
+    def test_scan_file_for_qrcode_without_upscale(self) -> None:
         """
         GIVEN:
             - A printed and scanned PDF document with a rather small QR code
@@ -769,7 +769,7 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
     @override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
     @override_settings(CONSUMER_BARCODE_DPI=600)
     @override_settings(CONSUMER_BARCODE_UPSCALE=1.5)
-    def test_scan_file_for_qrcode_with_upscale(self):
+    def test_scan_file_for_qrcode_with_upscale(self) -> None:
         """
         GIVEN:
             - A printed and scanned PDF document with a rather small QR code
@@ -826,7 +826,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_ENABLE_TAG_BARCODE=True,
         CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
     )
-    def test_barcode_without_tag_match(self):
+    def test_barcode_without_tag_match(self) -> None:
         """
         GIVEN:
             - Barcode that does not match any TAG mapping pattern
@@ -852,7 +852,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
             )
 
     @override_settings(CONSUMER_ENABLE_TAG_BARCODE=True)
-    def test_scan_file_without_matching_barcodes(self):
+    def test_scan_file_without_matching_barcodes(self) -> None:
         """
         GIVEN:
             - PDF containing tag barcodes but none with matching prefix (default "TAG:")
@@ -871,7 +871,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_ENABLE_TAG_BARCODE=False,
         CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<1>"},
     )
-    def test_scan_file_with_matching_barcode_but_function_disabled(self):
+    def test_scan_file_with_matching_barcode_but_function_disabled(self) -> None:
         """
         GIVEN:
             - PDF containing a tag barcode with matching custom prefix
@@ -891,7 +891,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_ENABLE_TAG_BARCODE=True,
         CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<1>"},
     )
-    def test_scan_file_for_tag_custom_prefix(self):
+    def test_scan_file_for_tag_custom_prefix(self) -> None:
         """
         GIVEN:
             - PDF containing a tag barcode with custom prefix
@@ -917,7 +917,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_ENABLE_TAG_BARCODE=True,
         CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "\\g<1>"},
     )
-    def test_scan_file_for_many_custom_tags(self):
+    def test_scan_file_for_many_custom_tags(self) -> None:
         """
         GIVEN:
             - PDF containing multiple tag barcode with custom prefix
@@ -944,7 +944,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_ENABLE_TAG_BARCODE=True,
         CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<3>"},
     )
-    def test_scan_file_for_tag_raises_value_error(self):
+    def test_scan_file_for_tag_raises_value_error(self) -> None:
         """
         GIVEN:
             - Any error occurs during tag barcode processing
@@ -963,7 +963,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_TAG_BARCODE_SPLIT=True,
         CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
     )
-    def test_split_on_tag_barcodes(self):
+    def test_split_on_tag_barcodes(self) -> None:
         """
         GIVEN:
             - PDF containing barcodes with TAG: prefix
@@ -989,7 +989,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_TAG_BARCODE_SPLIT=False,
         CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
     )
-    def test_no_split_when_tag_split_disabled(self):
+    def test_no_split_when_tag_split_disabled(self) -> None:
         """
         GIVEN:
             - PDF containing TAG barcodes (TAG:invoice, TAG:receipt)
@@ -1018,7 +1018,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CELERY_TASK_ALWAYS_EAGER=True,
         OCR_MODE="skip",
     )
-    def test_consume_barcode_file_tag_split_and_assignment(self):
+    def test_consume_barcode_file_tag_split_and_assignment(self) -> None:
         """
         GIVEN:
             - PDF containing TAG barcodes on pages 2 and 4 (TAG:invoice, TAG:receipt)
@@ -1066,7 +1066,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_TAG_BARCODE_SPLIT=True,
         CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "ASN_\\g<1>", "TAG:(.*)": "\\g<1>"},
     )
-    def test_split_by_mixed_asn_tag_backwards_compat(self):
+    def test_split_by_mixed_asn_tag_backwards_compat(self) -> None:
         """
         GIVEN:
             - PDF with mixed ASN and TAG barcodes
@@ -1095,7 +1095,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
         CONSUMER_TAG_BARCODE_SPLIT=True,
         CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
     )
-    def test_split_by_tag_multiple_per_page(self):
+    def test_split_by_tag_multiple_per_page(self) -> None:
         """
         GIVEN:
             - PDF with multiple TAG barcodes on same page
index bf5033bdc7f92284bdaa42df756996f6cfa60561..0e83d94a84a4ae7d08386bfb3b2bde4ff7052a44 100644 (file)
@@ -23,7 +23,7 @@ from documents.tests.utils import DirectoriesMixin
 
 
 class TestBulkEdit(DirectoriesMixin, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.owner = User.objects.create(username="test_owner")
@@ -67,7 +67,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         self.doc4.tags.add(self.t1, self.t2)
         self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
 
-    def test_set_correspondent(self):
+    def test_set_correspondent(self) -> None:
         self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
         bulk_edit.set_correspondent(
             [self.doc1.id, self.doc2.id, self.doc3.id],
@@ -78,7 +78,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         _, kwargs = self.async_task.call_args
         self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
 
-    def test_unset_correspondent(self):
+    def test_unset_correspondent(self) -> None:
         self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
         bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
         self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
@@ -86,7 +86,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         _, kwargs = self.async_task.call_args
         self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
 
-    def test_set_document_type(self):
+    def test_set_document_type(self) -> None:
         self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
         bulk_edit.set_document_type(
             [self.doc1.id, self.doc2.id, self.doc3.id],
@@ -97,7 +97,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         _, kwargs = self.async_task.call_args
         self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
 
-    def test_unset_document_type(self):
+    def test_unset_document_type(self) -> None:
         self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
         bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
         self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
@@ -105,7 +105,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         _, kwargs = self.async_task.call_args
         self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
 
-    def test_set_document_storage_path(self):
+    def test_set_document_storage_path(self) -> None:
         """
         GIVEN:
             - 5 documents without defined storage path
@@ -128,7 +128,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
 
         self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
 
-    def test_unset_document_storage_path(self):
+    def test_unset_document_storage_path(self) -> None:
         """
         GIVEN:
             - 4 documents without defined storage path
@@ -159,7 +159,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
 
         self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
 
-    def test_add_tag(self):
+    def test_add_tag(self) -> None:
         self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
         bulk_edit.add_tag(
             [self.doc1.id, self.doc2.id, self.doc3.id, self.doc4.id],
@@ -170,7 +170,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         _, kwargs = self.async_task.call_args
         self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
 
-    def test_remove_tag(self):
+    def test_remove_tag(self) -> None:
         self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
         bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
         self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
@@ -178,7 +178,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         _, kwargs = self.async_task.call_args
         self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
 
-    def test_modify_tags(self):
+    def test_modify_tags(self) -> None:
         tag_unrelated = Tag.objects.create(name="unrelated")
         self.doc2.tags.add(tag_unrelated)
         self.doc3.tags.add(tag_unrelated)
@@ -196,7 +196,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         # TODO: doc3 should not be affected, but the query for that is rather complicated
         self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
 
-    def test_modify_custom_fields(self):
+    def test_modify_custom_fields(self) -> None:
         """
         GIVEN:
             - 2 documents with custom fields
@@ -252,7 +252,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         _, kwargs = self.async_task.call_args
         self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
 
-    def test_modify_custom_fields_with_values(self):
+    def test_modify_custom_fields_with_values(self) -> None:
         """
         GIVEN:
             - 2 documents with custom fields
@@ -344,7 +344,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
             self.doc2.custom_fields.filter(field=cf3).first().value,
         )
 
-    def test_modify_custom_fields_doclink_self_link(self):
+    def test_modify_custom_fields_doclink_self_link(self) -> None:
         """
         GIVEN:
             - 2 existing documents
@@ -373,7 +373,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
             [self.doc1.id],
         )
 
-    def test_delete(self):
+    def test_delete(self) -> None:
         self.assertEqual(Document.objects.count(), 5)
         bulk_edit.delete([self.doc1.id, self.doc2.id])
         self.assertEqual(Document.objects.count(), 3)
@@ -383,7 +383,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         )
 
     @mock.patch("documents.tasks.bulk_update_documents.delay")
-    def test_set_permissions(self, m):
+    def test_set_permissions(self, m) -> None:
         doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
 
         assign_perm("view_document", self.group1, self.doc1)
@@ -422,7 +422,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         self.assertEqual(groups_with_perms.count(), 1)
 
     @mock.patch("documents.tasks.bulk_update_documents.delay")
-    def test_set_permissions_merge(self, m):
+    def test_set_permissions_merge(self, m) -> None:
         doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
 
         self.doc1.owner = self.user1
@@ -466,7 +466,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
         self.assertEqual(groups_with_perms.count(), 2)
 
     @mock.patch("documents.models.Document.delete")
-    def test_delete_documents_old_uuid_field(self, m):
+    def test_delete_documents_old_uuid_field(self, m) -> None:
         m.side_effect = Exception("Data too long for column 'transaction_id' at row 1")
         doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
         bulk_edit.delete(doc_ids)
@@ -476,7 +476,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
 
 
 class TestPDFActions(DirectoriesMixin, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         sample1 = self.dirs.scratch_dir / "sample.pdf"
         shutil.copy(
@@ -572,7 +572,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
         self.img_doc.save()
 
     @mock.patch("documents.tasks.consume_file.s")
-    def test_merge(self, mock_consume_file):
+    def test_merge(self, mock_consume_file) -> None:
         """
         GIVEN:
             - Existing documents
@@ -660,7 +660,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
         )
 
     @mock.patch("documents.tasks.consume_file.s")
-    def test_merge_with_archive_fallback(self, mock_consume_file):
+    def test_merge_with_archive_fallback(self, mock_consume_file) -> None:
         """
         GIVEN:
             - Existing documents
@@ -687,7 +687,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.tasks.consume_file.delay")
     @mock.patch("pikepdf.open")
-    def test_merge_with_errors(self, mock_open_pdf, mock_consume_file):
+    def test_merge_with_errors(self, mock_open_pdf, mock_consume_file) -> None:
         """
         GIVEN:
             - Existing documents
@@ -711,7 +711,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
         mock_consume_file.assert_not_called()
 
     @mock.patch("documents.tasks.consume_file.s")
-    def test_split(self, mock_consume_file):
+    def test_split(self, mock_consume_file) -> None:
         """
         GIVEN:
             - Existing documents
@@ -770,7 +770,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.tasks.consume_file.delay")
     @mock.patch("pikepdf.Pdf.save")
-    def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
+    def test_split_with_errors(self, mock_save_pdf, mock_consume_file) -> None:
         """
         GIVEN:
             - Existing documents
@@ -795,7 +795,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
     @mock.patch("documents.tasks.bulk_update_documents.si")
     @mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
     @mock.patch("celery.chord.delay")
-    def test_rotate(self, mock_chord, mock_update_document, mock_update_documents):
+    def test_rotate(
+        self,
+        mock_chord,
+        mock_update_document,
+        mock_update_documents,
+    ) -> None:
         """
         GIVEN:
             - Existing documents
@@ -868,7 +873,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
     @mock.patch("pikepdf.Pdf.save")
-    def test_delete_pages(self, mock_pdf_save, mock_update_archive_file):
+    def test_delete_pages(self, mock_pdf_save, mock_update_archive_file) -> None:
         """
         GIVEN:
             - Existing documents
@@ -893,7 +898,11 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
     @mock.patch("pikepdf.Pdf.save")
-    def test_delete_pages_with_error(self, mock_pdf_save, mock_update_archive_file):
+    def test_delete_pages_with_error(
+        self,
+        mock_pdf_save,
+        mock_update_archive_file,
+    ) -> None:
         """
         GIVEN:
             - Existing documents
@@ -917,7 +926,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.bulk_edit.group")
     @mock.patch("documents.tasks.consume_file.s")
-    def test_edit_pdf_basic_operations(self, mock_consume_file, mock_group):
+    def test_edit_pdf_basic_operations(self, mock_consume_file, mock_group) -> None:
         """
         GIVEN:
             - Existing document
@@ -936,7 +945,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.bulk_edit.group")
     @mock.patch("documents.tasks.consume_file.s")
-    def test_edit_pdf_with_user_override(self, mock_consume_file, mock_group):
+    def test_edit_pdf_with_user_override(self, mock_consume_file, mock_group) -> None:
         """
         GIVEN:
             - Existing document
@@ -956,7 +965,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.bulk_edit.chord")
     @mock.patch("documents.tasks.consume_file.s")
-    def test_edit_pdf_with_delete_original(self, mock_consume_file, mock_chord):
+    def test_edit_pdf_with_delete_original(self, mock_consume_file, mock_chord) -> None:
         """
         GIVEN:
             - Existing document
@@ -974,7 +983,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
         mock_chord.assert_called_once()
 
     @mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
-    def test_edit_pdf_with_update_document(self, mock_update_document):
+    def test_edit_pdf_with_update_document(self, mock_update_document) -> None:
         """
         GIVEN:
             - A single existing PDF document
@@ -1004,7 +1013,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.bulk_edit.group")
     @mock.patch("documents.tasks.consume_file.s")
-    def test_edit_pdf_without_metadata(self, mock_consume_file, mock_group):
+    def test_edit_pdf_without_metadata(self, mock_consume_file, mock_group) -> None:
         """
         GIVEN:
             - Existing document
@@ -1023,7 +1032,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
 
     @mock.patch("documents.bulk_edit.group")
     @mock.patch("documents.tasks.consume_file.s")
-    def test_edit_pdf_open_failure(self, mock_consume_file, mock_group):
+    def test_edit_pdf_open_failure(self, mock_consume_file, mock_group) -> None:
         """
         GIVEN:
             - Existing document
index 4c8d2ead08c4f8dcc87947dd8e835cae3e0314ce..d75bda3c94f35621a283aaa9a4ea4e2dcbd90798 100644 (file)
@@ -3,7 +3,7 @@ import pickle
 from documents.caching import StoredLRUCache
 
 
-def test_lru_cache_entries():
+def test_lru_cache_entries() -> None:
     CACHE_TTL = 1
     # LRU cache with a capacity of 2 elements
     cache = StoredLRUCache("test_lru_cache_key", 2, backend_ttl=CACHE_TTL)
@@ -28,7 +28,7 @@ def test_lru_cache_entries():
     assert cache.get(1) == 1
 
 
-def test_stored_lru_cache_key_ttl(mocker):
+def test_stored_lru_cache_key_ttl(mocker) -> None:
     mock_backend = mocker.Mock()
     cache = StoredLRUCache("test_key", backend=mock_backend, backend_ttl=321)
 
index 304074e37c3788502d64effd754ade998c71250e..b78946ba9e704c62f0459e3a27752c6ef0f95ec9 100644 (file)
@@ -10,7 +10,7 @@ from documents.checks import parser_check
 
 
 class TestDocumentChecks(TestCase):
-    def test_parser_check(self):
+    def test_parser_check(self) -> None:
         self.assertEqual(parser_check(None), [])
 
         with mock.patch("documents.checks.document_consumer_declaration.send") as m:
@@ -26,7 +26,7 @@ class TestDocumentChecks(TestCase):
                 ],
             )
 
-    def test_filename_format_check(self):
+    def test_filename_format_check(self) -> None:
         self.assertEqual(filename_format_check(None), [])
 
         with override_settings(FILENAME_FORMAT="{created}/{title}"):
index 4f2ad85f54b944dcbfc3832ce891b044f70c8aff..f04152ae088cca0b6bf87378473c19767725beca 100644 (file)
@@ -31,14 +31,14 @@ def dummy_preprocess(content: str, **kwargs):
 
 
 class TestClassifier(DirectoriesMixin, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.classifier = DocumentClassifier()
         self.classifier.preprocess_content = mock.MagicMock(
             side_effect=dummy_preprocess,
         )
 
-    def generate_test_data(self):
+    def generate_test_data(self) -> None:
         self.c1 = Correspondent.objects.create(
             name="c1",
             matching_algorithm=Correspondent.MATCH_AUTO,
@@ -119,7 +119,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.doc2.tags.add(self.t3)
         self.doc_inbox.tags.add(self.t2)
 
-    def generate_train_and_save(self):
+    def generate_train_and_save(self) -> None:
         """
         Generates the training data, trains and saves the updated pickle
         file. This ensures the test is using the same scikit learn version
@@ -129,7 +129,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.classifier.train()
         self.classifier.save()
 
-    def test_no_training_data(self):
+    def test_no_training_data(self) -> None:
         """
         GIVEN:
             - No documents exist to train
@@ -141,7 +141,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         with self.assertRaisesMessage(ValueError, "No training data available."):
             self.classifier.train()
 
-    def test_no_non_inbox_tags(self):
+    def test_no_non_inbox_tags(self) -> None:
         """
         GIVEN:
             - No documents without an inbox tag exist
@@ -168,7 +168,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         with self.assertRaisesMessage(ValueError, "No training data available."):
             self.classifier.train()
 
-    def testEmpty(self):
+    def testEmpty(self) -> None:
         """
         GIVEN:
             - A document exists
@@ -189,7 +189,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertIsNone(self.classifier.predict_document_type(""))
         self.assertIsNone(self.classifier.predict_correspondent(""))
 
-    def testTrain(self):
+    def testTrain(self) -> None:
         """
         GIVEN:
             - Test data
@@ -211,7 +211,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
             [self.t1.pk, self.t3.pk],
         )
 
-    def testPredict(self):
+    def testPredict(self) -> None:
         """
         GIVEN:
             - Classifier trained against test data
@@ -265,7 +265,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
             self.assertEqual(mock_preprocess_content.call_count, 2)
             self.assertEqual(mock_transform.call_count, 2)
 
-    def test_no_retrain_if_no_change(self):
+    def test_no_retrain_if_no_change(self) -> None:
         """
         GIVEN:
             - Classifier trained with current data
@@ -280,7 +280,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertTrue(self.classifier.train())
         self.assertFalse(self.classifier.train())
 
-    def test_retrain_if_change(self):
+    def test_retrain_if_change(self) -> None:
         """
         GIVEN:
             - Classifier trained with current data
@@ -300,7 +300,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
 
         self.assertTrue(self.classifier.train())
 
-    def test_retrain_if_auto_match_set_changed(self):
+    def test_retrain_if_auto_match_set_changed(self) -> None:
         """
         GIVEN:
             - Classifier trained with current data
@@ -322,7 +322,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
 
         self.assertTrue(self.classifier.train())
 
-    def testVersionIncreased(self):
+    def testVersionIncreased(self) -> None:
         """
         GIVEN:
             - Existing classifier model saved at a version
@@ -348,7 +348,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
             # assure that we can load the classifier after saving it.
             classifier2.load()
 
-    def testSaveClassifier(self):
+    def testSaveClassifier(self) -> None:
         self.generate_train_and_save()
 
         new_classifier = DocumentClassifier()
@@ -357,7 +357,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
 
         self.assertFalse(new_classifier.train())
 
-    def test_load_and_classify(self):
+    def test_load_and_classify(self) -> None:
         self.generate_train_and_save()
 
         new_classifier = DocumentClassifier()
@@ -367,7 +367,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertCountEqual(new_classifier.predict_tags(self.doc2.content), [45, 12])
 
     @mock.patch("documents.classifier.pickle.load")
-    def test_load_corrupt_file(self, patched_pickle_load: mock.MagicMock):
+    def test_load_corrupt_file(self, patched_pickle_load: mock.MagicMock) -> None:
         """
         GIVEN:
             - Corrupted classifier pickle file
@@ -394,7 +394,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertIsNone(load_classifier())
         patched_pickle_load.assert_called()
 
-    def test_load_new_scikit_learn_version(self):
+    def test_load_new_scikit_learn_version(self) -> None:
         """
         GIVEN:
             - classifier pickle file created with a different scikit-learn version
@@ -409,7 +409,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         # Need to rethink how to pass the load through to a file with a single
         # old model?
 
-    def test_one_correspondent_predict(self):
+    def test_one_correspondent_predict(self) -> None:
         c1 = Correspondent.objects.create(
             name="c1",
             matching_algorithm=Correspondent.MATCH_AUTO,
@@ -424,7 +424,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.classifier.train()
         self.assertEqual(self.classifier.predict_correspondent(doc1.content), c1.pk)
 
-    def test_one_correspondent_predict_manydocs(self):
+    def test_one_correspondent_predict_manydocs(self) -> None:
         c1 = Correspondent.objects.create(
             name="c1",
             matching_algorithm=Correspondent.MATCH_AUTO,
@@ -445,7 +445,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertEqual(self.classifier.predict_correspondent(doc1.content), c1.pk)
         self.assertIsNone(self.classifier.predict_correspondent(doc2.content))
 
-    def test_one_type_predict(self):
+    def test_one_type_predict(self) -> None:
         dt = DocumentType.objects.create(
             name="dt",
             matching_algorithm=DocumentType.MATCH_AUTO,
@@ -461,7 +461,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.classifier.train()
         self.assertEqual(self.classifier.predict_document_type(doc1.content), dt.pk)
 
-    def test_one_type_predict_manydocs(self):
+    def test_one_type_predict_manydocs(self) -> None:
         dt = DocumentType.objects.create(
             name="dt",
             matching_algorithm=DocumentType.MATCH_AUTO,
@@ -484,7 +484,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertEqual(self.classifier.predict_document_type(doc1.content), dt.pk)
         self.assertIsNone(self.classifier.predict_document_type(doc2.content))
 
-    def test_one_path_predict(self):
+    def test_one_path_predict(self) -> None:
         sp = StoragePath.objects.create(
             name="sp",
             matching_algorithm=StoragePath.MATCH_AUTO,
@@ -500,7 +500,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.classifier.train()
         self.assertEqual(self.classifier.predict_storage_path(doc1.content), sp.pk)
 
-    def test_one_path_predict_manydocs(self):
+    def test_one_path_predict_manydocs(self) -> None:
         sp = StoragePath.objects.create(
             name="sp",
             matching_algorithm=StoragePath.MATCH_AUTO,
@@ -523,7 +523,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertEqual(self.classifier.predict_storage_path(doc1.content), sp.pk)
         self.assertIsNone(self.classifier.predict_storage_path(doc2.content))
 
-    def test_one_tag_predict(self):
+    def test_one_tag_predict(self) -> None:
         t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
 
         doc1 = Document.objects.create(
@@ -536,7 +536,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.classifier.train()
         self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
 
-    def test_one_tag_predict_unassigned(self):
+    def test_one_tag_predict_unassigned(self) -> None:
         Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
 
         doc1 = Document.objects.create(
@@ -548,7 +548,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.classifier.train()
         self.assertListEqual(self.classifier.predict_tags(doc1.content), [])
 
-    def test_two_tags_predict_singledoc(self):
+    def test_two_tags_predict_singledoc(self) -> None:
         t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
         t2 = Tag.objects.create(name="t2", matching_algorithm=Tag.MATCH_AUTO, pk=121)
 
@@ -563,7 +563,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.classifier.train()
         self.assertListEqual(self.classifier.predict_tags(doc4.content), [t1.pk, t2.pk])
 
-    def test_two_tags_predict(self):
+    def test_two_tags_predict(self) -> None:
         t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
         t2 = Tag.objects.create(name="t2", matching_algorithm=Tag.MATCH_AUTO, pk=121)
 
@@ -599,7 +599,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertListEqual(self.classifier.predict_tags(doc3.content), [])
         self.assertListEqual(self.classifier.predict_tags(doc4.content), [t1.pk, t2.pk])
 
-    def test_one_tag_predict_multi(self):
+    def test_one_tag_predict_multi(self) -> None:
         t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
 
         doc1 = Document.objects.create(
@@ -619,7 +619,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
         self.assertListEqual(self.classifier.predict_tags(doc2.content), [t1.pk])
 
-    def test_one_tag_predict_multi_2(self):
+    def test_one_tag_predict_multi_2(self) -> None:
         t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
 
         doc1 = Document.objects.create(
@@ -638,12 +638,12 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
         self.assertListEqual(self.classifier.predict_tags(doc2.content), [])
 
-    def test_load_classifier_not_exists(self):
+    def test_load_classifier_not_exists(self) -> None:
         self.assertFalse(Path(settings.MODEL_FILE).exists())
         self.assertIsNone(load_classifier())
 
     @mock.patch("documents.classifier.DocumentClassifier.load")
-    def test_load_classifier(self, load):
+    def test_load_classifier(self, load) -> None:
         Path(settings.MODEL_FILE).touch()
         self.assertIsNotNone(load_classifier())
         load.assert_called_once()
@@ -659,7 +659,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
     @pytest.mark.skip(
         reason="Disabled caching due to high memory usage - need to investigate.",
     )
-    def test_load_classifier_cached(self):
+    def test_load_classifier_cached(self) -> None:
         classifier = load_classifier()
         self.assertIsNotNone(classifier)
 
@@ -668,7 +668,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
             load.assert_not_called()
 
     @mock.patch("documents.classifier.DocumentClassifier.load")
-    def test_load_classifier_incompatible_version(self, load):
+    def test_load_classifier_incompatible_version(self, load) -> None:
         Path(settings.MODEL_FILE).touch()
         self.assertTrue(Path(settings.MODEL_FILE).exists())
 
@@ -677,7 +677,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertFalse(Path(settings.MODEL_FILE).exists())
 
     @mock.patch("documents.classifier.DocumentClassifier.load")
-    def test_load_classifier_os_error(self, load):
+    def test_load_classifier_os_error(self, load) -> None:
         Path(settings.MODEL_FILE).touch()
         self.assertTrue(Path(settings.MODEL_FILE).exists())
 
@@ -685,7 +685,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
         self.assertIsNone(load_classifier())
         self.assertTrue(Path(settings.MODEL_FILE).exists())
 
-    def test_load_old_classifier_version(self):
+    def test_load_old_classifier_version(self) -> None:
         shutil.copy(
             Path(__file__).parent / "data" / "v1.17.4.model.pickle",
             self.dirs.scratch_dir,
@@ -697,7 +697,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
             self.assertIsNone(classifier)
 
     @mock.patch("documents.classifier.DocumentClassifier.load")
-    def test_load_classifier_raise_exception(self, mock_load):
+    def test_load_classifier_raise_exception(self, mock_load) -> None:
         Path(settings.MODEL_FILE).touch()
         mock_load.side_effect = IncompatibleClassifierVersionError("Dummy Error")
         with self.assertRaises(IncompatibleClassifierVersionError):
@@ -719,7 +719,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
             load_classifier(raise_exception=True)
 
 
-def test_preprocess_content():
+def test_preprocess_content() -> None:
     """
     GIVEN:
         - Advanced text processing is enabled (default)
@@ -739,7 +739,7 @@ def test_preprocess_content():
     assert result == expected_preprocess_content
 
 
-def test_preprocess_content_nltk_disabled():
+def test_preprocess_content_nltk_disabled() -> None:
     """
     GIVEN:
         - Advanced text processing is disabled
@@ -760,7 +760,7 @@ def test_preprocess_content_nltk_disabled():
     assert result == expected_preprocess_content
 
 
-def test_preprocess_content_nltk_load_fail(mocker):
+def test_preprocess_content_nltk_load_fail(mocker) -> None:
     """
     GIVEN:
         - NLTK stop words fail to load
index 16fa2bf703d50ff62919b792982abac1a4a89408..6dc979b20e4fe57229658bd71570e9f87fba0e3f 100644 (file)
@@ -35,7 +35,7 @@ from paperless_mail.parsers import MailDocumentParser
 
 
 class _BaseTestParser(DocumentParser):
-    def get_settings(self):
+    def get_settings(self) -> None:
         """
         This parser does not implement additional settings yet
         """
@@ -43,7 +43,7 @@ class _BaseTestParser(DocumentParser):
 
 
 class DummyParser(_BaseTestParser):
-    def __init__(self, logging_group, scratch_dir, archive_path):
+    def __init__(self, logging_group, scratch_dir, archive_path) -> None:
         super().__init__(logging_group, None)
         _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
         self.archive_path = archive_path
@@ -51,7 +51,7 @@ class DummyParser(_BaseTestParser):
     def get_thumbnail(self, document_path, mime_type, file_name=None):
         return self.fake_thumb
 
-    def parse(self, document_path, mime_type, file_name=None):
+    def parse(self, document_path, mime_type, file_name=None) -> None:
         self.text = "The Text"
 
 
@@ -59,18 +59,18 @@ class CopyParser(_BaseTestParser):
     def get_thumbnail(self, document_path, mime_type, file_name=None):
         return self.fake_thumb
 
-    def __init__(self, logging_group, progress_callback=None):
+    def __init__(self, logging_group, progress_callback=None) -> None:
         super().__init__(logging_group, progress_callback)
         _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=self.tempdir)
 
-    def parse(self, document_path, mime_type, file_name=None):
+    def parse(self, document_path, mime_type, file_name=None) -> None:
         self.text = "The text"
         self.archive_path = Path(self.tempdir / "archive.pdf")
         shutil.copy(document_path, self.archive_path)
 
 
 class FaultyParser(_BaseTestParser):
-    def __init__(self, logging_group, scratch_dir):
+    def __init__(self, logging_group, scratch_dir) -> None:
         super().__init__(logging_group)
         _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
 
@@ -82,7 +82,7 @@ class FaultyParser(_BaseTestParser):
 
 
 class FaultyGenericExceptionParser(_BaseTestParser):
-    def __init__(self, logging_group, scratch_dir):
+    def __init__(self, logging_group, scratch_dir) -> None:
         super().__init__(logging_group)
         _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
 
@@ -127,7 +127,7 @@ class TestConsumer(
         first_progress_max=100,
         last_progress=100,
         last_progress_max=100,
-    ):
+    ) -> None:
         self.assertGreaterEqual(len(self.status.payloads), 2)
 
         payload = self.status.payloads[0]
@@ -158,7 +158,7 @@ class TestConsumer(
     ):
         return FaultyGenericExceptionParser(logging_group, self.dirs.scratch_dir)
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         patcher = mock.patch("documents.parsers.document_consumer_declaration.send")
@@ -208,7 +208,7 @@ class TestConsumer(
         return dst
 
     @override_settings(FILENAME_FORMAT=None, TIME_ZONE="America/Chicago")
-    def testNormalOperation(self):
+    def testNormalOperation(self) -> None:
         filename = self.get_test_file()
 
         # Get the local time, as an aware datetime
@@ -250,7 +250,7 @@ class TestConsumer(
         self.assertEqual(document.created.day, rough_create_date_local.day)
 
     @override_settings(FILENAME_FORMAT=None)
-    def testDeleteMacFiles(self):
+    def testDeleteMacFiles(self) -> None:
         # https://github.com/jonaswinkler/paperless-ng/discussions/1037
 
         filename = self.get_test_file()
@@ -272,7 +272,7 @@ class TestConsumer(
         self.assertIsNotFile(shadow_file)
         self.assertIsNotFile(filename)
 
-    def testOverrideFilename(self):
+    def testOverrideFilename(self) -> None:
         filename = self.get_test_file()
         override_filename = "Statement for November.pdf"
 
@@ -290,7 +290,7 @@ class TestConsumer(
 
         self._assert_first_last_send_progress()
 
-    def testOverrideTitle(self):
+    def testOverrideTitle(self) -> None:
         with self.get_consumer(
             self.get_test_file(),
             DocumentMetadataOverrides(title="Override Title"),
@@ -304,7 +304,7 @@ class TestConsumer(
         self.assertEqual(document.title, "Override Title")
         self._assert_first_last_send_progress()
 
-    def testOverrideCorrespondent(self):
+    def testOverrideCorrespondent(self) -> None:
         c = Correspondent.objects.create(name="test")
 
         with self.get_consumer(
@@ -320,7 +320,7 @@ class TestConsumer(
         self.assertEqual(document.correspondent.id, c.id)
         self._assert_first_last_send_progress()
 
-    def testOverrideDocumentType(self):
+    def testOverrideDocumentType(self) -> None:
         dt = DocumentType.objects.create(name="test")
 
         with self.get_consumer(
@@ -334,7 +334,7 @@ class TestConsumer(
         self.assertEqual(document.document_type.id, dt.id)
         self._assert_first_last_send_progress()
 
-    def testOverrideStoragePath(self):
+    def testOverrideStoragePath(self) -> None:
         sp = StoragePath.objects.create(name="test")
 
         with self.get_consumer(
@@ -348,7 +348,7 @@ class TestConsumer(
         self.assertEqual(document.storage_path.id, sp.id)
         self._assert_first_last_send_progress()
 
-    def testOverrideTags(self):
+    def testOverrideTags(self) -> None:
         t1 = Tag.objects.create(name="t1")
         t2 = Tag.objects.create(name="t2")
         t3 = Tag.objects.create(name="t3")
@@ -366,7 +366,7 @@ class TestConsumer(
         self.assertIn(t3, document.tags.all())
         self._assert_first_last_send_progress()
 
-    def testOverrideCustomFields(self):
+    def testOverrideCustomFields(self) -> None:
         cf1 = CustomField.objects.create(name="Custom Field 1", data_type="string")
         cf2 = CustomField.objects.create(
             name="Custom Field 2",
@@ -400,7 +400,7 @@ class TestConsumer(
         )
         self._assert_first_last_send_progress()
 
-    def testOverrideAsn(self):
+    def testOverrideAsn(self) -> None:
         with self.get_consumer(
             self.get_test_file(),
             DocumentMetadataOverrides(asn=123),
@@ -412,7 +412,7 @@ class TestConsumer(
         self.assertEqual(document.archive_serial_number, 123)
         self._assert_first_last_send_progress()
 
-    def testMetadataOverridesSkipAsnPropagation(self):
+    def testMetadataOverridesSkipAsnPropagation(self) -> None:
         overrides = DocumentMetadataOverrides()
         incoming = DocumentMetadataOverrides(skip_asn=True)
 
@@ -420,7 +420,7 @@ class TestConsumer(
 
         self.assertTrue(overrides.skip_asn)
 
-    def testOverrideTitlePlaceholders(self):
+    def testOverrideTitlePlaceholders(self) -> None:
         c = Correspondent.objects.create(name="Correspondent Name")
         dt = DocumentType.objects.create(name="DocType Name")
 
@@ -440,7 +440,7 @@ class TestConsumer(
         self.assertEqual(document.title, f"{c.name}{dt.name} {now.strftime('%m-%y')}")
         self._assert_first_last_send_progress()
 
-    def testOverrideOwner(self):
+    def testOverrideOwner(self) -> None:
         testuser = User.objects.create(username="testuser")
 
         with self.get_consumer(
@@ -454,7 +454,7 @@ class TestConsumer(
         self.assertEqual(document.owner, testuser)
         self._assert_first_last_send_progress()
 
-    def testOverridePermissions(self):
+    def testOverridePermissions(self) -> None:
         testuser = User.objects.create(username="testuser")
         testgroup = Group.objects.create(name="testgroup")
 
@@ -475,13 +475,13 @@ class TestConsumer(
         self.assertTrue(group_checker.has_perm("view_document", document))
         self._assert_first_last_send_progress()
 
-    def testNotAFile(self):
+    def testNotAFile(self) -> None:
         with self.assertRaisesMessage(ConsumerError, "File not found"):
             with self.get_consumer(Path("non-existing-file")) as consumer:
                 consumer.run()
         self._assert_first_last_send_progress(last_status="FAILED")
 
-    def testDuplicates1(self):
+    def testDuplicates1(self) -> None:
         with self.get_consumer(self.get_test_file()) as consumer:
             consumer.run()
 
@@ -491,7 +491,7 @@ class TestConsumer(
         self.assertEqual(Document.objects.count(), 2)
         self._assert_first_last_send_progress()
 
-    def testDuplicates2(self):
+    def testDuplicates2(self) -> None:
         with self.get_consumer(self.get_test_file()) as consumer:
             consumer.run()
 
@@ -501,13 +501,13 @@ class TestConsumer(
         self.assertEqual(Document.objects.count(), 2)
         self._assert_first_last_send_progress()
 
-    def testDuplicates3(self):
+    def testDuplicates3(self) -> None:
         with self.get_consumer(self.get_test_archive_file()) as consumer:
             consumer.run()
         with self.get_consumer(self.get_test_file()) as consumer:
             consumer.run()
 
-    def testDuplicateInTrash(self):
+    def testDuplicateInTrash(self) -> None:
         with self.get_consumer(self.get_test_file()) as consumer:
             consumer.run()
 
@@ -518,7 +518,7 @@ class TestConsumer(
 
         self.assertEqual(Document.objects.count(), 1)
 
-    def testAsnExists(self):
+    def testAsnExists(self) -> None:
         with self.get_consumer(
             self.get_test_file(),
             DocumentMetadataOverrides(asn=123),
@@ -532,7 +532,7 @@ class TestConsumer(
             ) as consumer:
                 consumer.run()
 
-    def testAsnExistsInTrash(self):
+    def testAsnExistsInTrash(self) -> None:
         with self.get_consumer(
             self.get_test_file(),
             DocumentMetadataOverrides(asn=123),
@@ -550,7 +550,7 @@ class TestConsumer(
                 consumer.run()
 
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def testNoParsers(self, m):
+    def testNoParsers(self, m) -> None:
         m.return_value = []
 
         with self.assertRaisesMessage(
@@ -563,7 +563,7 @@ class TestConsumer(
         self._assert_first_last_send_progress(last_status="FAILED")
 
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def testFaultyParser(self, m):
+    def testFaultyParser(self, m) -> None:
         m.return_value = [
             (
                 None,
@@ -585,7 +585,7 @@ class TestConsumer(
         self._assert_first_last_send_progress(last_status="FAILED")
 
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def testGenericParserException(self, m):
+    def testGenericParserException(self, m) -> None:
         m.return_value = [
             (
                 None,
@@ -607,7 +607,7 @@ class TestConsumer(
         self._assert_first_last_send_progress(last_status="FAILED")
 
     @mock.patch("documents.consumer.ConsumerPlugin._write")
-    def testPostSaveError(self, m):
+    def testPostSaveError(self, m) -> None:
         filename = self.get_test_file()
         m.side_effect = OSError("NO.")
 
@@ -627,7 +627,7 @@ class TestConsumer(
         self.assertEqual(Document.objects.all().count(), 0)
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
-    def testFilenameHandling(self):
+    def testFilenameHandling(self) -> None:
         with self.get_consumer(
             self.get_test_file(),
             DocumentMetadataOverrides(title="new docs"),
@@ -644,7 +644,7 @@ class TestConsumer(
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
     @mock.patch("documents.signals.handlers.generate_unique_filename")
-    def testFilenameHandlingUnstableFormat(self, m):
+    def testFilenameHandlingUnstableFormat(self, m) -> None:
         filenames = ["this", "that", "now this", "i cannot decide"]
 
         def get_filename():
@@ -672,7 +672,7 @@ class TestConsumer(
         self._assert_first_last_send_progress()
 
     @mock.patch("documents.consumer.load_classifier")
-    def testClassifyDocument(self, m):
+    def testClassifyDocument(self, m) -> None:
         correspondent = Correspondent.objects.create(
             name="test",
             matching_algorithm=Correspondent.MATCH_AUTO,
@@ -702,7 +702,7 @@ class TestConsumer(
         self._assert_first_last_send_progress()
 
     @override_settings(CONSUMER_DELETE_DUPLICATES=True)
-    def test_delete_duplicate(self):
+    def test_delete_duplicate(self) -> None:
         dst = self.get_test_file()
         self.assertIsFile(dst)
 
@@ -733,7 +733,7 @@ class TestConsumer(
         self._assert_first_last_send_progress(last_status=ProgressStatusOptions.FAILED)
 
     @override_settings(CONSUMER_DELETE_DUPLICATES=True)
-    def test_delete_duplicate_in_trash(self):
+    def test_delete_duplicate_in_trash(self) -> None:
         dst = self.get_test_file()
         with self.get_consumer(dst) as consumer:
             consumer.run()
@@ -760,7 +760,7 @@ class TestConsumer(
         self.assertEqual(Document.objects.count(), 0)
 
     @override_settings(CONSUMER_DELETE_DUPLICATES=False)
-    def test_no_delete_duplicate(self):
+    def test_no_delete_duplicate(self) -> None:
         dst = self.get_test_file()
         self.assertIsFile(dst)
 
@@ -786,7 +786,7 @@ class TestConsumer(
 
     @override_settings(FILENAME_FORMAT="{title}")
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def test_similar_filenames(self, m):
+    def test_similar_filenames(self, m) -> None:
         shutil.copy(
             Path(__file__).parent / "samples" / "simple.pdf",
             settings.CONSUMPTION_DIR / "simple.pdf",
@@ -837,7 +837,7 @@ class TestConsumer(
         sanity_check()
 
     @mock.patch("documents.consumer.run_subprocess")
-    def test_try_to_clean_invalid_pdf(self, m):
+    def test_try_to_clean_invalid_pdf(self, m) -> None:
         shutil.copy(
             Path(__file__).parent / "samples" / "invalid_pdf.pdf",
             settings.CONSUMPTION_DIR / "invalid_pdf.pdf",
@@ -865,7 +865,7 @@ class TestConsumer(
         mock_consumer_declaration_send: mock.Mock,
         mock_mail_parser_parse: mock.Mock,
         mock_mailrule_get: mock.Mock,
-    ):
+    ) -> None:
         """
         GIVEN:
             - A mail document from a mail rule
@@ -913,10 +913,10 @@ class TestConsumer(
 
 @mock.patch("documents.consumer.magic.from_file", fake_magic_from_file)
 class TestConsumerCreatedDate(DirectoriesMixin, GetConsumerMixin, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
-    def test_consume_date_from_content(self):
+    def test_consume_date_from_content(self) -> None:
         """
         GIVEN:
             - File content with date in DMY (default) format
@@ -945,7 +945,7 @@ class TestConsumerCreatedDate(DirectoriesMixin, GetConsumerMixin, TestCase):
         )
 
     @override_settings(FILENAME_DATE_ORDER="YMD")
-    def test_consume_date_from_filename(self):
+    def test_consume_date_from_filename(self) -> None:
         """
         GIVEN:
             - File content with date in DMY (default) format
@@ -974,7 +974,7 @@ class TestConsumerCreatedDate(DirectoriesMixin, GetConsumerMixin, TestCase):
             datetime.date(2022, 2, 1),
         )
 
-    def test_consume_date_filename_date_use_content(self):
+    def test_consume_date_filename_date_use_content(self) -> None:
         """
         GIVEN:
             - File content with date in DMY (default) format
@@ -1007,7 +1007,7 @@ class TestConsumerCreatedDate(DirectoriesMixin, GetConsumerMixin, TestCase):
     @override_settings(
         IGNORE_DATES=(datetime.date(2010, 12, 13), datetime.date(2011, 11, 12)),
     )
-    def test_consume_date_use_content_with_ignore(self):
+    def test_consume_date_use_content_with_ignore(self) -> None:
         """
         GIVEN:
             - File content with dates in DMY (default) format
@@ -1052,20 +1052,20 @@ class PreConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
 
     @mock.patch("documents.consumer.run_subprocess")
     @override_settings(PRE_CONSUME_SCRIPT=None)
-    def test_no_pre_consume_script(self, m):
+    def test_no_pre_consume_script(self, m) -> None:
         with self.get_consumer(self.test_file) as c:
             c.run()
             m.assert_not_called()
 
     @mock.patch("documents.consumer.run_subprocess")
     @override_settings(PRE_CONSUME_SCRIPT="does-not-exist")
-    def test_pre_consume_script_not_found(self, m):
+    def test_pre_consume_script_not_found(self, m) -> None:
         with self.get_consumer(self.test_file) as c:
             self.assertRaises(ConsumerError, c.run)
             m.assert_not_called()
 
     @mock.patch("documents.consumer.run_subprocess")
-    def test_pre_consume_script(self, m):
+    def test_pre_consume_script(self, m) -> None:
         with tempfile.NamedTemporaryFile() as script:
             with override_settings(PRE_CONSUME_SCRIPT=script.name):
                 with self.get_consumer(self.test_file) as c:
@@ -1088,7 +1088,7 @@ class PreConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
                     }
                     self.assertDictEqual(environment, {**environment, **subset})
 
-    def test_script_with_output(self):
+    def test_script_with_output(self) -> None:
         """
         GIVEN:
             - A script which outputs to stdout and stderr
@@ -1121,7 +1121,7 @@ class PreConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
                         cm.output,
                     )
 
-    def test_script_exit_non_zero(self):
+    def test_script_exit_non_zero(self) -> None:
         """
         GIVEN:
             - A script which exits with a non-zero exit code
@@ -1163,7 +1163,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
 
     @mock.patch("documents.consumer.run_subprocess")
     @override_settings(POST_CONSUME_SCRIPT=None)
-    def test_no_post_consume_script(self, m):
+    def test_no_post_consume_script(self, m) -> None:
         doc = Document.objects.create(title="Test", mime_type="application/pdf")
         tag1 = Tag.objects.create(name="a")
         tag2 = Tag.objects.create(name="b")
@@ -1175,7 +1175,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
         m.assert_not_called()
 
     @override_settings(POST_CONSUME_SCRIPT="does-not-exist")
-    def test_post_consume_script_not_found(self):
+    def test_post_consume_script_not_found(self) -> None:
         doc = Document.objects.create(title="Test", mime_type="application/pdf")
 
         with self.get_consumer(self.test_file) as consumer:
@@ -1186,7 +1186,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
                 consumer.run_post_consume_script(doc)
 
     @mock.patch("documents.consumer.run_subprocess")
-    def test_post_consume_script_simple(self, m):
+    def test_post_consume_script_simple(self, m) -> None:
         with tempfile.NamedTemporaryFile() as script:
             with override_settings(POST_CONSUME_SCRIPT=script.name):
                 doc = Document.objects.create(title="Test", mime_type="application/pdf")
@@ -1197,7 +1197,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
                 m.assert_called_once()
 
     @mock.patch("documents.consumer.run_subprocess")
-    def test_post_consume_script_with_correspondent_and_type(self, m):
+    def test_post_consume_script_with_correspondent_and_type(self, m) -> None:
         with tempfile.NamedTemporaryFile() as script:
             with override_settings(POST_CONSUME_SCRIPT=script.name):
                 c = Correspondent.objects.create(name="my_bank")
@@ -1244,7 +1244,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
 
                 self.assertDictEqual(environment, {**environment, **subset})
 
-    def test_script_exit_non_zero(self):
+    def test_script_exit_non_zero(self) -> None:
         """
         GIVEN:
             - A script which exits with a non-zero exit code
index f565a954476f3032fb1198ceaadb2a64ac7aa73d..c9c1fb8376daf5ef30b6a1021b909468f35d942d 100644 (file)
@@ -10,29 +10,29 @@ from documents.parsers import parse_date_generator
 
 @pytest.mark.django_db()
 class TestDate:
-    def test_date_format_1(self):
+    def test_date_format_1(self) -> None:
         text = "lorem ipsum 130218 lorem ipsum"
         assert parse_date("", text) is None
 
-    def test_date_format_2(self):
+    def test_date_format_2(self) -> None:
         text = "lorem ipsum 2018 lorem ipsum"
         assert parse_date("", text) is None
 
-    def test_date_format_3(self):
+    def test_date_format_3(self) -> None:
         text = "lorem ipsum 20180213 lorem ipsum"
         assert parse_date("", text) is None
 
-    def test_date_format_4(self, settings_timezone: ZoneInfo):
+    def test_date_format_4(self, settings_timezone: ZoneInfo) -> None:
         text = "lorem ipsum 13.02.2018 lorem ipsum"
         date = parse_date("", text)
         assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
 
-    def test_date_format_5(self, settings_timezone: ZoneInfo):
+    def test_date_format_5(self, settings_timezone: ZoneInfo) -> None:
         text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem ipsum"
         date = parse_date("", text)
         assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
 
-    def test_date_format_6(self):
+    def test_date_format_6(self) -> None:
         text = (
             "lorem ipsum\n"
             "Wohnort\n"
@@ -50,7 +50,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         settings.DATE_PARSER_LANGUAGES = ["de"]
         text = "lorem ipsum\nMärz 2019\nlorem ipsum"
         date = parse_date("", text)
@@ -60,7 +60,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         settings.DATE_PARSER_LANGUAGES = ["de"]
         text = (
             "lorem ipsum\n"
@@ -87,7 +87,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         settings.DATE_PARSER_LANGUAGES = ["de"]
         text = "lorem ipsum\n27. Nullmonth 2020\nMärz 2020\nlorem ipsum"
         assert parse_date("", text) == datetime.datetime(
@@ -99,7 +99,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_10(self, settings_timezone: ZoneInfo):
+    def test_date_format_10(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 22-MAR-2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -110,7 +110,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_11(self, settings_timezone: ZoneInfo):
+    def test_date_format_11(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 22 MAR 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -121,7 +121,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_12(self, settings_timezone: ZoneInfo):
+    def test_date_format_12(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 22/MAR/2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -132,7 +132,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_13(self, settings_timezone: ZoneInfo):
+    def test_date_format_13(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 22.MAR.2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -143,7 +143,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_14(self, settings_timezone: ZoneInfo):
+    def test_date_format_14(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 22.MAR 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -154,23 +154,23 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_15(self):
+    def test_date_format_15(self) -> None:
         text = "Customer Number Currency 22.MAR.22 Credit Card 1934829304"
         assert parse_date("", text) is None
 
-    def test_date_format_16(self):
+    def test_date_format_16(self) -> None:
         text = "Customer Number Currency 22.MAR,22 Credit Card 1934829304"
         assert parse_date("", text) is None
 
-    def test_date_format_17(self):
+    def test_date_format_17(self) -> None:
         text = "Customer Number Currency 22,MAR,2022 Credit Card 1934829304"
         assert parse_date("", text) is None
 
-    def test_date_format_18(self):
+    def test_date_format_18(self) -> None:
         text = "Customer Number Currency 22 MAR,2022 Credit Card 1934829304"
         assert parse_date("", text) is None
 
-    def test_date_format_19(self, settings_timezone: ZoneInfo):
+    def test_date_format_19(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 21st MAR 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -181,7 +181,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_20(self, settings_timezone: ZoneInfo):
+    def test_date_format_20(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 22nd March 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -192,7 +192,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_21(self, settings_timezone: ZoneInfo):
+    def test_date_format_21(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 2nd MAR 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -203,7 +203,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_22(self, settings_timezone: ZoneInfo):
+    def test_date_format_22(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 23rd MAR 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -214,7 +214,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_23(self, settings_timezone: ZoneInfo):
+    def test_date_format_23(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 24th MAR 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -225,7 +225,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_24(self, settings_timezone: ZoneInfo):
+    def test_date_format_24(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 21-MAR-2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -236,7 +236,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_25(self, settings_timezone: ZoneInfo):
+    def test_date_format_25(self, settings_timezone: ZoneInfo) -> None:
         text = "Customer Number Currency 25TH MAR 2022 Credit Card 1934829304"
         assert parse_date("", text) == datetime.datetime(
             2022,
@@ -247,7 +247,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_date_format_26(self, settings_timezone: ZoneInfo):
+    def test_date_format_26(self, settings_timezone: ZoneInfo) -> None:
         text = "CHASE 0 September 25, 2019 JPMorgan Chase Bank, NA. P0 Box 182051"
         assert parse_date("", text) == datetime.datetime(
             2019,
@@ -258,20 +258,20 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_crazy_date_past(self):
+    def test_crazy_date_past(self) -> None:
         assert parse_date("", "01-07-0590 00:00:00") is None
 
-    def test_crazy_date_future(self):
+    def test_crazy_date_future(self) -> None:
         assert parse_date("", "01-07-2350 00:00:00") is None
 
-    def test_crazy_date_with_spaces(self):
+    def test_crazy_date_with_spaces(self) -> None:
         assert parse_date("", "20 408000l 2475") is None
 
     def test_utf_month_names(
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         settings.DATE_PARSER_LANGUAGES = ["fr", "de", "hr", "cs", "pl", "tr"]
         assert parse_date("", "13 décembre 2023") == datetime.datetime(
             2023,
@@ -378,7 +378,7 @@ class TestDate:
             tzinfo=settings_timezone,
         )
 
-    def test_multiple_dates(self, settings_timezone: ZoneInfo):
+    def test_multiple_dates(self, settings_timezone: ZoneInfo) -> None:
         text = """This text has multiple dates.
                   For example 02.02.2018, 22 July 2022 and December 2021.
                   But not 24-12-9999 because it's in the future..."""
@@ -408,7 +408,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Date parsing from the filename is enabled
@@ -429,7 +429,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Date parsing from the filename is enabled
@@ -445,7 +445,7 @@ class TestDate:
             "No date in here",
         ) == datetime.datetime(2021, 1, 10, 0, 0, tzinfo=settings_timezone)
 
-    def test_filename_date_parse_invalid(self, settings: SettingsWrapper):
+    def test_filename_date_parse_invalid(self, settings: SettingsWrapper) -> None:
         """
         GIVEN:
             - Date parsing from the filename is enabled
@@ -462,7 +462,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Date parsing from the filename is enabled
@@ -486,7 +486,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Ignore dates have been set
@@ -511,7 +511,7 @@ class TestDate:
         self,
         settings: SettingsWrapper,
         settings_timezone: ZoneInfo,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Ignore dates have been set
index 3ee4fb15d9b14004013c7dd632a0498420ce174d..6357d9030fbaf3e1112d48c17e0770b9b7b7fe12 100644 (file)
@@ -6,7 +6,7 @@ from documents.models import User
 
 
 class TestDelayedQuery(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         # all tests run without permission criteria, so has_no_owner query will always
         # be appended.
@@ -40,7 +40,7 @@ class TestDelayedQuery(TestCase):
             ),
         )
 
-    def test_get_permission_criteria(self):
+    def test_get_permission_criteria(self) -> None:
         # tests contains tuples of user instances and the expected filter
         tests = (
             (None, [query.Term("has_owner", text=False)]),
index 46b16d727d634f10a38707051cb29e3abfc445c4..39d6ce8c3eae4fdccbd1028242b6880a1b994e87 100644 (file)
@@ -30,7 +30,7 @@ class TestDocument(TestCase):
         shutil.rmtree(self.thumb_dir)
         self.overrides.disable()
 
-    def test_file_deletion(self):
+    def test_file_deletion(self) -> None:
         document = Document.objects.create(
             correspondent=Correspondent.objects.create(name="Test0"),
             title="Title",
@@ -50,7 +50,7 @@ class TestDocument(TestCase):
             empty_trash([document.pk])
             self.assertEqual(mock_unlink.call_count, 2)
 
-    def test_document_soft_delete(self):
+    def test_document_soft_delete(self) -> None:
         document = Document.objects.create(
             correspondent=Correspondent.objects.create(name="Test0"),
             title="Title",
@@ -78,7 +78,7 @@ class TestDocument(TestCase):
             empty_trash([document.pk])
             self.assertEqual(mock_unlink.call_count, 2)
 
-    def test_file_name(self):
+    def test_file_name(self) -> None:
         doc = Document(
             mime_type="application/pdf",
             title="test",
@@ -86,7 +86,7 @@ class TestDocument(TestCase):
         )
         self.assertEqual(doc.get_public_filename(), "2020-12-25 test.pdf")
 
-    def test_file_name_jpg(self):
+    def test_file_name_jpg(self) -> None:
         doc = Document(
             mime_type="image/jpeg",
             title="test",
@@ -94,7 +94,7 @@ class TestDocument(TestCase):
         )
         self.assertEqual(doc.get_public_filename(), "2020-12-25 test.jpg")
 
-    def test_file_name_unknown(self):
+    def test_file_name_unknown(self) -> None:
         doc = Document(
             mime_type="application/zip",
             title="test",
@@ -102,7 +102,7 @@ class TestDocument(TestCase):
         )
         self.assertEqual(doc.get_public_filename(), "2020-12-25 test.zip")
 
-    def test_file_name_invalid_type(self):
+    def test_file_name_invalid_type(self) -> None:
         doc = Document(
             mime_type="image/jpegasd",
             title="test",
@@ -111,7 +111,7 @@ class TestDocument(TestCase):
         self.assertEqual(doc.get_public_filename(), "2020-12-25 test")
 
 
-def test_suggestion_content():
+def test_suggestion_content() -> None:
     """
     Check that the document for suggestion is cropped, only if it exceeds the length limit.
     """
index 32ca5ceab8638d03325b110ac77437c833f2925d..9ae7ce63adc64ddd7b24fd6ced3844d5f307bbb9 100644 (file)
@@ -27,7 +27,7 @@ from documents.tests.utils import FileSystemAssertsMixin
 class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     SAMPLE_DIR = Path(__file__).parent / "samples"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.dirs.double_sided_dir = self.dirs.consumption_dir / "double-sided"
         self.dirs.double_sided_dir.mkdir()
@@ -56,13 +56,13 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsNotFile(dst)
         return msg
 
-    def create_staging_file(self, src="double-sided-odd.pdf", datetime=None):
+    def create_staging_file(self, src="double-sided-odd.pdf", datetime=None) -> None:
         shutil.copy(self.SAMPLE_DIR / src, self.staging_file)
         if datetime is None:
             datetime = dt.datetime.now()
         os.utime(str(self.staging_file), (datetime.timestamp(),) * 2)
 
-    def test_odd_numbered_moved_to_staging(self):
+    def test_odd_numbered_moved_to_staging(self) -> None:
         """
         GIVEN:
             - No staging file exists
@@ -85,7 +85,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
         self.assertIn("Received odd numbered pages", msg)
 
-    def test_collation(self):
+    def test_collation(self) -> None:
         """
         GIVEN:
             - A staging file not older than TIMEOUT_MINUTES with odd pages exists
@@ -113,7 +113,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             r"This is page 4.*This is page 5",
         )
 
-    def test_staging_file_expiration(self):
+    def test_staging_file_expiration(self) -> None:
         """
         GIVEN:
             - A staging file older than TIMEOUT_MINUTES exists
@@ -131,7 +131,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsFile(self.staging_file)
         self.assertIn("Received odd numbered pages", msg)
 
-    def test_less_odd_pages_then_even_fails(self):
+    def test_less_odd_pages_then_even_fails(self) -> None:
         """
         GIVEN:
             - A valid staging file
@@ -151,7 +151,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsNotFile(self.staging_file)
 
     @override_settings(CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=True)
-    def test_tiff_upload_enabled(self):
+    def test_tiff_upload_enabled(self) -> None:
         """
         GIVEN:
             - CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT is true
@@ -169,7 +169,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         Pdf.open(self.staging_file)
 
     @override_settings(CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=False)
-    def test_tiff_upload_disabled(self):
+    def test_tiff_upload_disabled(self) -> None:
         """
         GIVEN:
             - CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT is false
@@ -188,7 +188,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME="quux")
-    def test_different_upload_dir_name(self):
+    def test_different_upload_dir_name(self) -> None:
         """
         GIVEN:
             - No staging file exists
@@ -201,7 +201,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.consume_file("double-sided-odd.pdf", Path("..") / "quux" / "foo.pdf")
         self.assertIsFile(self.staging_file)
 
-    def test_only_double_sided_dir_is_handled(self):
+    def test_only_double_sided_dir_is_handled(self) -> None:
         """
         GIVEN:
             - No staging file exists
@@ -214,7 +214,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsNotFile(self.staging_file)
         self.assertRegex(msg, r"Success. New document id \d+ created")
 
-    def test_subdirectory_upload(self):
+    def test_subdirectory_upload(self) -> None:
         """
         GIVEN:
             - A staging file exists
@@ -241,7 +241,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 )
 
     @override_settings(CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED=False)
-    def test_disabled_double_sided_dir_upload(self):
+    def test_disabled_double_sided_dir_upload(self) -> None:
         """
         GIVEN:
             - CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED is false
index f6764d3f87299839947d9d04fc0d844f23b2f23b..1d4be14fe4b7042ba609ab5bee84cf8bb7a88c48 100644 (file)
@@ -31,7 +31,7 @@ from documents.tests.utils import FileSystemAssertsMixin
 
 class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(FILENAME_FORMAT="")
-    def test_generate_source_filename(self):
+    def test_generate_source_filename(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
         document.save()
@@ -39,7 +39,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf"))
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
-    def test_file_renaming(self):
+    def test_file_renaming(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
         document.save()
@@ -78,7 +78,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
-    def test_file_renaming_missing_permissions(self):
+    def test_file_renaming_missing_permissions(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
 
@@ -112,7 +112,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         (settings.ORIGINALS_DIR / "none").chmod(0o777)
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
-    def test_file_renaming_database_error(self):
+    def test_file_renaming_database_error(self) -> None:
         Document.objects.create(
             mime_type="application/pdf",
             checksum="AAAAA",
@@ -153,7 +153,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             self.assertEqual(document.filename, "none/none.pdf")
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
-    def test_document_delete(self):
+    def test_document_delete(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
 
@@ -179,7 +179,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         FILENAME_FORMAT="{correspondent}/{correspondent}",
         EMPTY_TRASH_DIR=Path(tempfile.mkdtemp()),
     )
-    def test_document_delete_trash_dir(self):
+    def test_document_delete_trash_dir(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
 
@@ -218,7 +218,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsFile(Path(settings.EMPTY_TRASH_DIR) / "none_01.pdf")
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
-    def test_document_delete_nofile(self):
+    def test_document_delete_nofile(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
 
@@ -228,7 +228,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         empty_trash([document.pk])
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
-    def test_directory_not_empty(self):
+    def test_directory_not_empty(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
 
@@ -254,7 +254,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsFile(important_file)
 
     @override_settings(FILENAME_FORMAT="{document_type} - {title}")
-    def test_document_type(self):
+    def test_document_type(self) -> None:
         dt = DocumentType.objects.create(name="my_doc_type")
         d = Document.objects.create(title="the_doc", mime_type="application/pdf")
 
@@ -265,7 +265,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(generate_filename(d), Path("my_doc_type - the_doc.pdf"))
 
     @override_settings(FILENAME_FORMAT="{asn} - {title}")
-    def test_asn(self):
+    def test_asn(self) -> None:
         d1 = Document.objects.create(
             title="the_doc",
             mime_type="application/pdf",
@@ -282,7 +282,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(generate_filename(d2), Path("none - the_doc.pdf"))
 
     @override_settings(FILENAME_FORMAT="{title} {tag_list}")
-    def test_tag_list(self):
+    def test_tag_list(self) -> None:
         doc = Document.objects.create(title="doc1", mime_type="application/pdf")
         doc.tags.create(name="tag2")
         doc.tags.create(name="tag1")
@@ -298,7 +298,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(generate_filename(doc), Path("doc2.pdf"))
 
     @override_settings(FILENAME_FORMAT="//etc/something/{title}")
-    def test_filename_relative(self):
+    def test_filename_relative(self) -> None:
         doc = Document.objects.create(title="doc1", mime_type="application/pdf")
         doc.filename = generate_filename(doc)
         doc.save()
@@ -311,7 +311,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{created_year}-{created_month}-{created_day}",
     )
-    def test_created_year_month_day(self):
+    def test_created_year_month_day(self) -> None:
         d1 = timezone.make_aware(datetime.datetime(2020, 3, 6, 1, 1, 1))
         doc1 = Document.objects.create(
             title="doc1",
@@ -328,7 +328,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{added_year}-{added_month}-{added_day}",
     )
-    def test_added_year_month_day(self):
+    def test_added_year_month_day(self) -> None:
         d1 = timezone.make_aware(datetime.datetime(232, 1, 9, 1, 1, 1))
         doc1 = Document.objects.create(
             title="doc1",
@@ -345,7 +345,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{correspondent}/{correspondent}/{correspondent}",
     )
-    def test_nested_directory_cleanup(self):
+    def test_nested_directory_cleanup(self) -> None:
         document = Document()
         document.mime_type = "application/pdf"
 
@@ -372,7 +372,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsDir(settings.ORIGINALS_DIR)
 
     @override_settings(FILENAME_FORMAT="{doc_pk}")
-    def test_format_doc_pk(self):
+    def test_format_doc_pk(self) -> None:
         document = Document()
         document.pk = 1
         document.mime_type = "application/pdf"
@@ -384,14 +384,14 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(generate_filename(document), Path("0013579.pdf"))
 
     @override_settings(FILENAME_FORMAT=None)
-    def test_format_none(self):
+    def test_format_none(self) -> None:
         document = Document()
         document.pk = 1
         document.mime_type = "application/pdf"
 
         self.assertEqual(generate_filename(document), Path("0000001.pdf"))
 
-    def test_try_delete_empty_directories(self):
+    def test_try_delete_empty_directories(self) -> None:
         # Create our working directory
         tmp: Path = settings.ORIGINALS_DIR / "test_delete_empty"
         tmp.mkdir(exist_ok=True, parents=True)
@@ -409,7 +409,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsNotDir(tmp / "notempty" / "empty")
 
     @override_settings(FILENAME_FORMAT="{% if x is None %}/{title]")
-    def test_invalid_format(self):
+    def test_invalid_format(self) -> None:
         document = Document()
         document.pk = 1
         document.mime_type = "application/pdf"
@@ -417,7 +417,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(generate_filename(document), Path("0000001.pdf"))
 
     @override_settings(FILENAME_FORMAT="{created__year}")
-    def test_invalid_format_key(self):
+    def test_invalid_format_key(self) -> None:
         document = Document()
         document.pk = 1
         document.mime_type = "application/pdf"
@@ -425,7 +425,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(generate_filename(document), Path("0000001.pdf"))
 
     @override_settings(FILENAME_FORMAT="{title}")
-    def test_duplicates(self):
+    def test_duplicates(self) -> None:
         document = Document.objects.create(
             mime_type="application/pdf",
             title="qwe",
@@ -479,7 +479,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(FILENAME_FORMAT="{title}")
     @mock.patch("documents.signals.handlers.Document.objects.filter")
     @mock.patch("documents.signals.handlers.shutil.move")
-    def test_no_move_only_save(self, mock_move, mock_filter):
+    def test_no_move_only_save(self, mock_move, mock_filter) -> None:
         """
         GIVEN:
             - A document with a filename
@@ -516,7 +516,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         CELERY_TASK_ALWAYS_EAGER=True,
     )
     @mock.patch("documents.signals.handlers.update_filename_and_move_files")
-    def test_select_cf_updated(self, m):
+    def test_select_cf_updated(self, m) -> None:
         """
         GIVEN:
             - A document with a select type custom field
@@ -569,7 +569,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
 class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(FILENAME_FORMAT=None)
-    def test_create_no_format(self):
+    def test_create_no_format(self) -> None:
         original = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
         Path(original).touch()
@@ -588,7 +588,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertIsFile(doc.archive_path)
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
-    def test_create_with_format(self):
+    def test_create_with_format(self) -> None:
         original = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
         Path(original).touch()
@@ -616,7 +616,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         )
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
-    def test_move_archive_gone(self):
+    def test_move_archive_gone(self) -> None:
         original = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
         Path(original).touch()
@@ -635,7 +635,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertIsNotFile(doc.archive_path)
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
-    def test_move_archive_exists(self):
+    def test_move_archive_exists(self) -> None:
         original = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
         existing_archive_file = settings.ARCHIVE_DIR / "none" / "my_doc.pdf"
@@ -660,7 +660,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertEqual(doc.archive_filename, "none/my_doc_01.pdf")
 
     @override_settings(FILENAME_FORMAT="{title}")
-    def test_move_original_only(self):
+    def test_move_original_only(self) -> None:
         original = settings.ORIGINALS_DIR / "document_01.pdf"
         archive = settings.ARCHIVE_DIR / "document.pdf"
         Path(original).touch()
@@ -682,7 +682,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertIsFile(doc.archive_path)
 
     @override_settings(FILENAME_FORMAT="{title}")
-    def test_move_archive_only(self):
+    def test_move_archive_only(self) -> None:
         original = settings.ORIGINALS_DIR / "document.pdf"
         archive = settings.ARCHIVE_DIR / "document_01.pdf"
         Path(original).touch()
@@ -705,8 +705,8 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
     @mock.patch("documents.signals.handlers.shutil.move")
-    def test_move_archive_error(self, m):
-        def fake_rename(src, dst):
+    def test_move_archive_error(self, m) -> None:
+        def fake_rename(src, dst) -> None:
             if "archive" in str(src):
                 raise OSError
             else:
@@ -735,7 +735,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertIsFile(doc.archive_path)
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
-    def test_move_file_gone(self):
+    def test_move_file_gone(self) -> None:
         original = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
         # Path(original).touch()
@@ -756,8 +756,8 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
     @mock.patch("documents.signals.handlers.shutil.move")
-    def test_move_file_error(self, m):
-        def fake_rename(src, dst):
+    def test_move_file_error(self, m) -> None:
+        def fake_rename(src, dst) -> None:
             if "original" in str(src):
                 raise OSError
             else:
@@ -786,7 +786,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertIsFile(doc.archive_path)
 
     @override_settings(FILENAME_FORMAT="")
-    def test_archive_deleted(self):
+    def test_archive_deleted(self) -> None:
         original = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
         Path(original).touch()
@@ -814,7 +814,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertIsNotFile(doc.archive_path)
 
     @override_settings(FILENAME_FORMAT="{title}")
-    def test_archive_deleted2(self):
+    def test_archive_deleted2(self) -> None:
         original = settings.ORIGINALS_DIR / "document.webp"
         original2 = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
@@ -849,7 +849,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
         self.assertIsNotFile(doc2.source_path)
 
     @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
-    def test_database_error(self):
+    def test_database_error(self) -> None:
         original = settings.ORIGINALS_DIR / "0000001.pdf"
         archive = settings.ARCHIVE_DIR / "0000001.pdf"
         Path(original).touch()
@@ -876,7 +876,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
 
 class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(FILENAME_FORMAT="{title}")
-    def test_invalid_characters(self):
+    def test_invalid_characters(self) -> None:
         doc = Document.objects.create(
             title="This. is the title.",
             mime_type="application/pdf",
@@ -894,7 +894,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
         self.assertEqual(generate_filename(doc), Path("my-invalid-..-title-yay.pdf"))
 
     @override_settings(FILENAME_FORMAT="{created}")
-    def test_date(self):
+    def test_date(self) -> None:
         doc = Document.objects.create(
             title="does not matter",
             created=datetime.date(2020, 5, 21),
@@ -904,7 +904,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
         )
         self.assertEqual(generate_filename(doc), Path("2020-05-21.pdf"))
 
-    def test_dynamic_path(self):
+    def test_dynamic_path(self) -> None:
         """
         GIVEN:
             - A document with a defined storage path
@@ -923,7 +923,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
         )
         self.assertEqual(generate_filename(doc), Path("TestFolder/2020-06-25.pdf"))
 
-    def test_dynamic_path_with_none(self):
+    def test_dynamic_path_with_none(self) -> None:
         """
         GIVEN:
             - A document with a defined storage path
@@ -947,7 +947,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT_REMOVE_NONE=True,
     )
-    def test_dynamic_path_remove_none(self):
+    def test_dynamic_path_remove_none(self) -> None:
         """
         GIVEN:
             - A document with a defined storage path
@@ -981,7 +981,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
         sp.save()
         self.assertEqual(generate_filename(doc), Path("2020/does not matter.pdf"))
 
-    def test_multiple_doc_paths(self):
+    def test_multiple_doc_paths(self) -> None:
         """
         GIVEN:
             - Two documents, each with different storage paths
@@ -1026,7 +1026,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT=None,
     )
-    def test_no_path_fallback(self):
+    def test_no_path_fallback(self) -> None:
         """
         GIVEN:
             - Two documents, one with defined storage path, the other not
@@ -1071,7 +1071,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
             "{{ correspondent or 'missing' }}/{{ title }}"
         ),
     )
-    def test_placeholder_matches_none_variants_and_false(self):
+    def test_placeholder_matches_none_variants_and_false(self) -> None:
         """
         GIVEN:
             - Templates that compare against 'none', '-none-' and rely on truthiness
@@ -1106,7 +1106,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{created_year_short}/{created_month_name_short}/{created_month_name}/{title}",
     )
-    def test_short_names_created(self):
+    def test_short_names_created(self) -> None:
         doc = Document.objects.create(
             title="The Title",
             created=datetime.date(1989, 12, 2),
@@ -1119,7 +1119,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{added_year_short}/{added_month_name}/{added_month_name_short}/{title}",
     )
-    def test_short_names_added(self):
+    def test_short_names_added(self) -> None:
         doc = Document.objects.create(
             title="The Title",
             added=timezone.make_aware(datetime.datetime(1984, 8, 21, 7, 36, 51, 153)),
@@ -1132,7 +1132,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{owner_username}/{title}",
     )
-    def test_document_owner_string(self):
+    def test_document_owner_string(self) -> None:
         """
         GIVEN:
             - Document with an other
@@ -1166,7 +1166,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{original_name}",
     )
-    def test_document_original_filename(self):
+    def test_document_original_filename(self) -> None:
         """
         GIVEN:
             - Document with an original filename
@@ -1226,7 +1226,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
         FILENAME_FORMAT="XX{correspondent}/{title}",
         FILENAME_FORMAT_REMOVE_NONE=True,
     )
-    def test_remove_none_not_dir(self):
+    def test_remove_none_not_dir(self) -> None:
         """
         GIVEN:
             - A document with & filename format that includes correspondent as part of directory name
@@ -1247,7 +1247,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
         document.filename = generate_filename(document)
         self.assertEqual(document.filename, Path("XX/doc1.pdf"))
 
-    def test_complex_template_strings(self):
+    def test_complex_template_strings(self) -> None:
         """
         GIVEN:
             - Storage paths with complex conditionals and logic
@@ -1326,7 +1326,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{{creation_date}}/{{ title_name_str }}",
     )
-    def test_template_with_undefined_var(self):
+    def test_template_with_undefined_var(self) -> None:
         """
         GIVEN:
             - Filename format with one or more undefined variables
@@ -1361,7 +1361,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
     @override_settings(
         FILENAME_FORMAT="{{created}}/{{ document.save() }}",
     )
-    def test_template_with_security(self):
+    def test_template_with_security(self) -> None:
         """
         GIVEN:
             - Filename format with one or more undefined variables
@@ -1393,7 +1393,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
                 "WARNING:paperless.templating:Template attempted restricted operation: <bound method Model.save of <Document: 2020-06-25 Does Matter>> is not safely callable",
             )
 
-    def test_template_with_custom_fields(self):
+    def test_template_with_custom_fields(self) -> None:
         """
         GIVEN:
             - Filename format which accesses custom field data
@@ -1499,7 +1499,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
                 Path("invoices/0.pdf"),
             )
 
-    def test_datetime_filter(self):
+    def test_datetime_filter(self) -> None:
         """
         GIVEN:
             - Filename format with datetime filter
@@ -1554,7 +1554,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
                 Path("2024-10-01/Some Title.pdf"),
             )
 
-    def test_slugify_filter(self):
+    def test_slugify_filter(self) -> None:
         """
         GIVEN:
             - Filename format with slugify filter
@@ -1638,7 +1638,7 @@ class TestCustomFieldFilenameUpdates(
         return super().setUp()
 
     @override_settings(FILENAME_FORMAT=None)
-    def test_custom_field_not_in_template_skips_filename_work(self):
+    def test_custom_field_not_in_template_skips_filename_work(self) -> None:
         storage_path = StoragePath.objects.create(path="{{created}}/{{ title }}")
         self.doc.storage_path = storage_path
         self.doc.save()
@@ -1658,7 +1658,7 @@ class TestCustomFieldFilenameUpdates(
         self.assertEqual(m.call_count, 0)
 
     @override_settings(FILENAME_FORMAT=None)
-    def test_custom_field_in_template_triggers_filename_update(self):
+    def test_custom_field_in_template_triggers_filename_update(self) -> None:
         storage_path = StoragePath.objects.create(
             path="{{ custom_fields|get_cf_value('flavor') }}/{{ title }}",
         )
@@ -1710,7 +1710,11 @@ class TestPathDateLocalization:
             ),
         ],
     )
-    def test_localize_date_path_building(self, filename_format, expected_filename):
+    def test_localize_date_path_building(
+        self,
+        filename_format,
+        expected_filename,
+    ) -> None:
         document = DocumentFactory.create(
             title="My Document",
             mime_type="application/pdf",
index 6283bed786a676ef34d35f86bd5439380bc808af..e14a0214e5dea6d168db14c99bfd3e7c60744249 100644 (file)
@@ -167,7 +167,7 @@ class TestDateLocalization:
 
         assert f"Unsupported type {type(invalid_value)}" in str(excinfo.value)
 
-    def test_localize_date_raises_error_for_invalid_locale(self):
+    def test_localize_date_raises_error_for_invalid_locale(self) -> None:
         with pytest.raises(ValueError) as excinfo:
             localize_date(self.TEST_DATE, "medium", "invalid_locale_code")
 
index ef6b535f7e5ee360528ffbfa6de3058949a856f9..5f1c7487dad79a10383310659fa222faedc8a8d2 100644 (file)
@@ -15,7 +15,7 @@ from documents.tests.utils import DirectoriesMixin
 
 
 class TestAutoComplete(DirectoriesMixin, TestCase):
-    def test_auto_complete(self):
+    def test_auto_complete(self) -> None:
         doc1 = Document.objects.create(
             title="doc1",
             checksum="A",
@@ -41,7 +41,7 @@ class TestAutoComplete(DirectoriesMixin, TestCase):
         self.assertListEqual(index.autocomplete(ix, "tes", limit=1), [b"test2"])
         self.assertListEqual(index.autocomplete(ix, "tes", limit=0), [])
 
-    def test_archive_serial_number_ranging(self):
+    def test_archive_serial_number_ranging(self) -> None:
         """
         GIVEN:
             - Document with an archive serial number above schema allowed size
@@ -74,7 +74,7 @@ class TestAutoComplete(DirectoriesMixin, TestCase):
                 expected_str = "ERROR:paperless.index:Not indexing Archive Serial Number 4294967296 of document 1"
                 self.assertIn(expected_str, error_str)
 
-    def test_archive_serial_number_is_none(self):
+    def test_archive_serial_number_is_none(self) -> None:
         """
         GIVEN:
             - Document with no archive serial number
@@ -99,7 +99,7 @@ class TestAutoComplete(DirectoriesMixin, TestCase):
             self.assertIsNone(kwargs["asn"])
 
     @override_settings(TIME_ZONE="Pacific/Auckland")
-    def test_added_today_respects_local_timezone_boundary(self):
+    def test_added_today_respects_local_timezone_boundary(self) -> None:
         tz = get_current_timezone()
         fixed_now = datetime(2025, 7, 20, 15, 0, 0, tzinfo=tz)
 
@@ -152,7 +152,7 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
             self.assertIn(fragment, result)
         return result
 
-    def test_range_keywords(self):
+    def test_range_keywords(self) -> None:
         """
         Test various different range keywords
         """
@@ -205,14 +205,14 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
             with self.subTest(query=query):
                 self._assert_rewrite_contains(query, now_dt, *fragments)
 
-    def test_additional_fields(self):
+    def test_additional_fields(self) -> None:
         fixed_now = datetime(2025, 7, 20, 15, 30, 45, tzinfo=timezone.utc)
         # created
         self._assert_rewrite_contains("created:today", fixed_now, "created:[20250720")
         # modified
         self._assert_rewrite_contains("modified:today", fixed_now, "modified:[20250720")
 
-    def test_basic_syntax_variants(self):
+    def test_basic_syntax_variants(self) -> None:
         """
         Test that quoting, casing, and multi-clause queries are parsed.
         """
@@ -234,7 +234,7 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
         self.assertIn("added:[20250720", result)
         self.assertIn("created:[20250719", result)
 
-    def test_no_match(self):
+    def test_no_match(self) -> None:
         """
         Test that queries without keywords are unchanged.
         """
@@ -243,7 +243,7 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
         self.assertEqual(query, result)
 
     @override_settings(TIME_ZONE="Pacific/Auckland")
-    def test_timezone_awareness(self):
+    def test_timezone_awareness(self) -> None:
         """
         Test timezone conversion.
         """
@@ -255,13 +255,13 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
 
 
 class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
-    def _assert_recreate_called(self, mock_create_in):
+    def _assert_recreate_called(self, mock_create_in) -> None:
         mock_create_in.assert_called_once()
         path_arg, schema_arg = mock_create_in.call_args.args
         self.assertEqual(path_arg, settings.INDEX_DIR)
         self.assertEqual(schema_arg.__class__.__name__, "Schema")
 
-    def test_transient_missing_segment_does_not_force_recreate(self):
+    def test_transient_missing_segment_does_not_force_recreate(self) -> None:
         """
         GIVEN:
             - Index directory exists
@@ -298,7 +298,7 @@ class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
         mock_create_in.assert_not_called()
         self.assertEqual(file_marker.read_text(), "keep")
 
-    def test_transient_errors_exhaust_retries_and_recreate(self):
+    def test_transient_errors_exhaust_retries_and_recreate(self) -> None:
         """
         GIVEN:
             - Index directory exists
@@ -335,7 +335,7 @@ class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
             cm.output[0],
         )
 
-    def test_non_transient_error_recreates_index(self):
+    def test_non_transient_error_recreates_index(self) -> None:
         """
         GIVEN:
             - Index directory exists
index e1b88633c52ae8d58b4597d606d0dc495c7c119f..63c870e2374f40e47f411083a0d0e250d78add5c 100644 (file)
@@ -29,13 +29,13 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             mime_type="application/pdf",
         )
 
-    def test_archiver(self):
+    def test_archiver(self) -> None:
         doc = self.make_models()
         shutil.copy(sample_file, Path(self.dirs.originals_dir) / f"{doc.id:07}.pdf")
 
         call_command("document_archiver", "--processes", "1")
 
-    def test_handle_document(self):
+    def test_handle_document(self) -> None:
         doc = self.make_models()
         shutil.copy(sample_file, Path(self.dirs.originals_dir) / f"{doc.id:07}.pdf")
 
@@ -50,7 +50,7 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertTrue(filecmp.cmp(sample_file, doc.source_path))
         self.assertEqual(doc.archive_filename, "none/A.pdf")
 
-    def test_unknown_mime_type(self):
+    def test_unknown_mime_type(self) -> None:
         doc = self.make_models()
         doc.mime_type = "sdgfh"
         doc.save()
@@ -66,7 +66,7 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsFile(doc.source_path)
 
     @override_settings(FILENAME_FORMAT="{title}")
-    def test_naming_priorities(self):
+    def test_naming_priorities(self) -> None:
         doc1 = Document.objects.create(
             checksum="A",
             title="document",
@@ -96,19 +96,19 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
 class TestMakeIndex(TestCase):
     @mock.patch("documents.management.commands.document_index.index_reindex")
-    def test_reindex(self, m):
+    def test_reindex(self, m) -> None:
         call_command("document_index", "reindex")
         m.assert_called_once()
 
     @mock.patch("documents.management.commands.document_index.index_optimize")
-    def test_optimize(self, m):
+    def test_optimize(self, m) -> None:
         call_command("document_index", "optimize")
         m.assert_called_once()
 
 
 class TestRenamer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(FILENAME_FORMAT="")
-    def test_rename(self):
+    def test_rename(self) -> None:
         doc = Document.objects.create(title="test", mime_type="image/jpeg")
         doc.filename = generate_filename(doc)
         doc.archive_filename = generate_filename(doc, archive_filename=True)
@@ -134,21 +134,21 @@ class TestCreateClassifier(TestCase):
     @mock.patch(
         "documents.management.commands.document_create_classifier.train_classifier",
     )
-    def test_create_classifier(self, m):
+    def test_create_classifier(self, m) -> None:
         call_command("document_create_classifier")
 
         m.assert_called_once()
 
 
 class TestSanityChecker(DirectoriesMixin, TestCase):
-    def test_no_issues(self):
+    def test_no_issues(self) -> None:
         with self.assertLogs() as capture:
             call_command("document_sanity_checker")
 
         self.assertEqual(len(capture.output), 1)
         self.assertIn("Sanity checker detected no issues.", capture.output[0])
 
-    def test_errors(self):
+    def test_errors(self) -> None:
         doc = Document.objects.create(
             title="test",
             content="test",
@@ -167,7 +167,7 @@ class TestSanityChecker(DirectoriesMixin, TestCase):
 
 class TestConvertMariaDBUUID(TestCase):
     @mock.patch("django.db.connection.schema_editor")
-    def test_convert(self, m):
+    def test_convert(self, m) -> None:
         m.alter_field.return_value = None
 
         stdout = StringIO()
@@ -179,7 +179,7 @@ class TestConvertMariaDBUUID(TestCase):
 
 
 class TestPruneAuditLogs(TestCase):
-    def test_prune_audit_logs(self):
+    def test_prune_audit_logs(self) -> None:
         LogEntry.objects.create(
             content_type=ContentType.objects.get_for_model(Document),
             object_id=1,
index c2a1360cab1544ea6f10fb79c98d06682acd14f6..391d87f415c15c884bb73cf603dd7412852901e5 100644 (file)
@@ -186,7 +186,7 @@ class TestExportImport(
 
         return manifest
 
-    def test_exporter(self, *, use_filename_format=False):
+    def test_exporter(self, *, use_filename_format=False) -> None:
         shutil.rmtree(Path(self.dirs.media_dir) / "documents")
         shutil.copytree(
             Path(__file__).parent / "samples" / "documents",
@@ -288,7 +288,7 @@ class TestExportImport(
             # everything is alright after the test
             self.assertEqual(len(messages), 0)
 
-    def test_exporter_with_filename_format(self):
+    def test_exporter_with_filename_format(self) -> None:
         shutil.rmtree(Path(self.dirs.media_dir) / "documents")
         shutil.copytree(
             Path(__file__).parent / "samples" / "documents",
@@ -300,7 +300,7 @@ class TestExportImport(
         ):
             self.test_exporter(use_filename_format=True)
 
-    def test_update_export_changed_time(self):
+    def test_update_export_changed_time(self) -> None:
         shutil.rmtree(Path(self.dirs.media_dir) / "documents")
         shutil.copytree(
             Path(__file__).parent / "samples" / "documents",
@@ -339,7 +339,7 @@ class TestExportImport(
         st_mtime_4 = (self.target / "manifest.json").stat().st_mtime
         self.assertEqual(st_mtime_3, st_mtime_4)
 
-    def test_update_export_changed_checksum(self):
+    def test_update_export_changed_checksum(self) -> None:
         shutil.rmtree(Path(self.dirs.media_dir) / "documents")
         shutil.copytree(
             Path(__file__).parent / "samples" / "documents",
@@ -369,7 +369,7 @@ class TestExportImport(
 
         self.assertIsFile(self.target / "manifest.json")
 
-    def test_update_export_deleted_document(self):
+    def test_update_export_deleted_document(self) -> None:
         shutil.rmtree(Path(self.dirs.media_dir) / "documents")
         shutil.copytree(
             Path(__file__).parent / "samples" / "documents",
@@ -404,7 +404,7 @@ class TestExportImport(
         self.assertTrue(len(manifest), 6)
 
     @override_settings(FILENAME_FORMAT="{title}/{correspondent}")
-    def test_update_export_changed_location(self):
+    def test_update_export_changed_location(self) -> None:
         shutil.rmtree(Path(self.dirs.media_dir) / "documents")
         shutil.copytree(
             Path(__file__).parent / "samples" / "documents",
@@ -428,7 +428,7 @@ class TestExportImport(
             self.target / "wow2" / "none_01.pdf",
         )
 
-    def test_export_missing_files(self):
+    def test_export_missing_files(self) -> None:
         target = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, target)
         Document.objects.create(
@@ -440,7 +440,7 @@ class TestExportImport(
         self.assertRaises(FileNotFoundError, call_command, "document_exporter", target)
 
     @override_settings(PASSPHRASE="test")
-    def test_export_zipped(self):
+    def test_export_zipped(self) -> None:
         """
         GIVEN:
             - Request to export documents to zipfile
@@ -472,7 +472,7 @@ class TestExportImport(
             self.assertIn("metadata.json", zip.namelist())
 
     @override_settings(PASSPHRASE="test")
-    def test_export_zipped_format(self):
+    def test_export_zipped_format(self) -> None:
         """
         GIVEN:
             - Request to export documents to zipfile
@@ -509,7 +509,7 @@ class TestExportImport(
             self.assertIn("metadata.json", zip.namelist())
 
     @override_settings(PASSPHRASE="test")
-    def test_export_zipped_with_delete(self):
+    def test_export_zipped_with_delete(self) -> None:
         """
         GIVEN:
             - Request to export documents to zipfile
@@ -555,7 +555,7 @@ class TestExportImport(
             self.assertIn("manifest.json", zip.namelist())
             self.assertIn("metadata.json", zip.namelist())
 
-    def test_export_target_not_exists(self):
+    def test_export_target_not_exists(self) -> None:
         """
         GIVEN:
             - Request to export documents to directory that doesn't exist
@@ -571,7 +571,7 @@ class TestExportImport(
 
         self.assertEqual("That path doesn't exist", str(e.exception))
 
-    def test_export_target_exists_but_is_file(self):
+    def test_export_target_exists_but_is_file(self) -> None:
         """
         GIVEN:
             - Request to export documents to file instead of directory
@@ -589,7 +589,7 @@ class TestExportImport(
 
             self.assertEqual("That path isn't a directory", str(e.exception))
 
-    def test_export_target_not_writable(self):
+    def test_export_target_not_writable(self) -> None:
         """
         GIVEN:
             - Request to export documents to directory that's not writeable
@@ -611,7 +611,7 @@ class TestExportImport(
                 str(e.exception),
             )
 
-    def test_no_archive(self):
+    def test_no_archive(self) -> None:
         """
         GIVEN:
             - Request to export documents to directory
@@ -652,7 +652,7 @@ class TestExportImport(
             call_command("document_importer", "--no-progress-bar", self.target)
             self.assertEqual(Document.objects.count(), 4)
 
-    def test_no_thumbnail(self):
+    def test_no_thumbnail(self) -> None:
         """
         GIVEN:
             - Request to export documents to directory
@@ -695,7 +695,7 @@ class TestExportImport(
             call_command("document_importer", "--no-progress-bar", self.target)
             self.assertEqual(Document.objects.count(), 4)
 
-    def test_split_manifest(self):
+    def test_split_manifest(self) -> None:
         """
         GIVEN:
             - Request to export documents to directory
@@ -727,7 +727,7 @@ class TestExportImport(
             self.assertEqual(Document.objects.count(), 4)
             self.assertEqual(CustomFieldInstance.objects.count(), 1)
 
-    def test_folder_prefix(self):
+    def test_folder_prefix(self) -> None:
         """
         GIVEN:
             - Request to export documents to directory
@@ -751,7 +751,7 @@ class TestExportImport(
             call_command("document_importer", "--no-progress-bar", self.target)
             self.assertEqual(Document.objects.count(), 4)
 
-    def test_import_db_transaction_failed(self):
+    def test_import_db_transaction_failed(self) -> None:
         """
         GIVEN:
             - Import from manifest started
@@ -795,7 +795,7 @@ class TestExportImport(
             self.assertEqual(ContentType.objects.count(), num_content_type_objects)
             self.assertEqual(Permission.objects.count(), num_permission_objects + 1)
 
-    def test_exporter_with_auditlog_disabled(self):
+    def test_exporter_with_auditlog_disabled(self) -> None:
         shutil.rmtree(Path(self.dirs.media_dir) / "documents")
         shutil.copytree(
             Path(__file__).parent / "samples" / "documents",
@@ -809,7 +809,7 @@ class TestExportImport(
             for obj in manifest:
                 self.assertNotEqual(obj["model"], "auditlog.logentry")
 
-    def test_export_data_only(self):
+    def test_export_data_only(self) -> None:
         """
         GIVEN:
             - Request to export documents with data only
@@ -859,7 +859,7 @@ class TestCryptExportImport(
         shutil.rmtree(self.target, ignore_errors=True)
         return super().tearDown()
 
-    def test_export_passphrase(self):
+    def test_export_passphrase(self) -> None:
         """
         GIVEN:
             - A mail account exists
@@ -934,7 +934,7 @@ class TestCryptExportImport(
         social_token = SocialToken.objects.first()
         self.assertIsNotNone(social_token)
 
-    def test_import_crypt_no_passphrase(self):
+    def test_import_crypt_no_passphrase(self) -> None:
         """
         GIVEN:
             - A mail account exists
@@ -965,7 +965,7 @@ class TestCryptExportImport(
                 "No passphrase was given, but this export contains encrypted fields",
             )
 
-    def test_export_warn_plaintext(self):
+    def test_export_warn_plaintext(self) -> None:
         """
         GIVEN:
             - A mail account exists
index 2a4f280256cca30acd610c4149bc42d7a3616a85..5ba57b15b609956f163d12909371f6954f1bd3ae 100644 (file)
@@ -23,7 +23,7 @@ class TestFuzzyMatchCommand(TestCase):
         )
         return stdout.getvalue(), stderr.getvalue()
 
-    def test_invalid_ratio_lower_limit(self):
+    def test_invalid_ratio_lower_limit(self) -> None:
         """
         GIVEN:
             - Invalid ratio below lower limit
@@ -36,7 +36,7 @@ class TestFuzzyMatchCommand(TestCase):
             self.call_command("--ratio", "-1")
         self.assertIn("The ratio must be between 0 and 100", str(e.exception))
 
-    def test_invalid_ratio_upper_limit(self):
+    def test_invalid_ratio_upper_limit(self) -> None:
         """
         GIVEN:s
             - Invalid ratio above upper
@@ -49,7 +49,7 @@ class TestFuzzyMatchCommand(TestCase):
             self.call_command("--ratio", "101")
         self.assertIn("The ratio must be between 0 and 100", str(e.exception))
 
-    def test_invalid_process_count(self):
+    def test_invalid_process_count(self) -> None:
         """
         GIVEN:
             - Invalid process count less than 0 above upper
@@ -62,7 +62,7 @@ class TestFuzzyMatchCommand(TestCase):
             self.call_command("--processes", "0")
         self.assertIn("There must be at least 1 process", str(e.exception))
 
-    def test_no_matches(self):
+    def test_no_matches(self) -> None:
         """
         GIVEN:
             - 2 documents exist
@@ -89,7 +89,7 @@ class TestFuzzyMatchCommand(TestCase):
         stdout, _ = self.call_command()
         self.assertIn("No matches found", stdout)
 
-    def test_with_matches(self):
+    def test_with_matches(self) -> None:
         """
         GIVEN:
             - 2 documents exist
@@ -118,7 +118,7 @@ class TestFuzzyMatchCommand(TestCase):
         stdout, _ = self.call_command("--processes", "1")
         self.assertRegex(stdout, self.MSG_REGEX)
 
-    def test_with_3_matches(self):
+    def test_with_3_matches(self) -> None:
         """
         GIVEN:
             - 3 documents exist
@@ -157,7 +157,7 @@ class TestFuzzyMatchCommand(TestCase):
         for line in lines:
             self.assertRegex(line, self.MSG_REGEX)
 
-    def test_document_deletion(self):
+    def test_document_deletion(self) -> None:
         """
         GIVEN:
             - 3 documents exist
@@ -207,7 +207,7 @@ class TestFuzzyMatchCommand(TestCase):
         self.assertIsNotNone(Document.objects.get(pk=1))
         self.assertIsNotNone(Document.objects.get(pk=2))
 
-    def test_empty_content(self):
+    def test_empty_content(self) -> None:
         """
         GIVEN:
             - 2 documents exist, content is empty (pw-protected)
index 004f5ac5f9a86a39b14e9a9c853f56b3978bd72e..8537716ee59f3e68fdfbf6de58fc67ddd347f034 100644 (file)
@@ -24,7 +24,7 @@ class TestCommandImport(
     SampleDirMixin,
     TestCase,
 ):
-    def test_check_manifest_exists(self):
+    def test_check_manifest_exists(self) -> None:
         """
         GIVEN:
             - Source directory exists
@@ -45,7 +45,7 @@ class TestCommandImport(
             str(e.exception),
         )
 
-    def test_check_manifest_malformed(self):
+    def test_check_manifest_malformed(self) -> None:
         """
         GIVEN:
             - Source directory exists
@@ -71,7 +71,7 @@ class TestCommandImport(
             str(e.exception),
         )
 
-    def test_check_manifest_file_not_found(self):
+    def test_check_manifest_file_not_found(self) -> None:
         """
         GIVEN:
             - Source directory exists
@@ -97,7 +97,7 @@ class TestCommandImport(
             )
         self.assertIn('The manifest file refers to "noexist.pdf"', str(e.exception))
 
-    def test_import_permission_error(self):
+    def test_import_permission_error(self) -> None:
         """
         GIVEN:
             - Original file which cannot be read from
@@ -138,7 +138,7 @@ class TestCommandImport(
                 cmd.check_manifest_validity()
             self.assertIn("Failed to read from archive file", str(cm.exception))
 
-    def test_import_source_not_existing(self):
+    def test_import_source_not_existing(self) -> None:
         """
         GIVEN:
             - Source given doesn't exist
@@ -151,7 +151,7 @@ class TestCommandImport(
             call_command("document_importer", Path("/tmp/notapath"))
         self.assertIn("That path doesn't exist", str(cm.exception))
 
-    def test_import_source_not_readable(self):
+    def test_import_source_not_readable(self) -> None:
         """
         GIVEN:
             - Source given isn't readable
@@ -170,7 +170,7 @@ class TestCommandImport(
                 str(cm.exception),
             )
 
-    def test_import_source_does_not_exist(self):
+    def test_import_source_does_not_exist(self) -> None:
         """
         GIVEN:
             - Source directory does not exist
@@ -187,7 +187,7 @@ class TestCommandImport(
             call_command("document_importer", "--no-progress-bar", str(path))
         self.assertIn("That path doesn't exist", str(e.exception))
 
-    def test_import_files_exist(self):
+    def test_import_files_exist(self) -> None:
         """
         GIVEN:
             - Source directory does exist
@@ -216,7 +216,7 @@ class TestCommandImport(
             str(stdout.read()),
         )
 
-    def test_import_with_user_exists(self):
+    def test_import_with_user_exists(self) -> None:
         """
         GIVEN:
             - Source directory does exist
@@ -244,7 +244,7 @@ class TestCommandImport(
             stdout.read(),
         )
 
-    def test_import_with_documents_exists(self):
+    def test_import_with_documents_exists(self) -> None:
         """
         GIVEN:
             - Source directory does exist
@@ -280,7 +280,7 @@ class TestCommandImport(
             str(stdout.read()),
         )
 
-    def test_import_no_metadata_or_version_file(self):
+    def test_import_no_metadata_or_version_file(self) -> None:
         """
         GIVEN:
             - A source directory with a manifest file only
@@ -306,7 +306,7 @@ class TestCommandImport(
 
         self.assertIn("No version.json or metadata.json file located", stdout_str)
 
-    def test_import_version_file(self):
+    def test_import_version_file(self) -> None:
         """
         GIVEN:
             - A source directory with a manifest file and version file
@@ -336,7 +336,7 @@ class TestCommandImport(
         self.assertIn("Version mismatch:", stdout_str)
         self.assertIn("importing 2.8.1", stdout_str)
 
-    def test_import_zipped_export(self):
+    def test_import_zipped_export(self) -> None:
         """
         GIVEN:
             - A zip file with correct content (manifest.json and version.json inside)
index 7b78b32c16e9d314edededbf75c39e05d3c5f67a..87912211b4bc63e1e3bd4c6193c6d154a95e5bb6 100644 (file)
@@ -11,7 +11,7 @@ from documents.tests.utils import DirectoriesMixin
 
 
 class TestRetagger(DirectoriesMixin, TestCase):
-    def make_models(self):
+    def make_models(self) -> None:
         self.sp1 = StoragePath.objects.create(
             name="dummy a",
             path="{created_data}/{title}",
@@ -109,7 +109,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
         super().setUp()
         self.make_models()
 
-    def test_add_tags(self):
+    def test_add_tags(self) -> None:
         call_command("document_retagger", "--tags")
         d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
 
@@ -121,21 +121,21 @@ class TestRetagger(DirectoriesMixin, TestCase):
         self.assertEqual(d_first.tags.first(), self.tag_first)
         self.assertEqual(d_second.tags.first(), self.tag_second)
 
-    def test_add_type(self):
+    def test_add_type(self) -> None:
         call_command("document_retagger", "--document_type")
         d_first, d_second, _, _ = self.get_updated_docs()
 
         self.assertEqual(d_first.document_type, self.doctype_first)
         self.assertEqual(d_second.document_type, self.doctype_second)
 
-    def test_add_correspondent(self):
+    def test_add_correspondent(self) -> None:
         call_command("document_retagger", "--correspondent")
         d_first, d_second, _, _ = self.get_updated_docs()
 
         self.assertEqual(d_first.correspondent, self.correspondent_first)
         self.assertEqual(d_second.correspondent, self.correspondent_second)
 
-    def test_overwrite_preserve_inbox(self):
+    def test_overwrite_preserve_inbox(self) -> None:
         self.d1.tags.add(self.tag_second)
 
         call_command("document_retagger", "--tags", "--overwrite")
@@ -158,7 +158,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
         )
         self.assertEqual(d_auto.tags.count(), 0)
 
-    def test_add_tags_suggest(self):
+    def test_add_tags_suggest(self) -> None:
         call_command("document_retagger", "--tags", "--suggest")
         d_first, d_second, _, d_auto = self.get_updated_docs()
 
@@ -166,21 +166,21 @@ class TestRetagger(DirectoriesMixin, TestCase):
         self.assertEqual(d_second.tags.count(), 0)
         self.assertEqual(d_auto.tags.count(), 1)
 
-    def test_add_type_suggest(self):
+    def test_add_type_suggest(self) -> None:
         call_command("document_retagger", "--document_type", "--suggest")
         d_first, d_second, _, _ = self.get_updated_docs()
 
         self.assertIsNone(d_first.document_type)
         self.assertIsNone(d_second.document_type)
 
-    def test_add_correspondent_suggest(self):
+    def test_add_correspondent_suggest(self) -> None:
         call_command("document_retagger", "--correspondent", "--suggest")
         d_first, d_second, _, _ = self.get_updated_docs()
 
         self.assertIsNone(d_first.correspondent)
         self.assertIsNone(d_second.correspondent)
 
-    def test_add_tags_suggest_url(self):
+    def test_add_tags_suggest_url(self) -> None:
         call_command(
             "document_retagger",
             "--tags",
@@ -193,7 +193,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
         self.assertEqual(d_second.tags.count(), 0)
         self.assertEqual(d_auto.tags.count(), 1)
 
-    def test_add_type_suggest_url(self):
+    def test_add_type_suggest_url(self) -> None:
         call_command(
             "document_retagger",
             "--document_type",
@@ -205,7 +205,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
         self.assertIsNone(d_first.document_type)
         self.assertIsNone(d_second.document_type)
 
-    def test_add_correspondent_suggest_url(self):
+    def test_add_correspondent_suggest_url(self) -> None:
         call_command(
             "document_retagger",
             "--correspondent",
@@ -217,7 +217,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
         self.assertIsNone(d_first.correspondent)
         self.assertIsNone(d_second.correspondent)
 
-    def test_add_storage_path(self):
+    def test_add_storage_path(self) -> None:
         """
         GIVEN:
             - 2 storage paths with documents which match them
@@ -240,7 +240,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
         self.assertIsNone(d_second.storage_path)
         self.assertEqual(d_unrelated.storage_path, self.sp3)
 
-    def test_overwrite_storage_path(self):
+    def test_overwrite_storage_path(self) -> None:
         """
         GIVEN:
             - 2 storage paths with documents which match them
@@ -260,7 +260,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
         self.assertIsNone(d_second.storage_path)
         self.assertEqual(d_unrelated.storage_path, self.sp2)
 
-    def test_id_range_parameter(self):
+    def test_id_range_parameter(self) -> None:
         commandOutput = ""
         Document.objects.create(
             checksum="E",
index 343d5f56843af94d9fa31e51acf2846502e41d6c..55484eb05064bd09c7f282c7b0fcb0e833af6e94 100644 (file)
@@ -21,7 +21,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
             )
         return out.getvalue()
 
-    def test_no_user(self):
+    def test_no_user(self) -> None:
         """
         GIVEN:
             - Environment does not contain admin user info
@@ -40,7 +40,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
             "Please check if PAPERLESS_ADMIN_PASSWORD has been set in the environment\n",
         )
 
-    def test_create(self):
+    def test_create(self) -> None:
         """
         GIVEN:
             - Environment does contain admin user password
@@ -58,7 +58,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
         self.assertEqual(user.email, "root@localhost")
         self.assertEqual(out, 'Created superuser "admin" with provided password.\n')
 
-    def test_some_superuser_exists(self):
+    def test_some_superuser_exists(self) -> None:
         """
         GIVEN:
             - A super user already exists
@@ -78,7 +78,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
             "Did not create superuser, the DB already contains superusers\n",
         )
 
-    def test_admin_superuser_exists(self):
+    def test_admin_superuser_exists(self) -> None:
         """
         GIVEN:
             - A super user already exists
@@ -96,7 +96,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
         self.assertTrue(user.check_password("password"))
         self.assertEqual(out, "Did not create superuser, a user admin already exists\n")
 
-    def test_admin_user_exists(self):
+    def test_admin_user_exists(self) -> None:
         """
         GIVEN:
             - A user already exists with the username admin
@@ -116,7 +116,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
         self.assertFalse(user.is_superuser)
         self.assertEqual(out, "Did not create superuser, a user admin already exists\n")
 
-    def test_no_password(self):
+    def test_no_password(self) -> None:
         """
         GIVEN:
             - No environment data is set
@@ -132,7 +132,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
             "Please check if PAPERLESS_ADMIN_PASSWORD has been set in the environment\n",
         )
 
-    def test_user_email(self):
+    def test_user_email(self) -> None:
         """
         GIVEN:
             - Environment does contain admin user password
@@ -155,7 +155,7 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
         self.assertEqual(user.username, "admin")
         self.assertEqual(out, 'Created superuser "admin" with provided password.\n')
 
-    def test_user_username(self):
+    def test_user_username(self) -> None:
         """
         GIVEN:
             - Environment does contain admin user password
index cb80e6c709c0a7542eb8ae0317b518f241f62236..0cb65e4d488988c5317828305b7adef089901178 100644 (file)
@@ -13,7 +13,7 @@ from documents.tests.utils import FileSystemAssertsMixin
 
 
 class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
-    def make_models(self):
+    def make_models(self) -> None:
         self.d1 = Document.objects.create(
             checksum="A",
             title="A",
@@ -54,12 +54,12 @@ class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         super().setUp()
         self.make_models()
 
-    def test_process_document(self):
+    def test_process_document(self) -> None:
         self.assertIsNotFile(self.d1.thumbnail_path)
         _process_document(self.d1.id)
         self.assertIsFile(self.d1.thumbnail_path)
 
-    def test_process_document_password_protected(self):
+    def test_process_document_password_protected(self) -> None:
         self.assertIsFile(get_default_thumbnail())
         self.assertIsNotFile(self.d3.thumbnail_path)
         _process_document(self.d3.id)
@@ -68,7 +68,7 @@ class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsFile(self.d3.thumbnail_path)
 
     @mock.patch("documents.management.commands.document_thumbnails.shutil.move")
-    def test_process_document_invalid_mime_type(self, m: mock.Mock):
+    def test_process_document_invalid_mime_type(self, m: mock.Mock) -> None:
         self.d1.mime_type = "asdasdasd"
         self.d1.save()
 
@@ -80,14 +80,14 @@ class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         # Not called during processing of document
         m.assert_not_called()
 
-    def test_command(self):
+    def test_command(self) -> None:
         self.assertIsNotFile(self.d1.thumbnail_path)
         self.assertIsNotFile(self.d2.thumbnail_path)
         call_command("document_thumbnails", "--processes", "1")
         self.assertIsFile(self.d1.thumbnail_path)
         self.assertIsFile(self.d2.thumbnail_path)
 
-    def test_command_documentid(self):
+    def test_command_documentid(self) -> None:
         self.assertIsNotFile(self.d1.thumbnail_path)
         self.assertIsNotFile(self.d2.thumbnail_path)
         call_command("document_thumbnails", "--processes", "1", "-d", f"{self.d1.id}")
index 8b2a7a463364505e8dac2906bd0d4f7cea86f98d..04ff3f6d346e4a477770cd2d8e651ae7abc59309 100644 (file)
@@ -25,7 +25,7 @@ class _TestMatchingBase(TestCase):
         no_match: Iterable[str],
         *,
         case_sensitive: bool = False,
-    ):
+    ) -> None:
         for klass in (Tag, Correspondent, DocumentType):
             instance = klass.objects.create(
                 name=str(randint(10000, 99999)),
@@ -48,7 +48,7 @@ class _TestMatchingBase(TestCase):
 
 
 class TestMatching(_TestMatchingBase):
-    def test_match_none(self):
+    def test_match_none(self) -> None:
         self._test_matching(
             "",
             "MATCH_NONE",
@@ -59,7 +59,7 @@ class TestMatching(_TestMatchingBase):
             ),
         )
 
-    def test_match_all(self):
+    def test_match_all(self) -> None:
         self._test_matching(
             "alpha charlie gamma",
             "MATCH_ALL",
@@ -105,7 +105,7 @@ class TestMatching(_TestMatchingBase):
             ),
         )
 
-    def test_match_any(self):
+    def test_match_any(self) -> None:
         self._test_matching(
             "alpha charlie gamma",
             "MATCH_ANY",
@@ -149,7 +149,7 @@ class TestMatching(_TestMatchingBase):
             ("the lazy fox jumped over the brown dogs",),
         )
 
-    def test_match_literal(self):
+    def test_match_literal(self) -> None:
         self._test_matching(
             "alpha charlie gamma",
             "MATCH_LITERAL",
@@ -183,7 +183,7 @@ class TestMatching(_TestMatchingBase):
             ),
         )
 
-    def test_match_regex(self):
+    def test_match_regex(self) -> None:
         self._test_matching(
             r"alpha\w+gamma",
             "MATCH_REGEX",
@@ -203,10 +203,10 @@ class TestMatching(_TestMatchingBase):
             ),
         )
 
-    def test_tach_invalid_regex(self):
+    def test_tach_invalid_regex(self) -> None:
         self._test_matching("[", "MATCH_REGEX", [], ["Don't match this"])
 
-    def test_match_regex_timeout_returns_false(self):
+    def test_match_regex_timeout_returns_false(self) -> None:
         tag = Tag.objects.create(
             name="slow",
             match=r"(a+)+$",
@@ -222,7 +222,7 @@ class TestMatching(_TestMatchingBase):
             f"Expected timeout log, got {cm.output}",
         )
 
-    def test_match_fuzzy(self):
+    def test_match_fuzzy(self) -> None:
         self._test_matching(
             "Springfield, Miss.",
             "MATCH_FUZZY",
@@ -237,7 +237,7 @@ class TestMatching(_TestMatchingBase):
 
 
 class TestCaseSensitiveMatching(_TestMatchingBase):
-    def test_match_all(self):
+    def test_match_all(self) -> None:
         self._test_matching(
             "alpha charlie gamma",
             "MATCH_ALL",
@@ -286,7 +286,7 @@ class TestCaseSensitiveMatching(_TestMatchingBase):
             case_sensitive=True,
         )
 
-    def test_match_any(self):
+    def test_match_any(self) -> None:
         self._test_matching(
             "alpha charlie gamma",
             "MATCH_ANY",
@@ -341,7 +341,7 @@ class TestCaseSensitiveMatching(_TestMatchingBase):
             case_sensitive=True,
         )
 
-    def test_match_literal(self):
+    def test_match_literal(self) -> None:
         self._test_matching(
             "alpha charlie gamma",
             "MATCH_LITERAL",
@@ -368,7 +368,7 @@ class TestCaseSensitiveMatching(_TestMatchingBase):
             case_sensitive=True,
         )
 
-    def test_match_regex(self):
+    def test_match_regex(self) -> None:
         self._test_matching(
             r"alpha\w+gamma",
             "MATCH_REGEX",
@@ -405,7 +405,7 @@ class TestDocumentConsumptionFinishedSignal(TestCase):
     doing what we expect wrt to tag & correspondent matching.
     """
 
-    def setUp(self):
+    def setUp(self) -> None:
         TestCase.setUp(self)
         User.objects.create_user(username="test_consumer", password="12345")
         self.doc_contains = Document.objects.create(
@@ -420,7 +420,7 @@ class TestDocumentConsumptionFinishedSignal(TestCase):
     def tearDown(self) -> None:
         shutil.rmtree(self.index_dir, ignore_errors=True)
 
-    def test_tag_applied_any(self):
+    def test_tag_applied_any(self) -> None:
         t1 = Tag.objects.create(
             name="test",
             match="keyword",
@@ -432,7 +432,7 @@ class TestDocumentConsumptionFinishedSignal(TestCase):
         )
         self.assertTrue(list(self.doc_contains.tags.all()) == [t1])
 
-    def test_tag_not_applied(self):
+    def test_tag_not_applied(self) -> None:
         Tag.objects.create(
             name="test",
             match="no-match",
@@ -444,7 +444,7 @@ class TestDocumentConsumptionFinishedSignal(TestCase):
         )
         self.assertTrue(list(self.doc_contains.tags.all()) == [])
 
-    def test_correspondent_applied(self):
+    def test_correspondent_applied(self) -> None:
         correspondent = Correspondent.objects.create(
             name="test",
             match="keyword",
@@ -456,7 +456,7 @@ class TestDocumentConsumptionFinishedSignal(TestCase):
         )
         self.assertTrue(self.doc_contains.correspondent == correspondent)
 
-    def test_correspondent_not_applied(self):
+    def test_correspondent_not_applied(self) -> None:
         Tag.objects.create(
             name="test",
             match="no-match",
index 74aeca14cb90fab28000ad86ffeab5b2f2ccd5c7..cf52de9242e9014b0403f5017f38922bb25c42f6 100644 (file)
@@ -5,7 +5,7 @@ class TestMigrateShareLinkBundlePermissions(TestMigrations):
     migrate_from = "0007_document_content_length"
     migrate_to = "0008_sharelinkbundle"
 
-    def setUpBeforeMigration(self, apps):
+    def setUpBeforeMigration(self, apps) -> None:
         User = apps.get_model("auth", "User")
         Group = apps.get_model("auth", "Group")
         self.Permission = apps.get_model("auth", "Permission")
@@ -15,7 +15,7 @@ class TestMigrateShareLinkBundlePermissions(TestMigrations):
         self.user.user_permissions.add(add_document.id)
         self.group.permissions.add(add_document.id)
 
-    def test_share_link_permissions_granted_to_add_document_holders(self):
+    def test_share_link_permissions_granted_to_add_document_holders(self) -> None:
         share_perms = self.Permission.objects.filter(
             codename__contains="sharelinkbundle",
         )
@@ -27,7 +27,7 @@ class TestReverseMigrateShareLinkBundlePermissions(TestMigrations):
     migrate_from = "0008_sharelinkbundle"
     migrate_to = "0007_document_content_length"
 
-    def setUpBeforeMigration(self, apps):
+    def setUpBeforeMigration(self, apps) -> None:
         User = apps.get_model("auth", "User")
         Group = apps.get_model("auth", "Group")
         self.Permission = apps.get_model("auth", "Permission")
@@ -42,7 +42,7 @@ class TestReverseMigrateShareLinkBundlePermissions(TestMigrations):
         self.user.user_permissions.add(add_document.id, *self.share_perm_ids)
         self.group.permissions.add(add_document.id, *self.share_perm_ids)
 
-    def test_share_link_permissions_revoked_on_reverse(self):
+    def test_share_link_permissions_revoked_on_reverse(self) -> None:
         self.assertFalse(
             self.user.user_permissions.filter(pk__in=self.share_perm_ids).exists(),
         )
index 1c99be3f769ecfc91a51780a10ec0f8f651fb554..160aa77f9655a6201c9f1ba20c6e6cbf16e55839 100644 (file)
@@ -7,14 +7,14 @@ from documents.tests.factories import DocumentFactory
 
 
 class CorrespondentTestCase(TestCase):
-    def test___str__(self):
+    def test___str__(self) -> None:
         for s in ("test", "oχi", "test with fun_charÅc'\"terß"):
             correspondent = CorrespondentFactory.create(name=s)
             self.assertEqual(str(correspondent), s)
 
 
 class DocumentTestCase(TestCase):
-    def test_correspondent_deletion_does_not_cascade(self):
+    def test_correspondent_deletion_does_not_cascade(self) -> None:
         self.assertEqual(Correspondent.objects.all().count(), 0)
         correspondent = CorrespondentFactory.create()
         self.assertEqual(Correspondent.objects.all().count(), 1)
index fee7234e889c6915745eee8c82221aa3c2c69ec7..1447d5c301cb1980b4342edb358b1cccb8d5d74d 100644 (file)
@@ -16,7 +16,7 @@ from paperless_tika.parsers import TikaDocumentParser
 
 class TestParserDiscovery(TestCase):
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def test_get_parser_class_1_parser(self, m, *args):
+    def test_get_parser_class_1_parser(self, m, *args) -> None:
         """
         GIVEN:
             - Parser declared for a given mimetype
@@ -43,7 +43,7 @@ class TestParserDiscovery(TestCase):
         self.assertEqual(get_parser_class_for_mime_type("application/pdf"), DummyParser)
 
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def test_get_parser_class_n_parsers(self, m, *args):
+    def test_get_parser_class_n_parsers(self, m, *args) -> None:
         """
         GIVEN:
             - Two parsers declared for a given mimetype
@@ -85,7 +85,7 @@ class TestParserDiscovery(TestCase):
         )
 
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def test_get_parser_class_0_parsers(self, m, *args):
+    def test_get_parser_class_0_parsers(self, m, *args) -> None:
         """
         GIVEN:
             - No parsers are declared
@@ -99,7 +99,7 @@ class TestParserDiscovery(TestCase):
             self.assertIsNone(get_parser_class_for_mime_type("application/pdf"))
 
     @mock.patch("documents.parsers.document_consumer_declaration.send")
-    def test_get_parser_class_no_valid_parser(self, m, *args):
+    def test_get_parser_class_no_valid_parser(self, m, *args) -> None:
         """
         GIVEN:
             - No parser declared for a given mimetype
@@ -128,7 +128,7 @@ class TestParserDiscovery(TestCase):
 
 
 class TestParserAvailability(TestCase):
-    def test_tesseract_parser(self):
+    def test_tesseract_parser(self) -> None:
         """
         GIVEN:
             - Various mime types
@@ -155,7 +155,7 @@ class TestParserAvailability(TestCase):
                 RasterisedDocumentParser,
             )
 
-    def test_text_parser(self):
+    def test_text_parser(self) -> None:
         """
         GIVEN:
             - Various mime types of a text form
@@ -179,7 +179,7 @@ class TestParserAvailability(TestCase):
                 TextDocumentParser,
             )
 
-    def test_tika_parser(self):
+    def test_tika_parser(self) -> None:
         """
         GIVEN:
             - Various mime types of a office document form
@@ -212,17 +212,17 @@ class TestParserAvailability(TestCase):
                 TikaDocumentParser,
             )
 
-    def test_no_parser_for_mime(self):
+    def test_no_parser_for_mime(self) -> None:
         self.assertIsNone(get_parser_class_for_mime_type("text/sdgsdf"))
 
-    def test_default_extension(self):
+    def test_default_extension(self) -> None:
         # Test no parser declared still returns a an extension
         self.assertEqual(get_default_file_extension("application/zip"), ".zip")
 
         # Test invalid mimetype returns no extension
         self.assertEqual(get_default_file_extension("aasdasd/dgfgf"), "")
 
-    def test_file_extension_support(self):
+    def test_file_extension_support(self) -> None:
         self.assertTrue(is_file_ext_supported(".pdf"))
         self.assertFalse(is_file_ext_supported(".hsdfh"))
         self.assertFalse(is_file_ext_supported(""))
index fff5f25286a78e1e8c4806589c8ff2024a49b988..415b0967fcd5e36d2149389878a80ed70ae007e0 100644 (file)
@@ -58,7 +58,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
             archive_filename="0000001.pdf",
         )
 
-    def assertSanityError(self, doc: Document, messageRegex):
+    def assertSanityError(self, doc: Document, messageRegex) -> None:
         messages = check_sanity()
         self.assertTrue(messages.has_error)
         with self.assertLogs() as capture:
@@ -69,7 +69,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
             )
             self.assertRegex(capture.records[1].message, messageRegex)
 
-    def test_no_issues(self):
+    def test_no_issues(self) -> None:
         self.make_test_data()
         messages = check_sanity()
         self.assertFalse(messages.has_error)
@@ -83,59 +83,59 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
                 "Sanity checker detected no issues.",
             )
 
-    def test_no_docs(self):
+    def test_no_docs(self) -> None:
         self.assertEqual(len(check_sanity()), 0)
 
-    def test_success(self):
+    def test_success(self) -> None:
         self.make_test_data()
         self.assertEqual(len(check_sanity()), 0)
 
-    def test_no_thumbnail(self):
+    def test_no_thumbnail(self) -> None:
         doc = self.make_test_data()
         Path(doc.thumbnail_path).unlink()
         self.assertSanityError(doc, "Thumbnail of document does not exist")
 
-    def test_thumbnail_no_access(self):
+    def test_thumbnail_no_access(self) -> None:
         doc = self.make_test_data()
         Path(doc.thumbnail_path).chmod(0o000)
         self.assertSanityError(doc, "Cannot read thumbnail file of document")
         Path(doc.thumbnail_path).chmod(0o777)
 
-    def test_no_original(self):
+    def test_no_original(self) -> None:
         doc = self.make_test_data()
         Path(doc.source_path).unlink()
         self.assertSanityError(doc, "Original of document does not exist.")
 
-    def test_original_no_access(self):
+    def test_original_no_access(self) -> None:
         doc = self.make_test_data()
         Path(doc.source_path).chmod(0o000)
         self.assertSanityError(doc, "Cannot read original file of document")
         Path(doc.source_path).chmod(0o777)
 
-    def test_original_checksum_mismatch(self):
+    def test_original_checksum_mismatch(self) -> None:
         doc = self.make_test_data()
         doc.checksum = "WOW"
         doc.save()
         self.assertSanityError(doc, "Checksum mismatch. Stored: WOW, actual: ")
 
-    def test_no_archive(self):
+    def test_no_archive(self) -> None:
         doc = self.make_test_data()
         Path(doc.archive_path).unlink()
         self.assertSanityError(doc, "Archived version of document does not exist.")
 
-    def test_archive_no_access(self):
+    def test_archive_no_access(self) -> None:
         doc = self.make_test_data()
         Path(doc.archive_path).chmod(0o000)
         self.assertSanityError(doc, "Cannot read archive file of document")
         Path(doc.archive_path).chmod(0o777)
 
-    def test_archive_checksum_mismatch(self):
+    def test_archive_checksum_mismatch(self) -> None:
         doc = self.make_test_data()
         doc.archive_checksum = "WOW"
         doc.save()
         self.assertSanityError(doc, "Checksum mismatch of archived document")
 
-    def test_empty_content(self):
+    def test_empty_content(self) -> None:
         doc = self.make_test_data()
         doc.content = ""
         doc.save()
@@ -148,7 +148,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
             "Document contains no OCR data",
         )
 
-    def test_orphaned_file(self):
+    def test_orphaned_file(self) -> None:
         self.make_test_data()
         Path(self.dirs.originals_dir, "orphaned").touch()
         messages = check_sanity()
@@ -161,7 +161,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
     @override_settings(
         APP_LOGO="logo/logo.png",
     )
-    def test_ignore_logo(self):
+    def test_ignore_logo(self) -> None:
         self.make_test_data()
         logo_dir = Path(self.dirs.media_dir, "logo")
         logo_dir.mkdir(parents=True, exist_ok=True)
@@ -169,20 +169,20 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
         messages = check_sanity()
         self.assertFalse(messages.has_warning)
 
-    def test_ignore_ignorable_files(self):
+    def test_ignore_ignorable_files(self) -> None:
         self.make_test_data()
         Path(self.dirs.media_dir, ".DS_Store").touch()
         Path(self.dirs.media_dir, "desktop.ini").touch()
         messages = check_sanity()
         self.assertFalse(messages.has_warning)
 
-    def test_archive_filename_no_checksum(self):
+    def test_archive_filename_no_checksum(self) -> None:
         doc = self.make_test_data()
         doc.archive_checksum = None
         doc.save()
         self.assertSanityError(doc, "has an archive file, but its checksum is missing.")
 
-    def test_archive_checksum_no_filename(self):
+    def test_archive_checksum_no_filename(self) -> None:
         doc = self.make_test_data()
         doc.archive_filename = None
         doc.save()
index f7deed5c95420cdbf7bdbd879f14c2dda9a58490..c822608199d62577d2f74de845b92c1fb39710e1 100644 (file)
@@ -25,14 +25,14 @@ from documents.tests.utils import DirectoriesMixin
 class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/share_link_bundles/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.user = User.objects.create_superuser(username="bundle_admin")
         self.client.force_authenticate(self.user)
         self.document = DocumentFactory.create()
 
     @mock.patch("documents.views.build_share_link_bundle.delay")
-    def test_create_bundle_triggers_build_job(self, delay_mock):
+    def test_create_bundle_triggers_build_job(self, delay_mock) -> None:
         payload = {
             "document_ids": [self.document.pk],
             "file_version": ShareLink.FileVersion.ARCHIVE,
@@ -47,7 +47,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
         delay_mock.assert_called_once_with(bundle.pk)
 
-    def test_create_bundle_rejects_missing_documents(self):
+    def test_create_bundle_rejects_missing_documents(self) -> None:
         payload = {
             "document_ids": [9999],
             "file_version": ShareLink.FileVersion.ARCHIVE,
@@ -60,7 +60,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         self.assertIn("document_ids", response.data)
 
     @mock.patch("documents.views.has_perms_owner_aware", return_value=False)
-    def test_create_bundle_rejects_insufficient_permissions(self, perms_mock):
+    def test_create_bundle_rejects_insufficient_permissions(self, perms_mock) -> None:
         payload = {
             "document_ids": [self.document.pk],
             "file_version": ShareLink.FileVersion.ARCHIVE,
@@ -74,7 +74,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         perms_mock.assert_called()
 
     @mock.patch("documents.views.build_share_link_bundle.delay")
-    def test_rebuild_bundle_resets_state(self, delay_mock):
+    def test_rebuild_bundle_resets_state(self, delay_mock) -> None:
         bundle = ShareLinkBundle.objects.create(
             slug="rebuild-slug",
             file_version=ShareLink.FileVersion.ARCHIVE,
@@ -96,7 +96,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         self.assertEqual(bundle.file_path, "")
         delay_mock.assert_called_once_with(bundle.pk)
 
-    def test_rebuild_bundle_rejects_processing_status(self):
+    def test_rebuild_bundle_rejects_processing_status(self) -> None:
         bundle = ShareLinkBundle.objects.create(
             slug="processing-slug",
             file_version=ShareLink.FileVersion.ARCHIVE,
@@ -109,7 +109,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertIn("detail", response.data)
 
-    def test_create_bundle_rejects_duplicate_documents(self):
+    def test_create_bundle_rejects_duplicate_documents(self) -> None:
         payload = {
             "document_ids": [self.document.pk, self.document.pk],
             "file_version": ShareLink.FileVersion.ARCHIVE,
@@ -121,7 +121,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertIn("document_ids", response.data)
 
-    def test_download_ready_bundle_streams_file(self):
+    def test_download_ready_bundle_streams_file(self) -> None:
         bundle_file = Path(self.dirs.media_dir) / "bundles" / "ready.zip"
         bundle_file.parent.mkdir(parents=True, exist_ok=True)
         bundle_file.write_bytes(b"binary-zip-content")
@@ -143,7 +143,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         self.assertEqual(content, b"binary-zip-content")
         self.assertIn("attachment;", response["Content-Disposition"])
 
-    def test_download_pending_bundle_returns_202(self):
+    def test_download_pending_bundle_returns_202(self) -> None:
         bundle = ShareLinkBundle.objects.create(
             slug="pendingslug",
             file_version=ShareLink.FileVersion.ARCHIVE,
@@ -156,7 +156,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
 
-    def test_download_failed_bundle_returns_503(self):
+    def test_download_failed_bundle_returns_503(self) -> None:
         bundle = ShareLinkBundle.objects.create(
             slug="failedslug",
             file_version=ShareLink.FileVersion.ARCHIVE,
@@ -169,7 +169,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)
 
-    def test_expired_share_link_redirects(self):
+    def test_expired_share_link_redirects(self) -> None:
         share_link = ShareLink.objects.create(
             slug="expiredlink",
             document=self.document,
@@ -183,7 +183,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_302_FOUND)
         self.assertIn("sharelink_expired=1", response["Location"])
 
-    def test_unknown_share_link_redirects(self):
+    def test_unknown_share_link_redirects(self) -> None:
         self.client.logout()
         response = self.client.get("/share/unknownsharelink/")
 
@@ -192,11 +192,11 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
 
 
 class ShareLinkBundleTaskTests(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.document = DocumentFactory.create()
 
-    def test_cleanup_expired_share_link_bundles(self):
+    def test_cleanup_expired_share_link_bundles(self) -> None:
         expired_path = Path(self.dirs.media_dir) / "expired.zip"
         expired_path.parent.mkdir(parents=True, exist_ok=True)
         expired_path.write_bytes(b"expired")
@@ -229,7 +229,7 @@ class ShareLinkBundleTaskTests(DirectoriesMixin, APITestCase):
         self.assertFalse(expired_path.exists())
         self.assertTrue(active_path.exists())
 
-    def test_cleanup_expired_share_link_bundles_logs_on_failure(self):
+    def test_cleanup_expired_share_link_bundles_logs_on_failure(self) -> None:
         expired_bundle = ShareLinkBundle.objects.create(
             slug="expired-bundle",
             file_version=ShareLink.FileVersion.ARCHIVE,
@@ -255,7 +255,7 @@ class ShareLinkBundleTaskTests(DirectoriesMixin, APITestCase):
 
 
 class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.document = DocumentFactory.create(
             mime_type="application/pdf",
@@ -284,7 +284,7 @@ class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
         path.write_bytes(content)
         return path
 
-    def test_build_share_link_bundle_creates_zip_and_sets_metadata(self):
+    def test_build_share_link_bundle_creates_zip_and_sets_metadata(self) -> None:
         self._write_document_file(archive=False, content=b"source")
         archive_path = self._write_document_file(archive=True, content=b"archive")
         bundle = ShareLinkBundle.objects.create(
@@ -308,7 +308,7 @@ class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
             self.assertEqual(len(names), 1)
             self.assertEqual(zipf.read(names[0]), archive_path.read_bytes())
 
-    def test_build_share_link_bundle_overwrites_existing_file(self):
+    def test_build_share_link_bundle_overwrites_existing_file(self) -> None:
         self._write_document_file(archive=False, content=b"source")
         bundle = ShareLinkBundle.objects.create(
             slug="overwrite",
@@ -328,7 +328,7 @@ class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
         self.assertTrue(final_path.exists())
         self.assertNotEqual(final_path.read_bytes(), b"old")
 
-    def test_build_share_link_bundle_failure_marks_failed(self):
+    def test_build_share_link_bundle_failure_marks_failed(self) -> None:
         self._write_document_file(archive=False, content=b"source")
         bundle = ShareLinkBundle.objects.create(
             slug="fail-bundle",
@@ -359,13 +359,13 @@ class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
         for path in scratch_zips:
             path.unlink(missing_ok=True)
 
-    def test_build_share_link_bundle_missing_bundle_noop(self):
+    def test_build_share_link_bundle_missing_bundle_noop(self) -> None:
         # Should not raise when bundle does not exist
         build_share_link_bundle(99999)
 
 
 class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.document = DocumentFactory.create()
         self.document.checksum = "doc1checksum"
@@ -384,7 +384,7 @@ class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
         )
         self.bundle_two.documents.set([self.other_document])
 
-    def test_filter_documents_returns_all_for_empty_value(self):
+    def test_filter_documents_returns_all_for_empty_value(self) -> None:
         filterset = ShareLinkBundleFilterSet(
             data={"documents": ""},
             queryset=ShareLinkBundle.objects.all(),
@@ -392,7 +392,7 @@ class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
 
         self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
 
-    def test_filter_documents_handles_invalid_input(self):
+    def test_filter_documents_handles_invalid_input(self) -> None:
         filterset = ShareLinkBundleFilterSet(
             data={"documents": "invalid"},
             queryset=ShareLinkBundle.objects.all(),
@@ -400,7 +400,7 @@ class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
 
         self.assertFalse(filterset.qs.exists())
 
-    def test_filter_documents_filters_by_multiple_ids(self):
+    def test_filter_documents_filters_by_multiple_ids(self) -> None:
         filterset = ShareLinkBundleFilterSet(
             data={"documents": f"{self.document.pk},{self.other_document.pk}"},
             queryset=ShareLinkBundle.objects.all(),
@@ -408,7 +408,7 @@ class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
 
         self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
 
-    def test_filter_documents_returns_queryset_for_empty_ids(self):
+    def test_filter_documents_returns_queryset_for_empty_ids(self) -> None:
         filterset = ShareLinkBundleFilterSet(
             data={"documents": ","},
             queryset=ShareLinkBundle.objects.all(),
@@ -418,7 +418,7 @@ class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
 
 
 class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
-    def test_absolute_file_path_handles_relative_and_absolute(self):
+    def test_absolute_file_path_handles_relative_and_absolute(self) -> None:
         relative_path = Path("relative.zip")
         bundle = ShareLinkBundle.objects.create(
             slug="relative-bundle",
@@ -436,7 +436,7 @@ class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
 
         self.assertEqual(bundle.absolute_file_path.resolve(), absolute_path.resolve())
 
-    def test_str_returns_translated_slug(self):
+    def test_str_returns_translated_slug(self) -> None:
         bundle = ShareLinkBundle.objects.create(
             slug="string-slug",
             file_version=ShareLink.FileVersion.ORIGINAL,
@@ -444,7 +444,7 @@ class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
 
         self.assertIn("string-slug", str(bundle))
 
-    def test_remove_file_deletes_existing_file(self):
+    def test_remove_file_deletes_existing_file(self) -> None:
         bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove.zip"
         bundle_path.parent.mkdir(parents=True, exist_ok=True)
         bundle_path.write_bytes(b"remove-me")
@@ -458,7 +458,7 @@ class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
 
         self.assertFalse(bundle_path.exists())
 
-    def test_remove_file_handles_oserror(self):
+    def test_remove_file_handles_oserror(self) -> None:
         bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove-error.zip"
         bundle_path.parent.mkdir(parents=True, exist_ok=True)
         bundle_path.write_bytes(b"remove-me")
@@ -473,7 +473,7 @@ class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
 
         self.assertTrue(bundle_path.exists())
 
-    def test_delete_calls_remove_file(self):
+    def test_delete_calls_remove_file(self) -> None:
         bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "delete.zip"
         bundle_path.parent.mkdir(parents=True, exist_ok=True)
         bundle_path.write_bytes(b"remove-me")
@@ -488,11 +488,11 @@ class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
 
 
 class ShareLinkBundleSerializerTests(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
         self.document = DocumentFactory.create()
 
-    def test_validate_document_ids_rejects_duplicates(self):
+    def test_validate_document_ids_rejects_duplicates(self) -> None:
         serializer = ShareLinkBundleSerializer(
             data={
                 "document_ids": [self.document.pk, self.document.pk],
@@ -503,7 +503,7 @@ class ShareLinkBundleSerializerTests(DirectoriesMixin, APITestCase):
         self.assertFalse(serializer.is_valid())
         self.assertIn("document_ids", serializer.errors)
 
-    def test_create_assigns_documents_and_expiration(self):
+    def test_create_assigns_documents_and_expiration(self) -> None:
         serializer = ShareLinkBundleSerializer(
             data={
                 "document_ids": [self.document.pk],
@@ -523,7 +523,7 @@ class ShareLinkBundleSerializerTests(DirectoriesMixin, APITestCase):
             delta=timedelta(seconds=10),
         )
 
-    def test_create_raises_when_missing_documents(self):
+    def test_create_raises_when_missing_documents(self) -> None:
         serializer = ShareLinkBundleSerializer(
             data={
                 "document_ids": [self.document.pk, 9999],
index e748225cd6dd64f222916ced861017c371511c4a..2b2937672bd6bece3e91305c20c18bd0711e47f9 100644 (file)
@@ -14,7 +14,7 @@ from documents.signals.handlers import run_workflows
 
 
 class TestTagHierarchy(APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         self.user = User.objects.create_superuser(username="admin")
         self.client.force_authenticate(user=self.user)
 
@@ -32,7 +32,7 @@ class TestTagHierarchy(APITestCase):
             mime_type="application/pdf",
         )
 
-    def test_document_api_add_child_adds_parent(self):
+    def test_document_api_add_child_adds_parent(self) -> None:
         self.client.patch(
             f"/api/documents/{self.document.pk}/",
             {"tags": [self.child.pk]},
@@ -42,7 +42,7 @@ class TestTagHierarchy(APITestCase):
         tags = set(self.document.tags.values_list("pk", flat=True))
         assert tags == {self.parent.pk, self.child.pk}
 
-    def test_document_api_remove_parent_removes_children(self):
+    def test_document_api_remove_parent_removes_children(self) -> None:
         self.document.add_nested_tags([self.parent, self.child])
         self.client.patch(
             f"/api/documents/{self.document.pk}/",
@@ -52,7 +52,7 @@ class TestTagHierarchy(APITestCase):
         self.document.refresh_from_db()
         assert self.document.tags.count() == 0
 
-    def test_document_api_remove_parent_removes_child(self):
+    def test_document_api_remove_parent_removes_child(self) -> None:
         self.document.add_nested_tags([self.child])
         self.client.patch(
             f"/api/documents/{self.document.pk}/",
@@ -62,7 +62,7 @@ class TestTagHierarchy(APITestCase):
         self.document.refresh_from_db()
         assert self.document.tags.count() == 0
 
-    def test_bulk_edit_respects_hierarchy(self):
+    def test_bulk_edit_respects_hierarchy(self) -> None:
         bulk_edit.add_tag([self.document.pk], self.child.pk)
         self.document.refresh_from_db()
         tags = set(self.document.tags.values_list("pk", flat=True))
@@ -81,7 +81,7 @@ class TestTagHierarchy(APITestCase):
         self.document.refresh_from_db()
         assert self.document.tags.count() == 0
 
-    def test_workflow_actions(self):
+    def test_workflow_actions(self) -> None:
         workflow = Workflow.objects.create(name="wf", order=0)
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
@@ -108,7 +108,7 @@ class TestTagHierarchy(APITestCase):
         self.document.refresh_from_db()
         assert self.document.tags.count() == 0
 
-    def test_tag_view_parent_update_adds_parent_to_docs(self):
+    def test_tag_view_parent_update_adds_parent_to_docs(self) -> None:
         orphan = Tag.objects.create(name="Orphan")
         self.document.tags.add(orphan)
 
@@ -122,7 +122,7 @@ class TestTagHierarchy(APITestCase):
         tags = set(self.document.tags.values_list("pk", flat=True))
         assert tags == {self.parent.pk, orphan.pk}
 
-    def test_child_document_count_included_when_parent_paginated(self):
+    def test_child_document_count_included_when_parent_paginated(self) -> None:
         self.document.tags.add(self.child)
 
         response = self.client.get(
@@ -140,14 +140,14 @@ class TestTagHierarchy(APITestCase):
         assert child_entry["id"] == self.child.pk
         assert child_entry["document_count"] == 1
 
-    def test_tag_serializer_populates_document_filter_context(self):
+    def test_tag_serializer_populates_document_filter_context(self) -> None:
         context = {}
 
         serializer = TagSerializer(self.parent, context=context)
         assert serializer.data  # triggers serialization
         assert "document_count_filter" in context
 
-    def test_cannot_set_parent_to_self(self):
+    def test_cannot_set_parent_to_self(self) -> None:
         tag = Tag.objects.create(name="Selfie")
         resp = self.client.patch(
             f"/api/tags/{tag.pk}/",
@@ -157,7 +157,7 @@ class TestTagHierarchy(APITestCase):
         assert resp.status_code == 400
         assert "Cannot set itself as parent" in str(resp.data["parent"])
 
-    def test_cannot_set_parent_to_descendant(self):
+    def test_cannot_set_parent_to_descendant(self) -> None:
         a = Tag.objects.create(name="A")
         b = Tag.objects.create(name="B", tn_parent=a)
         c = Tag.objects.create(name="C", tn_parent=b)
@@ -171,7 +171,7 @@ class TestTagHierarchy(APITestCase):
         assert resp.status_code == 400
         assert "Cannot set parent to a descendant" in str(resp.data["parent"])
 
-    def test_max_depth_on_create(self):
+    def test_max_depth_on_create(self) -> None:
         a = Tag.objects.create(name="A1")
         b = Tag.objects.create(name="B1", tn_parent=a)
         c = Tag.objects.create(name="C1", tn_parent=b)
@@ -199,7 +199,7 @@ class TestTagHierarchy(APITestCase):
         assert "parent" in resp_fail.data
         assert "Maximum nesting depth exceeded" in str(resp_fail.data["parent"])
 
-    def test_max_depth_on_move_subtree(self):
+    def test_max_depth_on_move_subtree(self) -> None:
         a = Tag.objects.create(name="A2")
         b = Tag.objects.create(name="B2", tn_parent=a)
         c = Tag.objects.create(name="C2", tn_parent=b)
@@ -230,7 +230,7 @@ class TestTagHierarchy(APITestCase):
         x.refresh_from_db()
         assert x.parent_pk == c.id
 
-    def test_is_root_filter_returns_only_root_tags(self):
+    def test_is_root_filter_returns_only_root_tags(self) -> None:
         other_root = Tag.objects.create(name="Other parent")
 
         response = self.client.get(
index d94eb38480a3431e33ac90612a8329ff95e5db27..abe36087ecdc7c5b4cb70980a13fb8e2bd665533 100644 (file)
@@ -18,7 +18,11 @@ from documents.tests.utils import DirectoriesMixin
 
 @mock.patch("documents.consumer.magic.from_file", fake_magic_from_file)
 class TestTaskSignalHandler(DirectoriesMixin, TestCase):
-    def util_call_before_task_publish_handler(self, headers_to_use, body_to_use):
+    def util_call_before_task_publish_handler(
+        self,
+        headers_to_use,
+        body_to_use,
+    ) -> None:
         """
         Simple utility to call the pre-run handle and ensure it created a single task
         instance
@@ -29,7 +33,7 @@ class TestTaskSignalHandler(DirectoriesMixin, TestCase):
 
         self.assertEqual(PaperlessTask.objects.all().count(), 1)
 
-    def test_before_task_publish_handler_consume(self):
+    def test_before_task_publish_handler_consume(self) -> None:
         """
         GIVEN:
             - A celery task is started via the consume folder
@@ -72,7 +76,7 @@ class TestTaskSignalHandler(DirectoriesMixin, TestCase):
         self.assertEqual(1, task.owner_id)
         self.assertEqual(celery.states.PENDING, task.status)
 
-    def test_task_prerun_handler(self):
+    def test_task_prerun_handler(self) -> None:
         """
         GIVEN:
             - A celery task is started via the consume folder
@@ -112,7 +116,7 @@ class TestTaskSignalHandler(DirectoriesMixin, TestCase):
 
         self.assertEqual(celery.states.STARTED, task.status)
 
-    def test_task_postrun_handler(self):
+    def test_task_postrun_handler(self) -> None:
         """
         GIVEN:
             - A celery task is started via the consume folder
@@ -154,7 +158,7 @@ class TestTaskSignalHandler(DirectoriesMixin, TestCase):
 
         self.assertEqual(celery.states.SUCCESS, task.status)
 
-    def test_task_failure_handler(self):
+    def test_task_failure_handler(self) -> None:
         """
         GIVEN:
             - A celery task is started via the consume folder
index 475709dd0438a5fd044314cffb594016240cd348..4647c19ba8a773727b5c2445f4c98485d273b41e 100644 (file)
@@ -23,7 +23,7 @@ from documents.tests.utils import FileSystemAssertsMixin
 
 
 class TestIndexReindex(DirectoriesMixin, TestCase):
-    def test_index_reindex(self):
+    def test_index_reindex(self) -> None:
         Document.objects.create(
             title="test",
             content="my document",
@@ -35,7 +35,7 @@ class TestIndexReindex(DirectoriesMixin, TestCase):
 
         tasks.index_reindex()
 
-    def test_index_optimize(self):
+    def test_index_optimize(self) -> None:
         Document.objects.create(
             title="test",
             content="my document",
@@ -50,12 +50,12 @@ class TestIndexReindex(DirectoriesMixin, TestCase):
 
 class TestClassifier(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @mock.patch("documents.tasks.load_classifier")
-    def test_train_classifier_no_auto_matching(self, load_classifier):
+    def test_train_classifier_no_auto_matching(self, load_classifier) -> None:
         tasks.train_classifier()
         load_classifier.assert_not_called()
 
     @mock.patch("documents.tasks.load_classifier")
-    def test_train_classifier_with_auto_tag(self, load_classifier):
+    def test_train_classifier_with_auto_tag(self, load_classifier) -> None:
         load_classifier.return_value = None
         Tag.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
         tasks.train_classifier()
@@ -63,7 +63,7 @@ class TestClassifier(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsNotFile(settings.MODEL_FILE)
 
     @mock.patch("documents.tasks.load_classifier")
-    def test_train_classifier_with_auto_type(self, load_classifier):
+    def test_train_classifier_with_auto_type(self, load_classifier) -> None:
         load_classifier.return_value = None
         DocumentType.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
         tasks.train_classifier()
@@ -71,14 +71,14 @@ class TestClassifier(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsNotFile(settings.MODEL_FILE)
 
     @mock.patch("documents.tasks.load_classifier")
-    def test_train_classifier_with_auto_correspondent(self, load_classifier):
+    def test_train_classifier_with_auto_correspondent(self, load_classifier) -> None:
         load_classifier.return_value = None
         Correspondent.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
         tasks.train_classifier()
         load_classifier.assert_called_once()
         self.assertIsNotFile(settings.MODEL_FILE)
 
-    def test_train_classifier(self):
+    def test_train_classifier(self) -> None:
         c = Correspondent.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
         doc = Document.objects.create(correspondent=c, content="test", title="test")
         self.assertIsNotFile(settings.MODEL_FILE)
@@ -107,13 +107,13 @@ class TestClassifier(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
 class TestSanityCheck(DirectoriesMixin, TestCase):
     @mock.patch("documents.tasks.sanity_checker.check_sanity")
-    def test_sanity_check_success(self, m):
+    def test_sanity_check_success(self, m) -> None:
         m.return_value = SanityCheckMessages()
         self.assertEqual(tasks.sanity_check(), "No issues detected.")
         m.assert_called_once()
 
     @mock.patch("documents.tasks.sanity_checker.check_sanity")
-    def test_sanity_check_error(self, m):
+    def test_sanity_check_error(self, m) -> None:
         messages = SanityCheckMessages()
         messages.error(None, "Some error")
         m.return_value = messages
@@ -121,7 +121,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
         m.assert_called_once()
 
     @mock.patch("documents.tasks.sanity_checker.check_sanity")
-    def test_sanity_check_error_no_raise(self, m):
+    def test_sanity_check_error_no_raise(self, m) -> None:
         messages = SanityCheckMessages()
         messages.error(None, "Some error")
         m.return_value = messages
@@ -134,7 +134,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
         m.assert_called_once()
 
     @mock.patch("documents.tasks.sanity_checker.check_sanity")
-    def test_sanity_check_warning(self, m):
+    def test_sanity_check_warning(self, m) -> None:
         messages = SanityCheckMessages()
         messages.warning(None, "Some warning")
         m.return_value = messages
@@ -145,7 +145,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
         m.assert_called_once()
 
     @mock.patch("documents.tasks.sanity_checker.check_sanity")
-    def test_sanity_check_info(self, m):
+    def test_sanity_check_info(self, m) -> None:
         messages = SanityCheckMessages()
         messages.info(None, "Some info")
         m.return_value = messages
@@ -157,7 +157,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
 
 
 class TestBulkUpdate(DirectoriesMixin, TestCase):
-    def test_bulk_update_documents(self):
+    def test_bulk_update_documents(self) -> None:
         doc1 = Document.objects.create(
             title="test",
             content="my document",
@@ -180,7 +180,7 @@ class TestEmptyTrashTask(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         - Document is only deleted if it has been in trash for more than delay (default 30 days)
     """
 
-    def test_empty_trash(self):
+    def test_empty_trash(self) -> None:
         doc = Document.objects.create(
             title="test",
             content="my document",
@@ -204,7 +204,7 @@ class TestEmptyTrashTask(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
 
 class TestUpdateContent(DirectoriesMixin, TestCase):
-    def test_update_content_maybe_archive_file(self):
+    def test_update_content_maybe_archive_file(self) -> None:
         """
         GIVEN:
             - Existing document with archive file
@@ -245,7 +245,7 @@ class TestUpdateContent(DirectoriesMixin, TestCase):
         self.assertNotEqual(Document.objects.get(pk=doc.pk).content, "test")
         self.assertNotEqual(Document.objects.get(pk=doc.pk).archive_checksum, "wow")
 
-    def test_update_content_maybe_archive_file_no_archive(self):
+    def test_update_content_maybe_archive_file_no_archive(self) -> None:
         """
         GIVEN:
             - Existing document without archive file
@@ -280,7 +280,7 @@ class TestAIIndex(DirectoriesMixin, TestCase):
         AI_ENABLED=True,
         LLM_EMBEDDING_BACKEND="huggingface",
     )
-    def test_ai_index_success(self):
+    def test_ai_index_success(self) -> None:
         """
         GIVEN:
             - Document exists, AI is enabled, llm index backend is set
@@ -309,7 +309,7 @@ class TestAIIndex(DirectoriesMixin, TestCase):
         AI_ENABLED=True,
         LLM_EMBEDDING_BACKEND="huggingface",
     )
-    def test_ai_index_failure(self):
+    def test_ai_index_failure(self) -> None:
         """
         GIVEN:
             - Document exists, AI is enabled, llm index backend is set
@@ -334,7 +334,7 @@ class TestAIIndex(DirectoriesMixin, TestCase):
             self.assertEqual(task.status, states.FAILURE)
             self.assertIn("LLM index update failed.", task.result)
 
-    def test_update_document_in_llm_index(self):
+    def test_update_document_in_llm_index(self) -> None:
         """
         GIVEN:
             - Nothing
@@ -354,7 +354,7 @@ class TestAIIndex(DirectoriesMixin, TestCase):
             tasks.update_document_in_llm_index(doc)
             llm_index_add_or_update_document.assert_called_once_with(doc)
 
-    def test_remove_document_from_llm_index(self):
+    def test_remove_document_from_llm_index(self) -> None:
         """
         GIVEN:
             - Nothing
index a73016c26e3bc3efcd4f2069187111fbc07dda7c..14f0425b94c6be8126c27a274b160b22cb6af315 100644 (file)
@@ -35,12 +35,12 @@ class TestViews(DirectoriesMixin, TestCase):
         self.user = User.objects.create_user("testuser")
         super().setUp()
 
-    def test_login_redirect(self):
+    def test_login_redirect(self) -> None:
         response = self.client.get("/")
         self.assertEqual(response.status_code, status.HTTP_302_FOUND)
         self.assertEqual(response.url, "/accounts/login/?next=/")
 
-    def test_index(self):
+    def test_index(self) -> None:
         self.client.force_login(self.user)
         for language_given, language_actual in [
             ("", "en-US"),
@@ -84,7 +84,7 @@ class TestViews(DirectoriesMixin, TestCase):
             )
 
     @override_settings(BASE_URL="/paperless/")
-    def test_index_app_logo_with_base_url(self):
+    def test_index_app_logo_with_base_url(self) -> None:
         """
         GIVEN:
             - Existing config with app_logo specified
@@ -103,7 +103,7 @@ class TestViews(DirectoriesMixin, TestCase):
             f"/paperless{config.app_logo}",
         )
 
-    def test_share_link_views(self):
+    def test_share_link_views(self) -> None:
         """
         GIVEN:
             - Share link created
@@ -169,7 +169,7 @@ class TestViews(DirectoriesMixin, TestCase):
         self.assertEqual(response.request["PATH_INFO"], "/accounts/login/")
         self.assertContains(response, b"Share link has expired")
 
-    def test_list_with_full_permissions(self):
+    def test_list_with_full_permissions(self) -> None:
         """
         GIVEN:
             - Tags with different permissions
@@ -238,7 +238,7 @@ class TestViews(DirectoriesMixin, TestCase):
             else:
                 assert False, f"Unexpected tag found: {tag['name']}"
 
-    def test_list_no_n_plus_1_queries(self):
+    def test_list_no_n_plus_1_queries(self) -> None:
         """
         GIVEN:
             - Tags with different permissions
@@ -281,7 +281,7 @@ class TestViews(DirectoriesMixin, TestCase):
 
 
 class TestAISuggestions(DirectoriesMixin, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         self.user = User.objects.create_superuser(username="testuser")
         self.document = Document.objects.create(
             title="Test Document",
@@ -300,7 +300,11 @@ class TestAISuggestions(DirectoriesMixin, TestCase):
         AI_ENABLED=True,
         LLM_BACKEND="mock_backend",
     )
-    def test_suggestions_with_cached_llm(self, mock_refresh_cache, mock_get_cache):
+    def test_suggestions_with_cached_llm(
+        self,
+        mock_refresh_cache,
+        mock_get_cache,
+    ) -> None:
         mock_get_cache.return_value = MagicMock(suggestions={"tags": ["tag1", "tag2"]})
 
         self.client.force_login(user=self.user)
@@ -317,7 +321,7 @@ class TestAISuggestions(DirectoriesMixin, TestCase):
     def test_suggestions_with_ai_enabled(
         self,
         mock_get_ai_classification,
-    ):
+    ) -> None:
         mock_get_ai_classification.return_value = {
             "title": "AI Title",
             "tags": ["tag1", "tag2"],
@@ -346,7 +350,7 @@ class TestAISuggestions(DirectoriesMixin, TestCase):
             },
         )
 
-    def test_invalidate_suggestions_cache(self):
+    def test_invalidate_suggestions_cache(self) -> None:
         self.client.force_login(user=self.user)
         suggestions = {
             "title": "AI Title",
@@ -384,7 +388,7 @@ class TestAISuggestions(DirectoriesMixin, TestCase):
 class TestAIChatStreamingView(DirectoriesMixin, TestCase):
     ENDPOINT = "/api/documents/chat/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         self.user = User.objects.create_user(username="testuser", password="pass")
         self.client.force_login(user=self.user)
         self.document = Document.objects.create(
@@ -395,7 +399,7 @@ class TestAIChatStreamingView(DirectoriesMixin, TestCase):
         super().setUp()
 
     @override_settings(AI_ENABLED=False)
-    def test_post_ai_disabled(self):
+    def test_post_ai_disabled(self) -> None:
         response = self.client.post(
             self.ENDPOINT,
             data='{"q": "question"}',
@@ -407,7 +411,7 @@ class TestAIChatStreamingView(DirectoriesMixin, TestCase):
     @patch("documents.views.stream_chat_with_documents")
     @patch("documents.views.get_objects_for_user_owner_aware")
     @override_settings(AI_ENABLED=True)
-    def test_post_no_document_id(self, mock_get_objects, mock_stream_chat):
+    def test_post_no_document_id(self, mock_get_objects, mock_stream_chat) -> None:
         mock_get_objects.return_value = [self.document]
         mock_stream_chat.return_value = iter([b"data"])
         response = self.client.post(
@@ -420,7 +424,7 @@ class TestAIChatStreamingView(DirectoriesMixin, TestCase):
 
     @patch("documents.views.stream_chat_with_documents")
     @override_settings(AI_ENABLED=True)
-    def test_post_with_document_id(self, mock_stream_chat):
+    def test_post_with_document_id(self, mock_stream_chat) -> None:
         mock_stream_chat.return_value = iter([b"data"])
         response = self.client.post(
             self.ENDPOINT,
@@ -431,7 +435,7 @@ class TestAIChatStreamingView(DirectoriesMixin, TestCase):
         self.assertEqual(response["Content-Type"], "text/event-stream")
 
     @override_settings(AI_ENABLED=True)
-    def test_post_with_invalid_document_id(self):
+    def test_post_with_invalid_document_id(self) -> None:
         response = self.client.post(
             self.ENDPOINT,
             data='{"q": "question", "document_id": 999999}',
@@ -442,7 +446,7 @@ class TestAIChatStreamingView(DirectoriesMixin, TestCase):
 
     @patch("documents.views.has_perms_owner_aware")
     @override_settings(AI_ENABLED=True)
-    def test_post_with_document_id_no_permission(self, mock_has_perms):
+    def test_post_with_document_id_no_permission(self, mock_has_perms) -> None:
         mock_has_perms.return_value = False
         response = self.client.post(
             self.ENDPOINT,
index 75f9d5fe6dcd75d0912564791ef982a9382af1c2..964d7eef6838b6f27a1bf97f60eb085db3edebbd 100644 (file)
@@ -118,7 +118,7 @@ class TestWorkflows(
 
         return super().setUp()
 
-    def test_workflow_match(self):
+    def test_workflow_match(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -237,7 +237,7 @@ class TestWorkflows(
         expected_str = f"Document matched {trigger} from {w}"
         self.assertIn(expected_str, info)
 
-    def test_workflow_match_mailrule(self):
+    def test_workflow_match_mailrule(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -339,7 +339,7 @@ class TestWorkflows(
         expected_str = f"Document matched {trigger} from {w}"
         self.assertIn(expected_str, info)
 
-    def test_workflow_match_multiple(self):
+    def test_workflow_match_multiple(self) -> None:
         """
         GIVEN:
             - Multiple existing workflows
@@ -434,7 +434,7 @@ class TestWorkflows(
         expected_str = f"Document matched {trigger2} from {w2}"
         self.assertIn(expected_str, cm.output[1])
 
-    def test_workflow_fnmatch_path(self):
+    def test_workflow_fnmatch_path(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -482,7 +482,7 @@ class TestWorkflows(
         expected_str = f"Document matched {trigger} from {w}"
         self.assertIn(expected_str, cm.output[0])
 
-    def test_workflow_no_match_filename(self):
+    def test_workflow_no_match_filename(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -557,7 +557,7 @@ class TestWorkflows(
         expected_str = f"Document filename {test_file.name} does not match"
         self.assertIn(expected_str, cm.output[1])
 
-    def test_workflow_no_match_path(self):
+    def test_workflow_no_match_path(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -641,7 +641,7 @@ class TestWorkflows(
         expected_str = f"Document path {test_file} does not match"
         self.assertIn(expected_str, cm.output[1])
 
-    def test_workflow_no_match_mail_rule(self):
+    def test_workflow_no_match_mail_rule(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -726,7 +726,7 @@ class TestWorkflows(
         expected_str = "Document mail rule 99 !="
         self.assertIn(expected_str, cm.output[1])
 
-    def test_workflow_no_match_source(self):
+    def test_workflow_no_match_source(self) -> None:
         """
         GIVEN:
             - Existing workflow
@@ -810,7 +810,7 @@ class TestWorkflows(
         expected_str = f"Document source {DocumentSource.ApiUpload.name} not in ['{DocumentSource.ConsumeFolder.name}', '{DocumentSource.MailFetch.name}']"
         self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_no_match_trigger_type(self):
+    def test_document_added_no_match_trigger_type(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
         )
@@ -845,7 +845,7 @@ class TestWorkflows(
             expected_str = f"No matching triggers with type {WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED} found"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_workflow_repeat_custom_fields(self):
+    def test_workflow_repeat_custom_fields(self) -> None:
         """
         GIVEN:
             - Existing workflows which assign the same custom field
@@ -898,7 +898,7 @@ class TestWorkflows(
         expected_str = f"Document matched {trigger} from {w}"
         self.assertIn(expected_str, cm.output[0])
 
-    def test_document_added_workflow(self):
+    def test_document_added_workflow(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_filename="*sample*",
@@ -946,7 +946,7 @@ class TestWorkflows(
         self.assertEqual(doc.correspondent, self.c2)
         self.assertEqual(doc.title, f"Doc created in {created.year}")
 
-    def test_document_added_no_match_filename(self):
+    def test_document_added_no_match_filename(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_filename="*foobar*",
@@ -982,7 +982,7 @@ class TestWorkflows(
             expected_str = f"Document filename {doc.original_filename} does not match"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_match_content_matching(self):
+    def test_document_added_match_content_matching(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             matching_algorithm=MatchingModel.MATCH_LITERAL,
@@ -1020,7 +1020,7 @@ class TestWorkflows(
             expected_str = f"Document matched {trigger} from {w}"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_no_match_content_matching(self):
+    def test_document_added_no_match_content_matching(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             matching_algorithm=MatchingModel.MATCH_LITERAL,
@@ -1057,7 +1057,7 @@ class TestWorkflows(
             expected_str = f"Document content matching settings for algorithm '{trigger.matching_algorithm}' did not match"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_no_match_tags(self):
+    def test_document_added_no_match_tags(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1092,7 +1092,7 @@ class TestWorkflows(
             expected_str = f"Document tags {list(doc.tags.all())} do not include {list(trigger.filter_has_tags.all())}"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_no_match_all_tags(self):
+    def test_document_added_no_match_all_tags(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1130,7 +1130,7 @@ class TestWorkflows(
             )
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_excluded_tags(self):
+    def test_document_added_excluded_tags(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1168,7 +1168,7 @@ class TestWorkflows(
             )
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_excluded_correspondent(self):
+    def test_document_added_excluded_correspondent(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1204,7 +1204,7 @@ class TestWorkflows(
             )
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_excluded_document_types(self):
+    def test_document_added_excluded_document_types(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1240,7 +1240,7 @@ class TestWorkflows(
             )
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_excluded_storage_paths(self):
+    def test_document_added_excluded_storage_paths(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1276,7 +1276,7 @@ class TestWorkflows(
             )
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_any_filters(self):
+    def test_document_added_any_filters(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1346,7 +1346,7 @@ class TestWorkflows(
         self.assertFalse(matched)
         self.assertIn("storage path", reason)
 
-    def test_document_added_custom_field_query_no_match(self):
+    def test_document_added_custom_field_query_no_match(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_custom_field_query=json.dumps(
@@ -1388,7 +1388,7 @@ class TestWorkflows(
                 cm.output[1],
             )
 
-    def test_document_added_custom_field_query_match(self):
+    def test_document_added_custom_field_query_match(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_custom_field_query=json.dumps(
@@ -1413,7 +1413,7 @@ class TestWorkflows(
         self.assertTrue(matched)
         self.assertIsNone(reason)
 
-    def test_prefilter_documents_custom_field_query(self):
+    def test_prefilter_documents_custom_field_query(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_custom_field_query=json.dumps(
@@ -1454,7 +1454,7 @@ class TestWorkflows(
         self.assertIn(doc1, filtered)
         self.assertNotIn(doc2, filtered)
 
-    def test_prefilter_documents_any_filters(self):
+    def test_prefilter_documents_any_filters(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
         )
@@ -1487,7 +1487,7 @@ class TestWorkflows(
         self.assertIn(allowed_document, filtered)
         self.assertNotIn(blocked_document, filtered)
 
-    def test_consumption_trigger_requires_filter_configuration(self):
+    def test_consumption_trigger_requires_filter_configuration(self) -> None:
         serializer = WorkflowTriggerSerializer(
             data={
                 "type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
@@ -1501,7 +1501,7 @@ class TestWorkflows(
             [str(error) for error in errors],
         )
 
-    def test_workflow_trigger_serializer_clears_empty_custom_field_query(self):
+    def test_workflow_trigger_serializer_clears_empty_custom_field_query(self) -> None:
         serializer = WorkflowTriggerSerializer(
             data={
                 "type": WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
@@ -1512,7 +1512,7 @@ class TestWorkflows(
         self.assertTrue(serializer.is_valid(), serializer.errors)
         self.assertIsNone(serializer.validated_data.get("filter_custom_field_query"))
 
-    def test_existing_document_invalid_custom_field_query_configuration(self):
+    def test_existing_document_invalid_custom_field_query_configuration(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_custom_field_query="{ not json",
@@ -1528,7 +1528,9 @@ class TestWorkflows(
         self.assertFalse(matched)
         self.assertEqual(reason, "Invalid custom field query configuration")
 
-    def test_prefilter_documents_returns_none_for_invalid_custom_field_query(self):
+    def test_prefilter_documents_returns_none_for_invalid_custom_field_query(
+        self,
+    ) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_custom_field_query="{ not json",
@@ -1547,7 +1549,7 @@ class TestWorkflows(
 
         self.assertEqual(list(filtered), [])
 
-    def test_prefilter_documents_applies_all_filters(self):
+    def test_prefilter_documents_applies_all_filters(self) -> None:
         other_document_type = DocumentType.objects.create(name="Other Type")
         other_storage_path = StoragePath.objects.create(
             name="Blocked path",
@@ -1595,7 +1597,7 @@ class TestWorkflows(
         self.assertIn(allowed_document, filtered)
         self.assertNotIn(blocked_document, filtered)
 
-    def test_document_added_no_match_doctype(self):
+    def test_document_added_no_match_doctype(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_has_document_type=self.dt,
@@ -1628,7 +1630,7 @@ class TestWorkflows(
             expected_str = f"Document doc type {doc.document_type} does not match {trigger.filter_has_document_type}"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_no_match_correspondent(self):
+    def test_document_added_no_match_correspondent(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_has_correspondent=self.c,
@@ -1662,7 +1664,7 @@ class TestWorkflows(
             expected_str = f"Document correspondent {doc.correspondent} does not match {trigger.filter_has_correspondent}"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_no_match_storage_path(self):
+    def test_document_added_no_match_storage_path(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_has_storage_path=self.sp,
@@ -1694,7 +1696,7 @@ class TestWorkflows(
             expected_str = f"Document storage path {doc.storage_path} does not match {trigger.filter_has_storage_path}"
             self.assertIn(expected_str, cm.output[1])
 
-    def test_document_added_invalid_title_placeholders(self):
+    def test_document_added_invalid_title_placeholders(self) -> None:
         """
         GIVEN:
             - Existing workflow with added trigger type
@@ -1735,7 +1737,7 @@ class TestWorkflows(
 
         self.assertEqual(doc.title, "Doc {created_year]")
 
-    def test_document_updated_workflow(self):
+    def test_document_updated_workflow(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
             filter_has_document_type=self.dt,
@@ -1767,7 +1769,7 @@ class TestWorkflows(
 
         self.assertEqual(doc.custom_fields.all().count(), 1)
 
-    def test_document_consumption_workflow_month_placeholder_addded(self):
+    def test_document_consumption_workflow_month_placeholder_addded(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
             sources=f"{DocumentSource.ApiUpload}",
@@ -1806,7 +1808,7 @@ class TestWorkflows(
                 r"Doc added in \w{3,}",
             )  # Match any 3-letter month name
 
-    def test_document_updated_workflow_existing_custom_field(self):
+    def test_document_updated_workflow_existing_custom_field(self) -> None:
         """
         GIVEN:
             - Existing workflow with UPDATED trigger and action that assigns a custom field with a value
@@ -1850,7 +1852,7 @@ class TestWorkflows(
         doc.refresh_from_db()
         self.assertEqual(doc.custom_fields.get(field=self.cf1).value, "new value")
 
-    def test_document_updated_workflow_merge_permissions(self):
+    def test_document_updated_workflow_merge_permissions(self) -> None:
         """
         GIVEN:
             - Existing workflow with UPDATED trigger and action that sets permissions
@@ -1918,7 +1920,7 @@ class TestWorkflows(
         # group2 should have been added
         self.assertIn(self.group2, group_perms)
 
-    def test_workflow_scheduled_trigger_created(self):
+    def test_workflow_scheduled_trigger_created(self) -> None:
         """
         GIVEN:
             - Existing workflow with SCHEDULED trigger against the created field and action that assigns owner
@@ -1961,7 +1963,7 @@ class TestWorkflows(
         doc.refresh_from_db()
         self.assertEqual(doc.owner, self.user2)
 
-    def test_workflow_scheduled_trigger_added(self):
+    def test_workflow_scheduled_trigger_added(self) -> None:
         """
         GIVEN:
             - Existing workflow with SCHEDULED trigger against the added field and action that assigns owner
@@ -2004,7 +2006,7 @@ class TestWorkflows(
         self.assertEqual(doc.owner, self.user2)
 
     @mock.patch("documents.models.Document.objects.filter", autospec=True)
-    def test_workflow_scheduled_trigger_modified(self, mock_filter):
+    def test_workflow_scheduled_trigger_modified(self, mock_filter) -> None:
         """
         GIVEN:
             - Existing workflow with SCHEDULED trigger against the modified field and action that assigns owner
@@ -2046,7 +2048,7 @@ class TestWorkflows(
         doc.refresh_from_db()
         self.assertEqual(doc.owner, self.user2)
 
-    def test_workflow_scheduled_trigger_custom_field(self):
+    def test_workflow_scheduled_trigger_custom_field(self) -> None:
         """
         GIVEN:
             - Existing workflow with SCHEDULED trigger against a custom field and action that assigns owner
@@ -2092,7 +2094,7 @@ class TestWorkflows(
         doc.refresh_from_db()
         self.assertEqual(doc.owner, self.user2)
 
-    def test_workflow_scheduled_already_run(self):
+    def test_workflow_scheduled_already_run(self) -> None:
         """
         GIVEN:
             - Existing workflow with SCHEDULED trigger
@@ -2143,7 +2145,7 @@ class TestWorkflows(
         doc.refresh_from_db()
         self.assertIsNone(doc.owner)
 
-    def test_workflow_scheduled_trigger_too_early(self):
+    def test_workflow_scheduled_trigger_too_early(self) -> None:
         """
         GIVEN:
             - Existing workflow with SCHEDULED trigger and recurring interval of 7 days
@@ -2197,7 +2199,7 @@ class TestWorkflows(
             doc.refresh_from_db()
             self.assertIsNone(doc.owner)
 
-    def test_workflow_scheduled_recurring_respects_latest_run(self):
+    def test_workflow_scheduled_recurring_respects_latest_run(self) -> None:
         """
         GIVEN:
             - Scheduled workflow marked as recurring with a 1-day interval
@@ -2259,7 +2261,7 @@ class TestWorkflows(
             2,
         )
 
-    def test_workflow_scheduled_trigger_negative_offset_customfield(self):
+    def test_workflow_scheduled_trigger_negative_offset_customfield(self) -> None:
         """
         GIVEN:
             - Workflow with offset -7 (i.e., 7 days *before* the date)
@@ -2321,7 +2323,7 @@ class TestWorkflows(
         doc2.refresh_from_db()
         self.assertIsNone(doc2.owner)
 
-    def test_workflow_scheduled_trigger_negative_offset_created(self):
+    def test_workflow_scheduled_trigger_negative_offset_created(self) -> None:
         """
         GIVEN:
             - Existing workflow with SCHEDULED trigger and negative offset of -7 days (so 7 days before date)
@@ -2372,7 +2374,7 @@ class TestWorkflows(
         doc2.refresh_from_db()
         self.assertIsNone(doc2.owner)  # has not triggered yet
 
-    def test_offset_positive_means_after(self):
+    def test_offset_positive_means_after(self) -> None:
         """
         GIVEN:
             - Document created 30 days ago
@@ -2406,7 +2408,7 @@ class TestWorkflows(
         doc.refresh_from_db()
         self.assertEqual(doc.owner, self.user2)
 
-    def test_workflow_scheduled_filters_queryset(self):
+    def test_workflow_scheduled_filters_queryset(self) -> None:
         """
         GIVEN:
             - Existing workflow with scheduled trigger
@@ -2457,7 +2459,7 @@ class TestWorkflows(
         )
         self.assertEqual(filtered_docs.count(), 5)
 
-    def test_workflow_enabled_disabled(self):
+    def test_workflow_enabled_disabled(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
             filter_filename="*sample*",
@@ -2503,7 +2505,7 @@ class TestWorkflows(
         self.assertEqual(doc.title, "Title assign owner")
         self.assertEqual(doc.owner, self.user2)
 
-    def test_new_trigger_type_raises_exception(self):
+    def test_new_trigger_type_raises_exception(self) -> None:
         trigger = WorkflowTrigger.objects.create(
             type=99,
         )
@@ -2523,7 +2525,7 @@ class TestWorkflows(
         )
         self.assertRaises(Exception, document_matches_workflow, doc, w, 99)
 
-    def test_removal_action_document_updated_workflow(self):
+    def test_removal_action_document_updated_workflow(self) -> None:
         """
         GIVEN:
             - Workflow with removal action
@@ -2597,7 +2599,7 @@ class TestWorkflows(
         group_perms: QuerySet = get_groups_with_perms(doc)
         self.assertNotIn(self.group1, group_perms)
 
-    def test_removal_action_document_updated_removeall(self):
+    def test_removal_action_document_updated_removeall(self) -> None:
         """
         GIVEN:
             - Workflow with removal action with remove all fields set
@@ -2668,7 +2670,7 @@ class TestWorkflows(
         group_perms: QuerySet = get_groups_with_perms(doc)
         self.assertNotIn(self.group1, group_perms)
 
-    def test_removal_action_document_consumed(self):
+    def test_removal_action_document_consumed(self) -> None:
         """
         GIVEN:
             - Workflow with assignment and removal actions
@@ -2799,7 +2801,7 @@ class TestWorkflows(
         expected_str = f"Document matched {trigger} from {w}"
         self.assertIn(expected_str, info)
 
-    def test_removal_action_document_consumed_remove_all(self):
+    def test_removal_action_document_consumed_remove_all(self) -> None:
         """
         GIVEN:
             - Workflow with assignment and removal actions with remove all fields set
@@ -2910,7 +2912,7 @@ class TestWorkflows(
         expected_str = f"Document matched {trigger} from {w}"
         self.assertIn(expected_str, info)
 
-    def test_workflow_with_tag_actions_doesnt_overwrite_other_actions(self):
+    def test_workflow_with_tag_actions_doesnt_overwrite_other_actions(self) -> None:
         """
         GIVEN:
             - Document updated workflow filtered by has tag with two actions, first adds owner, second removes a tag
@@ -3040,7 +3042,7 @@ class TestWorkflows(
     )
     @mock.patch("httpx.post")
     @mock.patch("django.core.mail.message.EmailMessage.send")
-    def test_workflow_email_action(self, mock_email_send, mock_post):
+    def test_workflow_email_action(self, mock_email_send, mock_post) -> None:
         """
         GIVEN:
             - Document updated workflow with email action
@@ -3093,7 +3095,7 @@ class TestWorkflows(
         PAPERLESS_URL="http://localhost:8000",
     )
     @mock.patch("django.core.mail.message.EmailMessage.send")
-    def test_workflow_email_include_file(self, mock_email_send):
+    def test_workflow_email_include_file(self, mock_email_send) -> None:
         """
         GIVEN:
             - Document updated workflow with email action
@@ -3165,7 +3167,7 @@ class TestWorkflows(
         PAPERLESS_URL="http://localhost:8000",
         EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
     )
-    def test_workflow_email_attachment_uses_storage_filename(self):
+    def test_workflow_email_attachment_uses_storage_filename(self) -> None:
         """
         GIVEN:
             - Document updated workflow with include document action
@@ -3226,7 +3228,7 @@ class TestWorkflows(
     @override_settings(
         EMAIL_ENABLED=False,
     )
-    def test_workflow_email_action_no_email_setup(self):
+    def test_workflow_email_action_no_email_setup(self) -> None:
         """
         GIVEN:
             - Document updated workflow with email action
@@ -3273,7 +3275,7 @@ class TestWorkflows(
         PAPERLESS_URL="http://localhost:8000",
     )
     @mock.patch("django.core.mail.message.EmailMessage.send")
-    def test_workflow_email_action_fail(self, mock_email_send):
+    def test_workflow_email_action_fail(self, mock_email_send) -> None:
         """
         GIVEN:
             - Document updated workflow with email action
@@ -3323,7 +3325,11 @@ class TestWorkflows(
     )
     @mock.patch("httpx.post")
     @mock.patch("django.core.mail.message.EmailMessage.send")
-    def test_workflow_email_consumption_started(self, mock_email_send, mock_post):
+    def test_workflow_email_consumption_started(
+        self,
+        mock_email_send,
+        mock_post,
+    ) -> None:
         """
         GIVEN:
             - Workflow with email action and consumption trigger
@@ -3382,7 +3388,7 @@ class TestWorkflows(
         BASE_URL="/paperless/",
     )
     @mock.patch("documents.workflows.webhooks.send_webhook.delay")
-    def test_workflow_webhook_action_body(self, mock_post):
+    def test_workflow_webhook_action_body(self, mock_post) -> None:
         """
         GIVEN:
             - Document updated workflow with webhook action which uses body
@@ -3444,7 +3450,7 @@ class TestWorkflows(
         PAPERLESS_URL="http://localhost:8000",
     )
     @mock.patch("documents.workflows.webhooks.send_webhook.delay")
-    def test_workflow_webhook_action_w_files(self, mock_post):
+    def test_workflow_webhook_action_w_files(self, mock_post) -> None:
         """
         GIVEN:
             - Document updated workflow with webhook action which includes document
@@ -3505,7 +3511,7 @@ class TestWorkflows(
     @override_settings(
         PAPERLESS_URL="http://localhost:8000",
     )
-    def test_workflow_webhook_action_fail(self):
+    def test_workflow_webhook_action_fail(self) -> None:
         """
         GIVEN:
             - Document updated workflow with webhook action
@@ -3552,7 +3558,7 @@ class TestWorkflows(
             expected_str = "Error occurred sending webhook"
             self.assertIn(expected_str, cm.output[0])
 
-    def test_workflow_webhook_action_url_invalid_params_headers(self):
+    def test_workflow_webhook_action_url_invalid_params_headers(self) -> None:
         """
         GIVEN:
             - Document updated workflow with webhook action
@@ -3598,7 +3604,7 @@ class TestWorkflows(
             self.assertIn(expected_str, cm.output[1])
 
     @mock.patch("httpx.Client.post")
-    def test_workflow_webhook_send_webhook_task(self, mock_post):
+    def test_workflow_webhook_send_webhook_task(self, mock_post) -> None:
         mock_post.return_value = mock.Mock(
             status_code=200,
             json=mock.Mock(return_value={"status": "ok"}),
@@ -3638,7 +3644,7 @@ class TestWorkflows(
             )
 
     @mock.patch("httpx.Client.post")
-    def test_workflow_webhook_send_webhook_retry(self, mock_http):
+    def test_workflow_webhook_send_webhook_retry(self, mock_http) -> None:
         mock_http.return_value.raise_for_status = mock.Mock(
             side_effect=HTTPStatusError(
                 "Error",
@@ -3664,7 +3670,7 @@ class TestWorkflows(
                 self.assertIn(expected_str, cm.output[0])
 
     @mock.patch("documents.workflows.webhooks.send_webhook.delay")
-    def test_workflow_webhook_action_consumption(self, mock_post):
+    def test_workflow_webhook_action_consumption(self, mock_post) -> None:
         """
         GIVEN:
             - Workflow with webhook action and consumption trigger
@@ -3721,7 +3727,7 @@ class TestWebhookSend:
     def test_send_webhook_data_or_json(
         self,
         httpx_mock: HTTPXMock,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Nothing
@@ -3773,7 +3779,7 @@ def resolve_to(monkeypatch):
 
 
 class TestWebhookSecurity:
-    def test_blocks_invalid_scheme_or_hostname(self, httpx_mock: HTTPXMock):
+    def test_blocks_invalid_scheme_or_hostname(self, httpx_mock: HTTPXMock) -> None:
         """
         GIVEN:
             - Invalid URL schemes or hostnames
@@ -3801,7 +3807,7 @@ class TestWebhookSecurity:
             )
 
     @override_settings(WEBHOOKS_ALLOWED_PORTS=[80, 443])
-    def test_blocks_disallowed_port(self, httpx_mock: HTTPXMock):
+    def test_blocks_disallowed_port(self, httpx_mock: HTTPXMock) -> None:
         """
         GIVEN:
             - URL with a disallowed port
@@ -3822,7 +3828,11 @@ class TestWebhookSecurity:
         assert httpx_mock.get_request() is None
 
     @override_settings(WEBHOOKS_ALLOW_INTERNAL_REQUESTS=False)
-    def test_blocks_private_loopback_linklocal(self, httpx_mock: HTTPXMock, resolve_to):
+    def test_blocks_private_loopback_linklocal(
+        self,
+        httpx_mock: HTTPXMock,
+        resolve_to,
+    ) -> None:
         """
         GIVEN:
             - URL with a private, loopback, or link-local IP address
@@ -3842,7 +3852,11 @@ class TestWebhookSecurity:
                 as_json=False,
             )
 
-    def test_allows_public_ip_and_sends(self, httpx_mock: HTTPXMock, resolve_to):
+    def test_allows_public_ip_and_sends(
+        self,
+        httpx_mock: HTTPXMock,
+        resolve_to,
+    ) -> None:
         """
         GIVEN:
             - URL with a public IP address
@@ -3866,7 +3880,7 @@ class TestWebhookSecurity:
         assert req.url.host == "52.207.186.75"
         assert req.headers["host"] == "paperless-ngx.com"
 
-    def test_follow_redirects_disabled(self, httpx_mock: HTTPXMock, resolve_to):
+    def test_follow_redirects_disabled(self, httpx_mock: HTTPXMock, resolve_to) -> None:
         """
         GIVEN:
             - A URL that redirects
@@ -3894,7 +3908,11 @@ class TestWebhookSecurity:
 
         assert len(httpx_mock.get_requests()) == 1
 
-    def test_strips_user_supplied_host_header(self, httpx_mock: HTTPXMock, resolve_to):
+    def test_strips_user_supplied_host_header(
+        self,
+        httpx_mock: HTTPXMock,
+        resolve_to,
+    ) -> None:
         """
         GIVEN:
             - A URL with a user-supplied Host header
index 88dddc557aae976b9ba93269e644d6fba03af0cf..dc89322c9bda6f4f3165d1c11285927869c087d1 100644 (file)
@@ -68,7 +68,7 @@ def setup_directories():
     return dirs
 
 
-def remove_dirs(dirs):
+def remove_dirs(dirs) -> None:
     shutil.rmtree(dirs.media_dir, ignore_errors=True)
     shutil.rmtree(dirs.data_dir, ignore_errors=True)
     shutil.rmtree(dirs.scratch_dir, ignore_errors=True)
@@ -169,23 +169,23 @@ class FileSystemAssertsMixin:
     Utilities for checks various state information of the file system
     """
 
-    def assertIsFile(self, path: PathLike | str):
+    def assertIsFile(self, path: PathLike | str) -> None:
         self.assertTrue(Path(path).resolve().is_file(), f"File does not exist: {path}")
 
-    def assertIsNotFile(self, path: PathLike | str):
+    def assertIsNotFile(self, path: PathLike | str) -> None:
         self.assertFalse(Path(path).resolve().is_file(), f"File does exist: {path}")
 
-    def assertIsDir(self, path: PathLike | str):
+    def assertIsDir(self, path: PathLike | str) -> None:
         self.assertTrue(Path(path).resolve().is_dir(), f"Dir does not exist: {path}")
 
-    def assertIsNotDir(self, path: PathLike | str):
+    def assertIsNotDir(self, path: PathLike | str) -> None:
         self.assertFalse(Path(path).resolve().is_dir(), f"Dir does exist: {path}")
 
     def assertFilesEqual(
         self,
         path1: PathLike | str,
         path2: PathLike | str,
-    ):
+    ) -> None:
         path1 = Path(path1)
         path2 = Path(path2)
         import hashlib
@@ -195,7 +195,7 @@ class FileSystemAssertsMixin:
 
         self.assertEqual(hash1, hash2, "File SHA256 mismatch")
 
-    def assertFileCountInDir(self, path: PathLike | str, count: int):
+    def assertFileCountInDir(self, path: PathLike | str, count: int) -> None:
         path = Path(path).resolve()
         self.assertTrue(path.is_dir(), f"Path {path} is not a directory")
         files = [x for x in path.iterdir() if x.is_file()]
@@ -293,7 +293,7 @@ class TestMigrations(TransactionTestCase):
     migrate_to = None
     auto_migrate = True
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         assert self.migrate_from and self.migrate_to, (
@@ -316,7 +316,7 @@ class TestMigrations(TransactionTestCase):
         if self.auto_migrate:
             self.performMigration()
 
-    def performMigration(self):
+    def performMigration(self) -> None:
         # Run the migration to test
         executor = MigrationExecutor(connection)
         executor.loader.build_graph()  # reload.
@@ -324,10 +324,10 @@ class TestMigrations(TransactionTestCase):
 
         self.apps = executor.loader.project_state(self.migrate_to).apps
 
-    def setUpBeforeMigration(self, apps):
+    def setUpBeforeMigration(self, apps) -> None:
         pass
 
-    def tearDown(self):
+    def tearDown(self) -> None:
         """
         Ensure the database schema is restored to the latest migration after
         each migration test, so subsequent tests run against HEAD.
@@ -404,7 +404,7 @@ class DummyProgressManager:
         self.open()
         return self
 
-    def __exit__(self, exc_type, exc_val, exc_tb):
+    def __exit__(self, exc_type, exc_val, exc_tb) -> None:
         self.close()
 
     def open(self) -> None:
index df1fa982793ef9fcd591819771986cdd7c3d80f1..fbd72b10d13395cdb89a58619b42094d50169346 100644 (file)
@@ -1460,7 +1460,7 @@ class ChatStreamingView(GenericAPIView):
     ),
 )
 class UnifiedSearchViewSet(DocumentViewSet):
-    def __init__(self, *args, **kwargs):
+    def __init__(self, *args, **kwargs) -> None:
         super().__init__(*args, **kwargs)
         self.searcher = None
 
@@ -1638,7 +1638,7 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
             .prefetch_related("filter_rules")
         )
 
-    def perform_create(self, serializer):
+    def perform_create(self, serializer) -> None:
         serializer.save(owner=self.request.user)
 
 
index 819d8d5ff02d4986647ba6269af5c4ab286eb280..e95c77ae44d24bcf8de792ef62614cba3d620e56 100644 (file)
@@ -10,7 +10,7 @@ class PaperlessConfig(AppConfig):
 
     verbose_name = _("Paperless")
 
-    def ready(self):
+    def ready(self) -> None:
         from django.contrib.auth.signals import user_login_failed
 
         user_login_failed.connect(handle_failed_login)
index c68d63cf086e3cd2f957c78ba21e9ddfc39ec141..2503fb8cbbc298da3f38df1d13d5efbbd63e6af0 100644 (file)
@@ -14,7 +14,7 @@ logger = logging.getLogger("paperless.auth")
 
 
 class AutoLoginMiddleware(MiddlewareMixin):
-    def process_request(self, request: HttpRequest):
+    def process_request(self, request: HttpRequest) -> None:
         # Dont use auto-login with token request
         if request.path.startswith("/api/token/") and request.method == "POST":
             return None
index 3d045f444c80ce09b4a52cfd5c2dbe8bcbd8dce2..40f9a006f99b5a8798ce65ec1064aaa6a5225351 100644 (file)
@@ -34,20 +34,20 @@ class StatusConsumer(WebsocketConsumer):
             )
             raise AcceptConnection
 
-    def disconnect(self, close_code):
+    def disconnect(self, close_code) -> None:
         async_to_sync(self.channel_layer.group_discard)(
             "status_updates",
             self.channel_name,
         )
 
-    def status_update(self, event):
+    def status_update(self, event) -> None:
         if not self._authenticated():
             self.close()
         else:
             if self._can_view(event["data"]):
                 self.send(json.dumps(event))
 
-    def documents_deleted(self, event):
+    def documents_deleted(self, event) -> None:
         if not self._authenticated():
             self.close()
         else:
index b8268b5c0743a7c099e34942e1cc69d8ed955e2c..9a3c88415a121e305260aa8ab2dc073aaf88487b 100644 (file)
@@ -13,5 +13,5 @@ def custom_get_table_cache_key(db_alias, table):
     return PREFIX + get_table_cache_key(db_alias, table)
 
 
-def invalidate_db_cache():
+def invalidate_db_cache() -> None:
     return cachalot_invalidate(cache_alias="read-cache")
index dbef3fde786443c7839ad7cf6fd10bd69e0d2d40..767749dd299ff5b8edbdfae4e248d8c53b3fd9fd 100644 (file)
@@ -18,7 +18,7 @@ from paperless.adapter import DrfTokenStrategy
 
 
 class TestCustomAccountAdapter(TestCase):
-    def test_is_open_for_signup(self):
+    def test_is_open_for_signup(self) -> None:
         adapter = get_adapter()
 
         # With no accounts, signups should be allowed
@@ -34,7 +34,7 @@ class TestCustomAccountAdapter(TestCase):
         settings.ACCOUNT_ALLOW_SIGNUPS = False
         self.assertFalse(adapter.is_open_for_signup(None))
 
-    def test_is_safe_url(self):
+    def test_is_safe_url(self) -> None:
         request = HttpRequest()
         request.get_host = mock.Mock(return_value="example.com")
         with context.request_context(request):
@@ -59,7 +59,7 @@ class TestCustomAccountAdapter(TestCase):
             self.assertFalse(adapter.is_safe_url(url))
 
     @mock.patch("allauth.core.internal.ratelimit.consume", return_value=True)
-    def test_pre_authenticate(self, mock_consume):
+    def test_pre_authenticate(self, mock_consume) -> None:
         adapter = get_adapter()
         request = HttpRequest()
         request.get_host = mock.Mock(return_value="example.com")
@@ -71,7 +71,7 @@ class TestCustomAccountAdapter(TestCase):
         with self.assertRaises(ValidationError):
             adapter.pre_authenticate(request)
 
-    def test_get_reset_password_from_key_url(self):
+    def test_get_reset_password_from_key_url(self) -> None:
         request = HttpRequest()
         request.get_host = mock.Mock(return_value="foo.org")
         with context.request_context(request):
@@ -93,7 +93,7 @@ class TestCustomAccountAdapter(TestCase):
                 )
 
     @override_settings(ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
-    def test_save_user_adds_groups(self):
+    def test_save_user_adds_groups(self) -> None:
         Group.objects.create(name="group1")
         user = User.objects.create_user("testuser")
         adapter = get_adapter()
@@ -110,7 +110,7 @@ class TestCustomAccountAdapter(TestCase):
         self.assertTrue(user.groups.filter(name="group1").exists())
         self.assertFalse(user.groups.filter(name="group2").exists())
 
-    def test_fresh_install_save_creates_superuser(self):
+    def test_fresh_install_save_creates_superuser(self) -> None:
         adapter = get_adapter()
         form = mock.Mock(
             cleaned_data={
@@ -133,7 +133,7 @@ class TestCustomAccountAdapter(TestCase):
 
 
 class TestCustomSocialAccountAdapter(TestCase):
-    def test_is_open_for_signup(self):
+    def test_is_open_for_signup(self) -> None:
         adapter = get_social_adapter()
 
         # Test when SOCIALACCOUNT_ALLOW_SIGNUPS is True
@@ -144,7 +144,7 @@ class TestCustomSocialAccountAdapter(TestCase):
         settings.SOCIALACCOUNT_ALLOW_SIGNUPS = False
         self.assertFalse(adapter.is_open_for_signup(None, None))
 
-    def test_get_connect_redirect_url(self):
+    def test_get_connect_redirect_url(self) -> None:
         adapter = get_social_adapter()
         request = None
         socialaccount = None
@@ -157,7 +157,7 @@ class TestCustomSocialAccountAdapter(TestCase):
         )
 
     @override_settings(SOCIAL_ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
-    def test_save_user_adds_groups(self):
+    def test_save_user_adds_groups(self) -> None:
         Group.objects.create(name="group1")
         adapter = get_social_adapter()
         request = HttpRequest()
@@ -172,7 +172,7 @@ class TestCustomSocialAccountAdapter(TestCase):
         self.assertTrue(user.groups.filter(name="group1").exists())
         self.assertFalse(user.groups.filter(name="group2").exists())
 
-    def test_error_logged_on_authentication_error(self):
+    def test_error_logged_on_authentication_error(self) -> None:
         adapter = get_social_adapter()
         request = HttpRequest()
         with self.assertLogs("paperless.auth", level="INFO") as log_cm:
@@ -188,7 +188,7 @@ class TestCustomSocialAccountAdapter(TestCase):
 
 
 class TestDrfTokenStrategy(TestCase):
-    def test_create_access_token_creates_new_token(self):
+    def test_create_access_token_creates_new_token(self) -> None:
         """
         GIVEN:
             - A user with no existing DRF token
@@ -213,7 +213,7 @@ class TestDrfTokenStrategy(TestCase):
         token = Token.objects.get(user=user)
         self.assertEqual(token_key, token.key)
 
-    def test_create_access_token_returns_existing_token(self):
+    def test_create_access_token_returns_existing_token(self) -> None:
         """
         GIVEN:
             - A user with an existing DRF token
@@ -238,7 +238,7 @@ class TestDrfTokenStrategy(TestCase):
         # Verify only one token exists (no duplicate created)
         self.assertEqual(Token.objects.filter(user=user).count(), 1)
 
-    def test_create_access_token_returns_none_for_unauthenticated_user(self):
+    def test_create_access_token_returns_none_for_unauthenticated_user(self) -> None:
         """
         GIVEN:
             - An unauthenticated request
index 781956ff6153c1ce193fdb53d722bd8af2a0b09f..30fdde182aab75648f528c9125339da0fb18f721 100644 (file)
@@ -15,14 +15,14 @@ from paperless.checks import settings_values_check
 
 
 class TestChecks(DirectoriesMixin, TestCase):
-    def test_binaries(self):
+    def test_binaries(self) -> None:
         self.assertEqual(binaries_check(None), [])
 
     @override_settings(CONVERT_BINARY="uuuhh")
-    def test_binaries_fail(self):
+    def test_binaries_fail(self) -> None:
         self.assertEqual(len(binaries_check(None)), 1)
 
-    def test_paths_check(self):
+    def test_paths_check(self) -> None:
         self.assertEqual(paths_check(None), [])
 
     @override_settings(
@@ -30,14 +30,14 @@ class TestChecks(DirectoriesMixin, TestCase):
         DATA_DIR=Path("whatever"),
         CONSUMPTION_DIR=Path("idontcare"),
     )
-    def test_paths_check_dont_exist(self):
+    def test_paths_check_dont_exist(self) -> None:
         msgs = paths_check(None)
         self.assertEqual(len(msgs), 3, str(msgs))
 
         for msg in msgs:
             self.assertTrue(msg.msg.endswith("is set but doesn't exist."))
 
-    def test_paths_check_no_access(self):
+    def test_paths_check_no_access(self) -> None:
         Path(self.dirs.data_dir).chmod(0o000)
         Path(self.dirs.media_dir).chmod(0o000)
         Path(self.dirs.consumption_dir).chmod(0o000)
@@ -53,16 +53,16 @@ class TestChecks(DirectoriesMixin, TestCase):
             self.assertTrue(msg.msg.endswith("is not writeable"))
 
     @override_settings(DEBUG=False)
-    def test_debug_disabled(self):
+    def test_debug_disabled(self) -> None:
         self.assertEqual(debug_mode_check(None), [])
 
     @override_settings(DEBUG=True)
-    def test_debug_enabled(self):
+    def test_debug_enabled(self) -> None:
         self.assertEqual(len(debug_mode_check(None)), 1)
 
 
 class TestSettingsChecksAgainstDefaults(DirectoriesMixin, TestCase):
-    def test_all_valid(self):
+    def test_all_valid(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -77,7 +77,7 @@ class TestSettingsChecksAgainstDefaults(DirectoriesMixin, TestCase):
 
 class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
     @override_settings(OCR_OUTPUT_TYPE="notapdf")
-    def test_invalid_output_type(self):
+    def test_invalid_output_type(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -95,7 +95,7 @@ class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
         self.assertIn('OCR output type "notapdf"', msg.msg)
 
     @override_settings(OCR_MODE="makeitso")
-    def test_invalid_ocr_type(self):
+    def test_invalid_ocr_type(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -113,7 +113,7 @@ class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
         self.assertIn('OCR output mode "makeitso"', msg.msg)
 
     @override_settings(OCR_MODE="skip_noarchive")
-    def test_deprecated_ocr_type(self):
+    def test_deprecated_ocr_type(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -131,7 +131,7 @@ class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
         self.assertIn("deprecated", msg.msg)
 
     @override_settings(OCR_SKIP_ARCHIVE_FILE="invalid")
-    def test_invalid_ocr_skip_archive_file(self):
+    def test_invalid_ocr_skip_archive_file(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -149,7 +149,7 @@ class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
         self.assertIn('OCR_SKIP_ARCHIVE_FILE setting "invalid"', msg.msg)
 
     @override_settings(OCR_CLEAN="cleanme")
-    def test_invalid_ocr_clean(self):
+    def test_invalid_ocr_clean(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -169,7 +169,7 @@ class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
 
 class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
     @override_settings(TIME_ZONE="TheMoon\\MyCrater")
-    def test_invalid_timezone(self):
+    def test_invalid_timezone(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -189,7 +189,7 @@ class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
 
 class TestBarcodeSettingsChecks(DirectoriesMixin, TestCase):
     @override_settings(CONSUMER_BARCODE_SCANNER="Invalid")
-    def test_barcode_scanner_invalid(self):
+    def test_barcode_scanner_invalid(self) -> None:
         msgs = settings_values_check(None)
         self.assertEqual(len(msgs), 1)
 
@@ -198,7 +198,7 @@ class TestBarcodeSettingsChecks(DirectoriesMixin, TestCase):
         self.assertIn('Invalid Barcode Scanner "Invalid"', msg.msg)
 
     @override_settings(CONSUMER_BARCODE_SCANNER="")
-    def test_barcode_scanner_empty(self):
+    def test_barcode_scanner_empty(self) -> None:
         msgs = settings_values_check(None)
         self.assertEqual(len(msgs), 1)
 
@@ -207,14 +207,14 @@ class TestBarcodeSettingsChecks(DirectoriesMixin, TestCase):
         self.assertIn('Invalid Barcode Scanner ""', msg.msg)
 
     @override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
-    def test_barcode_scanner_valid(self):
+    def test_barcode_scanner_valid(self) -> None:
         msgs = settings_values_check(None)
         self.assertEqual(len(msgs), 0)
 
 
 class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     @override_settings(EMAIL_CERTIFICATE_FILE=Path("/tmp/not_actually_here.pem"))
-    def test_not_valid_file(self):
+    def test_not_valid_file(self) -> None:
         """
         GIVEN:
             - Default settings
@@ -236,7 +236,7 @@ class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, Test
 
 
 class TestAuditLogChecks(TestCase):
-    def test_was_enabled_once(self):
+    def test_was_enabled_once(self) -> None:
         """
         GIVEN:
             - Audit log is not enabled
index ae9066ca1787b10292c45cfa592aa98f4e43fc4e..f190bb7d63b11910f97623c981ab20df746c2b36 100644 (file)
@@ -15,7 +15,7 @@ from paperless.settings import _parse_cachalot_settings
 from paperless.settings import _parse_caches
 
 
-def test_all_redis_caches_have_same_custom_prefix(monkeypatch):
+def test_all_redis_caches_have_same_custom_prefix(monkeypatch) -> None:
     """
     Check that when setting a custom Redis prefix,
     it is set for both the Django default cache and the read cache.
@@ -29,7 +29,7 @@ def test_all_redis_caches_have_same_custom_prefix(monkeypatch):
 
 
 class TestDbCacheSettings:
-    def test_cachalot_default_settings(self):
+    def test_cachalot_default_settings(self) -> None:
         # Cachalot must be installed even if disabled,
         # so the cache can be invalidated anytime
         assert "cachalot" not in settings.INSTALLED_APPS
@@ -62,7 +62,7 @@ class TestDbCacheSettings:
             "PAPERLESS_READ_CACHE_TTL": "7200",
         },
     )
-    def test_cachalot_custom_settings(self):
+    def test_cachalot_custom_settings(self) -> None:
         settings = _parse_cachalot_settings()
 
         assert settings["CACHALOT_ENABLED"]
@@ -95,7 +95,7 @@ class TestDbCacheSettings:
         self,
         env_var_ttl: int,
         expected_cachalot_timeout: int,
-    ):
+    ) -> None:
         with patch.dict(os.environ, {"PAPERLESS_READ_CACHE_TTL": f"{env_var_ttl}"}):
             cachalot_timeout = _parse_cachalot_settings()["CACHALOT_TIMEOUT"]
             assert cachalot_timeout == expected_cachalot_timeout
@@ -106,7 +106,7 @@ class TestDbCacheSettings:
     CACHALOT_TIMEOUT=1,
 )
 @pytest.mark.django_db(transaction=True)
-def test_cache_hit_when_enabled():
+def test_cache_hit_when_enabled() -> None:
     cachalot_settings.reload()
 
     assert cachalot_settings.CACHALOT_ENABLED
@@ -141,7 +141,7 @@ def test_cache_hit_when_enabled():
 
 
 @pytest.mark.django_db(transaction=True)
-def test_cache_is_disabled_by_default():
+def test_cache_is_disabled_by_default() -> None:
     cachalot_settings.reload()
     # Invalidate the cache just in case
     invalidate_db_cache()
index 78b3393d5665132f830f1dc4c8d6b808dbd7f645..f98097d03a7bb185821270d30d7c507a4daa36e4 100644 (file)
@@ -12,14 +12,14 @@ from paperless.settings import _parse_remote_user_settings
 
 
 class TestRemoteUser(DirectoriesMixin, APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_superuser(
             username="temp_admin",
         )
 
-    def test_remote_user(self):
+    def test_remote_user(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -54,7 +54,7 @@ class TestRemoteUser(DirectoriesMixin, APITestCase):
 
             self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-    def test_remote_user_api(self):
+    def test_remote_user_api(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -100,7 +100,7 @@ class TestRemoteUser(DirectoriesMixin, APITestCase):
             ],
         },
     )
-    def test_remote_user_api_disabled(self):
+    def test_remote_user_api_disabled(self) -> None:
         """
         GIVEN:
             - Configured user
@@ -123,7 +123,7 @@ class TestRemoteUser(DirectoriesMixin, APITestCase):
             [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN],
         )
 
-    def test_remote_user_header_setting(self):
+    def test_remote_user_header_setting(self) -> None:
         """
         GIVEN:
             - Remote user header name is set
index 9957de4fc7521c3aa9af85f675be4e30cc7332b9..02db82ef228f8fe429111ddb390f023bb9251546 100644 (file)
@@ -21,7 +21,7 @@ class TestIgnoreDateParsing(TestCase):
     Tests the parsing of the PAPERLESS_IGNORE_DATES setting value
     """
 
-    def _parse_checker(self, test_cases):
+    def _parse_checker(self, test_cases) -> None:
         """
         Helper function to check ignore date parsing
 
@@ -34,7 +34,7 @@ class TestIgnoreDateParsing(TestCase):
                 expected_date_set,
             )
 
-    def test_no_ignore_dates_set(self):
+    def test_no_ignore_dates_set(self) -> None:
         """
         GIVEN:
             - No ignore dates are set
@@ -43,7 +43,7 @@ class TestIgnoreDateParsing(TestCase):
         """
         self.assertSetEqual(_parse_ignore_dates(""), set())
 
-    def test_single_ignore_dates_set(self):
+    def test_single_ignore_dates_set(self) -> None:
         """
         GIVEN:
             - Ignore dates are set per certain inputs
@@ -70,7 +70,7 @@ class TestIgnoreDateParsing(TestCase):
 
 
 class TestThreadCalculation(TestCase):
-    def test_workers_threads(self):
+    def test_workers_threads(self) -> None:
         """
         GIVEN:
             - Certain CPU counts
@@ -96,7 +96,7 @@ class TestThreadCalculation(TestCase):
 
 
 class TestRedisSocketConversion(TestCase):
-    def test_redis_socket_parsing(self):
+    def test_redis_socket_parsing(self) -> None:
         """
         GIVEN:
             - Various Redis connection URI formats
@@ -163,7 +163,7 @@ class TestCeleryScheduleParsing(TestCase):
     LLM_INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
     CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME = 23.0 * 60.0 * 60.0
 
-    def test_schedule_configuration_default(self):
+    def test_schedule_configuration_default(self) -> None:
         """
         GIVEN:
             - No configured task schedules
@@ -224,7 +224,7 @@ class TestCeleryScheduleParsing(TestCase):
             schedule,
         )
 
-    def test_schedule_configuration_changed(self):
+    def test_schedule_configuration_changed(self) -> None:
         """
         GIVEN:
             - Email task is configured non-default
@@ -290,7 +290,7 @@ class TestCeleryScheduleParsing(TestCase):
             schedule,
         )
 
-    def test_schedule_configuration_disabled(self):
+    def test_schedule_configuration_disabled(self) -> None:
         """
         GIVEN:
             - Search index task is disabled
@@ -348,7 +348,7 @@ class TestCeleryScheduleParsing(TestCase):
             schedule,
         )
 
-    def test_schedule_configuration_disabled_all(self):
+    def test_schedule_configuration_disabled_all(self) -> None:
         """
         GIVEN:
             - All tasks are disabled
@@ -379,7 +379,7 @@ class TestCeleryScheduleParsing(TestCase):
 
 
 class TestDBSettings(TestCase):
-    def test_db_timeout_with_sqlite(self):
+    def test_db_timeout_with_sqlite(self) -> None:
         """
         GIVEN:
             - PAPERLESS_DB_TIMEOUT is set
@@ -403,7 +403,7 @@ class TestDBSettings(TestCase):
                 databases["default"]["OPTIONS"],
             )
 
-    def test_db_timeout_with_not_sqlite(self):
+    def test_db_timeout_with_not_sqlite(self) -> None:
         """
         GIVEN:
             - PAPERLESS_DB_TIMEOUT is set but db is not sqlite
@@ -437,7 +437,7 @@ class TestDBSettings(TestCase):
 
 
 class TestPaperlessURLSettings(TestCase):
-    def test_paperless_url(self):
+    def test_paperless_url(self) -> None:
         """
         GIVEN:
             - PAPERLESS_URL is set
@@ -461,7 +461,7 @@ class TestPaperlessURLSettings(TestCase):
 
 
 class TestPathSettings(TestCase):
-    def test_default_paths(self):
+    def test_default_paths(self) -> None:
         """
         GIVEN:
             - PAPERLESS_FORCE_SCRIPT_NAME is not set
@@ -481,7 +481,7 @@ class TestPathSettings(TestCase):
         )  # LOGOUT_REDIRECT_URL
 
     @mock.patch("os.environ", {"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless"})
-    def test_subpath(self):
+    def test_subpath(self) -> None:
         """
         GIVEN:
             - PAPERLESS_FORCE_SCRIPT_NAME is set
@@ -507,7 +507,7 @@ class TestPathSettings(TestCase):
             "PAPERLESS_LOGOUT_REDIRECT_URL": "/foobar/",
         },
     )
-    def test_subpath_with_explicit_logout_url(self):
+    def test_subpath_with_explicit_logout_url(self) -> None:
         """
         GIVEN:
             - PAPERLESS_FORCE_SCRIPT_NAME is set and so is PAPERLESS_LOGOUT_REDIRECT_URL
@@ -537,5 +537,5 @@ class TestPathSettings(TestCase):
         ("en+zh-Hans+zh-Hant", ["en", "zh-Hans", "zh-Hant", "zh"]),
     ],
 )
-def test_parser_date_parser_languages(languages, expected):
+def test_parser_date_parser_languages(languages, expected) -> None:
     assert sorted(_parse_dateparser_languages(languages)) == sorted(expected)
index a21f3b6601e93e632dbccaca03d05a6c9621c43a..0cf087b1eb6de38dec878231afcff4dc7ae1ef0a 100644 (file)
@@ -12,14 +12,14 @@ from paperless.signals import handle_social_account_updated
 
 
 class TestFailedLoginLogging(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.creds = {
             "username": "john lennon",
         }
 
-    def test_unauthenticated(self):
+    def test_unauthenticated(self) -> None:
         """
         GIVEN:
             - Request with no authentication provided
@@ -39,7 +39,7 @@ class TestFailedLoginLogging(TestCase):
                 ],
             )
 
-    def test_none(self):
+    def test_none(self) -> None:
         """
         GIVEN:
             - Request with no IP possible
@@ -60,7 +60,7 @@ class TestFailedLoginLogging(TestCase):
                 ],
             )
 
-    def test_public(self):
+    def test_public(self) -> None:
         """
         GIVEN:
             - Request with publicly routeable IP
@@ -83,7 +83,7 @@ class TestFailedLoginLogging(TestCase):
                 ],
             )
 
-    def test_private(self):
+    def test_private(self) -> None:
         """
         GIVEN:
             - Request with private range IP
@@ -110,7 +110,7 @@ class TestFailedLoginLogging(TestCase):
 
 class TestSyncSocialLoginGroups(TestCase):
     @override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
-    def test_sync_enabled(self):
+    def test_sync_enabled(self) -> None:
         """
         GIVEN:
             - Enabled group syncing, a user, and a social login
@@ -137,7 +137,7 @@ class TestSyncSocialLoginGroups(TestCase):
         self.assertEqual(list(user.groups.all()), [group])
 
     @override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=False)
-    def test_sync_disabled(self):
+    def test_sync_disabled(self) -> None:
         """
         GIVEN:
             - Disabled group syncing, a user, and a social login
@@ -164,7 +164,7 @@ class TestSyncSocialLoginGroups(TestCase):
         self.assertEqual(list(user.groups.all()), [])
 
     @override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
-    def test_no_groups(self):
+    def test_no_groups(self) -> None:
         """
         GIVEN:
             - Enabled group syncing, a user, and a social login with no groups
@@ -193,7 +193,7 @@ class TestSyncSocialLoginGroups(TestCase):
         self.assertEqual(list(user.groups.all()), [])
 
     @override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
-    def test_userinfo_groups(self):
+    def test_userinfo_groups(self) -> None:
         """
         GIVEN:
             - Enabled group syncing, and `groups` nested under `userinfo`
@@ -224,7 +224,7 @@ class TestSyncSocialLoginGroups(TestCase):
         self.assertEqual(list(user.groups.all()), [group])
 
     @override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
-    def test_id_token_groups_fallback(self):
+    def test_id_token_groups_fallback(self) -> None:
         """
         GIVEN:
             - Enabled group syncing, and `groups` only under `id_token`
@@ -261,7 +261,7 @@ class TestUserGroupDeletionCleanup(TestCase):
     from ui_settings
     """
 
-    def test_user_group_deletion_cleanup(self):
+    def test_user_group_deletion_cleanup(self) -> None:
         """
         GIVEN:
             - Existing user
@@ -302,7 +302,7 @@ class TestUserGroupDeletionCleanup(TestCase):
         self.assertEqual(permissions.get("default_view_groups"), [])
         self.assertEqual(permissions.get("default_change_groups"), [])
 
-    def test_user_group_deletion_error_handling(self):
+    def test_user_group_deletion_error_handling(self) -> None:
         """
         GIVEN:
             - Existing user and group
index e5358d61139d97acbb828ee61bc25f823a0f9768..eef7d00f32a17f6d568013c82c8c3a727f5c0a86 100644 (file)
@@ -19,7 +19,7 @@ TEST_CHANNEL_LAYERS = {
 
 @override_settings(CHANNEL_LAYERS=TEST_CHANNEL_LAYERS)
 class TestWebSockets(TestCase):
-    async def test_no_auth(self):
+    async def test_no_auth(self) -> None:
         communicator = WebsocketCommunicator(application, "/ws/status/")
         connected, _ = await communicator.connect()
         self.assertFalse(connected)
@@ -27,7 +27,7 @@ class TestWebSockets(TestCase):
 
     @mock.patch("paperless.consumers.StatusConsumer.close")
     @mock.patch("paperless.consumers.StatusConsumer._authenticated")
-    async def test_close_on_no_auth(self, _authenticated, mock_close):
+    async def test_close_on_no_auth(self, _authenticated, mock_close) -> None:
         _authenticated.return_value = True
 
         communicator = WebsocketCommunicator(application, "/ws/status/")
@@ -59,7 +59,7 @@ class TestWebSockets(TestCase):
         mock_close.assert_called_once()
 
     @mock.patch("paperless.consumers.StatusConsumer._authenticated")
-    async def test_auth(self, _authenticated):
+    async def test_auth(self, _authenticated) -> None:
         _authenticated.return_value = True
 
         communicator = WebsocketCommunicator(application, "/ws/status/")
@@ -69,7 +69,7 @@ class TestWebSockets(TestCase):
         await communicator.disconnect()
 
     @mock.patch("paperless.consumers.StatusConsumer._authenticated")
-    async def test_receive_status_update(self, _authenticated):
+    async def test_receive_status_update(self, _authenticated) -> None:
         _authenticated.return_value = True
 
         communicator = WebsocketCommunicator(application, "/ws/status/")
@@ -90,7 +90,7 @@ class TestWebSockets(TestCase):
 
         await communicator.disconnect()
 
-    async def test_status_update_check_perms(self):
+    async def test_status_update_check_perms(self) -> None:
         communicator = WebsocketCommunicator(application, "/ws/status/")
 
         communicator.scope["user"] = mock.Mock()
@@ -137,7 +137,7 @@ class TestWebSockets(TestCase):
         await communicator.disconnect()
 
     @mock.patch("paperless.consumers.StatusConsumer._authenticated")
-    async def test_receive_documents_deleted(self, _authenticated):
+    async def test_receive_documents_deleted(self, _authenticated) -> None:
         _authenticated.return_value = True
 
         communicator = WebsocketCommunicator(application, "/ws/status/")
@@ -159,7 +159,7 @@ class TestWebSockets(TestCase):
         await communicator.disconnect()
 
     @mock.patch("channels.layers.InMemoryChannelLayer.group_send")
-    def test_manager_send_progress(self, mock_group_send):
+    def test_manager_send_progress(self, mock_group_send) -> None:
         with ProgressManager(task_id="test") as manager:
             manager.send_progress(
                 ProgressStatusOptions.STARTED,
@@ -190,7 +190,7 @@ class TestWebSockets(TestCase):
         )
 
     @mock.patch("channels.layers.InMemoryChannelLayer.group_send")
-    def test_manager_send_documents_deleted(self, mock_group_send):
+    def test_manager_send_documents_deleted(self, mock_group_send) -> None:
         with DocumentsStatusManager() as manager:
             manager.send_documents_deleted([1, 2, 3])
 
index 1f52c56c720be95c8dd543c8f47dcaa2f9cb902e..62ff5f5c83ad96e46c79d41fce73675b2a2ca6de 100644 (file)
@@ -16,7 +16,7 @@ class AIClient:
     A client for interacting with an LLM backend.
     """
 
-    def __init__(self):
+    def __init__(self) -> None:
         self.settings = AIConfig()
         self.llm = self.get_llm()
 
index 7505d49b0dae8f3fb5016b743fe578de88dfa62f..c36655f4dc6b8f85716d1f70e9cf1594d8676e4d 100644 (file)
@@ -60,7 +60,7 @@ class FakeEmbedding(BaseEmbedding):
 
 
 @pytest.mark.django_db
-def test_build_document_node(real_document):
+def test_build_document_node(real_document) -> None:
     nodes = indexing.build_document_node(real_document)
     assert len(nodes) > 0
     assert nodes[0].metadata["document_id"] == str(real_document.id)
@@ -71,7 +71,7 @@ def test_update_llm_index(
     temp_llm_index_dir,
     real_document,
     mock_embed_model,
-):
+) -> None:
     with patch("documents.models.Document.objects.all") as mock_all:
         mock_queryset = MagicMock()
         mock_queryset.exists.return_value = True
@@ -87,7 +87,7 @@ def test_update_llm_index_removes_meta(
     temp_llm_index_dir,
     real_document,
     mock_embed_model,
-):
+) -> None:
     # Pre-create a meta.json with incorrect data
     (temp_llm_index_dir / "meta.json").write_text(
         json.dumps({"embedding_model": "old", "dim": 1}),
@@ -117,7 +117,7 @@ def test_update_llm_index_partial_update(
     temp_llm_index_dir,
     real_document,
     mock_embed_model,
-):
+) -> None:
     doc2 = Document.objects.create(
         title="Test Document 2",
         content="This is some test content 2.",
@@ -166,7 +166,7 @@ def test_update_llm_index_partial_update(
 def test_get_or_create_storage_context_raises_exception(
     temp_llm_index_dir,
     mock_embed_model,
-):
+) -> None:
     with pytest.raises(Exception):
         indexing.get_or_create_storage_context(rebuild=False)
 
@@ -178,7 +178,7 @@ def test_load_or_build_index_builds_when_nodes_given(
     temp_llm_index_dir,
     real_document,
     mock_embed_model,
-):
+) -> None:
     with (
         patch(
             "paperless_ai.indexing.load_index_from_storage",
@@ -203,7 +203,7 @@ def test_load_or_build_index_builds_when_nodes_given(
 def test_load_or_build_index_raises_exception_when_no_nodes(
     temp_llm_index_dir,
     mock_embed_model,
-):
+) -> None:
     with (
         patch(
             "paperless_ai.indexing.load_index_from_storage",
@@ -222,7 +222,7 @@ def test_load_or_build_index_raises_exception_when_no_nodes(
 def test_load_or_build_index_succeeds_when_nodes_given(
     temp_llm_index_dir,
     mock_embed_model,
-):
+) -> None:
     with (
         patch(
             "paperless_ai.indexing.load_index_from_storage",
@@ -249,7 +249,7 @@ def test_add_or_update_document_updates_existing_entry(
     temp_llm_index_dir,
     real_document,
     mock_embed_model,
-):
+) -> None:
     indexing.update_llm_index(rebuild=True)
     indexing.llm_index_add_or_update_document(real_document)
 
@@ -261,7 +261,7 @@ def test_remove_document_deletes_node_from_docstore(
     temp_llm_index_dir,
     real_document,
     mock_embed_model,
-):
+) -> None:
     indexing.update_llm_index(rebuild=True)
     index = indexing.load_or_build_index()
     assert len(index.docstore.docs) == 1
@@ -275,7 +275,7 @@ def test_remove_document_deletes_node_from_docstore(
 def test_update_llm_index_no_documents(
     temp_llm_index_dir,
     mock_embed_model,
-):
+) -> None:
     with patch("documents.models.Document.objects.all") as mock_all:
         mock_queryset = MagicMock()
         mock_queryset.exists.return_value = False
@@ -291,7 +291,7 @@ def test_update_llm_index_no_documents(
 
 
 @pytest.mark.django_db
-def test_queue_llm_index_update_if_needed_enqueues_when_idle_or_skips_recent():
+def test_queue_llm_index_update_if_needed_enqueues_when_idle_or_skips_recent() -> None:
     # No existing tasks
     with patch("documents.tasks.llmindex_index") as mock_task:
         result = indexing.queue_llm_index_update_if_needed(
@@ -327,7 +327,7 @@ def test_queue_llm_index_update_if_needed_enqueues_when_idle_or_skips_recent():
 def test_query_similar_documents(
     temp_llm_index_dir,
     real_document,
-):
+) -> None:
     with (
         patch("paperless_ai.indexing.get_or_create_storage_context") as mock_storage,
         patch("paperless_ai.indexing.load_or_build_index") as mock_load_or_build_index,
@@ -374,7 +374,7 @@ def test_query_similar_documents(
 def test_query_similar_documents_triggers_update_when_index_missing(
     temp_llm_index_dir,
     real_document,
-):
+) -> None:
     with (
         patch(
             "paperless_ai.indexing.vector_store_file_exists",
index 688d78058e4bafa8cbee8a0696501edccf45b9d7..0a14425cfc8639b76967d8162bf170cbd4bc3a25 100644 (file)
@@ -40,7 +40,7 @@ def mock_document():
     return doc
 
 
-def test_stream_chat_with_one_document_full_content(mock_document):
+def test_stream_chat_with_one_document_full_content(mock_document) -> None:
     with (
         patch("paperless_ai.chat.AIClient") as mock_client_cls,
         patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
@@ -71,7 +71,7 @@ def test_stream_chat_with_one_document_full_content(mock_document):
         assert output == ["chunk1", "chunk2"]
 
 
-def test_stream_chat_with_multiple_documents_retrieval(patch_embed_nodes):
+def test_stream_chat_with_multiple_documents_retrieval(patch_embed_nodes) -> None:
     with (
         patch("paperless_ai.chat.AIClient") as mock_client_cls,
         patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
@@ -121,7 +121,7 @@ def test_stream_chat_with_multiple_documents_retrieval(patch_embed_nodes):
         assert output == ["chunk1", "chunk2"]
 
 
-def test_stream_chat_no_matching_nodes():
+def test_stream_chat_no_matching_nodes() -> None:
     with (
         patch("paperless_ai.chat.AIClient") as mock_client_cls,
         patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
index 87a42a1a49957ad435c52c9e1c7ade1f436116c2..83cfd8a41a8d13a3bf7067ee8c7f3ce0842450c9 100644 (file)
@@ -14,7 +14,7 @@ from paperless_ai.matching import match_tags_by_name
 
 
 class TestAIMatching(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         # Create test data for Tag
         self.tag1 = Tag.objects.create(name="Test Tag 1")
         self.tag2 = Tag.objects.create(name="Test Tag 2")
@@ -32,7 +32,7 @@ class TestAIMatching(TestCase):
         self.storage_path2 = StoragePath.objects.create(name="Test Storage Path 2")
 
     @patch("paperless_ai.matching.get_objects_for_user_owner_aware")
-    def test_match_tags_by_name(self, mock_get_objects):
+    def test_match_tags_by_name(self, mock_get_objects) -> None:
         mock_get_objects.return_value = Tag.objects.all()
         names = ["Test Tag 1", "Nonexistent Tag"]
         result = match_tags_by_name(names, user=None)
@@ -40,7 +40,7 @@ class TestAIMatching(TestCase):
         self.assertEqual(result[0].name, "Test Tag 1")
 
     @patch("paperless_ai.matching.get_objects_for_user_owner_aware")
-    def test_match_correspondents_by_name(self, mock_get_objects):
+    def test_match_correspondents_by_name(self, mock_get_objects) -> None:
         mock_get_objects.return_value = Correspondent.objects.all()
         names = ["Test Correspondent 1", "Nonexistent Correspondent"]
         result = match_correspondents_by_name(names, user=None)
@@ -48,7 +48,7 @@ class TestAIMatching(TestCase):
         self.assertEqual(result[0].name, "Test Correspondent 1")
 
     @patch("paperless_ai.matching.get_objects_for_user_owner_aware")
-    def test_match_document_types_by_name(self, mock_get_objects):
+    def test_match_document_types_by_name(self, mock_get_objects) -> None:
         mock_get_objects.return_value = DocumentType.objects.all()
         names = ["Test Document Type 1", "Nonexistent Document Type"]
         result = match_document_types_by_name(names, user=None)
@@ -56,28 +56,28 @@ class TestAIMatching(TestCase):
         self.assertEqual(result[0].name, "Test Document Type 1")
 
     @patch("paperless_ai.matching.get_objects_for_user_owner_aware")
-    def test_match_storage_paths_by_name(self, mock_get_objects):
+    def test_match_storage_paths_by_name(self, mock_get_objects) -> None:
         mock_get_objects.return_value = StoragePath.objects.all()
         names = ["Test Storage Path 1", "Nonexistent Storage Path"]
         result = match_storage_paths_by_name(names, user=None)
         self.assertEqual(len(result), 1)
         self.assertEqual(result[0].name, "Test Storage Path 1")
 
-    def test_extract_unmatched_names(self):
+    def test_extract_unmatched_names(self) -> None:
         llm_names = ["Test Tag 1", "Nonexistent Tag"]
         matched_objects = [self.tag1]
         unmatched_names = extract_unmatched_names(llm_names, matched_objects)
         self.assertEqual(unmatched_names, ["Nonexistent Tag"])
 
     @patch("paperless_ai.matching.get_objects_for_user_owner_aware")
-    def test_match_tags_by_name_with_empty_names(self, mock_get_objects):
+    def test_match_tags_by_name_with_empty_names(self, mock_get_objects) -> None:
         mock_get_objects.return_value = Tag.objects.all()
         names = [None, "", "   "]
         result = match_tags_by_name(names, user=None)
         self.assertEqual(result, [])
 
     @patch("paperless_ai.matching.get_objects_for_user_owner_aware")
-    def test_match_tags_with_fuzzy_matching(self, mock_get_objects):
+    def test_match_tags_with_fuzzy_matching(self, mock_get_objects) -> None:
         mock_get_objects.return_value = Tag.objects.all()
         names = ["Test Taag 1", "Teest Tag 2"]
         result = match_tags_by_name(names, user=None)
index a07a059b40ba2f3f5e8ff80cee860eb0df5fd826..dd3e71f82a4f2b36e22d361f2cc8ea5577e6657a 100644 (file)
@@ -10,7 +10,7 @@ class PaperlessMailConfig(AppConfig):
 
     verbose_name = _("Paperless mail")
 
-    def ready(self):
+    def ready(self) -> None:
         from documents.signals import document_consumer_declaration
 
         if settings.TIKA_ENABLED:
index edb266c51b89b8150534c39fcdc217b88b3c99a8..8acd5d8adb3dcd1befd532cf246dac633d13d8e0 100644 (file)
@@ -107,7 +107,7 @@ class DeleteMailAction(BaseMailAction):
     A mail action that deletes mails after processing.
     """
 
-    def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+    def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
         M.delete(message_uid)
 
 
@@ -119,7 +119,7 @@ class MarkReadMailAction(BaseMailAction):
     def get_criteria(self):
         return {"seen": False}
 
-    def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+    def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
         M.flag(message_uid, [MailMessageFlags.SEEN], value=True)
 
 
@@ -128,7 +128,7 @@ class MoveMailAction(BaseMailAction):
     A mail action that moves mails to a different folder after processing.
     """
 
-    def post_consume(self, M, message_uid, parameter):
+    def post_consume(self, M, message_uid, parameter) -> None:
         M.move(message_uid, parameter)
 
 
@@ -140,7 +140,7 @@ class FlagMailAction(BaseMailAction):
     def get_criteria(self):
         return {"flagged": False}
 
-    def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+    def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
         M.flag(message_uid, [MailMessageFlags.FLAGGED], value=True)
 
 
@@ -149,7 +149,7 @@ class TagMailAction(BaseMailAction):
     A mail action that tags mails after processing.
     """
 
-    def __init__(self, parameter: str, *, supports_gmail_labels: bool):
+    def __init__(self, parameter: str, *, supports_gmail_labels: bool) -> None:
         # The custom tag should look like "apple:<color>"
         if "apple:" in parameter.lower():
             _, self.color = parameter.split(":")
@@ -177,7 +177,7 @@ class TagMailAction(BaseMailAction):
         else:  # pragma: no cover
             raise ValueError("This should never happen.")
 
-    def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+    def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
         if self.supports_gmail_labels:
             M.client.uid("STORE", message_uid, "+X-GM-LABELS", self.keyword)
 
@@ -205,7 +205,7 @@ class TagMailAction(BaseMailAction):
             raise MailError("No keyword specified.")
 
 
-def mailbox_login(mailbox: MailBox, account: MailAccount):
+def mailbox_login(mailbox: MailBox, account: MailAccount) -> None:
     logger = logging.getLogger("paperless_mail")
 
     try:
@@ -241,7 +241,7 @@ def apply_mail_action(
     message_uid: str,
     message_subject: str,
     message_date: datetime.datetime,
-):
+) -> None:
     """
     This shared task applies the mail action of a particular mail rule to the
     given mail. Creates a ProcessedMail object, so that the mail won't be
@@ -310,7 +310,7 @@ def error_callback(
     message_uid: str,
     message_subject: str,
     message_date: datetime.datetime,
-):
+) -> None:
     """
     A shared task that is called whenever something goes wrong during
     consumption of a file. See queue_consumption_tasks.
@@ -333,7 +333,7 @@ def queue_consumption_tasks(
     consume_tasks: list[Signature],
     rule: MailRule,
     message: MailMessage,
-):
+) -> None:
     """
     Queue a list of consumption tasks (Signatures for the consume_file shared
     task) with celery.
@@ -450,12 +450,12 @@ class MailAccountHandler(LoggingMixin):
         self.renew_logging_group()
         self._init_preprocessors()
 
-    def _init_preprocessors(self):
+    def _init_preprocessors(self) -> None:
         self._message_preprocessors: list[MailMessagePreprocessor] = []
         for preprocessor_type in self._message_preprocessor_types:
             self._init_preprocessor(preprocessor_type)
 
-    def _init_preprocessor(self, preprocessor_type):
+    def _init_preprocessor(self, preprocessor_type) -> None:
         if preprocessor_type.able_to_run():
             try:
                 self._message_preprocessors.append(preprocessor_type())
index 738b947b0fc3ab6c46c41c88f923276938710d21..22835de8961bb7ed64f9f89cdfc913b34a363900 100644 (file)
@@ -128,7 +128,7 @@ class MailDocumentParser(DocumentParser):
         mime_type: str,
         file_name=None,
         mailrule_id: int | None = None,
-    ):
+    ) -> None:
         """
         Parses the given .eml into formatted text, based on the decoded email.
 
@@ -471,7 +471,7 @@ class MailDocumentParser(DocumentParser):
         html_pdf.write_bytes(response.content)
         return html_pdf
 
-    def get_settings(self):
+    def get_settings(self) -> None:
         """
         This parser does not implement additional settings yet
         """
index d33f80c72b6402fed08a42d9fd4355d2784131d8..6afbbb4f81321c56856528fc5a84e0fd597f2a91 100644 (file)
@@ -39,7 +39,7 @@ class MailMessageDecryptor(MailMessagePreprocessor, LoggingMixin):
 
     NAME = "MailMessageDecryptor"
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         self.renew_logging_group()
         self._gpg = GPG(gnupghome=settings.EMAIL_GNUPG_HOME)
index dd63c67abfd5159e0159a5f90530e921ec843b8d..dba8c840c83194a65c3f891a6cf743cdf2a25000 100644 (file)
@@ -21,7 +21,7 @@ from paperless_mail.tests.test_mail import BogusMailBox
 class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/mail_accounts/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         self.bogus_mailbox = BogusMailBox()
 
         patcher = mock.patch("paperless_mail.mail.MailBox")
@@ -36,7 +36,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
         self.user.save()
         self.client.force_authenticate(user=self.user)
 
-    def test_get_mail_accounts(self):
+    def test_get_mail_accounts(self) -> None:
         """
         GIVEN:
             - Configured mail accounts
@@ -73,7 +73,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_account1["imap_security"], account1.imap_security)
         self.assertEqual(returned_account1["character_set"], account1.character_set)
 
-    def test_create_mail_account(self):
+    def test_create_mail_account(self) -> None:
         """
         WHEN:
             - API request is made to add a mail account
@@ -108,7 +108,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_account1.imap_security, account1["imap_security"])
         self.assertEqual(returned_account1.character_set, account1["character_set"])
 
-    def test_delete_mail_account(self):
+    def test_delete_mail_account(self) -> None:
         """
         GIVEN:
             - Existing mail account
@@ -136,7 +136,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
 
         self.assertEqual(len(MailAccount.objects.all()), 0)
 
-    def test_update_mail_account(self):
+    def test_update_mail_account(self) -> None:
         """
         GIVEN:
             - Existing mail accounts
@@ -184,7 +184,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_account2.name, "Updated Name 2")
         self.assertEqual(returned_account2.password, "123xyz")
 
-    def test_mail_account_test_fail(self):
+    def test_mail_account_test_fail(self) -> None:
         """
         GIVEN:
             - Errnoeous mail account details
@@ -210,7 +210,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
 
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 
-    def test_mail_account_test_success(self):
+    def test_mail_account_test_success(self) -> None:
         """
         GIVEN:
             - Working mail account details
@@ -236,7 +236,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data["success"], True)
 
-    def test_mail_account_test_existing(self):
+    def test_mail_account_test_existing(self) -> None:
         """
         GIVEN:
             - Testing server details for an existing account with obfuscated password (***)
@@ -272,7 +272,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data["success"], True)
 
-    def test_get_mail_accounts_owner_aware(self):
+    def test_get_mail_accounts_owner_aware(self) -> None:
         """
         GIVEN:
             - Configured accounts with different users
@@ -343,7 +343,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
 class TestAPIMailRules(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/mail_rules/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_user(username="temp_admin")
@@ -351,7 +351,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
         self.user.save()
         self.client.force_authenticate(user=self.user)
 
-    def test_get_mail_rules(self):
+    def test_get_mail_rules(self) -> None:
         """
         GIVEN:
             - Configured mail accounts and rules
@@ -415,7 +415,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_rule1["order"], rule1.order)
         self.assertEqual(returned_rule1["attachment_type"], rule1.attachment_type)
 
-    def test_create_mail_rule(self):
+    def test_create_mail_rule(self) -> None:
         """
         GIVEN:
             - Configured mail account exists
@@ -520,7 +520,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
             rule1["assign_owner_from_rule"],
         )
 
-    def test_delete_mail_rule(self):
+    def test_delete_mail_rule(self) -> None:
         """
         GIVEN:
             - Existing mail rule
@@ -564,7 +564,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
 
         self.assertEqual(len(MailRule.objects.all()), 0)
 
-    def test_update_mail_rule(self):
+    def test_update_mail_rule(self) -> None:
         """
         GIVEN:
             - Existing mail rule
@@ -614,7 +614,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
         self.assertEqual(returned_rule1.name, "Updated Name 1")
         self.assertEqual(returned_rule1.action, MailRule.MailAction.DELETE)
 
-    def test_get_mail_rules_owner_aware(self):
+    def test_get_mail_rules_owner_aware(self) -> None:
         """
         GIVEN:
             - Configured rules with different users
@@ -683,7 +683,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
         self.assertEqual(response.data["results"][1]["name"], rule2.name)
         self.assertEqual(response.data["results"][2]["name"], rule4.name)
 
-    def test_mailrule_maxage_validation(self):
+    def test_mailrule_maxage_validation(self) -> None:
         """
         GIVEN:
             - An existing mail account
@@ -728,7 +728,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
 class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
     ENDPOINT = "/api/processed_mail/"
 
-    def setUp(self):
+    def setUp(self) -> None:
         super().setUp()
 
         self.user = User.objects.create_user(username="temp_admin")
@@ -736,7 +736,7 @@ class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
         self.user.save()
         self.client.force_authenticate(user=self.user)
 
-    def test_get_processed_mails_owner_aware(self):
+    def test_get_processed_mails_owner_aware(self) -> None:
         """
         GIVEN:
             - Configured processed mails with different users
@@ -821,7 +821,7 @@ class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
         returned_ids = {r["id"] for r in response.data["results"]}
         self.assertSetEqual(returned_ids, {pm1.id, pm2.id, pm4.id})
 
-    def test_get_processed_mails_filter_by_rule(self):
+    def test_get_processed_mails_filter_by_rule(self) -> None:
         """
         GIVEN:
             - Processed mails belonging to two different rules
@@ -893,7 +893,7 @@ class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
         returned_ids = {r["id"] for r in response.data["results"]}
         self.assertSetEqual(returned_ids, {pm1.id, pm2.id})
 
-    def test_bulk_delete_processed_mails(self):
+    def test_bulk_delete_processed_mails(self) -> None:
         """
         GIVEN:
             - Processed mails belonging to two different rules and different users
index 3dca97ca181e6ee183d8cbe6de6de6d9cb0e8edf..305c2854c89b17a1c6dee0ee89581969b0543b9c 100644 (file)
@@ -51,30 +51,30 @@ class _AttachmentDef:
 class BogusFolderManager:
     current_folder = "INBOX"
 
-    def set(self, new_folder):
+    def set(self, new_folder) -> None:
         if new_folder not in ["INBOX", "spam"]:
             raise MailboxFolderSelectError(None, "uhm")
         self.current_folder = new_folder
 
 
 class BogusClient:
-    def __init__(self, messages):
+    def __init__(self, messages) -> None:
         self.messages: list[MailMessage] = messages
         self.capabilities: list[str] = []
 
     def __enter__(self):
         return self
 
-    def __exit__(self, exc_type, exc_val, exc_tb):
+    def __exit__(self, exc_type, exc_val, exc_tb) -> None:
         pass
 
-    def authenticate(self, mechanism, authobject):
+    def authenticate(self, mechanism, authobject) -> None:
         # authobject must be a callable object
         auth_bytes = authobject(None)
         if auth_bytes != b"\x00admin\x00w57\xc3\xa4\xc3\xb6\xc3\xbcw4b6huwb6nhu":
             raise MailboxLoginError("BAD", "OK")
 
-    def uid(self, command, *args):
+    def uid(self, command, *args) -> None:
         if command == "STORE":
             for message in self.messages:
                 if message.uid == args[0]:
@@ -94,7 +94,7 @@ class BogusMailBox(AbstractContextManager):
     # A dummy access token
     ACCESS_TOKEN = "ea7e075cd3acf2c54c48e600398d5d5a"
 
-    def __init__(self):
+    def __init__(self) -> None:
         self.messages: list[MailMessage] = []
         self.messages_spam: list[MailMessage] = []
         self.folder = BogusFolderManager()
@@ -104,25 +104,25 @@ class BogusMailBox(AbstractContextManager):
     def __enter__(self):
         return self
 
-    def __exit__(self, exc_type, exc_val, exc_tb):
+    def __exit__(self, exc_type, exc_val, exc_tb) -> None:
         pass
 
-    def updateClient(self):
+    def updateClient(self) -> None:
         self.client = BogusClient(self.messages)
 
-    def login(self, username, password):
+    def login(self, username, password) -> None:
         # This will raise a UnicodeEncodeError if the password is not ASCII only
         password.encode("ascii")
         # Otherwise, check for correct values
         if username != self.USERNAME or password != self.ASCII_PASSWORD:
             raise MailboxLoginError("BAD", "OK")
 
-    def login_utf8(self, username, password):
+    def login_utf8(self, username, password) -> None:
         # Expected to only be called with the UTF-8 password
         if username != self.USERNAME or password != self.UTF_PASSWORD:
             raise MailboxLoginError("BAD", "OK")
 
-    def xoauth2(self, username: str, access_token: str):
+    def xoauth2(self, username: str, access_token: str) -> None:
         if username != self.USERNAME or access_token != self.ACCESS_TOKEN:
             raise MailboxLoginError("BAD", "OK")
 
@@ -166,10 +166,10 @@ class BogusMailBox(AbstractContextManager):
 
         return list(msg)
 
-    def delete(self, uid_list):
+    def delete(self, uid_list) -> None:
         self.messages = list(filter(lambda m: m.uid not in uid_list, self.messages))
 
-    def flag(self, uid_list, flag_set, value):
+    def flag(self, uid_list, flag_set, value) -> None:
         for message in self.messages:
             if message.uid in uid_list:
                 for flag in flag_set:
@@ -182,7 +182,7 @@ class BogusMailBox(AbstractContextManager):
                         if hasattr(message, "flags"):
                             del message.flags
 
-    def move(self, uid_list, folder):
+    def move(self, uid_list, folder) -> None:
         if folder == "spam":
             self.messages_spam += list(
                 filter(lambda m: m.uid in uid_list, self.messages),
@@ -203,7 +203,7 @@ def fake_magic_from_buffer(buffer, *, mime=False):
 
 
 class MessageBuilder:
-    def __init__(self):
+    def __init__(self) -> None:
         self._used_uids = set()
 
     def create_message(
@@ -274,7 +274,10 @@ class MessageBuilder:
         return imap_msg
 
 
-def reset_bogus_mailbox(bogus_mailbox: BogusMailBox, message_builder: MessageBuilder):
+def reset_bogus_mailbox(
+    bogus_mailbox: BogusMailBox,
+    message_builder: MessageBuilder,
+) -> None:
     bogus_mailbox.messages = []
     bogus_mailbox.messages_spam = []
     bogus_mailbox.messages.append(
@@ -310,7 +313,7 @@ def reset_bogus_mailbox(bogus_mailbox: BogusMailBox, message_builder: MessageBui
 
 
 class MailMocker(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         self.bogus_mailbox = BogusMailBox()
         self.messageBuilder = MessageBuilder()
 
@@ -330,7 +333,7 @@ class MailMocker(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     def assert_queue_consumption_tasks_call_args(
         self,
         expected_call_args: list[list[dict[str, str]]],
-    ):
+    ) -> None:
         """
         Verifies that queue_consumption_tasks has been called with the expected arguments.
 
@@ -377,7 +380,7 @@ class MailMocker(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                     else:
                         self.fail("No match for expected arg")
 
-    def apply_mail_actions(self):
+    def apply_mail_actions(self) -> None:
         """
         Applies pending actions to mails by inspecting calls to the queue_consumption_tasks method.
         """
@@ -387,7 +390,12 @@ class MailMocker(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             apply_mail_action([], rule.pk, message.uid, message.subject, message.date)
 
 
-def assert_eventually_equals(getter_fn, expected_value, timeout=1.0, interval=0.05):
+def assert_eventually_equals(
+    getter_fn,
+    expected_value,
+    timeout=1.0,
+    interval=0.05,
+) -> None:
     """
     Repeatedly calls `getter_fn()` until the result equals `expected_value`,
     or times out after `timeout` seconds.
@@ -395,7 +403,7 @@ def assert_eventually_equals(getter_fn, expected_value, timeout=1.0, interval=0.
     deadline = time.time() + timeout
     while time.time() < deadline:
         if getter_fn() == expected_value:
-            return
+            return None
         time.sleep(interval)
     actual = getter_fn()
     raise AssertionError(f"Expected {expected_value}, but got {actual}")
@@ -407,14 +415,14 @@ class TestMail(
     FileSystemAssertsMixin,
     TestCase,
 ):
-    def setUp(self):
+    def setUp(self) -> None:
         self.mailMocker = MailMocker()
         self.mailMocker.setUp()
         self.mail_account_handler = MailAccountHandler()
 
         super().setUp()
 
-    def test_get_correspondent(self):
+    def test_get_correspondent(self) -> None:
         message = namedtuple("MailMessage", [])
         message.from_ = "someone@somewhere.com"
         message.from_values = EmailAddress(
@@ -473,7 +481,7 @@ class TestMail(
         c = handler._get_correspondent(message, rule)
         self.assertEqual(c, someone_else)
 
-    def test_get_title(self):
+    def test_get_title(self) -> None:
         message = namedtuple("MailMessage", [])
         message.subject = "the message title"
         att = namedtuple("Attachment", [])
@@ -497,7 +505,7 @@ class TestMail(
         )
         self.assertEqual(handler._get_title(message, att, rule), None)
 
-    def test_handle_message(self):
+    def test_handle_message(self) -> None:
         message = self.mailMocker.messageBuilder.create_message(
             subject="the message title",
             from_="Myself",
@@ -526,7 +534,7 @@ class TestMail(
             ],
         )
 
-    def test_handle_empty_message(self):
+    def test_handle_empty_message(self) -> None:
         message = namedtuple("MailMessage", [])
 
         message.attachments = []
@@ -537,7 +545,7 @@ class TestMail(
         self.mailMocker._queue_consumption_tasks_mock.assert_not_called()
         self.assertEqual(result, 0)
 
-    def test_handle_unknown_mime_type(self):
+    def test_handle_unknown_mime_type(self) -> None:
         message = self.mailMocker.messageBuilder.create_message(
             attachments=[
                 _AttachmentDef(filename="f1.pdf"),
@@ -566,7 +574,7 @@ class TestMail(
             ],
         )
 
-    def test_handle_disposition(self):
+    def test_handle_disposition(self) -> None:
         message = self.mailMocker.messageBuilder.create_message(
             attachments=[
                 _AttachmentDef(
@@ -594,7 +602,7 @@ class TestMail(
             ],
         )
 
-    def test_handle_inline_files(self):
+    def test_handle_inline_files(self) -> None:
         message = self.mailMocker.messageBuilder.create_message(
             attachments=[
                 _AttachmentDef(
@@ -624,7 +632,7 @@ class TestMail(
             ],
         )
 
-    def test_filename_filter(self):
+    def test_filename_filter(self) -> None:
         """
         GIVEN:
             - Email with multiple similar named attachments
@@ -745,7 +753,7 @@ class TestMail(
                 )
 
     @pytest.mark.flaky(reruns=4)
-    def test_filename_filter_inline_no_consumption(self):
+    def test_filename_filter_inline_no_consumption(self) -> None:
         """
         GIVEN:
             - Rule that processes all attachments but filters by filename
@@ -787,7 +795,7 @@ class TestMail(
 
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 1)
 
-    def test_handle_mail_account_mark_read(self):
+    def test_handle_mail_account_mark_read(self) -> None:
         account = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -817,7 +825,7 @@ class TestMail(
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
 
     @pytest.mark.flaky(reruns=4)
-    def test_handle_mail_account_delete(self):
+    def test_handle_mail_account_delete(self) -> None:
         account = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -839,7 +847,7 @@ class TestMail(
 
         assert_eventually_equals(lambda: len(self.mailMocker.bogus_mailbox.messages), 1)
 
-    def test_handle_mail_account_delete_no_filters(self):
+    def test_handle_mail_account_delete_no_filters(self) -> None:
         account = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -862,7 +870,7 @@ class TestMail(
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0)
 
     @pytest.mark.flaky(reruns=4)
-    def test_handle_mail_account_flag(self):
+    def test_handle_mail_account_flag(self) -> None:
         account = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -893,7 +901,7 @@ class TestMail(
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
 
     @pytest.mark.flaky(reruns=4)
-    def test_handle_mail_account_move(self):
+    def test_handle_mail_account_move(self) -> None:
         account = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -918,7 +926,7 @@ class TestMail(
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2)
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1)
 
-    def test_handle_mail_account_move_no_filters(self):
+    def test_handle_mail_account_move_no_filters(self) -> None:
         account = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -943,7 +951,7 @@ class TestMail(
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0)
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 3)
 
-    def test_handle_mail_account_tag(self):
+    def test_handle_mail_account_tag(self) -> None:
         account = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -983,7 +991,7 @@ class TestMail(
             0,
         )
 
-    def test_handle_mail_account_tag_gmail(self):
+    def test_handle_mail_account_tag_gmail(self) -> None:
         self.mailMocker.bogus_mailbox._host = "imap.gmail.com"
         self.mailMocker.bogus_mailbox.client.capabilities = ["X-GM-EXT-1"]
 
@@ -1017,7 +1025,7 @@ class TestMail(
         )
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
 
-    def test_tag_mail_action_applemail_wrong_input(self):
+    def test_tag_mail_action_applemail_wrong_input(self) -> None:
         self.assertRaises(
             MailError,
             TagMailAction,
@@ -1025,7 +1033,7 @@ class TestMail(
             supports_gmail_labels=False,
         )
 
-    def test_handle_mail_account_tag_applemail(self):
+    def test_handle_mail_account_tag_applemail(self) -> None:
         # all mails will be FLAGGED afterwards
 
         account = MailAccount.objects.create(
@@ -1057,7 +1065,7 @@ class TestMail(
         )
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
 
-    def test_error_login(self):
+    def test_error_login(self) -> None:
         """
         GIVEN:
             - Account configured with incorrect password
@@ -1080,7 +1088,7 @@ class TestMail(
             self.mail_account_handler.handle_mail_account(account)
 
     @pytest.mark.flaky(reruns=4)
-    def test_error_skip_account(self):
+    def test_error_skip_account(self) -> None:
         _ = MailAccount.objects.create(
             name="test",
             imap_server="",
@@ -1109,7 +1117,7 @@ class TestMail(
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1)
 
     @pytest.mark.flaky(reruns=4)
-    def test_error_skip_rule(self):
+    def test_error_skip_rule(self) -> None:
         account = MailAccount.objects.create(
             name="test2",
             imap_server="",
@@ -1140,7 +1148,7 @@ class TestMail(
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2)
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1)
 
-    def test_error_folder_set(self):
+    def test_error_folder_set(self) -> None:
         """
         GIVEN:
             - Mail rule with non-existent folder
@@ -1173,7 +1181,7 @@ class TestMail(
         self.mailMocker.bogus_mailbox.folder.list.assert_called_once()
         self.mailMocker._queue_consumption_tasks_mock.assert_not_called()
 
-    def test_error_folder_set_error_listing(self):
+    def test_error_folder_set_error_listing(self) -> None:
         """
         GIVEN:
             - Mail rule with non-existent folder
@@ -1208,8 +1216,8 @@ class TestMail(
 
     @pytest.mark.flaky(reruns=4)
     @mock.patch("paperless_mail.mail.MailAccountHandler._get_correspondent")
-    def test_error_skip_mail(self, m):
-        def get_correspondent_fake(message, rule):
+    def test_error_skip_mail(self, m) -> None:
+        def get_correspondent_fake(message, rule) -> None:
             if message.from_ == "amazon@amazon.de":
                 raise ValueError("Does not compute.")
             else:
@@ -1243,7 +1251,7 @@ class TestMail(
             "amazon@amazon.de",
         )
 
-    def test_error_create_correspondent(self):
+    def test_error_create_correspondent(self) -> None:
         account = MailAccount.objects.create(
             name="test2",
             imap_server="",
@@ -1292,7 +1300,7 @@ class TestMail(
         )
 
     @pytest.mark.flaky(reruns=4)
-    def test_filters(self):
+    def test_filters(self) -> None:
         account = MailAccount.objects.create(
             name="test3",
             imap_server="",
@@ -1342,7 +1350,7 @@ class TestMail(
                     expected_mail_count,
                 )
 
-    def test_auth_plain_fallback(self):
+    def test_auth_plain_fallback(self) -> None:
         """
         GIVEN:
             - Mail account with password containing non-ASCII characters
@@ -1382,7 +1390,7 @@ class TestMail(
         )
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
 
-    def test_auth_plain_fallback_fails_still(self):
+    def test_auth_plain_fallback_fails_still(self) -> None:
         """
         GIVEN:
             - Mail account with password containing non-ASCII characters
@@ -1413,7 +1421,7 @@ class TestMail(
             account,
         )
 
-    def test_auth_with_valid_token(self):
+    def test_auth_with_valid_token(self) -> None:
         """
         GIVEN:
             - Mail account configured with access token
@@ -1454,7 +1462,7 @@ class TestMail(
         )
         self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
 
-    def test_disabled_rule(self):
+    def test_disabled_rule(self) -> None:
         """
         GIVEN:
             - Mail rule is disabled
@@ -1494,7 +1502,7 @@ class TestMail(
 
 
 class TestPostConsumeAction(TestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         self.account = MailAccount.objects.create(
             name="test",
             imap_server="imap.test.com",
@@ -1586,7 +1594,7 @@ class TestManagementCommand(TestCase):
     @mock.patch(
         "paperless_mail.management.commands.mail_fetcher.tasks.process_mail_accounts",
     )
-    def test_mail_fetcher(self, m):
+    def test_mail_fetcher(self, m) -> None:
         call_command("mail_fetcher")
 
         m.assert_called_once()
@@ -1594,7 +1602,7 @@ class TestManagementCommand(TestCase):
 
 class TestTasks(TestCase):
     @mock.patch("paperless_mail.tasks.MailAccountHandler.handle_mail_account")
-    def test_all_accounts(self, m):
+    def test_all_accounts(self, m) -> None:
         m.side_effect = lambda account: 6
 
         MailAccount.objects.create(
@@ -1628,7 +1636,7 @@ class TestTasks(TestCase):
         self.assertIn("No new", result)
 
     @mock.patch("paperless_mail.tasks.MailAccountHandler.handle_mail_account")
-    def test_accounts_no_enabled_rules(self, m):
+    def test_accounts_no_enabled_rules(self, m) -> None:
         m.side_effect = lambda account: 6
 
         MailAccount.objects.create(
@@ -1658,7 +1666,7 @@ class TestTasks(TestCase):
         self.assertEqual(m.call_count, 0)
 
     @mock.patch("paperless_mail.tasks.MailAccountHandler.handle_mail_account")
-    def test_process_with_account_ids(self, m):
+    def test_process_with_account_ids(self, m) -> None:
         m.side_effect = lambda account: 6
 
         account_a = MailAccount.objects.create(
@@ -1693,7 +1701,7 @@ class TestTasks(TestCase):
 
 
 class TestMailAccountTestView(APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         self.mailMocker = MailMocker()
         self.mailMocker.setUp()
         self.user = User.objects.create_user(
@@ -1703,7 +1711,7 @@ class TestMailAccountTestView(APITestCase):
         self.client.force_authenticate(user=self.user)
         self.url = "/api/mail_accounts/test/"
 
-    def test_mail_account_test_view_success(self):
+    def test_mail_account_test_view_success(self) -> None:
         data = {
             "imap_server": "imap.example.com",
             "imap_port": 993,
@@ -1717,7 +1725,7 @@ class TestMailAccountTestView(APITestCase):
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertEqual(response.data, {"success": True})
 
-    def test_mail_account_test_view_mail_error(self):
+    def test_mail_account_test_view_mail_error(self) -> None:
         data = {
             "imap_server": "imap.example.com",
             "imap_port": 993,
@@ -1818,7 +1826,7 @@ class TestMailAccountTestView(APITestCase):
 
 
 class TestMailAccountProcess(APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         self.mailMocker = MailMocker()
         self.mailMocker.setUp()
         self.user = User.objects.create_superuser(
@@ -1838,14 +1846,14 @@ class TestMailAccountProcess(APITestCase):
         self.url = f"/api/mail_accounts/{self.account.pk}/process/"
 
     @mock.patch("paperless_mail.tasks.process_mail_accounts.delay")
-    def test_mail_account_process_view(self, m):
+    def test_mail_account_process_view(self, m) -> None:
         response = self.client.post(self.url)
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         m.assert_called_once()
 
 
 class TestMailRuleAPI(APITestCase):
-    def setUp(self):
+    def setUp(self) -> None:
         self.user = User.objects.create_superuser(
             username="testuser",
             password="testpassword",
@@ -1862,7 +1870,7 @@ class TestMailRuleAPI(APITestCase):
         )
         self.url = "/api/mail_rules/"
 
-    def test_create_mail_rule(self):
+    def test_create_mail_rule(self) -> None:
         """
         GIVEN:
             - Valid data for creating a mail rule
@@ -1884,7 +1892,7 @@ class TestMailRuleAPI(APITestCase):
         rule = MailRule.objects.first()
         self.assertEqual(rule.name, "Test Rule")
 
-    def test_mail_rule_action_parameter_required_for_tag_or_move(self):
+    def test_mail_rule_action_parameter_required_for_tag_or_move(self) -> None:
         """
         GIVEN:
             - Valid data for creating a mail rule without action_parameter
index f8f28df65ae6e44fd98e2a4f541ace76fb8e1013..1f7033bddeee797d66083eca719864f13f50674b 100644 (file)
@@ -39,7 +39,7 @@ class TestMailOAuth(
         settings.OUTLOOK_OAUTH_CLIENT_SECRET = "test_outlook_client_secret"
         super().setUp()
 
-    def test_generate_paths(self):
+    def test_generate_paths(self) -> None:
         """
         GIVEN:
             - Mocked settings for OAuth callback and base URLs
@@ -148,7 +148,7 @@ class TestMailOAuth(
         )
 
     @mock.patch("httpx_oauth.oauth2.BaseOAuth2.get_access_token")
-    def test_oauth_callback_view_fails(self, mock_get_access_token):
+    def test_oauth_callback_view_fails(self, mock_get_access_token) -> None:
         """
         GIVEN:
             - Mocked settings for Gmail and Outlook OAuth client IDs and secrets
@@ -193,7 +193,7 @@ class TestMailOAuth(
 
             self.assertIn("Error getting access token: test_error", cm.output[0])
 
-    def test_oauth_callback_view_insufficient_permissions(self):
+    def test_oauth_callback_view_insufficient_permissions(self) -> None:
         """
         GIVEN:
             - Mocked settings for Gmail and Outlook OAuth client IDs and secrets
@@ -223,7 +223,7 @@ class TestMailOAuth(
             MailAccount.objects.filter(imap_server="outlook.office365.com").exists(),
         )
 
-    def test_oauth_callback_view_no_code(self):
+    def test_oauth_callback_view_no_code(self) -> None:
         """
         GIVEN:
             - Mocked settings for Gmail and Outlook OAuth client IDs and secrets
@@ -244,7 +244,7 @@ class TestMailOAuth(
             MailAccount.objects.filter(imap_server="outlook.office365.com").exists(),
         )
 
-    def test_oauth_callback_view_invalid_state(self):
+    def test_oauth_callback_view_invalid_state(self) -> None:
         """
         GIVEN:
             - Mocked settings for Gmail and Outlook OAuth client IDs and secrets
index 0d02b31073b36b8bdb234a8ff5d908e6cb76ae0b..061771d4784a3266083b1ca0739950411c4cb0c4 100644 (file)
@@ -24,7 +24,7 @@ class TestEmailFileParsing:
         self,
         mail_parser: MailDocumentParser,
         sample_dir: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh parser
@@ -45,7 +45,7 @@ class TestEmailFileParsing:
         self,
         mail_parser: MailDocumentParser,
         broken_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh parser
@@ -63,7 +63,7 @@ class TestEmailFileParsing:
         self,
         mail_parser: MailDocumentParser,
         simple_txt_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh parser
@@ -97,7 +97,7 @@ class TestEmailMetadataExtraction:
         self,
         caplog: pytest.LogCaptureFixture,
         mail_parser: MailDocumentParser,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -120,7 +120,7 @@ class TestEmailMetadataExtraction:
         self,
         mail_parser: MailDocumentParser,
         simple_txt_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -234,7 +234,7 @@ class TestEmailThumbnailGenerate:
         mocker: MockerFixture,
         mail_parser: MailDocumentParser,
         simple_txt_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - An E-Mail was parsed
@@ -271,7 +271,7 @@ class TestTikaHtmlParse:
         self,
         httpx_mock: HTTPXMock,
         mail_parser: MailDocumentParser,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -287,7 +287,11 @@ class TestTikaHtmlParse:
         parsed = mail_parser.tika_parse("None")
         assert parsed == ""
 
-    def test_tika_parse(self, httpx_mock: HTTPXMock, mail_parser: MailDocumentParser):
+    def test_tika_parse(
+        self,
+        httpx_mock: HTTPXMock,
+        mail_parser: MailDocumentParser,
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -314,7 +318,7 @@ class TestTikaHtmlParse:
         self,
         httpx_mock: HTTPXMock,
         mail_parser: MailDocumentParser,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -334,7 +338,7 @@ class TestTikaHtmlParse:
         self,
         settings: SettingsWrapper,
         mail_parser: MailDocumentParser,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -357,7 +361,7 @@ class TestParser:
         mocker: MockerFixture,
         mail_parser: MailDocumentParser,
         simple_txt_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -403,7 +407,7 @@ class TestParser:
         httpx_mock: HTTPXMock,
         mail_parser: MailDocumentParser,
         html_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -457,7 +461,7 @@ class TestParser:
         httpx_mock: HTTPXMock,
         mail_parser: MailDocumentParser,
         simple_txt_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Fresh start
@@ -477,7 +481,7 @@ class TestParser:
         mail_parser: MailDocumentParser,
         simple_txt_email_file: Path,
         simple_txt_email_pdf_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Simple text email with no HTML content
@@ -505,7 +509,7 @@ class TestParser:
         mail_parser: MailDocumentParser,
         html_email_file: Path,
         html_email_pdf_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - email with HTML content
@@ -545,7 +549,7 @@ class TestParser:
         mail_parser: MailDocumentParser,
         html_email_file: Path,
         html_email_pdf_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - email with HTML content
@@ -584,7 +588,7 @@ class TestParser:
         mail_parser: MailDocumentParser,
         html_email_file: Path,
         html_email_pdf_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - email with HTML content
@@ -621,7 +625,7 @@ class TestParser:
         mail_parser: MailDocumentParser,
         html_email_file: Path,
         html_email_html_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Email message with HTML content
@@ -643,7 +647,7 @@ class TestParser:
         httpx_mock: HTTPXMock,
         mail_parser: MailDocumentParser,
         html_email_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Email message with HTML content
@@ -675,7 +679,7 @@ class TestParser:
         mail_parser: MailDocumentParser,
         html_email_file: Path,
         html_email_pdf_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Email message
index 2ad9410f9295f50edb297dfe6596058fbf580ad9..5d665831feabae301bed606f8c069826076fb5cb 100644 (file)
@@ -21,7 +21,7 @@ from paperless_mail.tests.test_mail import _AttachmentDef
 
 
 class MessageEncryptor:
-    def __init__(self):
+    def __init__(self) -> None:
         self.gpg_home = tempfile.mkdtemp()
         self.gpg = gnupg.GPG(gnupghome=self.gpg_home)
         self._testUser = "testuser@example.com"
@@ -112,53 +112,53 @@ class MessageEncryptor:
 
 class TestMailMessageGpgDecryptor(TestMail):
     @classmethod
-    def setUpClass(cls):
+    def setUpClass(cls) -> None:
         """Create GPG encryptor once for all tests in this class."""
         super().setUpClass()
         cls.messageEncryptor = MessageEncryptor()
 
     @classmethod
-    def tearDownClass(cls):
+    def tearDownClass(cls) -> None:
         """Clean up GPG resources after all tests complete."""
         if hasattr(cls, "messageEncryptor"):
             cls.messageEncryptor.cleanup()
         super().tearDownClass()
 
-    def setUp(self):
+    def setUp(self) -> None:
         with override_settings(
             EMAIL_GNUPG_HOME=self.messageEncryptor.gpg_home,
             EMAIL_ENABLE_GPG_DECRYPTOR=True,
         ):
             super().setUp()
 
-    def test_preprocessor_is_able_to_run(self):
+    def test_preprocessor_is_able_to_run(self) -> None:
         with override_settings(
             EMAIL_GNUPG_HOME=self.messageEncryptor.gpg_home,
             EMAIL_ENABLE_GPG_DECRYPTOR=True,
         ):
             self.assertTrue(MailMessageDecryptor.able_to_run())
 
-    def test_preprocessor_is_able_to_run2(self):
+    def test_preprocessor_is_able_to_run2(self) -> None:
         with override_settings(
             EMAIL_GNUPG_HOME=None,
             EMAIL_ENABLE_GPG_DECRYPTOR=True,
         ):
             self.assertTrue(MailMessageDecryptor.able_to_run())
 
-    def test_is_not_able_to_run_disabled(self):
+    def test_is_not_able_to_run_disabled(self) -> None:
         with override_settings(
             EMAIL_ENABLE_GPG_DECRYPTOR=False,
         ):
             self.assertFalse(MailMessageDecryptor.able_to_run())
 
-    def test_is_not_able_to_run_bogus_path(self):
+    def test_is_not_able_to_run_bogus_path(self) -> None:
         with override_settings(
             EMAIL_ENABLE_GPG_DECRYPTOR=True,
             EMAIL_GNUPG_HOME="_)@# notapath &%#$",
         ):
             self.assertFalse(MailMessageDecryptor.able_to_run())
 
-    def test_fails_at_initialization(self):
+    def test_fails_at_initialization(self) -> None:
         with (
             mock.patch("gnupg.GPG.__init__") as mock_run,
             override_settings(
@@ -174,7 +174,7 @@ class TestMailMessageGpgDecryptor(TestMail):
             handler = MailAccountHandler()
             self.assertEqual(len(handler._message_preprocessors), 0)
 
-    def test_decrypt_fails(self):
+    def test_decrypt_fails(self) -> None:
         encrypted_message, _ = self.create_encrypted_unencrypted_message_pair()
         # This test creates its own empty GPG home to test decryption failure
         empty_gpg_home = tempfile.mkdtemp()
@@ -199,7 +199,7 @@ class TestMailMessageGpgDecryptor(TestMail):
                 pass
             shutil.rmtree(empty_gpg_home, ignore_errors=True)
 
-    def test_decrypt_encrypted_mail(self):
+    def test_decrypt_encrypted_mail(self) -> None:
         """
         Creates a mail with attachments. Then encrypts it with a new key.
         Verifies that this encrypted message can be decrypted with attachments intact.
@@ -241,7 +241,7 @@ class TestMailMessageGpgDecryptor(TestMail):
         encrypted_message = self.messageEncryptor.encrypt(message)
         return encrypted_message, message
 
-    def test_handle_encrypted_message(self):
+    def test_handle_encrypted_message(self) -> None:
         message = self.mailMocker.messageBuilder.create_message(
             subject="the message title",
             from_="Myself",
index 8cd3199f980721e18846055b8ed9e45d25ed338f..1997b0ae9b7e5f23f7f25b9a7c90a9d559d4e23e 100644 (file)
@@ -6,7 +6,7 @@ from paperless_remote.signals import remote_consumer_declaration
 class PaperlessRemoteParserConfig(AppConfig):
     name = "paperless_remote"
 
-    def ready(self):
+    def ready(self) -> None:
         from documents.signals import document_consumer_declaration
 
         document_consumer_declaration.connect(remote_consumer_declaration)
index 8a257952e6e657883465d9b387da9b472af76776..0512fb257715b7a0414a937961f25d8599c3ba8c 100644 (file)
@@ -7,14 +7,14 @@ from paperless_remote import check_remote_parser_configured
 
 class TestChecks(TestCase):
     @override_settings(REMOTE_OCR_ENGINE=None)
-    def test_no_engine(self):
+    def test_no_engine(self) -> None:
         msgs = check_remote_parser_configured(None)
         self.assertEqual(len(msgs), 0)
 
     @override_settings(REMOTE_OCR_ENGINE="azureai")
     @override_settings(REMOTE_OCR_API_KEY="somekey")
     @override_settings(REMOTE_OCR_ENDPOINT=None)
-    def test_azure_no_endpoint(self):
+    def test_azure_no_endpoint(self) -> None:
         msgs = check_remote_parser_configured(None)
         self.assertEqual(len(msgs), 1)
         self.assertTrue(
index 793778ec3b608f58276a3c18f4418fdcffb67c1b..0116365f42ef4b8d25f771d8604db4737bbb73c4 100644 (file)
@@ -14,7 +14,7 @@ from paperless_remote.signals import get_parser
 class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     SAMPLE_FILES = Path(__file__).resolve().parent / "samples"
 
-    def assertContainsStrings(self, content: str, strings: list[str]):
+    def assertContainsStrings(self, content: str, strings: list[str]) -> None:
         # Asserts that all strings appear in content, in the given order.
         indices = []
         for s in strings:
@@ -26,7 +26,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
     @mock.patch("paperless_tesseract.parsers.run_subprocess")
     @mock.patch("azure.ai.documentintelligence.DocumentIntelligenceClient")
-    def test_get_text_with_azure(self, mock_client_cls, mock_subprocess):
+    def test_get_text_with_azure(self, mock_client_cls, mock_subprocess) -> None:
         # Arrange mock Azure client
         mock_client = mock.Mock()
         mock_client_cls.return_value = mock_client
@@ -46,7 +46,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         ]
 
         # Simulate pdftotext by writing dummy text to sidecar file
-        def fake_run(cmd, *args, **kwargs):
+        def fake_run(cmd, *args, **kwargs) -> None:
             with Path(cmd[-1]).open("w", encoding="utf-8") as f:
                 f.write("This is a test document.")
 
@@ -69,7 +69,10 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             )
 
     @mock.patch("azure.ai.documentintelligence.DocumentIntelligenceClient")
-    def test_get_text_with_azure_error_logged_and_returns_none(self, mock_client_cls):
+    def test_get_text_with_azure_error_logged_and_returns_none(
+        self,
+        mock_client_cls,
+    ) -> None:
         mock_client = mock.Mock()
         mock_client.begin_analyze_document.side_effect = RuntimeError("fail")
         mock_client_cls.return_value = mock_client
@@ -100,7 +103,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         REMOTE_OCR_API_KEY="key",
         REMOTE_OCR_ENDPOINT="https://endpoint.cognitiveservices.azure.com",
     )
-    def test_supported_mime_types_valid_config(self):
+    def test_supported_mime_types_valid_config(self) -> None:
         parser = RemoteDocumentParser(uuid.uuid4())
         expected_types = {
             "application/pdf": ".pdf",
@@ -113,7 +116,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         }
         self.assertEqual(parser.supported_mime_types(), expected_types)
 
-    def test_supported_mime_types_invalid_config(self):
+    def test_supported_mime_types_invalid_config(self) -> None:
         parser = get_parser(uuid.uuid4())
         self.assertEqual(parser.supported_mime_types(), {})
 
@@ -122,7 +125,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         REMOTE_OCR_API_KEY=None,
         REMOTE_OCR_ENDPOINT=None,
     )
-    def test_parse_with_invalid_config(self):
+    def test_parse_with_invalid_config(self) -> None:
         parser = get_parser(uuid.uuid4())
         parser.parse(self.SAMPLE_FILES / "simple-digital.pdf", "application/pdf")
         self.assertEqual(parser.text, "")
index f634349fb3bcdd03e1465624897f906983d58711..8ade88400cc70843acdd8a5e7487e84d61a64845 100644 (file)
@@ -6,7 +6,7 @@ from paperless_tesseract.signals import tesseract_consumer_declaration
 class PaperlessTesseractConfig(AppConfig):
     name = "paperless_tesseract"
 
-    def ready(self):
+    def ready(self) -> None:
         from documents.signals import document_consumer_declaration
 
         document_consumer_declaration.connect(tesseract_consumer_declaration)
index e3b0deed0516dfcaa72a7ac6e2ef7e7c5e907759..0e0146421748922307db0f5e2842c9baa217593c 100644 (file)
@@ -330,7 +330,7 @@ class RasterisedDocumentParser(DocumentParser):
 
         return ocrmypdf_args
 
-    def parse(self, document_path: Path, mime_type, file_name=None):
+    def parse(self, document_path: Path, mime_type, file_name=None) -> None:
         # This forces tesseract to use one core per page.
         os.environ["OMP_THREAD_LIMIT"] = "1"
         VALID_TEXT_LENGTH = 50
index 79991bab1e1d195ffde1d93e034036cb8c5f90d1..ab3ba0c16ba72e1dfa96f59a8fc1660cd4b408aa 100644 (file)
@@ -8,11 +8,11 @@ from paperless_tesseract import check_default_language_available
 
 
 class TestChecks(TestCase):
-    def test_default_language(self):
+    def test_default_language(self) -> None:
         check_default_language_available(None)
 
     @override_settings(OCR_LANGUAGE="")
-    def test_no_language(self):
+    def test_no_language(self) -> None:
         msgs = check_default_language_available(None)
         self.assertEqual(len(msgs), 1)
         self.assertTrue(
@@ -23,7 +23,7 @@ class TestChecks(TestCase):
 
     @override_settings(OCR_LANGUAGE="ita")
     @mock.patch("paperless_tesseract.checks.get_tesseract_langs")
-    def test_invalid_language(self, m):
+    def test_invalid_language(self, m) -> None:
         m.return_value = ["deu", "eng"]
         msgs = check_default_language_available(None)
         self.assertEqual(len(msgs), 1)
@@ -31,7 +31,7 @@ class TestChecks(TestCase):
 
     @override_settings(OCR_LANGUAGE="chi_sim")
     @mock.patch("paperless_tesseract.checks.get_tesseract_langs")
-    def test_multi_part_language(self, m):
+    def test_multi_part_language(self, m) -> None:
         """
         GIVEN:
             - An OCR language which is multi part (ie chi-sim)
@@ -49,7 +49,7 @@ class TestChecks(TestCase):
 
     @override_settings(OCR_LANGUAGE="chi-sim")
     @mock.patch("paperless_tesseract.checks.get_tesseract_langs")
-    def test_multi_part_language_bad_format(self, m):
+    def test_multi_part_language_bad_format(self, m) -> None:
         """
         GIVEN:
             - An OCR language which is multi part (ie chi-sim)
index 57f1c8157a8e8228bc0370f3b5190d587f0b31c3..ce8ef4c9e5cae9c529e36cc483288effc0dd8bbd 100644 (file)
@@ -19,7 +19,7 @@ from paperless_tesseract.parsers import post_process_text
 class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     SAMPLE_FILES = Path(__file__).resolve().parent / "samples"
 
-    def assertContainsStrings(self, content, strings):
+    def assertContainsStrings(self, content, strings) -> None:
         # Asserts that all strings appear in content, in the given order.
         indices = []
         for s in strings:
@@ -29,7 +29,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 self.fail(f"'{s}' is not in '{content}'")
         self.assertListEqual(indices, sorted(indices))
 
-    def test_post_process_text(self):
+    def test_post_process_text(self) -> None:
         text_cases = [
             ("simple     string", "simple string"),
             ("simple    newline\n   testing string", "simple newline\ntesting string"),
@@ -47,7 +47,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 f"strip_exceess_whitespace({source}) != '{result}', but '{actual_result}'",
             )
 
-    def test_get_text_from_pdf(self):
+    def test_get_text_from_pdf(self) -> None:
         parser = RasterisedDocumentParser(uuid.uuid4())
         text = parser.extract_text(
             None,
@@ -56,7 +56,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
         self.assertContainsStrings(text.strip(), ["This is a test document."])
 
-    def test_get_page_count(self):
+    def test_get_page_count(self) -> None:
         """
         GIVEN:
             - PDF file with a single page
@@ -80,7 +80,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
         self.assertEqual(page_count, 6)
 
-    def test_get_page_count_password_protected(self):
+    def test_get_page_count_password_protected(self) -> None:
         """
         GIVEN:
             - Password protected PDF file
@@ -98,7 +98,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             self.assertEqual(page_count, None)
             self.assertIn("Unable to determine PDF page count", cm.output[0])
 
-    def test_thumbnail(self):
+    def test_thumbnail(self) -> None:
         parser = RasterisedDocumentParser(uuid.uuid4())
         thumb = parser.get_thumbnail(
             str(self.SAMPLE_FILES / "simple-digital.pdf"),
@@ -107,8 +107,8 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsFile(thumb)
 
     @mock.patch("documents.parsers.run_convert")
-    def test_thumbnail_fallback(self, m):
-        def call_convert(input_file, output_file, **kwargs):
+    def test_thumbnail_fallback(self, m) -> None:
+        def call_convert(input_file, output_file, **kwargs) -> None:
             if ".pdf" in str(input_file):
                 raise ParseError("Does not compute.")
             else:
@@ -123,7 +123,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
         self.assertIsFile(thumb)
 
-    def test_thumbnail_encrypted(self):
+    def test_thumbnail_encrypted(self) -> None:
         parser = RasterisedDocumentParser(uuid.uuid4())
         thumb = parser.get_thumbnail(
             str(self.SAMPLE_FILES / "encrypted.pdf"),
@@ -131,7 +131,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
         self.assertIsFile(thumb)
 
-    def test_get_dpi(self):
+    def test_get_dpi(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         dpi = parser.get_dpi(str(self.SAMPLE_FILES / "simple-no-dpi.png"))
@@ -140,7 +140,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         dpi = parser.get_dpi(str(self.SAMPLE_FILES / "simple.png"))
         self.assertEqual(dpi, 72)
 
-    def test_simple_digital(self):
+    def test_simple_digital(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(
@@ -152,7 +152,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
         self.assertContainsStrings(parser.get_text(), ["This is a test document."])
 
-    def test_with_form(self):
+    def test_with_form(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(
@@ -168,7 +168,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="redo")
-    def test_with_form_error(self):
+    def test_with_form_error(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(
@@ -183,7 +183,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="skip")
-    def test_signed(self):
+    def test_signed(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(str(self.SAMPLE_FILES / "signed.pdf"), "application/pdf")
@@ -198,7 +198,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="skip")
-    def test_encrypted(self):
+    def test_encrypted(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(
@@ -210,7 +210,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(parser.get_text(), "")
 
     @override_settings(OCR_MODE="redo")
-    def test_with_form_error_notext(self):
+    def test_with_form_error_notext(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(
             str(self.SAMPLE_FILES / "with-form.pdf"),
@@ -223,7 +223,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="force")
-    def test_with_form_force(self):
+    def test_with_form_force(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(
@@ -236,7 +236,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             ["Please enter your name in here:", "This is a PDF document with a form."],
         )
 
-    def test_image_simple(self):
+    def test_image_simple(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(str(self.SAMPLE_FILES / "simple.png"), "image/png")
@@ -245,7 +245,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
         self.assertContainsStrings(parser.get_text(), ["This is a test document."])
 
-    def test_image_simple_alpha(self):
+    def test_image_simple_alpha(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         with tempfile.TemporaryDirectory() as tempdir:
@@ -261,7 +261,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 
             self.assertContainsStrings(parser.get_text(), ["This is a test document."])
 
-    def test_image_calc_a4_dpi(self):
+    def test_image_calc_a4_dpi(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         dpi = parser.calculate_a4_dpi(
@@ -271,11 +271,11 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertEqual(dpi, 62)
 
     @mock.patch("paperless_tesseract.parsers.RasterisedDocumentParser.calculate_a4_dpi")
-    def test_image_dpi_fail(self, m):
+    def test_image_dpi_fail(self, m) -> None:
         m.return_value = None
         parser = RasterisedDocumentParser(None)
 
-        def f():
+        def f() -> None:
             parser.parse(
                 str(self.SAMPLE_FILES / "simple-no-dpi.png"),
                 "image/png",
@@ -284,7 +284,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertRaises(ParseError, f)
 
     @override_settings(OCR_IMAGE_DPI=72, MAX_IMAGE_PIXELS=0)
-    def test_image_no_dpi_default(self):
+    def test_image_no_dpi_default(self) -> None:
         parser = RasterisedDocumentParser(None)
 
         parser.parse(str(self.SAMPLE_FILES / "simple-no-dpi.png"), "image/png")
@@ -296,7 +296,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             ["this is a test document."],
         )
 
-    def test_multi_page(self):
+    def test_multi_page(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(
             str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
@@ -309,7 +309,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_PAGES=2, OCR_MODE="skip")
-    def test_multi_page_pages_skip(self):
+    def test_multi_page_pages_skip(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(
             str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
@@ -322,7 +322,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_PAGES=2, OCR_MODE="redo")
-    def test_multi_page_pages_redo(self):
+    def test_multi_page_pages_redo(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(
             str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
@@ -335,7 +335,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_PAGES=2, OCR_MODE="force")
-    def test_multi_page_pages_force(self):
+    def test_multi_page_pages_force(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(
             str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
@@ -348,7 +348,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="skip")
-    def test_multi_page_analog_pages_skip(self):
+    def test_multi_page_analog_pages_skip(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(
             str(self.SAMPLE_FILES / "multi-page-images.pdf"),
@@ -361,7 +361,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_PAGES=2, OCR_MODE="redo")
-    def test_multi_page_analog_pages_redo(self):
+    def test_multi_page_analog_pages_redo(self) -> None:
         """
         GIVEN:
             - File with text contained in images but no text layer
@@ -383,7 +383,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertNotIn("page 3", parser.get_text().lower())
 
     @override_settings(OCR_PAGES=1, OCR_MODE="force")
-    def test_multi_page_analog_pages_force(self):
+    def test_multi_page_analog_pages_force(self) -> None:
         """
         GIVEN:
             - File with text contained in images but no text layer
@@ -406,7 +406,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertNotIn("page 3", parser.get_text().lower())
 
     @override_settings(OCR_MODE="skip_noarchive")
-    def test_skip_noarchive_withtext(self):
+    def test_skip_noarchive_withtext(self) -> None:
         """
         GIVEN:
             - File with existing text layer
@@ -429,7 +429,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="skip_noarchive")
-    def test_skip_noarchive_notext(self):
+    def test_skip_noarchive_notext(self) -> None:
         """
         GIVEN:
             - File with text contained in images but no text layer
@@ -454,7 +454,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIsNotNone(parser.archive_path)
 
     @override_settings(OCR_SKIP_ARCHIVE_FILE="never")
-    def test_skip_archive_never_withtext(self):
+    def test_skip_archive_never_withtext(self) -> None:
         """
         GIVEN:
             - File with existing text layer
@@ -477,7 +477,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_SKIP_ARCHIVE_FILE="never")
-    def test_skip_archive_never_withimages(self):
+    def test_skip_archive_never_withimages(self) -> None:
         """
         GIVEN:
             - File with text contained in images but no text layer
@@ -500,7 +500,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_SKIP_ARCHIVE_FILE="with_text")
-    def test_skip_archive_withtext_withtext(self):
+    def test_skip_archive_withtext_withtext(self) -> None:
         """
         GIVEN:
             - File with existing text layer
@@ -523,7 +523,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_SKIP_ARCHIVE_FILE="with_text")
-    def test_skip_archive_withtext_withimages(self):
+    def test_skip_archive_withtext_withimages(self) -> None:
         """
         GIVEN:
             - File with text contained in images but no text layer
@@ -546,7 +546,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_SKIP_ARCHIVE_FILE="always")
-    def test_skip_archive_always_withtext(self):
+    def test_skip_archive_always_withtext(self) -> None:
         """
         GIVEN:
             - File with existing text layer
@@ -569,7 +569,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_SKIP_ARCHIVE_FILE="always")
-    def test_skip_archive_always_withimages(self):
+    def test_skip_archive_always_withimages(self) -> None:
         """
         GIVEN:
             - File with text contained in images but no text layer
@@ -592,7 +592,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="skip")
-    def test_multi_page_mixed(self):
+    def test_multi_page_mixed(self) -> None:
         """
         GIVEN:
             - File with some text contained in images and some in text layer
@@ -621,7 +621,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIn("[OCR skipped on page(s) 4-6]", sidecar)
 
     @override_settings(OCR_MODE="redo")
-    def test_single_page_mixed(self):
+    def test_single_page_mixed(self) -> None:
         """
         GIVEN:
             - File with some text contained in images and some in text layer
@@ -660,7 +660,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="skip_noarchive")
-    def test_multi_page_mixed_no_archive(self):
+    def test_multi_page_mixed_no_archive(self) -> None:
         """
         GIVEN:
             - File with some text contained in images and some in text layer
@@ -683,7 +683,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         )
 
     @override_settings(OCR_MODE="skip", OCR_ROTATE_PAGES=True)
-    def test_rotate(self):
+    def test_rotate(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(str(self.SAMPLE_FILES / "rotated.pdf"), "application/pdf")
         self.assertContainsStrings(
@@ -696,7 +696,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             ],
         )
 
-    def test_multi_page_tiff(self):
+    def test_multi_page_tiff(self) -> None:
         """
         GIVEN:
             - Multi-page TIFF image
@@ -716,7 +716,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             ["page 1", "page 2", "page 3"],
         )
 
-    def test_multi_page_tiff_alpha(self):
+    def test_multi_page_tiff_alpha(self) -> None:
         """
         GIVEN:
             - Multi-page TIFF image
@@ -740,7 +740,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 ["page 1", "page 2", "page 3"],
             )
 
-    def test_multi_page_tiff_alpha_srgb(self):
+    def test_multi_page_tiff_alpha_srgb(self) -> None:
         """
         GIVEN:
             - Multi-page TIFF image
@@ -767,7 +767,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
                 ["page 1", "page 2", "page 3"],
             )
 
-    def test_ocrmypdf_parameters(self):
+    def test_ocrmypdf_parameters(self) -> None:
         parser = RasterisedDocumentParser(None)
         params = parser.construct_ocrmypdf_parameters(
             input_file="input.pdf",
@@ -831,7 +831,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
             params = parser.construct_ocrmypdf_parameters("", "", "", "")
             self.assertNotIn("max_image_mpixels", params)
 
-    def test_rtl_language_detection(self):
+    def test_rtl_language_detection(self) -> None:
         """
         GIVEN:
             - File with text in an RTL language
@@ -851,7 +851,7 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
         self.assertIn("ةﯾﻠﺧﺎدﻻ ةرازو", parser.get_text())
 
     @mock.patch("ocrmypdf.ocr")
-    def test_gs_rendering_error(self, m):
+    def test_gs_rendering_error(self, m) -> None:
         m.side_effect = SubprocessOutputError("Ghostscript PDF/A rendering failed")
         parser = RasterisedDocumentParser(None)
 
@@ -866,39 +866,39 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
 class TestParserFileTypes(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
     SAMPLE_FILES = Path(__file__).parent / "samples"
 
-    def test_bmp(self):
+    def test_bmp(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(str(self.SAMPLE_FILES / "simple.bmp"), "image/bmp")
         self.assertIsFile(parser.archive_path)
         self.assertIn("this is a test document", parser.get_text().lower())
 
-    def test_jpg(self):
+    def test_jpg(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(str(self.SAMPLE_FILES / "simple.jpg"), "image/jpeg")
         self.assertIsFile(parser.archive_path)
         self.assertIn("this is a test document", parser.get_text().lower())
 
-    def test_heic(self):
+    def test_heic(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(str(self.SAMPLE_FILES / "simple.heic"), "image/heic")
         self.assertIsFile(parser.archive_path)
         self.assertIn("pizza", parser.get_text().lower())
 
     @override_settings(OCR_IMAGE_DPI=200)
-    def test_gif(self):
+    def test_gif(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(str(self.SAMPLE_FILES / "simple.gif"), "image/gif")
         self.assertIsFile(parser.archive_path)
         self.assertIn("this is a test document", parser.get_text().lower())
 
-    def test_tiff(self):
+    def test_tiff(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(str(self.SAMPLE_FILES / "simple.tif"), "image/tiff")
         self.assertIsFile(parser.archive_path)
         self.assertIn("this is a test document", parser.get_text().lower())
 
     @override_settings(OCR_IMAGE_DPI=72)
-    def test_webp(self):
+    def test_webp(self) -> None:
         parser = RasterisedDocumentParser(None)
         parser.parse(
             str(self.SAMPLE_FILES / "document.webp"),
index 7621092dc171893f897af3247841ddeb4d3c9da5..da2912a8b305d7533c020eb32589193faf10af8a 100644 (file)
@@ -27,7 +27,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
             safe_fallback=False,
         )
 
-    def test_db_settings_ocr_pages(self):
+    def test_db_settings_ocr_pages(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_PAGES than
@@ -45,7 +45,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
             params = self.get_params()
         self.assertEqual(params["pages"], "1-5")
 
-    def test_db_settings_ocr_language(self):
+    def test_db_settings_ocr_language(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_LANGUAGE than
@@ -63,7 +63,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
             params = self.get_params()
         self.assertEqual(params["language"], "fra+ita")
 
-    def test_db_settings_ocr_output_type(self):
+    def test_db_settings_ocr_output_type(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_OUTPUT_TYPE than
@@ -81,7 +81,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
             params = self.get_params()
         self.assertEqual(params["output_type"], "pdfa")
 
-    def test_db_settings_ocr_mode(self):
+    def test_db_settings_ocr_mode(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_MODE than
@@ -101,7 +101,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
         self.assertNotIn("redo_ocr", params)
         self.assertNotIn("force_ocr", params)
 
-    def test_db_settings_ocr_clean(self):
+    def test_db_settings_ocr_clean(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_CLEAN than
@@ -129,7 +129,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
         self.assertTrue(params["clean_final"])
         self.assertNotIn("clean", params)
 
-    def test_db_settings_ocr_deskew(self):
+    def test_db_settings_ocr_deskew(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_DESKEW than
@@ -147,7 +147,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
             params = self.get_params()
         self.assertTrue(params["deskew"])
 
-    def test_db_settings_ocr_rotate(self):
+    def test_db_settings_ocr_rotate(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_ROTATE_PAGES
@@ -167,7 +167,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
         self.assertTrue(params["rotate_pages"])
         self.assertAlmostEqual(params["rotate_pages_threshold"], 15.0)
 
-    def test_db_settings_ocr_max_pixels(self):
+    def test_db_settings_ocr_max_pixels(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_MAX_IMAGE_PIXELS than
@@ -185,7 +185,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
             params = self.get_params()
         self.assertAlmostEqual(params["max_image_mpixels"], 1.0)
 
-    def test_db_settings_ocr_color_convert(self):
+    def test_db_settings_ocr_color_convert(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_COLOR_CONVERSION_STRATEGY than
@@ -206,7 +206,7 @@ class TestParserSettingsFromDb(DirectoriesMixin, FileSystemAssertsMixin, TestCas
             "UseDeviceIndependentColor",
         )
 
-    def test_ocr_user_args(self):
+    def test_ocr_user_args(self) -> None:
         """
         GIVEN:
             - Django settings defines different value for OCR_USER_ARGS than
index 0dd7b22064a23604915aa6548c12ed1af5b8187e..619d718861e3dfc3c72cdd8e5b1561e9c954b934 100644 (file)
@@ -6,7 +6,7 @@ from paperless_text.signals import text_consumer_declaration
 class PaperlessTextConfig(AppConfig):
     name = "paperless_text"
 
-    def ready(self):
+    def ready(self) -> None:
         from documents.signals import document_consumer_declaration
 
         document_consumer_declaration.connect(text_consumer_declaration)
index 4e37ccd8224aefe8bcde6cd9b49ab925c3633cc1..a6c149a0a794b3eafa6571137192bad477fb0463 100644 (file)
@@ -40,10 +40,10 @@ class TextDocumentParser(DocumentParser):
 
         return out_path
 
-    def parse(self, document_path, mime_type, file_name=None):
+    def parse(self, document_path, mime_type, file_name=None) -> None:
         self.text = self.read_file_handle_unicode_errors(document_path)
 
-    def get_settings(self):
+    def get_settings(self) -> None:
         """
         This parser does not implement additional settings yet
         """
index 5fea36744402358619d340efc219d08491b3087b..b1086bc3d310212f52a1d4f844ae26a4c7ade009 100644 (file)
@@ -5,13 +5,21 @@ from paperless_text.parsers import TextDocumentParser
 
 
 class TestTextParser:
-    def test_thumbnail(self, text_parser: TextDocumentParser, sample_txt_file: Path):
+    def test_thumbnail(
+        self,
+        text_parser: TextDocumentParser,
+        sample_txt_file: Path,
+    ) -> None:
         # just make sure that it does not crash
         f = text_parser.get_thumbnail(sample_txt_file, "text/plain")
         assert f.exists()
         assert f.is_file()
 
-    def test_parse(self, text_parser: TextDocumentParser, sample_txt_file: Path):
+    def test_parse(
+        self,
+        text_parser: TextDocumentParser,
+        sample_txt_file: Path,
+    ) -> None:
         text_parser.parse(sample_txt_file, "text/plain")
 
         assert text_parser.get_text() == "This is a test file.\n"
@@ -21,7 +29,7 @@ class TestTextParser:
         self,
         text_parser: TextDocumentParser,
         malformed_txt_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Text file which contains invalid UTF bytes
@@ -37,7 +45,7 @@ class TestTextParser:
         assert text_parser.get_text() == "Pantothens�ure\n"
         assert text_parser.get_archive_path() is None
 
-    def test_thumbnail_large_file(self, text_parser: TextDocumentParser):
+    def test_thumbnail_large_file(self, text_parser: TextDocumentParser) -> None:
         """
         GIVEN:
             - A very large text file (>50MB)
index 6fad68df8694ef2742c8717677071b22eb2829c4..714a051883f3edbecd3b3ff0c9ffec542f49025f 100644 (file)
@@ -7,7 +7,7 @@ from paperless_tika.signals import tika_consumer_declaration
 class PaperlessTikaConfig(AppConfig):
     name = "paperless_tika"
 
-    def ready(self):
+    def ready(self) -> None:
         from documents.signals import document_consumer_declaration
 
         if settings.TIKA_ENABLED:
index 40aa8e581e13d32a11cdd109d7e57e178d6c561d..22a5bc1c6e8e0ba9941d19b697954d9101581bfd 100644 (file)
@@ -53,7 +53,7 @@ class TikaDocumentParser(DocumentParser):
             )
             return []
 
-    def parse(self, document_path: Path, mime_type: str, file_name=None):
+    def parse(self, document_path: Path, mime_type: str, file_name=None) -> None:
         self.log.info(f"Sending {document_path} to Tika server")
 
         try:
index 432f7482ef75b19b5fdd83aeaa784fa0039c2644..05315cf240180faaf9b9d6e1d960319249b01738 100644 (file)
@@ -26,7 +26,7 @@ class TestTikaParserAgainstServer:
         self,
         tika_parser: TikaDocumentParser,
         sample_odt_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - An input ODT format document
@@ -55,7 +55,7 @@ class TestTikaParserAgainstServer:
         self,
         tika_parser: TikaDocumentParser,
         sample_docx_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - An input DOCX format document
@@ -87,7 +87,7 @@ class TestTikaParserAgainstServer:
         self,
         tika_parser: TikaDocumentParser,
         sample_doc_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - An input DOC format document
@@ -114,7 +114,7 @@ class TestTikaParserAgainstServer:
         self,
         tika_parser: TikaDocumentParser,
         sample_broken_odt: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - An input ODT format document
index a84adb7b183c005b317ff128c12c634d7ed0722c..e1c71c1311af9bd7f68ed3f159d4f60e5a40ee83 100644 (file)
@@ -20,7 +20,7 @@ class TestTikaParser:
         settings: SettingsWrapper,
         tika_parser: TikaDocumentParser,
         sample_odt_file: Path,
-    ):
+    ) -> None:
         settings.TIME_ZONE = "America/Chicago"
         # Pretend parse response
         httpx_mock.add_response(
@@ -53,7 +53,7 @@ class TestTikaParser:
         httpx_mock: HTTPXMock,
         tika_parser: TikaDocumentParser,
         sample_odt_file: Path,
-    ):
+    ) -> None:
         httpx_mock.add_response(
             json={
                 "Content-Type": "application/vnd.oasis.opendocument.text",
@@ -76,7 +76,7 @@ class TestTikaParser:
         httpx_mock: HTTPXMock,
         tika_parser: TikaDocumentParser,
         sample_odt_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Document needs to be converted to PDF
@@ -108,7 +108,7 @@ class TestTikaParser:
         settings: SettingsWrapper,
         tika_parser: TikaDocumentParser,
         sample_odt_file: Path,
-    ):
+    ) -> None:
         """
         GIVEN:
             - Document needs to be converted to PDF