verbose_name = _("Documents")
- def ready(self):
+ def ready(self) -> None:
from documents.signals import document_consumption_finished
from documents.signals import document_updated
from documents.signals.handlers import add_inbox_tags
document: Document,
field: CustomField,
target_doc_ids: list[int],
-):
+) -> None:
"""
Add or remove 'symmetrical' links to `document` on all `target_doc_ids`
"""
document: Document,
field: CustomField,
target_doc_id: int,
-):
+) -> None:
"""
Removes a 'symmetrical' link to `document` from the target document's existing custom field instance
"""
)
self._stop_words = None
- def _update_data_vectorizer_hash(self):
+ def _update_data_vectorizer_hash(self) -> None:
self.data_vectorizer_hash = sha256(
pickle.dumps(self.data_vectorizer),
).hexdigest()
status: ProgressStatusOptions,
message: ConsumerStatusShortMessage | str | None = None,
document_id=None,
- ): # pragma: no cover
+ ) -> None: # pragma: no cover
self.status_mgr.send_progress(
status,
message,
):
logging_name = "paperless.consumer"
- def run_pre_consume_script(self):
+ def run_pre_consume_script(self) -> None:
"""
If one is configured and exists, run the pre-consume script and
handle its output and/or errors
exception=e,
)
- def run_post_consume_script(self, document: Document):
+ def run_post_consume_script(self, document: Document) -> None:
"""
If one is configured and exists, run the pre-consume script and
handle its output and/or errors
tempdir.cleanup()
raise
- def progress_callback(current_progress, max_progress): # pragma: no cover
+ def progress_callback(
+ current_progress,
+ max_progress,
+ ) -> None: # pragma: no cover
# recalculate progress to be within 20 and 80
p = int((current_progress / max_progress) * 50 + 20)
self._send_progress(p, 100, ProgressStatusOptions.WORKING)
return document
- def apply_overrides(self, document):
+ def apply_overrides(self, document) -> None:
if self.metadata.correspondent_id:
document.correspondent = Correspondent.objects.get(
pk=self.metadata.correspondent_id,
}
CustomFieldInstance.objects.create(**args) # adds to document
- def _write(self, source, target):
+ def _write(self, source, target) -> None:
with (
Path(source).open("rb") as read_file,
Path(target).open("wb") as write_file,
NAME: str = "ConsumerPreflightPlugin"
logging_name = "paperless.consumer"
- def pre_check_file_exists(self):
+ def pre_check_file_exists(self) -> None:
"""
Confirm the input file still exists where it should
"""
f"Cannot consume {self.input_doc.original_file}: File not found.",
)
- def pre_check_duplicate(self):
+ def pre_check_duplicate(self) -> None:
"""
Using the MD5 of the file, check this exact file doesn't already exist
"""
failure_msg,
)
- def pre_check_directories(self):
+ def pre_check_directories(self) -> None:
"""
Ensure all required directories exist before attempting to use them
"""
settings.ORIGINALS_DIR.mkdir(parents=True, exist_ok=True)
settings.ARCHIVE_DIR.mkdir(parents=True, exist_ok=True)
- def pre_check_asn_value(self):
+ def pre_check_asn_value(self) -> None:
"""
Check that if override_asn is given, it is unique and within a valid range
"""
mailrule_id: int | None = None
mime_type: str = dataclasses.field(init=False, default=None)
- def __post_init__(self):
+ def __post_init__(self) -> None:
"""
After a dataclass is initialized, this is called to finalize some data
1. Make sure the original path is an absolute, fully qualified path
class ObjectFilter(Filter):
- def __init__(self, *, exclude=False, in_list=False, field_name=""):
+ def __init__(self, *, exclude=False, in_list=False, field_name="") -> None:
super().__init__()
self.exclude = exclude
self.in_list = in_list
class SelectField(serializers.CharField):
- def __init__(self, custom_field: CustomField):
+ def __init__(self, custom_field: CustomField) -> None:
self._options = custom_field.extra_data["select_options"]
super().__init__(max_length=16)
@extend_schema_field(serializers.CharField)
class CustomFieldQueryFilter(Filter):
- def __init__(self, validation_prefix):
+ def __init__(self, validation_prefix) -> None:
"""
A filter that filters documents based on custom field name and value.
class ManualResultsPage(list):
- def __init__(self, hits):
+ def __init__(self, hits) -> None:
super().__init__(hits)
self.results = ManualResults(hits)
class ManualResults:
- def __init__(self, hits):
+ def __init__(self, hits) -> None:
self._docnums = [hit.docnum for hit in hits]
def docs(self):
class LoggingMixin:
- def renew_logging_group(self):
+ def renew_logging_group(self) -> None:
"""
Creates a new UUID to group subsequent log calls together with
the extra data named group
# This code is taken almost entirely from https://github.com/wagtail/wagtail/pull/11912 with all credit to the original author.
help = "Converts UUID columns from char type to the native UUID type used in MariaDB 10.7+ and Django 5.0+."
- def convert_field(self, model, field_name, *, null=False):
+ def convert_field(self, model, field_name, *, null=False) -> None:
if model._meta.get_field(field_name).model != model: # pragma: no cover
# Field is inherited from a parent model
return
"easy import."
)
- def add_arguments(self, parser):
+ def add_arguments(self, parser) -> None:
parser.add_argument("target")
parser.add_argument(
help="If provided, is used to encrypt sensitive data in the export",
)
- def handle(self, *args, **options):
+ def handle(self, *args, **options) -> None:
self.target = Path(options["target"]).resolve()
self.split_manifest: bool = options["split_manifest"]
self.compare_checksums: bool = options["compare_checksums"]
if self.zip_export and temp_dir is not None:
temp_dir.cleanup()
- def dump(self):
+ def dump(self) -> None:
# 1. Take a snapshot of what files exist in the current export folder
for x in self.target.glob("**/*"):
if x.is_file():
self,
content: list[dict] | dict,
target: Path,
- ):
+ ) -> None:
"""
Writes the source content to the target json file.
If --compare-json arg was used, don't write to target file if
source: Path,
source_checksum: str | None,
target: Path,
- ):
+ ) -> None:
"""
Copies the source to the target, if target doesn't exist or the target doesn't seem to match
the source attributes
self.source = Path(tmp_dir)
self._run_import()
- def _run_import(self):
+ def _run_import(self) -> None:
self.pre_check()
self.load_metadata()
self.load_manifest_files()
from documents.parsers import get_parser_class_for_mime_type
-def _process_document(doc_id):
+def _process_document(doc_id) -> None:
document: Document = Document.objects.get(id=doc_id)
parser_class = get_parser_class_for_mime_type(document.mime_type)
class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
help = "This will regenerate the thumbnails for all documents."
- def add_arguments(self, parser):
+ def add_arguments(self, parser) -> None:
parser.add_argument(
"-d",
"--document",
parser.formatter_class = RawTextHelpFormatter
return parser
- def handle(self, *args, **options):
+ def handle(self, *args, **options) -> None:
username = os.getenv("PAPERLESS_ADMIN_USER", "admin")
mail = os.getenv("PAPERLESS_ADMIN_MAIL", "root@localhost")
password = os.getenv("PAPERLESS_ADMIN_PASSWORD")
for the use of multiple processes
"""
- def add_argument_processes_mixin(self, parser: ArgumentParser):
+ def add_argument_processes_mixin(self, parser: ArgumentParser) -> None:
parser.add_argument(
"--processes",
default=max(1, os.cpu_count() // 4),
help="Number of processes to distribute work amongst",
)
- def handle_processes_mixin(self, *args, **options):
+ def handle_processes_mixin(self, *args, **options) -> None:
self.process_count = options["processes"]
if self.process_count < 1:
raise CommandError("There must be at least 1 process")
via this class
"""
- def add_argument_progress_bar_mixin(self, parser: ArgumentParser):
+ def add_argument_progress_bar_mixin(self, parser: ArgumentParser) -> None:
parser.add_argument(
"--no-progress-bar",
default=False,
help="If set, the progress bar will not be shown",
)
- def handle_progress_bar_mixin(self, *args, **options):
+ def handle_progress_bar_mixin(self, *args, **options) -> None:
self.no_progress_bar = options["no_progress_bar"]
self.use_progress_bar = not self.no_progress_bar
},
}
- def load_crypt_params(self, metadata: dict):
+ def load_crypt_params(self, metadata: dict) -> None:
# Load up the values for setting up decryption
self.kdf_algorithm: str = metadata[EXPORTER_CRYPTO_SETTINGS_NAME][
EXPORTER_CRYPTO_ALGO_NAME
EXPORTER_CRYPTO_SALT_NAME
]
- def setup_crypto(self, *, passphrase: str, salt: str | None = None):
+ def setup_crypto(self, *, passphrase: str, salt: str | None = None) -> None:
"""
Constructs a class for encryption or decryption using the specified passphrase and salt
matching_model: MatchingModel | WorkflowTrigger,
document: Document,
reason: str,
-):
+) -> None:
class_name = type(matching_model).__name__
name = (
matching_model.name if hasattr(matching_model, "name") else str(matching_model)
verbose_name = _("tag")
verbose_name_plural = _("tags")
- def clean(self):
+ def clean(self) -> None:
# Prevent self-parenting and assigning a descendant as parent
parent = self.get_parent()
if parent == self:
def created_date(self):
return self.created
- def add_nested_tags(self, tags):
+ def add_nested_tags(self, tags) -> None:
tag_ids = set()
for tag in tags:
tag_ids.add(tag.id)
return None
return (settings.SHARE_LINK_BUNDLE_DIR / Path(self.file_path)).resolve()
- def remove_file(self):
+ def remove_file(self) -> None:
if self.absolute_file_path is not None and self.absolute_file_path.exists():
try:
self.absolute_file_path.unlink()
logging_name = "paperless.parsing"
- def __init__(self, logging_group, progress_callback=None):
+ def __init__(self, logging_group, progress_callback=None) -> None:
super().__init__()
self.renew_logging_group()
self.logging_group = logging_group
self.date: datetime.datetime | None = None
self.progress_callback = progress_callback
- def progress(self, current_progress, max_progress):
+ def progress(self, current_progress, max_progress) -> None:
if self.progress_callback:
self.progress_callback(current_progress, max_progress)
def extract_metadata(self, document_path, mime_type):
return []
- def get_page_count(self, document_path, mime_type):
+ def get_page_count(self, document_path, mime_type) -> None:
return None
def parse(self, document_path, mime_type, file_name=None):
def get_date(self) -> datetime.datetime | None:
return self.date
- def cleanup(self):
+ def cleanup(self) -> None:
self.log.debug(f"Deleting directory {self.tempdir}")
shutil.rmtree(self.tempdir)
return Group.objects.filter(id__in=group_object_perm_group_ids).distinct()
-def set_permissions_for_object(permissions: dict, object, *, merge: bool = False):
+def set_permissions_for_object(
+ permissions: dict,
+ object,
+ *,
+ merge: bool = False,
+) -> None:
"""
Set permissions for an object. The permissions are given as a mapping of actions
to a dict of user / group id lists, e.g.
class SanityCheckMessages:
- def __init__(self):
+ def __init__(self) -> None:
self._messages: dict[int, list[dict]] = defaultdict(list)
self.has_error = False
self.has_warning = False
- def error(self, doc_pk, message):
+ def error(self, doc_pk, message) -> None:
self._messages[doc_pk].append({"level": logging.ERROR, "message": message})
self.has_error = True
- def warning(self, doc_pk, message):
+ def warning(self, doc_pk, message) -> None:
self._messages[doc_pk].append({"level": logging.WARNING, "message": message})
self.has_warning = True
- def info(self, doc_pk, message):
+ def info(self, doc_pk, message) -> None:
self._messages[doc_pk].append({"level": logging.INFO, "message": message})
- def log_messages(self):
+ def log_messages(self) -> None:
logger = logging.getLogger("paperless.sanity_checker")
if len(self._messages) == 0:
controls which fields should be displayed.
"""
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs) -> None:
# Don't pass the 'fields' arg up to the superclass
fields = kwargs.pop("fields", None)
del permissions_dict[action]
return permissions_dict
- def _set_permissions(self, permissions, object):
+ def _set_permissions(self, permissions, object) -> None:
set_permissions_for_object(permissions, object)
class SerializerWithPerms(serializers.Serializer):
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs) -> None:
self.user = kwargs.pop("user", None)
self.full_perms = kwargs.pop("full_perms", False)
self.all_fields = kwargs.pop("all_fields", False)
serializers.ModelSerializer,
SetPermissionsMixin,
):
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
if not self.all_fields:
)
# other methods in mixin
- def validate_unique_together(self, validated_data, instance=None):
+ def validate_unique_together(self, validated_data, instance=None) -> None:
# workaround for https://github.com/encode/django-rest-framework/issues/9358
if "owner" in validated_data and "name" in self.Meta.fields:
name = validated_data.get("name", instance.name if instance else None)
class CustomFieldSerializer(serializers.ModelSerializer):
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs) -> None:
context = kwargs.get("context")
self.api_version = int(
context.get("request").version
Based on https://stackoverflow.com/a/62579804
"""
- def __init__(self, method_name=None, *args, **kwargs):
+ def __init__(self, method_name=None, *args, **kwargs) -> None:
self.method_name = method_name
kwargs["source"] = "*"
super(serializers.SerializerMethodField, self).__init__(*args, **kwargs)
CustomFieldInstance.deleted_objects.filter(document=instance).delete()
return instance
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs) -> None:
self.truncate_content = kwargs.pop("truncate_content", False)
# return full permissions if we're doing a PATCH or PUT
child=serializers.IntegerField(),
)
- def _validate_document_id_list(self, documents, name="documents"):
+ def _validate_document_id_list(self, documents, name="documents") -> None:
if not isinstance(documents, list):
raise serializers.ValidationError(f"{name} must be a list")
if not all(isinstance(i, int) for i in documents):
parameters = serializers.DictField(allow_empty=True, default={}, write_only=True)
- def _validate_tag_id_list(self, tags, name="tags"):
+ def _validate_tag_id_list(self, tags, name="tags") -> None:
if not isinstance(tags, list):
raise serializers.ValidationError(f"{name} must be a list")
if not all(isinstance(i, int) for i in tags):
self,
custom_fields,
name="custom_fields",
- ):
+ ) -> None:
ids = custom_fields
if isinstance(custom_fields, dict):
try:
# This will never happen as it is handled by the ChoiceField
raise serializers.ValidationError("Unsupported method.")
- def _validate_parameters_tags(self, parameters):
+ def _validate_parameters_tags(self, parameters) -> None:
if "tag" in parameters:
tag_id = parameters["tag"]
try:
else:
raise serializers.ValidationError("tag not specified")
- def _validate_parameters_document_type(self, parameters):
+ def _validate_parameters_document_type(self, parameters) -> None:
if "document_type" in parameters:
document_type_id = parameters["document_type"]
if document_type_id is None:
else:
raise serializers.ValidationError("document_type not specified")
- def _validate_parameters_correspondent(self, parameters):
+ def _validate_parameters_correspondent(self, parameters) -> None:
if "correspondent" in parameters:
correspondent_id = parameters["correspondent"]
if correspondent_id is None:
else:
raise serializers.ValidationError("correspondent not specified")
- def _validate_storage_path(self, parameters):
+ def _validate_storage_path(self, parameters) -> None:
if "storage_path" in parameters:
storage_path_id = parameters["storage_path"]
if storage_path_id is None:
else:
raise serializers.ValidationError("storage path not specified")
- def _validate_parameters_modify_tags(self, parameters):
+ def _validate_parameters_modify_tags(self, parameters) -> None:
if "add_tags" in parameters:
self._validate_tag_id_list(parameters["add_tags"], "add_tags")
else:
else:
raise serializers.ValidationError("remove_tags not specified")
- def _validate_parameters_modify_custom_fields(self, parameters):
+ def _validate_parameters_modify_custom_fields(self, parameters) -> None:
if "add_custom_fields" in parameters:
self._validate_custom_field_id_list_or_dict(
parameters["add_custom_fields"],
raise serializers.ValidationError("Specified owner cannot be found")
return ownerUser
- def _validate_parameters_set_permissions(self, parameters):
+ def _validate_parameters_set_permissions(self, parameters) -> None:
parameters["set_permissions"] = self.validate_set_permissions(
parameters["set_permissions"],
)
if "merge" not in parameters:
parameters["merge"] = False
- def _validate_parameters_rotate(self, parameters):
+ def _validate_parameters_rotate(self, parameters) -> None:
try:
if (
"degrees" not in parameters
except ValueError:
raise serializers.ValidationError("invalid rotation degrees")
- def _validate_parameters_split(self, parameters):
+ def _validate_parameters_split(self, parameters) -> None:
if "pages" not in parameters:
raise serializers.ValidationError("pages not specified")
try:
else:
parameters["delete_originals"] = False
- def _validate_parameters_delete_pages(self, parameters):
+ def _validate_parameters_delete_pages(self, parameters) -> None:
if "pages" not in parameters:
raise serializers.ValidationError("pages not specified")
if not isinstance(parameters["pages"], list):
if not all(isinstance(i, int) for i in parameters["pages"]):
raise serializers.ValidationError("pages must be a list of integers")
- def _validate_parameters_merge(self, parameters):
+ def _validate_parameters_merge(self, parameters) -> None:
if "delete_originals" in parameters:
if not isinstance(parameters["delete_originals"], bool):
raise serializers.ValidationError("delete_originals must be a boolean")
else:
parameters["archive_fallback"] = False
- def _validate_parameters_edit_pdf(self, parameters, document_id):
+ def _validate_parameters_edit_pdf(self, parameters, document_id) -> None:
if "operations" not in parameters:
raise serializers.ValidationError("operations not specified")
if not isinstance(parameters["operations"], list):
child=serializers.IntegerField(),
)
- def _validate_task_id_list(self, tasks, name="tasks"):
+ def _validate_task_id_list(self, tasks, name="tasks") -> None:
if not isinstance(tasks, list):
raise serializers.ValidationError(f"{name} must be a list")
if not all(isinstance(i, int) for i in tasks):
)
return objects
- def _validate_permissions(self, permissions):
+ def _validate_permissions(self, permissions) -> None:
self.validate_set_permissions(
permissions,
)
return attrs
@staticmethod
- def normalize_workflow_trigger_sources(trigger):
+ def normalize_workflow_trigger_sources(trigger) -> None:
"""
Convert sources to strings to handle django-multiselectfield v1.0 changes
"""
"actions",
]
- def update_triggers_and_actions(self, instance: Workflow, triggers, actions):
+ def update_triggers_and_actions(
+ self,
+ instance: Workflow,
+ triggers,
+ actions,
+ ) -> None:
set_triggers = []
set_actions = []
instance.actions.set(set_actions)
instance.save()
- def prune_triggers_and_actions(self):
+ def prune_triggers_and_actions(self) -> None:
"""
ManyToMany fields dont support e.g. on_delete so we need to discard unattached
triggers and actions manually
logger = logging.getLogger("paperless.handlers")
-def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs):
+def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs) -> None:
if document.owner is not None:
tags = get_objects_for_user_owner_aware(
document.owner,
document: Document,
selected: MatchingModel,
base_url: str | None = None,
-):
+) -> None:
"""
Smaller helper to reduce duplication when just outputting suggestions to the console
"""
stdout=None,
style_func=None,
**kwargs,
-):
+) -> None:
if document.correspondent and not replace:
return
stdout=None,
style_func=None,
**kwargs,
-):
+) -> None:
if document.document_type and not replace:
return
stdout=None,
style_func=None,
**kwargs,
-):
+) -> None:
if replace:
Document.tags.through.objects.filter(document=document).exclude(
Q(tag__is_inbox_tag=True),
stdout=None,
style_func=None,
**kwargs,
-):
+) -> None:
if document.storage_path and not replace:
return
# see empty_trash in documents/tasks.py for signal handling
-def cleanup_document_deletion(sender, instance, **kwargs):
+def cleanup_document_deletion(sender, instance, **kwargs) -> None:
with FileLock(settings.MEDIA_LOCK):
if settings.EMPTY_TRASH_DIR:
# Find a non-conflicting filename in case a document with the same
sender,
instance: Document | CustomFieldInstance,
**kwargs,
-):
+) -> None:
if isinstance(instance, CustomFieldInstance):
if not _filename_template_uses_custom_fields(instance.document):
return
instance = instance.document
- def validate_move(instance, old_path: Path, new_path: Path, root: Path):
+ def validate_move(instance, old_path: Path, new_path: Path, root: Path) -> None:
if not new_path.is_relative_to(root):
msg = (
f"Document {instance!s}: Refusing to move file outside root {root}: "
@shared_task
-def process_cf_select_update(custom_field: CustomField):
+def process_cf_select_update(custom_field: CustomField) -> None:
"""
Update documents tied to a select custom field:
# should be disabled in /src/documents/management/commands/document_importer.py handle
@receiver(models.signals.post_save, sender=CustomField)
-def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs):
+def check_paths_and_prune_custom_fields(
+ sender,
+ instance: CustomField,
+ **kwargs,
+) -> None:
"""
When a custom field is updated, check if we need to update any documents. Done async to avoid slowing down the save operation.
"""
@receiver(models.signals.post_delete, sender=CustomField)
-def cleanup_custom_field_deletion(sender, instance: CustomField, **kwargs):
+def cleanup_custom_field_deletion(sender, instance: CustomField, **kwargs) -> None:
"""
When a custom field is deleted, ensure no saved views reference it.
"""
@receiver(models.signals.post_delete, sender=User)
@receiver(models.signals.post_delete, sender=Group)
-def cleanup_user_deletion(sender, instance: User | Group, **kwargs):
+def cleanup_user_deletion(sender, instance: User | Group, **kwargs) -> None:
"""
When a user or group is deleted, remove non-cascading references.
At the moment, just the default permission settings in UiSettings.
)
-def add_to_index(sender, document, **kwargs):
+def add_to_index(sender, document, **kwargs) -> None:
from documents import index
index.add_or_update_document(document)
logging_group=None,
original_file=None,
**kwargs,
-):
+) -> None:
run_workflows(
trigger_type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
document=document,
)
-def run_workflows_updated(sender, document: Document, logging_group=None, **kwargs):
+def run_workflows_updated(
+ sender,
+ document: Document,
+ logging_group=None,
+ **kwargs,
+) -> None:
run_workflows(
trigger_type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
document=document,
@before_task_publish.connect
-def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
+def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs) -> None:
"""
Creates the PaperlessTask object in a pending state. This is sent before
the task reaches the broker, but before it begins executing on a worker.
@task_prerun.connect
-def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs):
+def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs) -> None:
"""
Updates the PaperlessTask to be started. Sent before the task begins execution
retval=None,
state=None,
**kwargs,
-):
+) -> None:
"""
Updates the result of the PaperlessTask.
args=None,
traceback=None,
**kwargs,
-):
+) -> None:
"""
Updates the result of a failed PaperlessTask.
@worker_process_init.connect
-def close_connection_pool_on_worker_init(**kwargs):
+def close_connection_pool_on_worker_init(**kwargs) -> None:
"""
Close the DB connection pool for each Celery child process after it starts.
@shared_task
-def index_optimize():
+def index_optimize() -> None:
ix = index.open_index()
writer = AsyncWriter(ix)
writer.commit(optimize=True)
-def index_reindex(*, progress_bar_disable=False):
+def index_reindex(*, progress_bar_disable=False) -> None:
documents = Document.objects.all()
ix = index.open_index(recreate=True)
@shared_task
-def train_classifier(*, scheduled=True):
+def train_classifier(*, scheduled=True) -> None:
task = PaperlessTask.objects.create(
type=PaperlessTask.TaskType.SCHEDULED_TASK
if scheduled
@shared_task
-def bulk_update_documents(document_ids):
+def bulk_update_documents(document_ids) -> None:
documents = Document.objects.filter(id__in=document_ids)
ix = index.open_index()
@shared_task
-def update_document_content_maybe_archive_file(document_id):
+def update_document_content_maybe_archive_file(document_id) -> None:
"""
Re-creates OCR content and thumbnail for a document, and archive file if
it exists.
@shared_task
-def empty_trash(doc_ids=None):
+def empty_trash(doc_ids=None) -> None:
if doc_ids is None:
logger.info("Emptying trash of all expired documents")
documents = (
@shared_task
-def check_scheduled_workflows():
+def check_scheduled_workflows() -> None:
"""
Check and run all enabled scheduled workflows.
rebuild=False,
scheduled=True,
auto=False,
-):
+) -> None:
ai_config = AIConfig()
if ai_config.llm_index_enabled:
task = PaperlessTask.objects.create(
@shared_task
-def update_document_in_llm_index(document):
+def update_document_in_llm_index(document) -> None:
llm_index_add_or_update_document(document)
@shared_task
-def remove_document_from_llm_index(document):
+def remove_document_from_llm_index(document) -> None:
llm_index_remove_document(document)
@shared_task
-def build_share_link_bundle(bundle_id: int):
+def build_share_link_bundle(bundle_id: int) -> None:
try:
bundle = (
ShareLinkBundle.objects.filter(pk=bundle_id)
@shared_task
-def cleanup_expired_share_link_bundles():
+def cleanup_expired_share_link_bundles() -> None:
now = timezone.now()
expired_qs = ShareLinkBundle.objects.filter(
expiration__isnull=False,
super().setUp()
self.doc_admin = DocumentAdmin(model=Document, admin_site=AdminSite())
- def test_save_model(self):
+ def test_save_model(self) -> None:
doc = Document.objects.create(title="test")
doc.title = "new title"
self.assertEqual(Document.objects.get(id=doc.id).title, "new title")
self.assertEqual(self.get_document_from_index(doc)["id"], doc.id)
- def test_delete_model(self):
+ def test_delete_model(self) -> None:
doc = Document.objects.create(title="test")
index.add_or_update_document(doc)
self.assertIsNotNone(self.get_document_from_index(doc))
self.assertRaises(Document.DoesNotExist, Document.objects.get, id=doc.id)
self.assertIsNone(self.get_document_from_index(doc))
- def test_delete_queryset(self):
+ def test_delete_queryset(self) -> None:
docs = []
for i in range(42):
doc = Document.objects.create(
for doc in docs:
self.assertIsNone(self.get_document_from_index(doc))
- def test_created(self):
+ def test_created(self) -> None:
doc = Document.objects.create(
title="test",
created=timezone.make_aware(timezone.datetime(2020, 4, 12)),
super().setUp()
self.user_admin = PaperlessUserAdmin(model=User, admin_site=AdminSite())
- def test_request_is_passed_to_form(self):
+ def test_request_is_passed_to_form(self) -> None:
user = User.objects.create(username="test", is_superuser=False)
non_superuser = User.objects.create(username="requestuser")
request = types.SimpleNamespace(user=non_superuser)
form = formType(data={}, instance=user)
self.assertEqual(form.request, request)
- def test_only_superuser_can_change_superuser(self):
+ def test_only_superuser_can_change_superuser(self) -> None:
superuser = User.objects.create_superuser(username="superuser", password="test")
non_superuser = User.objects.create(username="requestuser")
user = User.objects.create(username="test", is_superuser=False)
self.assertTrue(form.is_valid())
self.assertEqual({}, form.errors)
- def test_superuser_can_only_be_modified_by_superuser(self):
+ def test_superuser_can_only_be_modified_by_superuser(self) -> None:
superuser = User.objects.create_superuser(username="superuser", password="test")
user = User.objects.create(
username="test",
user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=user)
- def test_api_get_config(self):
+ def test_api_get_config(self) -> None:
"""
GIVEN:
- API request to get app config
},
)
- def test_api_get_ui_settings_with_config(self):
+ def test_api_get_ui_settings_with_config(self) -> None:
"""
GIVEN:
- Existing config with app_title, app_logo specified
| response.data["settings"],
)
- def test_api_update_config(self):
+ def test_api_update_config(self) -> None:
"""
GIVEN:
- API request to update app config
config = ApplicationConfiguration.objects.first()
self.assertEqual(config.color_conversion_strategy, ColorConvertChoices.RGB)
- def test_api_update_config_empty_fields(self):
+ def test_api_update_config_empty_fields(self) -> None:
"""
GIVEN:
- API request to update app config with empty string for user_args JSONField and language field
self.assertEqual(config.language, None)
self.assertEqual(config.barcode_tag_mapping, None)
- def test_api_replace_app_logo(self):
+ def test_api_replace_app_logo(self) -> None:
"""
GIVEN:
- Existing config with app_logo specified
)
self.assertFalse(Path(old_logo.path).exists())
- def test_api_rejects_malicious_svg_logo(self):
+ def test_api_rejects_malicious_svg_logo(self) -> None:
"""
GIVEN:
- An SVG logo containing a <script> tag
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("disallowed svg tag", str(response.data).lower())
- def test_api_rejects_malicious_svg_with_style_javascript(self):
+ def test_api_rejects_malicious_svg_with_style_javascript(self) -> None:
"""
GIVEN:
- An SVG logo containing javascript: in style attribute
)
self.assertIn("style", str(response.data).lower())
- def test_api_rejects_svg_with_style_expression(self):
+ def test_api_rejects_svg_with_style_expression(self) -> None:
"""
GIVEN:
- An SVG logo containing CSS expression() in style
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("disallowed", str(response.data).lower())
- def test_api_rejects_svg_with_style_cdata_javascript(self):
+ def test_api_rejects_svg_with_style_cdata_javascript(self) -> None:
"""
GIVEN:
- An SVG logo with javascript: hidden in a CDATA style block
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("disallowed", str(response.data).lower())
- def test_api_rejects_svg_with_style_import(self):
+ def test_api_rejects_svg_with_style_import(self) -> None:
"""
GIVEN:
- An SVG logo containing @import in style
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("disallowed", str(response.data).lower())
- def test_api_accepts_valid_svg_with_safe_style(self):
+ def test_api_accepts_valid_svg_with_safe_style(self) -> None:
"""
GIVEN:
- A valid SVG logo with safe style attributes
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_api_accepts_valid_svg_with_safe_style_tag(self):
+ def test_api_accepts_valid_svg_with_safe_style_tag(self) -> None:
"""
GIVEN:
- A valid SVG logo with an embedded <style> tag
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_api_rejects_svg_with_disallowed_attribute(self):
+ def test_api_rejects_svg_with_disallowed_attribute(self) -> None:
"""
GIVEN:
- An SVG with a disallowed attribute (onclick)
self.assertIn("disallowed", str(response.data).lower())
self.assertIn("attribute", str(response.data).lower())
- def test_api_rejects_svg_with_disallowed_tag(self):
+ def test_api_rejects_svg_with_disallowed_tag(self) -> None:
"""
GIVEN:
- An SVG with a disallowed tag (script)
self.assertIn("disallowed", str(response.data).lower())
self.assertIn("tag", str(response.data).lower())
- def test_api_rejects_svg_with_javascript_href(self):
+ def test_api_rejects_svg_with_javascript_href(self) -> None:
"""
GIVEN:
- An SVG with javascript: in href attribute
self.assertIn("disallowed", str(response.data).lower())
self.assertIn("javascript", str(response.data).lower())
- def test_api_rejects_svg_with_javascript_xlink_href(self):
+ def test_api_rejects_svg_with_javascript_xlink_href(self) -> None:
"""
GIVEN:
- An SVG with javascript: in xlink:href attribute
self.assertIn("disallowed", str(response.data).lower())
self.assertIn("javascript", str(response.data).lower())
- def test_api_rejects_svg_with_data_text_html_href(self):
+ def test_api_rejects_svg_with_data_text_html_href(self) -> None:
"""
GIVEN:
- An SVG with data:text/html in href attribute
# This will now catch "Disallowed URI scheme"
self.assertIn("disallowed", str(response.data).lower())
- def test_api_rejects_svg_with_unknown_namespace_attribute(self):
+ def test_api_rejects_svg_with_unknown_namespace_attribute(self) -> None:
"""
GIVEN:
- An SVG with an attribute in an unknown/custom namespace
# Check for the error message raised by the safe_prefixes check
self.assertIn("uri scheme not allowed", str(response.data).lower())
- def test_create_not_allowed(self):
+ def test_create_not_allowed(self) -> None:
"""
GIVEN:
- API request to create a new app config
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
self.assertEqual(ApplicationConfiguration.objects.count(), 1)
- def test_update_llm_api_key(self):
+ def test_update_llm_api_key(self) -> None:
"""
GIVEN:
- Existing config with llm_api_key specified
config.refresh_from_db()
self.assertEqual(config.llm_api_key, None)
- def test_enable_ai_index_triggers_update(self):
+ def test_enable_ai_index_triggers_update(self) -> None:
"""
GIVEN:
- Existing config with AI disabled
class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
ENDPOINT = "/api/documents/bulk_download/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
shutil.copy(self.SAMPLE_DIR / "simple.jpg", self.doc3.source_path)
shutil.copy(self.SAMPLE_DIR / "test_with_bom.pdf", self.doc3.archive_path)
- def test_download_originals(self):
+ def test_download_originals(self) -> None:
response = self.client.post(
self.ENDPOINT,
json.dumps(
with self.doc3.source_file as f:
self.assertEqual(f.read(), zipf.read("2020-03-21 document B.jpg"))
- def test_download_default(self):
+ def test_download_default(self) -> None:
response = self.client.post(
self.ENDPOINT,
json.dumps({"documents": [self.doc2.id, self.doc3.id]}),
with self.doc3.archive_file as f:
self.assertEqual(f.read(), zipf.read("2020-03-21 document B.pdf"))
- def test_download_both(self):
+ def test_download_both(self) -> None:
response = self.client.post(
self.ENDPOINT,
json.dumps({"documents": [self.doc2.id, self.doc3.id], "content": "both"}),
zipf.read("originals/2020-03-21 document B.jpg"),
)
- def test_filename_clashes(self):
+ def test_filename_clashes(self) -> None:
response = self.client.post(
self.ENDPOINT,
json.dumps({"documents": [self.doc2.id, self.doc2b.id]}),
with self.doc2b.source_file as f:
self.assertEqual(f.read(), zipf.read("2021-01-01 document A_01.pdf"))
- def test_compression(self):
+ def test_compression(self) -> None:
self.client.post(
self.ENDPOINT,
json.dumps(
)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
- def test_formatted_download_originals(self):
+ def test_formatted_download_originals(self) -> None:
"""
GIVEN:
- Defined file naming format
)
@override_settings(FILENAME_FORMAT="somewhere/{title}")
- def test_formatted_download_archive(self):
+ def test_formatted_download_archive(self) -> None:
"""
GIVEN:
- Defined file naming format
self.assertEqual(f.read(), zipf.read("somewhere/Title 2 - Doc 3.pdf"))
@override_settings(FILENAME_FORMAT="{document_type}/{title}")
- def test_formatted_download_both(self):
+ def test_formatted_download_both(self) -> None:
"""
GIVEN:
- Defined file naming format
zipf.read("originals/statement/Title 2 - Doc 3.jpg"),
)
- def test_download_insufficient_permissions(self):
+ def test_download_insufficient_permissions(self) -> None:
user = User.objects.create_user(username="temp_user")
self.client.force_authenticate(user=user)
class TestBulkEditAPI(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
user = User.objects.create_superuser(username="temp_admin")
self.cf1 = CustomField.objects.create(name="cf1", data_type="string")
self.cf2 = CustomField.objects.create(name="cf2", data_type="string")
- def setup_mock(self, m, method_name, return_value="OK"):
+ def setup_mock(self, m, method_name, return_value="OK") -> None:
m.return_value = return_value
m.__name__ = method_name
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_set_correspondent(self, bulk_update_task_mock):
+ def test_api_set_correspondent(self, bulk_update_task_mock) -> None:
self.assertNotEqual(self.doc1.correspondent, self.c1)
response = self.client.post(
"/api/documents/bulk_edit/",
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_unset_correspondent(self, bulk_update_task_mock):
+ def test_api_unset_correspondent(self, bulk_update_task_mock) -> None:
self.doc1.correspondent = self.c1
self.doc1.save()
self.assertIsNotNone(self.doc1.correspondent)
self.assertIsNone(self.doc1.correspondent)
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_set_type(self, bulk_update_task_mock):
+ def test_api_set_type(self, bulk_update_task_mock) -> None:
self.assertNotEqual(self.doc1.document_type, self.dt1)
response = self.client.post(
"/api/documents/bulk_edit/",
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_unset_type(self, bulk_update_task_mock):
+ def test_api_unset_type(self, bulk_update_task_mock) -> None:
self.doc1.document_type = self.dt1
self.doc1.save()
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_add_tag(self, bulk_update_task_mock):
+ def test_api_add_tag(self, bulk_update_task_mock) -> None:
self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists())
response = self.client.post(
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_remove_tag(self, bulk_update_task_mock):
+ def test_api_remove_tag(self, bulk_update_task_mock) -> None:
self.doc1.tags.add(self.t1)
response = self.client.post(
self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists())
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
- def test_api_modify_tags(self, m):
+ def test_api_modify_tags(self, m) -> None:
self.setup_mock(m, "modify_tags")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(kwargs["remove_tags"], [self.t2.id])
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
- def test_api_modify_tags_not_provided(self, m):
+ def test_api_modify_tags_not_provided(self, m) -> None:
"""
GIVEN:
- API data to modify tags is missing remove_tags field
m.assert_not_called()
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
- def test_api_modify_custom_fields(self, m):
+ def test_api_modify_custom_fields(self, m) -> None:
self.setup_mock(m, "modify_custom_fields")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(kwargs["remove_custom_fields"], [self.cf2.id])
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
- def test_api_modify_custom_fields_with_values(self, m):
+ def test_api_modify_custom_fields_with_values(self, m) -> None:
self.setup_mock(m, "modify_custom_fields")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(kwargs["remove_custom_fields"], [self.cf2.id])
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
- def test_api_modify_custom_fields_invalid_params(self, m):
+ def test_api_modify_custom_fields_invalid_params(self, m) -> None:
"""
GIVEN:
- API data to modify custom fields is malformed
m.assert_not_called()
@mock.patch("documents.serialisers.bulk_edit.delete")
- def test_api_delete(self, m):
+ def test_api_delete(self, m) -> None:
self.setup_mock(m, "delete")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(len(kwargs), 0)
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
- def test_api_set_storage_path(self, m):
+ def test_api_set_storage_path(self, m) -> None:
"""
GIVEN:
- API data to set the storage path of a document
self.assertEqual(kwargs["storage_path"], self.sp1.id)
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
- def test_api_unset_storage_path(self, m):
+ def test_api_unset_storage_path(self, m) -> None:
"""
GIVEN:
- API data to clear/unset the storage path of a document
self.assertListEqual(args[0], [self.doc1.id])
self.assertEqual(kwargs["storage_path"], None)
- def test_api_invalid_storage_path(self):
+ def test_api_invalid_storage_path(self) -> None:
"""
GIVEN:
- API data to set the storage path of a document
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.async_task.assert_not_called()
- def test_api_set_storage_path_not_provided(self):
+ def test_api_set_storage_path_not_provided(self) -> None:
"""
GIVEN:
- API data to set the storage path of a document
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.async_task.assert_not_called()
- def test_api_invalid_doc(self):
+ def test_api_invalid_doc(self) -> None:
self.assertEqual(Document.objects.count(), 5)
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(Document.objects.count(), 5)
- def test_api_invalid_method(self):
+ def test_api_invalid_method(self) -> None:
self.assertEqual(Document.objects.count(), 5)
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(Document.objects.count(), 5)
- def test_api_invalid_correspondent(self):
+ def test_api_invalid_correspondent(self) -> None:
self.assertEqual(self.doc2.correspondent, self.c1)
response = self.client.post(
"/api/documents/bulk_edit/",
doc2 = Document.objects.get(id=self.doc2.id)
self.assertEqual(doc2.correspondent, self.c1)
- def test_api_no_correspondent(self):
+ def test_api_no_correspondent(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_api_invalid_document_type(self):
+ def test_api_invalid_document_type(self) -> None:
self.assertEqual(self.doc2.document_type, self.dt1)
response = self.client.post(
"/api/documents/bulk_edit/",
doc2 = Document.objects.get(id=self.doc2.id)
self.assertEqual(doc2.document_type, self.dt1)
- def test_api_no_document_type(self):
+ def test_api_no_document_type(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_api_add_invalid_tag(self):
+ def test_api_add_invalid_tag(self) -> None:
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
- def test_api_add_tag_no_tag(self):
+ def test_api_add_tag_no_tag(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_api_delete_invalid_tag(self):
+ def test_api_delete_invalid_tag(self) -> None:
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
- def test_api_delete_tag_no_tag(self):
+ def test_api_delete_tag_no_tag(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_api_modify_invalid_tags(self):
+ def test_api_modify_invalid_tags(self) -> None:
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
response = self.client.post(
"/api/documents/bulk_edit/",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_api_modify_tags_no_tags(self):
+ def test_api_modify_tags_no_tags(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_api_selection_data_empty(self):
+ def test_api_selection_data_empty(self) -> None:
response = self.client.post(
"/api/documents/selection_data/",
json.dumps({"documents": []}),
map(lambda c: c["id"], Entity.objects.values("id")),
)
- def test_api_selection_data(self):
+ def test_api_selection_data(self) -> None:
response = self.client.post(
"/api/documents/selection_data/",
json.dumps(
)
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
- def test_set_permissions(self, m):
+ def test_set_permissions(self, m) -> None:
self.setup_mock(m, "set_permissions")
user1 = User.objects.create(username="user1")
user2 = User.objects.create(username="user2")
self.assertEqual(len(kwargs["set_permissions"]["view"]["users"]), 2)
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
- def test_set_permissions_merge(self, m):
+ def test_set_permissions_merge(self, m) -> None:
self.setup_mock(m, "set_permissions")
user1 = User.objects.create(username="user1")
user2 = User.objects.create(username="user2")
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
@mock.patch("documents.serialisers.bulk_edit.merge")
- def test_insufficient_global_perms(self, mock_merge, mock_set_storage):
+ def test_insufficient_global_perms(self, mock_merge, mock_set_storage) -> None:
"""
GIVEN:
- User has no global permissions to change a document
mock_merge.assert_not_called()
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
- def test_insufficient_permissions_ownership(self, m):
+ def test_insufficient_permissions_ownership(self, m) -> None:
"""
GIVEN:
- Documents owned by user other than logged in user
m.assert_called_once()
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
- def test_insufficient_permissions_edit(self, m):
+ def test_insufficient_permissions_edit(self, m) -> None:
"""
GIVEN:
- Documents for which current user only has view permissions
m.assert_called_once()
@mock.patch("documents.serialisers.bulk_edit.rotate")
- def test_rotate(self, m):
+ def test_rotate(self, m) -> None:
self.setup_mock(m, "rotate")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(kwargs["degrees"], 90)
@mock.patch("documents.serialisers.bulk_edit.rotate")
- def test_rotate_invalid_params(self, m):
+ def test_rotate_invalid_params(self, m) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
m.assert_not_called()
@mock.patch("documents.serialisers.bulk_edit.merge")
- def test_merge(self, m):
+ def test_merge(self, m) -> None:
self.setup_mock(m, "merge")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(kwargs["user"], self.user)
@mock.patch("documents.serialisers.bulk_edit.merge")
- def test_merge_and_delete_insufficient_permissions(self, m):
+ def test_merge_and_delete_insufficient_permissions(self, m) -> None:
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
m.assert_called_once()
@mock.patch("documents.serialisers.bulk_edit.merge")
- def test_merge_invalid_parameters(self, m):
+ def test_merge_invalid_parameters(self, m) -> None:
"""
GIVEN:
- API data for merging documents is called
m.assert_not_called()
@mock.patch("documents.serialisers.bulk_edit.split")
- def test_split(self, m):
+ def test_split(self, m) -> None:
self.setup_mock(m, "split")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(kwargs["pages"], [[1], [2, 3, 4], [5, 6], [7]])
self.assertEqual(kwargs["user"], self.user)
- def test_split_invalid_params(self):
+ def test_split_invalid_params(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
self.assertIn(b"delete_originals must be a boolean", response.content)
@mock.patch("documents.serialisers.bulk_edit.delete_pages")
- def test_delete_pages(self, m):
+ def test_delete_pages(self, m) -> None:
self.setup_mock(m, "delete_pages")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertCountEqual(args[0], [self.doc2.id])
self.assertEqual(kwargs["pages"], [1, 2, 3, 4])
- def test_delete_pages_invalid_params(self):
+ def test_delete_pages_invalid_params(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
self.assertIn(b"pages must be a list of integers", response.content)
@mock.patch("documents.serialisers.bulk_edit.edit_pdf")
- def test_edit_pdf(self, m):
+ def test_edit_pdf(self, m) -> None:
self.setup_mock(m, "edit_pdf")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertEqual(kwargs["operations"], [{"page": 1}])
self.assertEqual(kwargs["user"], self.user)
- def test_edit_pdf_invalid_params(self):
+ def test_edit_pdf_invalid_params(self) -> None:
# multiple documents
response = self.client.post(
"/api/documents/bulk_edit/",
)
@mock.patch("documents.serialisers.bulk_edit.edit_pdf")
- def test_edit_pdf_page_out_of_bounds(self, m):
+ def test_edit_pdf_page_out_of_bounds(self, m) -> None:
"""
GIVEN:
- API data for editing PDF is called
self.assertIn(b"out of bounds", response.content)
@mock.patch("documents.serialisers.bulk_edit.remove_password")
- def test_remove_password(self, m):
+ def test_remove_password(self, m) -> None:
self.setup_mock(m, "remove_password")
response = self.client.post(
"/api/documents/bulk_edit/",
self.assertTrue(kwargs["update_document"])
self.assertEqual(kwargs["user"], self.user)
- def test_remove_password_invalid_params(self):
+ def test_remove_password_invalid_params(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
self.assertIn(b"password must be a string", response.content)
@override_settings(AUDIT_LOG_ENABLED=True)
- def test_bulk_edit_audit_log_enabled_simple_field(self):
+ def test_bulk_edit_audit_log_enabled_simple_field(self) -> None:
"""
GIVEN:
- Audit log is enabled
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
@override_settings(AUDIT_LOG_ENABLED=True)
- def test_bulk_edit_audit_log_enabled_tags(self):
+ def test_bulk_edit_audit_log_enabled_tags(self) -> None:
"""
GIVEN:
- Audit log is enabled
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
@override_settings(AUDIT_LOG_ENABLED=True)
- def test_bulk_edit_audit_log_enabled_custom_fields(self):
+ def test_bulk_edit_audit_log_enabled_custom_fields(self) -> None:
"""
GIVEN:
- Audit log is enabled
class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/custom_fields/"
- def setUp(self):
+ def setUp(self) -> None:
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
return super().setUp()
- def test_create_custom_field(self):
+ def test_create_custom_field(self) -> None:
"""
GIVEN:
- Each of the supported data types is created
],
)
- def test_create_custom_field_nonunique_name(self):
+ def test_create_custom_field_nonunique_name(self) -> None:
"""
GIVEN:
- Custom field exists
)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
- def test_create_custom_field_select_invalid_options(self):
+ def test_create_custom_field_select_invalid_options(self) -> None:
"""
GIVEN:
- Custom field does not exist
)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
- def test_custom_field_select_unique_ids(self):
+ def test_custom_field_select_unique_ids(self) -> None:
"""
GIVEN:
- Existing custom field
)
@override_settings(CELERY_TASK_ALWAYS_EAGER=True)
- def test_custom_field_select_options_pruned(self):
+ def test_custom_field_select_options_pruned(self) -> None:
"""
GIVEN:
- Select custom field exists and document instance with one of the options
self.assertEqual(doc.custom_fields.first().value, None)
@mock.patch("documents.signals.handlers.process_cf_select_update.delay")
- def test_custom_field_update_offloaded_once(self, mock_delay):
+ def test_custom_field_update_offloaded_once(self, mock_delay) -> None:
"""
GIVEN:
- A select custom field attached to multiple documents
mock_delay.assert_called_once_with(cf_select)
- def test_custom_field_select_old_version(self):
+ def test_custom_field_select_old_version(self) -> None:
"""
GIVEN:
- Nothing
],
)
- def test_custom_field_select_value_old_version(self):
+ def test_custom_field_select_value_old_version(self) -> None:
"""
GIVEN:
- Existing document with custom field select
data = resp.json()
self.assertEqual(data["custom_fields"][0]["value"], 1)
- def test_create_custom_field_monetary_validation(self):
+ def test_create_custom_field_monetary_validation(self) -> None:
"""
GIVEN:
- Custom field does not exist
)
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
- def test_create_custom_field_instance(self):
+ def test_create_custom_field_instance(self) -> None:
"""
GIVEN:
- Field of each data type is created
doc.refresh_from_db()
self.assertEqual(len(doc.custom_fields.all()), 10)
- def test_change_custom_field_instance_value(self):
+ def test_change_custom_field_instance_value(self) -> None:
"""
GIVEN:
- Custom field instance is created and attached to document
self.assertEqual(CustomFieldInstance.objects.count(), 1)
self.assertEqual(doc.custom_fields.first().value, "a new test value")
- def test_delete_custom_field_instance(self):
+ def test_delete_custom_field_instance(self) -> None:
"""
GIVEN:
- Multiple custom field instances are created and attached to document
self.assertEqual(len(doc.custom_fields.all()), 1)
self.assertEqual(doc.custom_fields.first().value, date_value)
- def test_custom_field_validation(self):
+ def test_custom_field_validation(self) -> None:
"""
GIVEN:
- Document exists with no fields
self.assertEqual(CustomFieldInstance.objects.count(), 0)
self.assertEqual(len(doc.custom_fields.all()), 0)
- def test_custom_field_value_url_validation(self):
+ def test_custom_field_value_url_validation(self) -> None:
"""
GIVEN:
- Document & custom field exist
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- def test_custom_field_value_integer_validation(self):
+ def test_custom_field_value_integer_validation(self) -> None:
"""
GIVEN:
- Document & custom field exist
self.assertEqual(CustomFieldInstance.objects.count(), 0)
self.assertEqual(len(doc.custom_fields.all()), 0)
- def test_custom_field_value_monetary_validation(self):
+ def test_custom_field_value_monetary_validation(self) -> None:
"""
GIVEN:
- Document & custom field exist
self.assertEqual(CustomFieldInstance.objects.count(), 0)
self.assertEqual(len(doc.custom_fields.all()), 0)
- def test_custom_field_value_short_text_validation(self):
+ def test_custom_field_value_short_text_validation(self) -> None:
"""
GIVEN:
- Document & custom field exist
self.assertEqual(CustomFieldInstance.objects.count(), 0)
self.assertEqual(len(doc.custom_fields.all()), 0)
- def test_custom_field_value_select_validation(self):
+ def test_custom_field_value_select_validation(self) -> None:
"""
GIVEN:
- Document & custom field exist
self.assertEqual(CustomFieldInstance.objects.count(), 0)
self.assertEqual(len(doc.custom_fields.all()), 0)
- def test_custom_field_value_documentlink_validation(self):
+ def test_custom_field_value_documentlink_validation(self) -> None:
"""
GIVEN:
- Document & custom field exist
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(CustomFieldInstance.objects.count(), 0)
- def test_custom_field_not_null(self):
+ def test_custom_field_not_null(self) -> None:
"""
GIVEN:
- Existing document
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
- def test_symmetric_doclink_fields(self):
+ def test_symmetric_doclink_fields(self) -> None:
"""
GIVEN:
- Existing document
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(doc5.custom_fields.first().value, [1])
- def test_custom_field_filters(self):
+ def test_custom_field_filters(self) -> None:
custom_field_string = CustomField.objects.create(
name="Test Custom Field String",
data_type=CustomField.FieldDataType.STRING,
self.assertEqual(len(results), 1)
self.assertEqual(results[0]["name"], custom_field_int.name)
- def test_custom_fields_document_count(self):
+ def test_custom_fields_document_count(self) -> None:
custom_field_string = CustomField.objects.create(
name="Test Custom Field String",
data_type=CustomField.FieldDataType.STRING,
class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
cache.clear()
- def testDocuments(self):
+ def testDocuments(self) -> None:
response = self.client.get("/api/documents/").data
self.assertEqual(response["count"], 0)
self.assertEqual(len(Document.objects.all()), 0)
- def test_document_fields(self):
+ def test_document_fields(self) -> None:
c = Correspondent.objects.create(name="c", pk=41)
dt = DocumentType.objects.create(name="dt", pk=63)
Tag.objects.create(name="t", pk=85)
results = response.data["results"]
self.assertEqual(len(results[0]), 0)
- def test_document_fields_api_version_8_respects_created(self):
+ def test_document_fields_api_version_8_respects_created(self) -> None:
Document.objects.create(
title="legacy",
checksum="123",
self.assertIn("created", results[0])
self.assertRegex(results[0]["created"], r"^2024-01-15T00:00:00.*$")
- def test_document_legacy_created_format(self):
+ def test_document_legacy_created_format(self) -> None:
"""
GIVEN:
- Existing document
doc.refresh_from_db()
self.assertEqual(doc.created, date(2023, 6, 28))
- def test_document_update_legacy_created_format(self):
+ def test_document_update_legacy_created_format(self) -> None:
"""
GIVEN:
- Existing document
doc.refresh_from_db()
self.assertEqual(doc.created, date(2023, 2, 1))
- def test_document_update_with_created_date(self):
+ def test_document_update_with_created_date(self) -> None:
"""
GIVEN:
- Existing document
doc.refresh_from_db()
self.assertEqual(doc.created_date, created_date)
- def test_document_actions(self):
+ def test_document_actions(self) -> None:
_, filename = tempfile.mkstemp(dir=self.dirs.originals_dir)
content = b"This is a test"
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.content, content_thumbnail)
- def test_document_actions_with_perms(self):
+ def test_document_actions_with_perms(self) -> None:
"""
GIVEN:
- Document with owner and without granted permissions
self.assertEqual(response.status_code, status.HTTP_200_OK)
@override_settings(FILENAME_FORMAT="")
- def test_download_with_archive(self):
+ def test_download_with_archive(self) -> None:
content = b"This is a test"
content_archive = b"This is the same test but archived"
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.content, content)
- def test_document_actions_not_existing_file(self):
+ def test_document_actions_not_existing_file(self) -> None:
doc = Document.objects.create(
title="none",
filename=Path("asd").name,
response = self.client.get(f"/api/documents/{doc.pk}/thumb/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
- def test_document_history_action(self):
+ def test_document_history_action(self) -> None:
"""
GIVEN:
- Document
{"title": ["First title", "New title"]},
)
- def test_document_history_action_w_custom_fields(self):
+ def test_document_history_action_w_custom_fields(self) -> None:
"""
GIVEN:
- Document with custom fields
self.assertEqual(response.data[1]["action"], "create")
@override_settings(AUDIT_LOG_ENABLED=False)
- def test_document_history_action_disabled(self):
+ def test_document_history_action_disabled(self) -> None:
"""
GIVEN:
- Audit log is disabled
response = self.client.get(f"/api/documents/{doc.pk}/history/")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_document_history_insufficient_perms(self):
+ def test_document_history_insufficient_perms(self) -> None:
"""
GIVEN:
- Audit log is enabled
response = self.client.get(f"/api/documents/{doc2.pk}/history/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_document_filters(self):
+ def test_document_filters(self) -> None:
doc1 = Document.objects.create(
title="none1",
checksum="A",
results = response.data["results"]
self.assertEqual(len(results), 3)
- def test_custom_field_select_filter(self):
+ def test_custom_field_select_filter(self) -> None:
"""
GIVEN:
- Documents with select custom field values
self.assertEqual(r.status_code, status.HTTP_200_OK)
self.assertEqual(r.data["count"], 0)
- def test_document_checksum_filter(self):
+ def test_document_checksum_filter(self) -> None:
Document.objects.create(
title="none1",
checksum="A",
results = response.data["results"]
self.assertEqual(len(results), 0)
- def test_document_original_filename_filter(self):
+ def test_document_original_filename_filter(self) -> None:
doc1 = Document.objects.create(
title="none1",
checksum="A",
[doc1.id, doc2.id, doc3.id],
)
- def test_documents_title_content_filter(self):
+ def test_documents_title_content_filter(self) -> None:
doc1 = Document.objects.create(
title="title A",
content="content A",
results = response.data["results"]
self.assertEqual(len(results), 0)
- def test_documents_title_content_filter_strips_boundary_whitespace(self):
+ def test_documents_title_content_filter_strips_boundary_whitespace(self) -> None:
doc = Document.objects.create(
title="Testwort",
content="",
self.assertEqual(len(results), 1)
self.assertEqual(results[0]["id"], doc.id)
- def test_document_permissions_filters(self):
+ def test_document_permissions_filters(self) -> None:
"""
GIVEN:
- Documents with owners, with and without granted permissions
[u1_doc1.id],
)
- def test_pagination_all(self):
+ def test_pagination_all(self) -> None:
"""
GIVEN:
- A set of 50 documents
self.assertEqual(len(response.data["all"]), 50)
self.assertCountEqual(response.data["all"], [d.id for d in docs])
- def test_statistics(self):
+ def test_statistics(self) -> None:
doc1 = Document.objects.create(
title="none1",
checksum="A",
self.assertEqual(response.data["document_type_count"], 1)
self.assertEqual(response.data["storage_path_count"], 2)
- def test_statistics_no_inbox_tag(self):
+ def test_statistics_no_inbox_tag(self) -> None:
Document.objects.create(title="none1", checksum="A")
response = self.client.get("/api/statistics/")
self.assertEqual(response.data["documents_inbox"], None)
self.assertEqual(response.data["inbox_tags"], None)
- def test_statistics_multiple_users(self):
+ def test_statistics_multiple_users(self) -> None:
"""
GIVEN:
- Inbox tags with different owners and documents that are accessible to different users
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["documents_inbox"], 0)
- def test_upload(self):
+ def test_upload(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.document_type_id)
self.assertIsNone(overrides.tag_ids)
- def test_create_wrong_endpoint(self):
+ def test_create_wrong_endpoint(self) -> None:
response = self.client.post(
"/api/documents/",
{},
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
- def test_upload_insufficient_permissions(self):
+ def test_upload_insufficient_permissions(self) -> None:
self.client.force_authenticate(user=User.objects.create_user("testuser2"))
with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
- def test_upload_empty_metadata(self):
+ def test_upload_empty_metadata(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.storage_path_id)
self.assertIsNone(overrides.tag_ids)
- def test_upload_invalid_form(self):
+ def test_upload_invalid_form(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.consume_file_mock.assert_not_called()
- def test_upload_invalid_file(self):
+ def test_upload_invalid_file(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.consume_file_mock.assert_not_called()
- def test_upload_with_title(self):
+ def test_upload_with_title(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.document_type_id)
self.assertIsNone(overrides.tag_ids)
- def test_upload_with_correspondent(self):
+ def test_upload_with_correspondent(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.document_type_id)
self.assertIsNone(overrides.tag_ids)
- def test_upload_with_invalid_correspondent(self):
+ def test_upload_with_invalid_correspondent(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.consume_file_mock.assert_not_called()
- def test_upload_with_document_type(self):
+ def test_upload_with_document_type(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.title)
self.assertIsNone(overrides.tag_ids)
- def test_upload_with_invalid_document_type(self):
+ def test_upload_with_invalid_document_type(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.consume_file_mock.assert_not_called()
- def test_upload_with_storage_path(self):
+ def test_upload_with_storage_path(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.title)
self.assertIsNone(overrides.tag_ids)
- def test_upload_with_invalid_storage_path(self):
+ def test_upload_with_invalid_storage_path(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.consume_file_mock.assert_not_called()
- def test_upload_with_tags(self):
+ def test_upload_with_tags(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.correspondent_id)
self.assertIsNone(overrides.title)
- def test_upload_with_invalid_tags(self):
+ def test_upload_with_invalid_tags(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.consume_file_mock.assert_not_called()
- def test_upload_with_created(self):
+ def test_upload_with_created(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertEqual(overrides.created, created.date())
- def test_upload_with_asn(self):
+ def test_upload_with_asn(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertIsNone(overrides.tag_ids)
self.assertEqual(500, overrides.asn)
- def test_upload_with_custom_fields(self):
+ def test_upload_with_custom_fields(self) -> None:
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
self.assertEqual(overrides.filename, "simple.pdf")
self.assertEqual(overrides.custom_fields, {custom_field.id: None})
- def test_upload_with_custom_fields_and_workflow(self):
+ def test_upload_with_custom_fields_and_workflow(self) -> None:
"""
GIVEN: A document with a source file
WHEN: Upload the document with custom fields and a workflow
overrides.update(new_overrides)
self.assertEqual(overrides.custom_fields, {cf.id: None, cf2.id: 123})
- def test_upload_with_custom_field_values(self):
+ def test_upload_with_custom_field_values(self) -> None:
"""
GIVEN: A document with a source file
WHEN: Upload the document with custom fields and values
{cf_string.id: "a string", cf_int.id: 123},
)
- def test_upload_with_custom_fields_errors(self):
+ def test_upload_with_custom_fields_errors(self) -> None:
"""
GIVEN: A document with a source file
WHEN: Upload the document with invalid custom fields payloads
self.consume_file_mock.assert_not_called()
- def test_patch_document_integer_custom_field_out_of_range(self):
+ def test_patch_document_integer_custom_field_out_of_range(self) -> None:
"""
GIVEN:
- An integer custom field
self.assertIn("custom_fields", response.data)
self.assertEqual(CustomFieldInstance.objects.count(), 0)
- def test_upload_with_webui_source(self):
+ def test_upload_with_webui_source(self) -> None:
"""
GIVEN: A document with a source file
WHEN: Upload the document with 'from_webui' flag
self.assertEqual(input_doc.source, WorkflowTrigger.DocumentSourceChoices.WEB_UI)
- def test_upload_invalid_pdf(self):
+ def test_upload_invalid_pdf(self) -> None:
"""
GIVEN: Invalid PDF named "*.pdf" that mime_type is in settings.CONSUMER_PDF_RECOVERABLE_MIME_TYPES
WHEN: Upload the file
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_get_metadata(self):
+ def test_get_metadata(self) -> None:
doc = Document.objects.create(
title="test",
filename="file.pdf",
response = self.client.get(f"/api/documents/{doc.pk}/metadata/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_get_metadata_invalid_doc(self):
+ def test_get_metadata_invalid_doc(self) -> None:
response = self.client.get("/api/documents/34576/metadata/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
- def test_get_metadata_no_archive(self):
+ def test_get_metadata_no_archive(self) -> None:
doc = Document.objects.create(
title="test",
filename="file.pdf",
self.assertIsNone(meta["archive_metadata"])
self.assertIsNone(meta["archive_media_filename"])
- def test_get_metadata_missing_files(self):
+ def test_get_metadata_missing_files(self) -> None:
doc = Document.objects.create(
title="test",
filename="file.pdf",
self.assertIsNone(meta["archive_metadata"])
self.assertIsNone(meta["archive_size"])
- def test_get_empty_suggestions(self):
+ def test_get_empty_suggestions(self) -> None:
doc = Document.objects.create(title="test", mime_type="application/pdf")
response = self.client.get(f"/api/documents/{doc.pk}/suggestions/")
},
)
- def test_get_suggestions_invalid_doc(self):
+ def test_get_suggestions_invalid_doc(self) -> None:
response = self.client.get("/api/documents/34676/suggestions/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.client.get(f"/api/documents/{doc.pk}/suggestions/")
self.assertFalse(parse_date_generator.called)
- def test_saved_views(self):
+ def test_saved_views(self) -> None:
u1 = User.objects.create_superuser("user1")
u2 = User.objects.create_superuser("user2")
status.HTTP_404_NOT_FOUND,
)
- def test_saved_view_create_update_patch(self):
+ def test_saved_view_create_update_patch(self) -> None:
User.objects.create_user("user1")
view = {
v1 = SavedView.objects.get(id=v1.id)
self.assertEqual(v1.filter_rules.count(), 0)
- def test_saved_view_display_options(self):
+ def test_saved_view_display_options(self) -> None:
"""
GIVEN:
- Saved view
v1.refresh_from_db()
self.assertEqual(v1.display_fields, None)
- def test_saved_view_display_customfields(self):
+ def test_saved_view_display_customfields(self) -> None:
"""
GIVEN:
- Saved view
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_saved_view_cleanup_after_custom_field_deletion(self):
+ def test_saved_view_cleanup_after_custom_field_deletion(self) -> None:
"""
GIVEN:
- Saved view with custom field in display fields and as sort field
[str(SavedView.DisplayFields.TITLE), str(SavedView.DisplayFields.CREATED)],
)
- def test_get_logs(self):
+ def test_get_logs(self) -> None:
log_data = "test\ntest2\n"
with (Path(settings.LOGGING_DIR) / "mail.log").open("w") as f:
f.write(log_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertCountEqual(response.data, ["mail", "paperless"])
- def test_get_logs_only_when_exist(self):
+ def test_get_logs_only_when_exist(self) -> None:
log_data = "test\ntest2\n"
with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
f.write(log_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertCountEqual(response.data, ["paperless"])
- def test_get_invalid_log(self):
+ def test_get_invalid_log(self) -> None:
response = self.client.get("/api/logs/bogus_log/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
@override_settings(LOGGING_DIR="bogus_dir")
- def test_get_nonexistent_log(self):
+ def test_get_nonexistent_log(self) -> None:
response = self.client.get("/api/logs/paperless/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
- def test_get_log(self):
+ def test_get_log(self) -> None:
log_data = "test\ntest2\n"
with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
f.write(log_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(response.data, ["test", "test2"])
- def test_get_log_with_limit(self):
+ def test_get_log_with_limit(self) -> None:
log_data = "test1\ntest2\ntest3\n"
with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
f.write(log_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(response.data, ["test2", "test3"])
- def test_get_log_with_invalid_limit(self):
+ def test_get_log_with_invalid_limit(self) -> None:
log_data = "test1\ntest2\n"
with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
f.write(log_data)
response = self.client.get("/api/logs/paperless/", {"limit": -5})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_invalid_regex_other_algorithm(self):
+ def test_invalid_regex_other_algorithm(self) -> None:
for endpoint in ["correspondents", "tags", "document_types"]:
response = self.client.post(
f"/api/{endpoint}/",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED, endpoint)
- def test_invalid_regex(self):
+ def test_invalid_regex(self) -> None:
for endpoint in ["correspondents", "tags", "document_types"]:
response = self.client.post(
f"/api/{endpoint}/",
endpoint,
)
- def test_valid_regex(self):
+ def test_valid_regex(self) -> None:
for endpoint in ["correspondents", "tags", "document_types"]:
response = self.client.post(
f"/api/{endpoint}/",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED, endpoint)
- def test_regex_no_algorithm(self):
+ def test_regex_no_algorithm(self) -> None:
for endpoint in ["correspondents", "tags", "document_types"]:
response = self.client.post(
f"/api/{endpoint}/",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED, endpoint)
- def test_tag_color_default(self):
+ def test_tag_color_default(self) -> None:
response = self.client.post("/api/tags/", {"name": "tag"}, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Tag.objects.get(id=response.data["id"]).color, "#a6cee3")
1,
)
- def test_tag_color(self):
+ def test_tag_color(self) -> None:
response = self.client.post(
"/api/tags/",
data={"name": "tag", "colour": 3},
3,
)
- def test_tag_color_invalid(self):
+ def test_tag_color_invalid(self) -> None:
response = self.client.post(
"/api/tags/",
data={"name": "tag", "colour": 34},
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_tag_color_custom(self):
+ def test_tag_color_custom(self) -> None:
tag = Tag.objects.create(name="test", color="#abcdef")
self.assertEqual(
self.client.get(
1,
)
- def test_get_existing_notes(self):
+ def test_get_existing_notes(self) -> None:
"""
GIVEN:
- A document with a single note
},
)
- def test_docnote_serializer_v7(self):
+ def test_docnote_serializer_v7(self) -> None:
doc = Document.objects.create(
title="test",
mime_type="application/pdf",
self.user.id,
)
- def test_create_note(self):
+ def test_create_note(self) -> None:
"""
GIVEN:
- Existing document
# modified was updated to today
self.assertEqual(doc.modified.day, timezone.now().day)
- def test_notes_permissions_aware(self):
+ def test_notes_permissions_aware(self) -> None:
"""
GIVEN:
- Existing document owned by user2 but with granted view perms for user1
self.assertEqual(response.content, b"Insufficient permissions to delete notes")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
- def test_delete_note(self):
+ def test_delete_note(self) -> None:
"""
GIVEN:
- Existing document, existing note
# modified was updated to today
self.assertEqual(doc.modified.day, timezone.now().day)
- def test_get_notes_no_doc(self):
+ def test_get_notes_no_doc(self) -> None:
"""
GIVEN:
- A request to get notes from a non-existent document
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
- def test_tag_unique_name_and_owner(self):
+ def test_tag_unique_name_and_owner(self) -> None:
"""
GIVEN:
- Multiple users
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_tag_unique_name_and_owner_enforced_on_update(self):
+ def test_tag_unique_name_and_owner_enforced_on_update(self) -> None:
"""
GIVEN:
- Multiple users
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_create_share_links(self):
+ def test_create_share_links(self) -> None:
"""
GIVEN:
- Existing document
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
- def test_share_links_permissions_aware(self):
+ def test_share_links_permissions_aware(self) -> None:
"""
GIVEN:
- Existing document owned by user2 but with granted view perms for user1
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- def test_next_asn(self):
+ def test_next_asn(self) -> None:
"""
GIVEN:
- Existing documents with ASNs, highest owned by user2
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp.content, b"1000")
- def test_next_asn_no_documents_with_asn(self):
+ def test_next_asn_no_documents_with_asn(self) -> None:
"""
GIVEN:
- Existing document, but with no ASN assugned
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp.content, b"1")
- def test_asn_not_unique_with_trashed_doc(self):
+ def test_asn_not_unique_with_trashed_doc(self) -> None:
"""
GIVEN:
- Existing document with ASN that is trashed
},
)
- def test_remove_inbox_tags(self):
+ def test_remove_inbox_tags(self) -> None:
"""
GIVEN:
- Existing document with or without inbox tags
EMAIL_ENABLED=True,
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
- def test_email_document(self):
+ def test_email_document(self) -> None:
"""
GIVEN:
- Existing document
self.assertEqual(mail.outbox[1].attachments[0][0], expected_filename2)
@mock.patch("django.core.mail.message.EmailMessage.send", side_effect=Exception)
- def test_email_document_errors(self, mocked_send):
+ def test_email_document_errors(self, mocked_send) -> None:
"""
GIVEN:
- Existing document
self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
@mock.patch("django_softdelete.models.SoftDeleteModel.delete")
- def test_warn_on_delete_with_old_uuid_field(self, mocked_delete):
+ def test_warn_on_delete_with_old_uuid_field(self, mocked_delete) -> None:
"""
GIVEN:
- Existing document in a (mocked) MariaDB database with an old UUID field
class TestDocumentApiV2(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
self.client.defaults["HTTP_ACCEPT"] = "application/json; version=2"
- def test_tag_validate_color(self):
+ def test_tag_validate_color(self) -> None:
self.assertEqual(
self.client.post(
"/api/tags/",
status.HTTP_400_BAD_REQUEST,
)
- def test_tag_text_color(self):
+ def test_tag_text_color(self) -> None:
t = Tag.objects.create(name="tag1", color="#000000")
self.assertEqual(
self.client.get(f"/api/tags/{t.id}/", format="json").data["text_color"],
class TestDocumentApiCustomFieldsSorting(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
cache.clear()
- def test_document_custom_fields_sorting(self):
+ def test_document_custom_fields_sorting(self) -> None:
"""
GIVEN:
- Documents with custom fields
[self.doc1.id, self.doc3.id, self.doc2.id],
)
- def test_document_custom_fields_sorting_invalid(self):
+ def test_document_custom_fields_sorting_invalid(self) -> None:
"""
GIVEN:
- Documents with custom fields
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_document_custom_fields_sorting_invalid_data_type(self):
+ def test_document_custom_fields_sorting_invalid_data_type(self) -> None:
"""
GIVEN:
- Documents with custom fields
class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
ENDPOINT = "/api/documents/email/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
EMAIL_ENABLED=True,
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
- def test_email_success(self):
+ def test_email_success(self) -> None:
"""
GIVEN:
- Multiple existing documents (doc1 with archive, doc2 without)
EMAIL_ENABLED=True,
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
- def test_email_use_original_version(self):
+ def test_email_use_original_version(self) -> None:
"""
GIVEN:
- Documents with archive versions
original_size = self.doc1.source_path.stat().st_size
self.assertEqual(len(attachment[1]), original_size)
- def test_email_missing_required_fields(self):
+ def test_email_missing_required_fields(self) -> None:
"""
GIVEN:
- Request with missing required fields
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_email_empty_document_list(self):
+ def test_email_empty_document_list(self) -> None:
"""
GIVEN:
- Request with empty document list
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_email_invalid_document_id(self):
+ def test_email_invalid_document_id(self) -> None:
"""
GIVEN:
- Request with non-existent document ID
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_email_invalid_email_address(self):
+ def test_email_invalid_email_address(self) -> None:
"""
GIVEN:
- Request with invalid email address
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_email_insufficient_permissions(self):
+ def test_email_insufficient_permissions(self) -> None:
"""
GIVEN:
- User without permissions to view document
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
- def test_email_only_requires_view_permission(self):
+ def test_email_only_requires_view_permission(self) -> None:
"""
GIVEN:
- User having only view documents permission
EMAIL_ENABLED=True,
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
- def test_email_duplicate_filenames(self):
+ def test_email_duplicate_filenames(self) -> None:
"""
GIVEN:
- Multiple documents with the same title
"django.core.mail.message.EmailMessage.send",
side_effect=Exception("Email error"),
)
- def test_email_send_error(self, mocked_send):
+ def test_email_send_error(self, mocked_send) -> None:
"""
GIVEN:
- Existing documents
class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
reference_predicate: Callable[[DocumentWrapper], bool],
*,
match_nothing_ok=False,
- ):
+ ) -> None:
"""
Checks the results of the query against a callable reference predicate.
"""
]
self.assertEqual(reference_document_ids, response_document_ids)
- def _assert_validation_error(self, query: str, path: list, keyword: str):
+ def _assert_validation_error(self, query: str, path: list, keyword: str) -> None:
"""
Asserts that the query raises a validation error.
Checks the message to make sure it points to the right place.
# ==========================================================#
# Sanity checks #
# ==========================================================#
- def test_name_value_association(self):
+ def test_name_value_association(self) -> None:
"""
GIVEN:
- A document with `{"string_field": "https://docs.paperless-ngx.com/",
and document["url_field"] == "https://docs.paperless-ngx.com/",
)
- def test_filter_by_multiple_fields(self):
+ def test_filter_by_multiple_fields(self) -> None:
"""
GIVEN:
- A document with `{"string_field": "https://docs.paperless-ngx.com/",
# ==========================================================#
# Basic expressions supported by all custom field types #
# ==========================================================#
- def test_exact(self):
+ def test_exact(self) -> None:
self._assert_query_match_predicate(
["string_field", "exact", "paperless"],
lambda document: "string_field" in document
and document["string_field"] == "paperless",
)
- def test_in(self):
+ def test_in(self) -> None:
self._assert_query_match_predicate(
["string_field", "in", ["paperless", "Paperless"]],
lambda document: "string_field" in document
and document["string_field"] in ("paperless", "Paperless"),
)
- def test_isnull(self):
+ def test_isnull(self) -> None:
self._assert_query_match_predicate(
["string_field", "isnull", True],
lambda document: "string_field" in document
and document["string_field"] is None,
)
- def test_exists(self):
+ def test_exists(self) -> None:
self._assert_query_match_predicate(
["string_field", "exists", True],
lambda document: "string_field" in document,
)
- def test_exists_false(self):
+ def test_exists_false(self) -> None:
self._assert_query_match_predicate(
["string_field", "exists", False],
lambda document: "string_field" not in document,
)
- def test_select(self):
+ def test_select(self) -> None:
# For select fields, you can either specify the id of the option
# or the name of the option. They function exactly the same.
self._assert_query_match_predicate(
# ==========================================================#
# Expressions for string, URL, and monetary fields #
# ==========================================================#
- def test_icontains(self):
+ def test_icontains(self) -> None:
self._assert_query_match_predicate(
["string_field", "icontains", "aper"],
lambda document: "string_field" in document
and "aper" in document["string_field"].lower(),
)
- def test_istartswith(self):
+ def test_istartswith(self) -> None:
self._assert_query_match_predicate(
["string_field", "istartswith", "paper"],
lambda document: "string_field" in document
and document["string_field"].lower().startswith("paper"),
)
- def test_iendswith(self):
+ def test_iendswith(self) -> None:
self._assert_query_match_predicate(
["string_field", "iendswith", "less"],
lambda document: "string_field" in document
and document["string_field"].lower().endswith("less"),
)
- def test_url_field_istartswith(self):
+ def test_url_field_istartswith(self) -> None:
# URL fields supports all of the expressions above.
# Just showing one of them here.
self._assert_query_match_predicate(
# ==========================================================#
# Arithmetic comparisons #
# ==========================================================#
- def test_gt(self):
+ def test_gt(self) -> None:
self._assert_query_match_predicate(
["date_field", "gt", date(2024, 8, 22).isoformat()],
lambda document: "date_field" in document
and document["date_field"] > date(2024, 8, 22),
)
- def test_gte(self):
+ def test_gte(self) -> None:
self._assert_query_match_predicate(
["date_field", "gte", date(2024, 8, 22).isoformat()],
lambda document: "date_field" in document
and document["date_field"] >= date(2024, 8, 22),
)
- def test_lt(self):
+ def test_lt(self) -> None:
self._assert_query_match_predicate(
["integer_field", "lt", 0],
lambda document: "integer_field" in document
and document["integer_field"] < 0,
)
- def test_lte(self):
+ def test_lte(self) -> None:
self._assert_query_match_predicate(
["integer_field", "lte", 0],
lambda document: "integer_field" in document
and document["integer_field"] <= 0,
)
- def test_range(self):
+ def test_range(self) -> None:
self._assert_query_match_predicate(
["float_field", "range", [-0.05, 0.05]],
lambda document: "float_field" in document
and -0.05 <= document["float_field"] <= 0.05,
)
- def test_date_modifier(self):
+ def test_date_modifier(self) -> None:
# For date fields you can optionally prefix the operator
# with the part of the date you are comparing with.
self._assert_query_match_predicate(
and document["date_field"].year >= 2024,
)
- def test_gt_monetary(self):
+ def test_gt_monetary(self) -> None:
self._assert_query_match_predicate(
["monetary_field", "gt", "99"],
lambda document: "monetary_field" in document
# ==========================================================#
# Subset check (document link field only) #
# ==========================================================#
- def test_document_link_contains(self):
+ def test_document_link_contains(self) -> None:
# Document link field "contains" performs a subset check.
self._assert_query_match_predicate(
["documentlink_field", "contains", [1, 2]],
and set(document["documentlink_field"]) >= {1, 2},
)
- def test_document_link_contains_empty_set(self):
+ def test_document_link_contains_empty_set(self) -> None:
# An empty set is a subset of any set.
self._assert_query_match_predicate(
["documentlink_field", "contains", []],
and document["documentlink_field"] is not None,
)
- def test_document_link_contains_no_reverse_link(self):
+ def test_document_link_contains_no_reverse_link(self) -> None:
# An edge case is that the document in the value list
# doesn't have a document link field and thus has no reverse link.
self._assert_query_match_predicate(
# ==========================================================#
# Logical expressions #
# ==========================================================#
- def test_logical_and(self):
+ def test_logical_and(self) -> None:
self._assert_query_match_predicate(
[
"AND",
and document["date_field"].month < 9,
)
- def test_logical_or(self):
+ def test_logical_or(self) -> None:
# This is also the recommend way to check for "empty" text, URL, and monetary fields.
self._assert_query_match_predicate(
[
and not bool(document["string_field"]),
)
- def test_logical_not(self):
+ def test_logical_not(self) -> None:
# This means `NOT ((document has string_field) AND (string_field iexact "paperless"))`,
# not `(document has string_field) AND (NOT (string_field iexact "paperless"))`!
self._assert_query_match_predicate(
# Tests for invalid queries #
# ==========================================================#
- def test_invalid_json(self):
+ def test_invalid_json(self) -> None:
self._assert_validation_error(
"not valid json",
["custom_field_query"],
"must be valid JSON",
)
- def test_invalid_expression(self):
+ def test_invalid_expression(self) -> None:
self._assert_validation_error(
json.dumps("valid json but not valid expr"),
["custom_field_query"],
"Invalid custom field query expression",
)
- def test_invalid_custom_field_name(self):
+ def test_invalid_custom_field_name(self) -> None:
self._assert_validation_error(
json.dumps(["invalid name", "iexact", "foo"]),
["custom_field_query", "0"],
"is not a valid custom field",
)
- def test_invalid_operator(self):
+ def test_invalid_operator(self) -> None:
self._assert_validation_error(
json.dumps(["integer_field", "iexact", "foo"]),
["custom_field_query", "1"],
"does not support query expr",
)
- def test_invalid_value(self):
+ def test_invalid_value(self) -> None:
self._assert_validation_error(
json.dumps(["select_field", "exact", []]),
["custom_field_query", "2"],
"string",
)
- def test_invalid_logical_operator(self):
+ def test_invalid_logical_operator(self) -> None:
self._assert_validation_error(
json.dumps(["invalid op", ["integer_field", "gt", 0]]),
["custom_field_query", "0"],
"Invalid logical operator",
)
- def test_invalid_expr_list(self):
+ def test_invalid_expr_list(self) -> None:
self._assert_validation_error(
json.dumps(["AND", "not a list"]),
["custom_field_query", "1"],
"Invalid expression list",
)
- def test_invalid_operator_prefix(self):
+ def test_invalid_operator_prefix(self) -> None:
self._assert_validation_error(
json.dumps(["integer_field", "foo__gt", 0]),
["custom_field_query", "1"],
"does not support query expr",
)
- def test_query_too_deep(self):
+ def test_query_too_deep(self) -> None:
query = ["string_field", "exact", "paperless"]
for _ in range(10):
query = ["NOT", query]
"Maximum nesting depth exceeded",
)
- def test_query_too_many_atoms(self):
+ def test_query_too_many_atoms(self) -> None:
atom = ["string_field", "exact", "paperless"]
query = ["AND", [atom for _ in range(21)]]
self._assert_validation_error(
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{title}")
self.sp2 = StoragePath.objects.create(name="sp2", path="Something2/{title}")
- def test_object_filters(self):
+ def test_object_filters(self) -> None:
response = self.client.get(
f"/api/tags/?id={self.tag2.id}",
)
results = response.data["results"]
self.assertEqual(len(results), 2)
- def test_correspondent_last_correspondence(self):
+ def test_correspondent_last_correspondence(self) -> None:
"""
GIVEN:
- Correspondent with documents
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
- def test_api_get_storage_path(self):
+ def test_api_get_storage_path(self) -> None:
"""
GIVEN:
- API request to get all storage paths
self.assertEqual(resp_storage_path["id"], self.sp1.id)
self.assertEqual(resp_storage_path["path"], self.sp1.path)
- def test_api_create_storage_path(self):
+ def test_api_create_storage_path(self) -> None:
"""
GIVEN:
- API request to create a storage paths
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(StoragePath.objects.count(), 2)
- def test_api_create_invalid_storage_path(self):
+ def test_api_create_invalid_storage_path(self) -> None:
"""
GIVEN:
- API request to create a storage paths
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(StoragePath.objects.count(), 1)
- def test_api_create_storage_path_rejects_traversal(self):
+ def test_api_create_storage_path_rejects_traversal(self) -> None:
"""
GIVEN:
- API request to create a storage paths
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(StoragePath.objects.count(), 1)
- def test_api_storage_path_placeholders(self):
+ def test_api_storage_path_placeholders(self) -> None:
"""
GIVEN:
- API request to create a storage path with placeholders
self.assertEqual(StoragePath.objects.count(), 2)
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_update_storage_path(self, bulk_update_mock):
+ def test_api_update_storage_path(self, bulk_update_mock) -> None:
"""
GIVEN:
- API request to get all storage paths
self.assertCountEqual([document.pk], args[0])
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
- def test_api_delete_storage_path(self, bulk_update_mock):
+ def test_api_delete_storage_path(self, bulk_update_mock) -> None:
"""
GIVEN:
- API request to delete a storage
# only called once
bulk_update_mock.assert_called_once_with([document.pk])
- def test_test_storage_path(self):
+ def test_test_storage_path(self) -> None:
"""
GIVEN:
- API request to test a storage path
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, "path/Something")
- def test_test_storage_path_respects_none_placeholder_setting(self):
+ def test_test_storage_path_respects_none_placeholder_setting(self) -> None:
"""
GIVEN:
- A storage path template referencing an empty field
class TestBulkEditObjects(APITestCase):
# See test_api_permissions.py for bulk tests on permissions
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.temp_admin = User.objects.create_superuser(username="temp_admin")
self.user2 = User.objects.create(username="user2")
self.user3 = User.objects.create(username="user3")
- def test_bulk_objects_delete(self):
+ def test_bulk_objects_delete(self) -> None:
"""
GIVEN:
- Existing objects
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(StoragePath.objects.count(), 0)
- def test_bulk_edit_object_permissions_insufficient_global_perms(self):
+ def test_bulk_edit_object_permissions_insufficient_global_perms(self) -> None:
"""
GIVEN:
- Existing objects, user does not have global delete permissions
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content, b"Insufficient permissions")
- def test_bulk_edit_object_permissions_sufficient_global_perms(self):
+ def test_bulk_edit_object_permissions_sufficient_global_perms(self) -> None:
"""
GIVEN:
- Existing objects, user does have global delete permissions
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_bulk_edit_object_permissions_insufficient_object_perms(self):
+ def test_bulk_edit_object_permissions_insufficient_object_perms(self) -> None:
"""
GIVEN:
- Objects owned by user other than logged in user
class TestApiAuth(DirectoriesMixin, APITestCase):
- def test_auth_required(self):
+ def test_auth_required(self) -> None:
d = Document.objects.create(title="Test")
self.assertEqual(
status.HTTP_401_UNAUTHORIZED,
)
- def test_api_version_no_auth(self):
+ def test_api_version_no_auth(self) -> None:
response = self.client.get("/api/documents/")
self.assertNotIn("X-Api-Version", response)
self.assertNotIn("X-Version", response)
- def test_api_version_with_auth(self):
+ def test_api_version_with_auth(self) -> None:
user = User.objects.create_superuser(username="test")
self.client.force_authenticate(user)
response = self.client.get("/api/documents/")
self.assertIn("X-Api-Version", response)
self.assertIn("X-Version", response)
- def test_api_insufficient_permissions(self):
+ def test_api_insufficient_permissions(self) -> None:
user = User.objects.create_user(username="test")
self.client.force_authenticate(user)
status.HTTP_403_FORBIDDEN,
)
- def test_api_sufficient_permissions(self):
+ def test_api_sufficient_permissions(self) -> None:
user = User.objects.create_user(username="test")
user.user_permissions.add(*Permission.objects.all())
user.is_staff = True
status.HTTP_200_OK,
)
- def test_api_get_object_permissions(self):
+ def test_api_get_object_permissions(self) -> None:
user1 = User.objects.create_user(username="test1")
user2 = User.objects.create_user(username="test2")
user1.user_permissions.add(*Permission.objects.filter(codename="view_document"))
status.HTTP_404_NOT_FOUND,
)
- def test_api_default_owner(self):
+ def test_api_default_owner(self) -> None:
"""
GIVEN:
- API request to create an object (Tag)
tag1 = Tag.objects.filter(name="test1").first()
self.assertEqual(tag1.owner, user1)
- def test_api_set_no_owner(self):
+ def test_api_set_no_owner(self) -> None:
"""
GIVEN:
- API request to create an object (Tag)
tag1 = Tag.objects.filter(name="test1").first()
self.assertEqual(tag1.owner, None)
- def test_api_set_owner_w_permissions(self):
+ def test_api_set_owner_w_permissions(self) -> None:
"""
GIVEN:
- API request to create an object (Tag) that supplies set_permissions object
self.assertEqual(checker.has_perm("view_tag", tag1), True)
self.assertIn("view_tag", get_perms(group1, tag1))
- def test_api_set_other_owner_w_permissions(self):
+ def test_api_set_other_owner_w_permissions(self) -> None:
"""
GIVEN:
- API request to create an object (Tag)
self.assertEqual(tag1.owner, user2)
self.assertIn("view_tag", get_perms(group1, tag1))
- def test_api_set_doc_permissions(self):
+ def test_api_set_doc_permissions(self) -> None:
"""
GIVEN:
- API request to update doc permissions and owner
self.assertTrue(checker.has_perm("view_document", doc))
self.assertIn("view_document", get_perms(group1, doc))
- def test_patch_doesnt_remove_permissions(self):
+ def test_patch_doesnt_remove_permissions(self) -> None:
"""
GIVEN:
- existing document with permissions set
self.assertTrue(checker.has_perm("change_document", doc))
self.assertIn("change_document", get_perms(group1, doc))
- def test_document_permissions_change_requires_owner(self):
+ def test_document_permissions_change_requires_owner(self) -> None:
owner = User.objects.create_user(username="owner")
editor = User.objects.create_user(username="editor")
editor.user_permissions.add(
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
- def test_dynamic_permissions_fields(self):
+ def test_dynamic_permissions_fields(self) -> None:
user1 = User.objects.create_user(username="user1")
user1.user_permissions.add(*Permission.objects.filter(codename="view_document"))
user2 = User.objects.create_user(username="user2")
self.assertNotIn("is_shared_by_requester", results[0])
@mock.patch("allauth.mfa.adapter.DefaultMFAAdapter.is_mfa_enabled")
- def test_basic_auth_mfa_enabled(self, mock_is_mfa_enabled):
+ def test_basic_auth_mfa_enabled(self, mock_is_mfa_enabled) -> None:
"""
GIVEN:
- User with MFA enabled
self.assertEqual(response.data["detail"], "MFA required")
@mock.patch("allauth.mfa.totp.internal.auth.TOTP.validate_code")
- def test_get_token_mfa_enabled(self, mock_validate_code):
+ def test_get_token_mfa_enabled(self, mock_validate_code) -> None:
"""
GIVEN:
- User with MFA enabled
class TestApiUser(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/users/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
- def test_get_users(self):
+ def test_get_users(self) -> None:
"""
GIVEN:
- Configured users
self.assertEqual(returned_user2["first_name"], user1.first_name)
self.assertEqual(returned_user2["last_name"], user1.last_name)
- def test_create_user(self):
+ def test_create_user(self) -> None:
"""
WHEN:
- API request is made to add a user account
self.assertEqual(returned_user1.first_name, user1["first_name"])
self.assertEqual(returned_user1.last_name, user1["last_name"])
- def test_delete_user(self):
+ def test_delete_user(self) -> None:
"""
GIVEN:
- Existing user account
self.assertEqual(User.objects.count(), nUsers - 1)
- def test_update_user(self):
+ def test_update_user(self) -> None:
"""
GIVEN:
- Existing user accounts
self.assertEqual(returned_user2.first_name, "Updated Name 2")
self.assertNotEqual(returned_user2.password, initial_password)
- def test_deactivate_totp(self):
+ def test_deactivate_totp(self) -> None:
"""
GIVEN:
- Existing user account with TOTP enabled
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
- def test_only_superusers_can_create_or_alter_superuser_status(self):
+ def test_only_superusers_can_create_or_alter_superuser_status(self) -> None:
"""
GIVEN:
- Existing user account
class TestApiGroup(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/groups/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
- def test_get_groups(self):
+ def test_get_groups(self) -> None:
"""
GIVEN:
- Configured groups
self.assertEqual(returned_group1["name"], group1.name)
- def test_create_group(self):
+ def test_create_group(self) -> None:
"""
WHEN:
- API request is made to add a group
self.assertEqual(returned_group1.name, group1["name"])
- def test_delete_group(self):
+ def test_delete_group(self) -> None:
"""
GIVEN:
- Existing group
self.assertEqual(len(Group.objects.all()), 0)
- def test_update_group(self):
+ def test_update_group(self) -> None:
"""
GIVEN:
- Existing groups
class TestBulkEditObjectPermissions(APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.temp_admin = User.objects.create_superuser(username="temp_admin")
self.user2 = User.objects.create(username="user2")
self.user3 = User.objects.create(username="user3")
- def test_bulk_object_set_permissions(self):
+ def test_bulk_object_set_permissions(self) -> None:
"""
GIVEN:
- Existing objects
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(StoragePath.objects.get(pk=self.sp1.id).owner, self.user3)
- def test_bulk_object_set_permissions_merge(self):
+ def test_bulk_object_set_permissions_merge(self) -> None:
"""
GIVEN:
- Existing objects
# user3 should be removed
self.assertNotIn(self.user3, get_users_with_perms(self.t1))
- def test_bulk_edit_object_permissions_insufficient_perms(self):
+ def test_bulk_edit_object_permissions_insufficient_perms(self) -> None:
"""
GIVEN:
- Objects owned by user other than logged in user
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content, b"Insufficient permissions")
- def test_bulk_edit_object_permissions_validation(self):
+ def test_bulk_edit_object_permissions_validation(self) -> None:
"""
GIVEN:
- Existing objects
class TestFullPermissionsFlag(APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.admin = User.objects.create_superuser(username="admin")
- def test_full_perms_flag(self):
+ def test_full_perms_flag(self) -> None:
"""
GIVEN:
- API request to list documents
# see allauth.socialaccount.providers.openid_connect.provider.OpenIDConnectProviderAccount
class MockOpenIDConnectProviderAccount:
- def __init__(self, mock_social_account_dict):
+ def __init__(self, mock_social_account_dict) -> None:
self.account = mock_social_account_dict
def to_str(self):
id = "openid_connect"
name = "OpenID Connect"
- def __init__(self, app=None):
+ def __init__(self, app=None) -> None:
self.app = app
self.name = app.name
class TestApiProfile(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/profile/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(
)
self.client.force_authenticate(user=self.user)
- def setupSocialAccount(self):
+ def setupSocialAccount(self) -> None:
SocialApp.objects.create(
name="Keycloak",
provider="openid_connect",
bulk=False,
)
- def test_get_profile(self):
+ def test_get_profile(self) -> None:
"""
GIVEN:
- Configured user
@mock.patch(
"allauth.socialaccount.adapter.DefaultSocialAccountAdapter.list_providers",
)
- def test_get_profile_w_social(self, mock_list_providers, mock_get_provider_account):
+ def test_get_profile_w_social(
+ self,
+ mock_list_providers,
+ mock_get_provider_account,
+ ) -> None:
"""
GIVEN:
- Configured user and setup social account
],
)
- def test_profile_w_social_removed_app(self):
+ def test_profile_w_social_removed_app(self) -> None:
"""
GIVEN:
- Configured user and setup social account
],
)
- def test_update_profile(self):
+ def test_update_profile(self) -> None:
"""
GIVEN:
- Configured user
self.assertEqual(user.first_name, user_data["first_name"])
self.assertEqual(user.last_name, user_data["last_name"])
- def test_update_profile_invalid_password_returns_field_error(self):
+ def test_update_profile_invalid_password_returns_field_error(self) -> None:
"""
GIVEN:
- Configured user
),
)
- def test_update_profile_placeholder_password_skips_validation(self):
+ def test_update_profile_placeholder_password_skips_validation(self) -> None:
"""
GIVEN:
- Configured user with existing password
self.assertEqual(user.first_name, user_data["first_name"])
self.assertEqual(user.last_name, user_data["last_name"])
- def test_update_auth_token(self):
+ def test_update_auth_token(self) -> None:
"""
GIVEN:
- Configured user
self.assertNotEqual(token1.key, token2.key)
- def test_profile_not_logged_in(self):
+ def test_profile_not_logged_in(self) -> None:
"""
GIVEN:
- User not logged in
def test_get_social_account_providers(
self,
mock_list_providers,
- ):
+ ) -> None:
"""
GIVEN:
- Configured user
def test_get_social_account_providers_openid(
self,
mock_list_providers,
- ):
+ ) -> None:
"""
GIVEN:
- Configured user and openid social account provider
2,
)
- def test_disconnect_social_account(self):
+ def test_disconnect_social_account(self) -> None:
"""
GIVEN:
- Configured user
class TestApiTOTPViews(APITestCase):
ENDPOINT = "/api/profile/totp/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
- def test_get_totp(self):
+ def test_get_totp(self) -> None:
"""
GIVEN:
- Existing user account
self.assertIn("secret", response.data)
@mock.patch("allauth.mfa.totp.internal.auth.validate_totp_code")
- def test_activate_totp(self, mock_validate_totp_code):
+ def test_activate_totp(self, mock_validate_totp_code) -> None:
"""
GIVEN:
- Existing user account
self.assertTrue(Authenticator.objects.filter(user=self.user).exists())
self.assertIn("recovery_codes", response.data)
- def test_deactivate_totp(self):
+ def test_deactivate_totp(self) -> None:
"""
GIVEN:
- Existing user account with TOTP enabled
class TestApiRemoteVersion:
ENDPOINT = "/api/remote_version/"
- def setup_method(self):
+ def setup_method(self) -> None:
cache.clear()
def test_remote_version_enabled_no_update_prefix(
self,
rest_api_client: APIClient,
httpx_mock: HTTPXMock,
- ):
+ ) -> None:
httpx_mock.add_response(
url="https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest",
json={"tag_name": "ngx-1.6.0"},
self,
rest_api_client: APIClient,
httpx_mock: HTTPXMock,
- ):
+ ) -> None:
httpx_mock.add_response(
url="https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest",
json={"tag_name": version.__full_version_str__},
self,
rest_api_client: APIClient,
httpx_mock: HTTPXMock,
- ):
+ ) -> None:
new_version = (
version.__version__[0],
version.__version__[1],
self,
rest_api_client: APIClient,
httpx_mock: HTTPXMock,
- ):
+ ) -> None:
httpx_mock.add_response(
content=b'{ "blah":',
headers={"Content-Type": "application/json"},
self,
rest_api_client: APIClient,
httpx_mock: HTTPXMock,
- ):
+ ) -> None:
httpx_mock.add_response(status_code=503)
response = rest_api_client.get(self.ENDPOINT)
class TestApiSchema(APITestCase):
ENDPOINT = "/api/schema/"
- def test_valid_schema(self):
+ def test_valid_schema(self) -> None:
"""
Test that the schema is valid
"""
except CommandError as e:
self.fail(f"Schema validation failed: {e}")
- def test_get_schema_endpoints(self):
+ def test_get_schema_endpoints(self) -> None:
"""
Test that the schema endpoints exist and return a 200 status code
"""
class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
- def test_search(self):
+ def test_search(self) -> None:
d1 = Document.objects.create(
title="invoice",
content="the thing i bought at a shop and paid with bank account",
self.assertEqual(len(results), 0)
self.assertCountEqual(response.data["all"], [])
- def test_search_custom_field_ordering(self):
+ def test_search_custom_field_ordering(self) -> None:
custom_field = CustomField.objects.create(
name="Sortable field",
data_type=CustomField.FieldDataType.INT,
[d1.id, d3.id, d2.id],
)
- def test_search_multi_page(self):
+ def test_search_multi_page(self) -> None:
with AsyncWriter(index.open_index()) as writer:
for i in range(55):
doc = Document.objects.create(
self.assertNotIn(result["id"], seen_ids)
seen_ids.append(result["id"])
- def test_search_invalid_page(self):
+ def test_search_invalid_page(self) -> None:
with AsyncWriter(index.open_index()) as writer:
for i in range(15):
doc = Document.objects.create(
@override_settings(
TIME_ZONE="UTC",
)
- def test_search_added_in_last_week(self):
+ def test_search_added_in_last_week(self) -> None:
"""
GIVEN:
- Three documents added right now
@override_settings(
TIME_ZONE="America/Chicago",
)
- def test_search_added_in_last_week_with_timezone_behind(self):
+ def test_search_added_in_last_week_with_timezone_behind(self) -> None:
"""
GIVEN:
- Two documents added right now
@override_settings(
TIME_ZONE="Europe/Sofia",
)
- def test_search_added_in_last_week_with_timezone_ahead(self):
+ def test_search_added_in_last_week_with_timezone_ahead(self) -> None:
"""
GIVEN:
- Two documents added right now
# Assert subset in results
self.assertDictEqual(result, {**result, **subset})
- def test_search_added_in_last_month(self):
+ def test_search_added_in_last_month(self) -> None:
"""
GIVEN:
- One document added right now
@override_settings(
TIME_ZONE="America/Denver",
)
- def test_search_added_in_last_month_timezone_behind(self):
+ def test_search_added_in_last_month_timezone_behind(self) -> None:
"""
GIVEN:
- One document added right now
@override_settings(
TIME_ZONE="Europe/Sofia",
)
- def test_search_added_specific_date_with_timezone_ahead(self):
+ def test_search_added_specific_date_with_timezone_ahead(self) -> None:
"""
GIVEN:
- Two documents added right now
# Assert subset in results
self.assertDictEqual(result, {**result, **subset})
- def test_search_added_invalid_date(self):
+ def test_search_added_invalid_date(self) -> None:
"""
GIVEN:
- One document added right now
self.assertEqual(len(results), 0)
@mock.patch("documents.index.autocomplete")
- def test_search_autocomplete_limits(self, m):
+ def test_search_autocomplete_limits(self, m) -> None:
"""
GIVEN:
- No pre-conditions
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 10)
- def test_search_autocomplete_respect_permissions(self):
+ def test_search_autocomplete_respect_permissions(self) -> None:
"""
GIVEN:
- Multiple users and documents with & without permissions
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, [b"apples", b"applebaum", b"appletini"])
- def test_search_autocomplete_field_name_match(self):
+ def test_search_autocomplete_field_name_match(self) -> None:
"""
GIVEN:
- One document exists in index (must be one document to experience the crash)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, [])
- def test_search_autocomplete_search_term(self):
+ def test_search_autocomplete_search_term(self) -> None:
"""
GIVEN:
- Search results for autocomplete include the exact search term
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data[0], b"auto")
- def test_search_spelling_suggestion(self):
+ def test_search_spelling_suggestion(self) -> None:
with AsyncWriter(index.open_index()) as writer:
for i in range(55):
doc = Document.objects.create(
"whoosh.searching.Searcher.correct_query",
side_effect=Exception("Test error"),
)
- def test_corrected_query_error(self, mock_correct_query):
+ def test_corrected_query_error(self, mock_correct_query) -> None:
"""
GIVEN:
- A query that raises an error on correction
expected_str = "Error while correcting query '2025-06-04': Test error"
self.assertIn(expected_str, error_str)
- def test_search_more_like(self):
+ def test_search_more_like(self) -> None:
"""
GIVEN:
- Documents exist which have similar content
self.assertEqual(results[0]["id"], d3.id)
self.assertEqual(results[1]["id"], d1.id)
- def test_search_filtering(self):
+ def test_search_filtering(self) -> None:
t = Tag.objects.create(name="tag")
t2 = Tag.objects.create(name="tag2")
c = Correspondent.objects.create(name="correspondent")
),
)
- def test_search_filtering_respect_owner(self):
+ def test_search_filtering_respect_owner(self) -> None:
"""
GIVEN:
- Documents with owners set & without
r = self.client.get(f"/api/documents/?query=test&owner__id__none={u1.id}")
self.assertEqual(r.data["count"], 3)
- def test_search_filtering_with_object_perms(self):
+ def test_search_filtering_with_object_perms(self) -> None:
"""
GIVEN:
- Documents with granted view permissions to others
r = self.client.get(f"/api/documents/?query=test&shared_by__id={u1.id}")
self.assertEqual(r.data["count"], 1)
- def test_search_sorting(self):
+ def test_search_sorting(self) -> None:
u1 = User.objects.create_user("user1")
u2 = User.objects.create_user("user2")
c1 = Correspondent.objects.create(name="corres Ax")
)
@mock.patch("documents.bulk_edit.bulk_update_documents")
- def test_global_search(self, m):
+ def test_global_search(self, m) -> None:
"""
GIVEN:
- Multiple documents and objects
self.assertEqual(results["custom_fields"][0]["id"], custom_field1.id)
self.assertEqual(results["workflows"][0]["id"], workflow1.id)
- def test_global_search_bad_request(self):
+ def test_global_search_bad_request(self) -> None:
"""
WHEN:
- Global search query is made without or with query < 3 characters
class TestSystemStatus(APITestCase):
ENDPOINT = "/api/status/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(
username="temp_admin",
self.override = override_settings(MEDIA_ROOT=self.tmp_dir)
self.override.enable()
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
self.override.disable()
shutil.rmtree(self.tmp_dir)
- def test_system_status(self):
+ def test_system_status(self) -> None:
"""
GIVEN:
- A user is logged in
self.assertEqual(response.data["tasks"]["redis_status"], "ERROR")
self.assertIsNotNone(response.data["tasks"]["redis_error"])
- def test_system_status_insufficient_permissions(self):
+ def test_system_status_insufficient_permissions(self) -> None:
"""
GIVEN:
- A user is not logged in or does not have permissions
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
- def test_system_status_container_detection(self):
+ def test_system_status_container_detection(self) -> None:
"""
GIVEN:
- The application is running in a containerized environment
self.assertEqual(response.data["install_type"], "kubernetes")
@mock.patch("redis.Redis.execute_command")
- def test_system_status_redis_ping(self, mock_ping):
+ def test_system_status_redis_ping(self, mock_ping) -> None:
"""
GIVEN:
- Redies ping returns True
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["tasks"]["redis_status"], "OK")
- def test_system_status_redis_no_credentials(self):
+ def test_system_status_redis_no_credentials(self) -> None:
"""
GIVEN:
- Redis URL with credentials
"redis://localhost:6379",
)
- def test_system_status_redis_socket(self):
+ def test_system_status_redis_socket(self) -> None:
"""
GIVEN:
- Redis URL is socket
)
@mock.patch("celery.app.control.Inspect.ping")
- def test_system_status_celery_ping(self, mock_ping):
+ def test_system_status_celery_ping(self, mock_ping) -> None:
"""
GIVEN:
- Celery ping returns pong
@override_settings(INDEX_DIR=Path("/tmp/index"))
@mock.patch("whoosh.index.FileIndex.last_modified")
- def test_system_status_index_ok(self, mock_last_modified):
+ def test_system_status_index_ok(self, mock_last_modified) -> None:
"""
GIVEN:
- The index last modified time is set
@override_settings(INDEX_DIR=Path("/tmp/index/"))
@mock.patch("documents.index.open_index", autospec=True)
- def test_system_status_index_error(self, mock_open_index):
+ def test_system_status_index_error(self, mock_open_index) -> None:
"""
GIVEN:
- The index is not found
self.assertEqual(response.data["tasks"]["index_status"], "ERROR")
self.assertIsNotNone(response.data["tasks"]["index_error"])
- def test_system_status_classifier_ok(self):
+ def test_system_status_classifier_ok(self) -> None:
"""
GIVEN:
- The classifier is found
self.assertEqual(response.data["tasks"]["classifier_status"], "OK")
self.assertIsNone(response.data["tasks"]["classifier_error"])
- def test_system_status_classifier_warning(self):
+ def test_system_status_classifier_warning(self) -> None:
"""
GIVEN:
- No classifier task is found
"WARNING",
)
- def test_system_status_classifier_error(self):
+ def test_system_status_classifier_error(self) -> None:
"""
GIVEN:
- An error occurred while loading the classifier
)
self.assertIsNotNone(response.data["tasks"]["classifier_error"])
- def test_system_status_sanity_check_ok(self):
+ def test_system_status_sanity_check_ok(self) -> None:
"""
GIVEN:
- The sanity check is successful
self.assertEqual(response.data["tasks"]["sanity_check_status"], "OK")
self.assertIsNone(response.data["tasks"]["sanity_check_error"])
- def test_system_status_sanity_check_warning(self):
+ def test_system_status_sanity_check_warning(self) -> None:
"""
GIVEN:
- No sanity check task is found
"WARNING",
)
- def test_system_status_sanity_check_error(self):
+ def test_system_status_sanity_check_error(self) -> None:
"""
GIVEN:
- The sanity check failed
)
self.assertIsNotNone(response.data["tasks"]["sanity_check_error"])
- def test_system_status_ai_disabled(self):
+ def test_system_status_ai_disabled(self) -> None:
"""
GIVEN:
- The AI feature is disabled
self.assertEqual(response.data["tasks"]["llmindex_status"], "DISABLED")
self.assertIsNone(response.data["tasks"]["llmindex_error"])
- def test_system_status_ai_enabled(self):
+ def test_system_status_ai_enabled(self) -> None:
"""
GIVEN:
- The AI index feature is enabled, but no tasks are found
self.assertEqual(response.data["tasks"]["llmindex_status"], "OK")
self.assertIsNone(response.data["tasks"]["llmindex_error"])
- def test_system_status_ai_error(self):
+ def test_system_status_ai_error(self) -> None:
"""
GIVEN:
- The AI index feature is enabled and a task is found with an error
class TestTasks(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/tasks/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
- def test_get_tasks(self):
+ def test_get_tasks(self) -> None:
"""
GIVEN:
- Attempted celery tasks
self.assertEqual(returned_task2["status"], celery.states.PENDING)
self.assertEqual(returned_task2["task_file_name"], task2.task_file_name)
- def test_get_single_task_status(self):
+ def test_get_single_task_status(self) -> None:
"""
GIVEN
- Query parameter for a valid task ID
self.assertEqual(returned_task1["task_id"], task1.task_id)
- def test_get_single_task_status_not_valid(self):
+ def test_get_single_task_status_not_valid(self) -> None:
"""
GIVEN
- Query parameter for a non-existent task ID
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 0)
- def test_acknowledge_tasks(self):
+ def test_acknowledge_tasks(self) -> None:
"""
GIVEN:
- Attempted celery tasks
response = self.client.get(self.ENDPOINT + "?acknowledged=false")
self.assertEqual(len(response.data), 0)
- def test_acknowledge_tasks_requires_change_permission(self):
+ def test_acknowledge_tasks_requires_change_permission(self) -> None:
"""
GIVEN:
- A regular user initially without change permissions
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_tasks_owner_aware(self):
+ def test_tasks_owner_aware(self) -> None:
"""
GIVEN:
- Existing PaperlessTasks with owner and with no owner
self.assertEqual(acknowledge_response.status_code, status.HTTP_200_OK)
self.assertEqual(acknowledge_response.data, {"result": 2})
- def test_task_result_no_error(self):
+ def test_task_result_no_error(self) -> None:
"""
GIVEN:
- A celery task completed without error
self.assertEqual(returned_data["result"], "Success. New document id 1 created")
self.assertEqual(returned_data["related_document"], "1")
- def test_task_result_with_error(self):
+ def test_task_result_with_error(self) -> None:
"""
GIVEN:
- A celery task completed with an exception
"test.pdf: Unexpected error during ingestion.",
)
- def test_task_name_webui(self):
+ def test_task_name_webui(self) -> None:
"""
GIVEN:
- Attempted celery task
self.assertEqual(returned_data["task_file_name"], "test.pdf")
- def test_task_name_consume_folder(self):
+ def test_task_name_consume_folder(self) -> None:
"""
GIVEN:
- Attempted celery task
self.assertEqual(returned_data["task_file_name"], "anothertest.pdf")
- def test_task_result_duplicate_warning_includes_count(self):
+ def test_task_result_duplicate_warning_includes_count(self) -> None:
"""
GIVEN:
- A celery task succeeds, but a duplicate exists
self.assertEqual(returned_data["related_document"], str(created_doc.pk))
- def test_run_train_classifier_task(self):
+ def test_run_train_classifier_task(self) -> None:
"""
GIVEN:
- A superuser
mock_train_classifier.assert_called_once_with(scheduled=False)
@mock.patch("documents.tasks.sanity_check")
- def test_run_task_requires_superuser(self, mock_check_sanity):
+ def test_run_task_requires_superuser(self, mock_check_sanity) -> None:
"""
GIVEN:
- A regular user
class TestTrashAPI(APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_user(username="temp_admin")
self.client.force_authenticate(user=self.user)
cache.clear()
- def test_api_trash(self):
+ def test_api_trash(self) -> None:
"""
GIVEN:
- Existing document
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(Document.global_objects.count(), 0)
- def test_trash_api_empty_all(self):
+ def test_trash_api_empty_all(self) -> None:
"""
GIVEN:
- Existing documents in trash
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(Document.global_objects.count(), 0)
- def test_api_trash_show_owned_only(self):
+ def test_api_trash_show_owned_only(self) -> None:
"""
GIVEN:
- Existing documents in trash
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(resp.data["count"], 3)
- def test_api_trash_insufficient_permissions(self):
+ def test_api_trash_insufficient_permissions(self) -> None:
"""
GIVEN:
- Existing document with owner = user2 in trash
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Document.global_objects.count(), 1)
- def test_api_trash_invalid_params(self):
+ def test_api_trash_invalid_params(self) -> None:
"""
GIVEN:
- Existing documents
class TestApiUiSettings(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/ui_settings/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.test_user = User.objects.create_superuser(username="test")
self.test_user.first_name = "Test"
self.test_user.save()
self.client.force_authenticate(user=self.test_user)
- def test_api_get_ui_settings(self):
+ def test_api_get_ui_settings(self) -> None:
response = self.client.get(self.ENDPOINT, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.maxDiff = None
},
)
- def test_api_set_ui_settings(self):
+ def test_api_set_ui_settings(self) -> None:
settings = {
"settings": {
"dark_mode": {
settings["settings"],
)
- def test_api_set_ui_settings_insufficient_global_permissions(self):
+ def test_api_set_ui_settings_insufficient_global_permissions(self) -> None:
not_superuser = User.objects.create_user(username="test_not_superuser")
self.client.force_authenticate(user=not_superuser)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
- def test_api_set_ui_settings_sufficient_global_permissions(self):
+ def test_api_set_ui_settings_sufficient_global_permissions(self) -> None:
not_superuser = User.objects.create_user(username="test_not_superuser")
not_superuser.user_permissions.add(
*Permission.objects.filter(codename__contains="uisettings"),
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_settings_must_be_dict(self):
+ def test_settings_must_be_dict(self) -> None:
"""
GIVEN:
- API request to update ui_settings with settings not being a dict
OUTLOOK_OAUTH_CLIENT_SECRET="jkl012",
OUTLOOK_OAUTH_ENABLED=True,
)
- def test_settings_includes_oauth_urls_if_enabled(self):
+ def test_settings_includes_oauth_urls_if_enabled(self) -> None:
response = self.client.get(self.ENDPOINT, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIsNotNone(
self.workflow.actions.add(self.action)
self.workflow.save()
- def test_api_get_workflow(self):
+ def test_api_get_workflow(self) -> None:
"""
GIVEN:
- API request to get all workflows
self.action.assign_correspondent.pk,
)
- def test_api_create_workflow(self):
+ def test_api_create_workflow(self) -> None:
"""
GIVEN:
- API request to create a workflow, trigger and action separately
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Workflow.objects.count(), 2)
- def test_api_create_workflow_nested(self):
+ def test_api_create_workflow_nested(self) -> None:
"""
GIVEN:
- API request to create a workflow with nested trigger and action
json.dumps(["AND", [[self.cf1.id, "exact", "value"]]]),
)
- def test_api_create_invalid_workflow_trigger(self):
+ def test_api_create_invalid_workflow_trigger(self) -> None:
"""
GIVEN:
- API request to create a workflow trigger
self.assertEqual(WorkflowTrigger.objects.count(), 1)
- def test_api_create_invalid_assign_title(self):
+ def test_api_create_invalid_assign_title(self) -> None:
"""
GIVEN:
- API request to create a workflow
self.assertEqual(Workflow.objects.count(), 1)
- def test_api_create_workflow_trigger_action_empty_fields(self):
+ def test_api_create_workflow_trigger_action_empty_fields(self) -> None:
"""
GIVEN:
- API request to create a workflow trigger and action
self.assertEqual(trigger2.filter_path, "*/test/*")
self.assertIsNone(trigger2.filter_filename)
- def test_api_update_workflow_nested_triggers_actions(self):
+ def test_api_update_workflow_nested_triggers_actions(self) -> None:
"""
GIVEN:
- Existing workflow with trigger and action
)
self.assertEqual(workflow.actions.first().assign_title, "Action New Title")
- def test_api_update_workflow_no_trigger_actions(self):
+ def test_api_update_workflow_no_trigger_actions(self) -> None:
"""
GIVEN:
- Existing workflow
self.assertEqual(workflow.triggers.count(), 1)
self.assertEqual(workflow.actions.count(), 0)
- def test_api_auto_remove_orphaned_triggers_actions(self):
+ def test_api_auto_remove_orphaned_triggers_actions(self) -> None:
"""
GIVEN:
- Existing trigger and action
self.assertEqual(WorkflowAction.objects.all().count(), 1)
self.assertNotEqual(workflow.actions.first().id, self.action.id)
- def test_email_action_validation(self):
+ def test_email_action_validation(self) -> None:
"""
GIVEN:
- API request to create a workflow with an email action
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- def test_webhook_action_validation(self):
+ def test_webhook_action_validation(self) -> None:
"""
GIVEN:
- API request to create a workflow with a notification action
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- def test_webhook_action_url_validation(self):
+ def test_webhook_action_url_validation(self) -> None:
"""
GIVEN:
- API request to create a workflow with a notification action
)
self.assertEqual(response.status_code, expected_resp_code)
- def test_patch_trigger_cannot_change_id(self):
+ def test_patch_trigger_cannot_change_id(self) -> None:
"""
GIVEN:
- An existing workflow trigger
GetReaderPluginMixin,
TestCase,
):
- def test_scan_file_for_separating_barcodes(self):
+ def test_scan_file_for_separating_barcodes(self) -> None:
"""
GIVEN:
- PDF containing barcodes
@override_settings(
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
- def test_scan_tiff_for_separating_barcodes(self):
+ def test_scan_tiff_for_separating_barcodes(self) -> None:
"""
GIVEN:
- TIFF image containing barcodes
@override_settings(
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
- def test_scan_tiff_with_alpha_for_separating_barcodes(self):
+ def test_scan_tiff_with_alpha_for_separating_barcodes(self) -> None:
"""
GIVEN:
- TIFF image containing barcodes
self.assertDictEqual(separator_page_numbers, {1: False})
- def test_scan_file_for_separating_barcodes_none_present(self):
+ def test_scan_file_for_separating_barcodes_none_present(self) -> None:
"""
GIVEN:
- File with no barcodes
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {})
- def test_scan_file_for_separating_barcodes_middle_page(self):
+ def test_scan_file_for_separating_barcodes_middle_page(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 1 (zero indexed)
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {1: False})
- def test_scan_file_for_separating_barcodes_multiple_pages(self):
+ def test_scan_file_for_separating_barcodes_multiple_pages(self) -> None:
"""
GIVEN:
- PDF file containing a separator on pages 2 and 5 (zero indexed)
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {2: False, 5: False})
- def test_scan_file_for_separating_barcodes_hard_to_detect(self):
+ def test_scan_file_for_separating_barcodes_hard_to_detect(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 1 (zero indexed)
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {1: False})
- def test_scan_file_for_separating_barcodes_unreadable(self):
+ def test_scan_file_for_separating_barcodes_unreadable(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 1 (zero indexed)
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {})
- def test_scan_file_for_separating_barcodes_fax_decode(self):
+ def test_scan_file_for_separating_barcodes_fax_decode(self) -> None:
"""
GIVEN:
- A PDF containing an image encoded as CCITT Group 4 encoding
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {1: False})
- def test_scan_file_for_separating_qr_barcodes(self):
+ def test_scan_file_for_separating_qr_barcodes(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 0 (zero indexed)
self.assertDictEqual(separator_page_numbers, {0: False})
@override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
- def test_scan_file_for_separating_custom_barcodes(self):
+ def test_scan_file_for_separating_custom_barcodes(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 0 (zero indexed)
self.assertDictEqual(separator_page_numbers, {0: False})
@override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
- def test_scan_file_for_separating_custom_qr_barcodes(self):
+ def test_scan_file_for_separating_custom_qr_barcodes(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 0 (zero indexed)
self.assertDictEqual(separator_page_numbers, {0: False})
@override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
- def test_scan_file_for_separating_custom_128_barcodes(self):
+ def test_scan_file_for_separating_custom_128_barcodes(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 0 (zero indexed)
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {0: False})
- def test_scan_file_for_separating_wrong_qr_barcodes(self):
+ def test_scan_file_for_separating_wrong_qr_barcodes(self) -> None:
"""
GIVEN:
- PDF file containing a separator on page 0 (zero indexed)
self.assertDictEqual(separator_page_numbers, {})
@override_settings(CONSUMER_BARCODE_STRING="ADAR-NEXTDOC")
- def test_scan_file_qr_barcodes_was_problem(self):
+ def test_scan_file_qr_barcodes_was_problem(self) -> None:
"""
GIVEN:
- Input PDF with certain QR codes that aren't detected at current size
self.assertGreater(len(reader.barcodes), 0)
self.assertDictEqual(separator_page_numbers, {1: False})
- def test_scan_file_for_separating_barcodes_password(self):
+ def test_scan_file_for_separating_barcodes_password(self) -> None:
"""
GIVEN:
- Password protected PDF
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {})
- def test_separate_pages(self):
+ def test_separate_pages(self) -> None:
"""
GIVEN:
- Input PDF 2 pages after separation
self.assertEqual(reader.pdf_file, test_file)
self.assertEqual(len(documents), 2)
- def test_separate_pages_double_code(self):
+ def test_separate_pages_double_code(self) -> None:
"""
GIVEN:
- Input PDF with two patch code pages in a row
self.assertEqual(len(documents), 2)
@override_settings(CONSUMER_ENABLE_BARCODES=True)
- def test_separate_pages_no_list(self):
+ def test_separate_pages_no_list(self) -> None:
"""
GIVEN:
- Input file to separate
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
- def test_consume_barcode_unsupported_jpg_file(self):
+ def test_consume_barcode_unsupported_jpg_file(self) -> None:
"""
GIVEN:
- JPEG image as input
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_ENABLE_ASN_BARCODE=True,
)
- def test_separate_pages_by_asn_barcodes_and_patcht(self):
+ def test_separate_pages_by_asn_barcodes_and_patcht(self) -> None:
"""
GIVEN:
- Input PDF with a patch code on page 3 and ASN barcodes on pages 1,5,6,9,11
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_ENABLE_ASN_BARCODE=True,
)
- def test_separate_pages_by_asn_barcodes(self):
+ def test_separate_pages_by_asn_barcodes(self) -> None:
"""
GIVEN:
- Input PDF with ASN barcodes on pages 1,3,4,7,9
CONSUMER_ENABLE_ASN_BARCODE=True,
CONSUMER_BARCODE_RETAIN_SPLIT_PAGES=True,
)
- def test_separate_pages_by_asn_barcodes_and_patcht_retain_pages(self):
+ def test_separate_pages_by_asn_barcodes_and_patcht_retain_pages(self) -> None:
"""
GIVEN:
- Input PDF with a patch code on page 3 and ASN barcodes on pages 1,5,6,9,11
},
)
- def test_barcode_config(self):
+ def test_barcode_config(self) -> None:
"""
GIVEN:
- Barcode app config is set (settings are not)
TestCase,
):
@override_settings(CONSUMER_ENABLE_BARCODES=True)
- def test_consume_barcode_file(self):
+ def test_consume_barcode_file(self) -> None:
"""
GIVEN:
- Incoming file with at 1 barcode producing 2 documents
reader.cleanup()
@override_settings(CONSUMER_ASN_BARCODE_PREFIX="CUSTOM-PREFIX-")
- def test_scan_file_for_asn_custom_prefix(self):
+ def test_scan_file_for_asn_custom_prefix(self) -> None:
"""
GIVEN:
- PDF containing an ASN barcode with custom prefix
self.assertEqual(reader.pdf_file, test_file)
self.assertEqual(asn, 123)
- def test_scan_file_for_asn_barcode(self):
+ def test_scan_file_for_asn_barcode(self) -> None:
"""
GIVEN:
- PDF containing an ASN barcode
self.assertEqual(reader.pdf_file, test_file)
self.assertEqual(asn, 123)
- def test_scan_file_for_asn_not_found(self):
+ def test_scan_file_for_asn_not_found(self) -> None:
"""
GIVEN:
- PDF without an ASN barcode
self.assertEqual(reader.pdf_file, test_file)
self.assertEqual(asn, None)
- def test_scan_file_for_asn_barcode_invalid(self):
+ def test_scan_file_for_asn_barcode_invalid(self) -> None:
"""
GIVEN:
- PDF containing an ASN barcode
self.assertEqual(asn, None)
@override_settings(CONSUMER_ENABLE_ASN_BARCODE=True)
- def test_consume_barcode_file_asn_assignment(self):
+ def test_consume_barcode_file_asn_assignment(self) -> None:
"""
GIVEN:
- PDF containing an ASN barcode
self.assertEqual(document.archive_serial_number, 123)
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
- def test_scan_file_for_qrcode_without_upscale(self):
+ def test_scan_file_for_qrcode_without_upscale(self) -> None:
"""
GIVEN:
- A printed and scanned PDF document with a rather small QR code
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
@override_settings(CONSUMER_BARCODE_DPI=600)
@override_settings(CONSUMER_BARCODE_UPSCALE=1.5)
- def test_scan_file_for_qrcode_with_upscale(self):
+ def test_scan_file_for_qrcode_with_upscale(self) -> None:
"""
GIVEN:
- A printed and scanned PDF document with a rather small QR code
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
- def test_barcode_without_tag_match(self):
+ def test_barcode_without_tag_match(self) -> None:
"""
GIVEN:
- Barcode that does not match any TAG mapping pattern
)
@override_settings(CONSUMER_ENABLE_TAG_BARCODE=True)
- def test_scan_file_without_matching_barcodes(self):
+ def test_scan_file_without_matching_barcodes(self) -> None:
"""
GIVEN:
- PDF containing tag barcodes but none with matching prefix (default "TAG:")
CONSUMER_ENABLE_TAG_BARCODE=False,
CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<1>"},
)
- def test_scan_file_with_matching_barcode_but_function_disabled(self):
+ def test_scan_file_with_matching_barcode_but_function_disabled(self) -> None:
"""
GIVEN:
- PDF containing a tag barcode with matching custom prefix
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<1>"},
)
- def test_scan_file_for_tag_custom_prefix(self):
+ def test_scan_file_for_tag_custom_prefix(self) -> None:
"""
GIVEN:
- PDF containing a tag barcode with custom prefix
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "\\g<1>"},
)
- def test_scan_file_for_many_custom_tags(self):
+ def test_scan_file_for_many_custom_tags(self) -> None:
"""
GIVEN:
- PDF containing multiple tag barcode with custom prefix
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<3>"},
)
- def test_scan_file_for_tag_raises_value_error(self):
+ def test_scan_file_for_tag_raises_value_error(self) -> None:
"""
GIVEN:
- Any error occurs during tag barcode processing
CONSUMER_TAG_BARCODE_SPLIT=True,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
- def test_split_on_tag_barcodes(self):
+ def test_split_on_tag_barcodes(self) -> None:
"""
GIVEN:
- PDF containing barcodes with TAG: prefix
CONSUMER_TAG_BARCODE_SPLIT=False,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
- def test_no_split_when_tag_split_disabled(self):
+ def test_no_split_when_tag_split_disabled(self) -> None:
"""
GIVEN:
- PDF containing TAG barcodes (TAG:invoice, TAG:receipt)
CELERY_TASK_ALWAYS_EAGER=True,
OCR_MODE="skip",
)
- def test_consume_barcode_file_tag_split_and_assignment(self):
+ def test_consume_barcode_file_tag_split_and_assignment(self) -> None:
"""
GIVEN:
- PDF containing TAG barcodes on pages 2 and 4 (TAG:invoice, TAG:receipt)
CONSUMER_TAG_BARCODE_SPLIT=True,
CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "ASN_\\g<1>", "TAG:(.*)": "\\g<1>"},
)
- def test_split_by_mixed_asn_tag_backwards_compat(self):
+ def test_split_by_mixed_asn_tag_backwards_compat(self) -> None:
"""
GIVEN:
- PDF with mixed ASN and TAG barcodes
CONSUMER_TAG_BARCODE_SPLIT=True,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
- def test_split_by_tag_multiple_per_page(self):
+ def test_split_by_tag_multiple_per_page(self) -> None:
"""
GIVEN:
- PDF with multiple TAG barcodes on same page
class TestBulkEdit(DirectoriesMixin, TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.owner = User.objects.create(username="test_owner")
self.doc4.tags.add(self.t1, self.t2)
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
- def test_set_correspondent(self):
+ def test_set_correspondent(self) -> None:
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
bulk_edit.set_correspondent(
[self.doc1.id, self.doc2.id, self.doc3.id],
_, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
- def test_unset_correspondent(self):
+ def test_unset_correspondent(self) -> None:
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
_, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
- def test_set_document_type(self):
+ def test_set_document_type(self) -> None:
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
bulk_edit.set_document_type(
[self.doc1.id, self.doc2.id, self.doc3.id],
_, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
- def test_unset_document_type(self):
+ def test_unset_document_type(self) -> None:
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
_, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
- def test_set_document_storage_path(self):
+ def test_set_document_storage_path(self) -> None:
"""
GIVEN:
- 5 documents without defined storage path
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
- def test_unset_document_storage_path(self):
+ def test_unset_document_storage_path(self) -> None:
"""
GIVEN:
- 4 documents without defined storage path
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
- def test_add_tag(self):
+ def test_add_tag(self) -> None:
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
bulk_edit.add_tag(
[self.doc1.id, self.doc2.id, self.doc3.id, self.doc4.id],
_, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
- def test_remove_tag(self):
+ def test_remove_tag(self) -> None:
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
_, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
- def test_modify_tags(self):
+ def test_modify_tags(self) -> None:
tag_unrelated = Tag.objects.create(name="unrelated")
self.doc2.tags.add(tag_unrelated)
self.doc3.tags.add(tag_unrelated)
# TODO: doc3 should not be affected, but the query for that is rather complicated
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
- def test_modify_custom_fields(self):
+ def test_modify_custom_fields(self) -> None:
"""
GIVEN:
- 2 documents with custom fields
_, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
- def test_modify_custom_fields_with_values(self):
+ def test_modify_custom_fields_with_values(self) -> None:
"""
GIVEN:
- 2 documents with custom fields
self.doc2.custom_fields.filter(field=cf3).first().value,
)
- def test_modify_custom_fields_doclink_self_link(self):
+ def test_modify_custom_fields_doclink_self_link(self) -> None:
"""
GIVEN:
- 2 existing documents
[self.doc1.id],
)
- def test_delete(self):
+ def test_delete(self) -> None:
self.assertEqual(Document.objects.count(), 5)
bulk_edit.delete([self.doc1.id, self.doc2.id])
self.assertEqual(Document.objects.count(), 3)
)
@mock.patch("documents.tasks.bulk_update_documents.delay")
- def test_set_permissions(self, m):
+ def test_set_permissions(self, m) -> None:
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
assign_perm("view_document", self.group1, self.doc1)
self.assertEqual(groups_with_perms.count(), 1)
@mock.patch("documents.tasks.bulk_update_documents.delay")
- def test_set_permissions_merge(self, m):
+ def test_set_permissions_merge(self, m) -> None:
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
self.doc1.owner = self.user1
self.assertEqual(groups_with_perms.count(), 2)
@mock.patch("documents.models.Document.delete")
- def test_delete_documents_old_uuid_field(self, m):
+ def test_delete_documents_old_uuid_field(self, m) -> None:
m.side_effect = Exception("Data too long for column 'transaction_id' at row 1")
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
bulk_edit.delete(doc_ids)
class TestPDFActions(DirectoriesMixin, TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
sample1 = self.dirs.scratch_dir / "sample.pdf"
shutil.copy(
self.img_doc.save()
@mock.patch("documents.tasks.consume_file.s")
- def test_merge(self, mock_consume_file):
+ def test_merge(self, mock_consume_file) -> None:
"""
GIVEN:
- Existing documents
)
@mock.patch("documents.tasks.consume_file.s")
- def test_merge_with_archive_fallback(self, mock_consume_file):
+ def test_merge_with_archive_fallback(self, mock_consume_file) -> None:
"""
GIVEN:
- Existing documents
@mock.patch("documents.tasks.consume_file.delay")
@mock.patch("pikepdf.open")
- def test_merge_with_errors(self, mock_open_pdf, mock_consume_file):
+ def test_merge_with_errors(self, mock_open_pdf, mock_consume_file) -> None:
"""
GIVEN:
- Existing documents
mock_consume_file.assert_not_called()
@mock.patch("documents.tasks.consume_file.s")
- def test_split(self, mock_consume_file):
+ def test_split(self, mock_consume_file) -> None:
"""
GIVEN:
- Existing documents
@mock.patch("documents.tasks.consume_file.delay")
@mock.patch("pikepdf.Pdf.save")
- def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
+ def test_split_with_errors(self, mock_save_pdf, mock_consume_file) -> None:
"""
GIVEN:
- Existing documents
@mock.patch("documents.tasks.bulk_update_documents.si")
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
@mock.patch("celery.chord.delay")
- def test_rotate(self, mock_chord, mock_update_document, mock_update_documents):
+ def test_rotate(
+ self,
+ mock_chord,
+ mock_update_document,
+ mock_update_documents,
+ ) -> None:
"""
GIVEN:
- Existing documents
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
@mock.patch("pikepdf.Pdf.save")
- def test_delete_pages(self, mock_pdf_save, mock_update_archive_file):
+ def test_delete_pages(self, mock_pdf_save, mock_update_archive_file) -> None:
"""
GIVEN:
- Existing documents
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
@mock.patch("pikepdf.Pdf.save")
- def test_delete_pages_with_error(self, mock_pdf_save, mock_update_archive_file):
+ def test_delete_pages_with_error(
+ self,
+ mock_pdf_save,
+ mock_update_archive_file,
+ ) -> None:
"""
GIVEN:
- Existing documents
@mock.patch("documents.bulk_edit.group")
@mock.patch("documents.tasks.consume_file.s")
- def test_edit_pdf_basic_operations(self, mock_consume_file, mock_group):
+ def test_edit_pdf_basic_operations(self, mock_consume_file, mock_group) -> None:
"""
GIVEN:
- Existing document
@mock.patch("documents.bulk_edit.group")
@mock.patch("documents.tasks.consume_file.s")
- def test_edit_pdf_with_user_override(self, mock_consume_file, mock_group):
+ def test_edit_pdf_with_user_override(self, mock_consume_file, mock_group) -> None:
"""
GIVEN:
- Existing document
@mock.patch("documents.bulk_edit.chord")
@mock.patch("documents.tasks.consume_file.s")
- def test_edit_pdf_with_delete_original(self, mock_consume_file, mock_chord):
+ def test_edit_pdf_with_delete_original(self, mock_consume_file, mock_chord) -> None:
"""
GIVEN:
- Existing document
mock_chord.assert_called_once()
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
- def test_edit_pdf_with_update_document(self, mock_update_document):
+ def test_edit_pdf_with_update_document(self, mock_update_document) -> None:
"""
GIVEN:
- A single existing PDF document
@mock.patch("documents.bulk_edit.group")
@mock.patch("documents.tasks.consume_file.s")
- def test_edit_pdf_without_metadata(self, mock_consume_file, mock_group):
+ def test_edit_pdf_without_metadata(self, mock_consume_file, mock_group) -> None:
"""
GIVEN:
- Existing document
@mock.patch("documents.bulk_edit.group")
@mock.patch("documents.tasks.consume_file.s")
- def test_edit_pdf_open_failure(self, mock_consume_file, mock_group):
+ def test_edit_pdf_open_failure(self, mock_consume_file, mock_group) -> None:
"""
GIVEN:
- Existing document
from documents.caching import StoredLRUCache
-def test_lru_cache_entries():
+def test_lru_cache_entries() -> None:
CACHE_TTL = 1
# LRU cache with a capacity of 2 elements
cache = StoredLRUCache("test_lru_cache_key", 2, backend_ttl=CACHE_TTL)
assert cache.get(1) == 1
-def test_stored_lru_cache_key_ttl(mocker):
+def test_stored_lru_cache_key_ttl(mocker) -> None:
mock_backend = mocker.Mock()
cache = StoredLRUCache("test_key", backend=mock_backend, backend_ttl=321)
class TestDocumentChecks(TestCase):
- def test_parser_check(self):
+ def test_parser_check(self) -> None:
self.assertEqual(parser_check(None), [])
with mock.patch("documents.checks.document_consumer_declaration.send") as m:
],
)
- def test_filename_format_check(self):
+ def test_filename_format_check(self) -> None:
self.assertEqual(filename_format_check(None), [])
with override_settings(FILENAME_FORMAT="{created}/{title}"):
class TestClassifier(DirectoriesMixin, TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.classifier = DocumentClassifier()
self.classifier.preprocess_content = mock.MagicMock(
side_effect=dummy_preprocess,
)
- def generate_test_data(self):
+ def generate_test_data(self) -> None:
self.c1 = Correspondent.objects.create(
name="c1",
matching_algorithm=Correspondent.MATCH_AUTO,
self.doc2.tags.add(self.t3)
self.doc_inbox.tags.add(self.t2)
- def generate_train_and_save(self):
+ def generate_train_and_save(self) -> None:
"""
Generates the training data, trains and saves the updated pickle
file. This ensures the test is using the same scikit learn version
self.classifier.train()
self.classifier.save()
- def test_no_training_data(self):
+ def test_no_training_data(self) -> None:
"""
GIVEN:
- No documents exist to train
with self.assertRaisesMessage(ValueError, "No training data available."):
self.classifier.train()
- def test_no_non_inbox_tags(self):
+ def test_no_non_inbox_tags(self) -> None:
"""
GIVEN:
- No documents without an inbox tag exist
with self.assertRaisesMessage(ValueError, "No training data available."):
self.classifier.train()
- def testEmpty(self):
+ def testEmpty(self) -> None:
"""
GIVEN:
- A document exists
self.assertIsNone(self.classifier.predict_document_type(""))
self.assertIsNone(self.classifier.predict_correspondent(""))
- def testTrain(self):
+ def testTrain(self) -> None:
"""
GIVEN:
- Test data
[self.t1.pk, self.t3.pk],
)
- def testPredict(self):
+ def testPredict(self) -> None:
"""
GIVEN:
- Classifier trained against test data
self.assertEqual(mock_preprocess_content.call_count, 2)
self.assertEqual(mock_transform.call_count, 2)
- def test_no_retrain_if_no_change(self):
+ def test_no_retrain_if_no_change(self) -> None:
"""
GIVEN:
- Classifier trained with current data
self.assertTrue(self.classifier.train())
self.assertFalse(self.classifier.train())
- def test_retrain_if_change(self):
+ def test_retrain_if_change(self) -> None:
"""
GIVEN:
- Classifier trained with current data
self.assertTrue(self.classifier.train())
- def test_retrain_if_auto_match_set_changed(self):
+ def test_retrain_if_auto_match_set_changed(self) -> None:
"""
GIVEN:
- Classifier trained with current data
self.assertTrue(self.classifier.train())
- def testVersionIncreased(self):
+ def testVersionIncreased(self) -> None:
"""
GIVEN:
- Existing classifier model saved at a version
# assure that we can load the classifier after saving it.
classifier2.load()
- def testSaveClassifier(self):
+ def testSaveClassifier(self) -> None:
self.generate_train_and_save()
new_classifier = DocumentClassifier()
self.assertFalse(new_classifier.train())
- def test_load_and_classify(self):
+ def test_load_and_classify(self) -> None:
self.generate_train_and_save()
new_classifier = DocumentClassifier()
self.assertCountEqual(new_classifier.predict_tags(self.doc2.content), [45, 12])
@mock.patch("documents.classifier.pickle.load")
- def test_load_corrupt_file(self, patched_pickle_load: mock.MagicMock):
+ def test_load_corrupt_file(self, patched_pickle_load: mock.MagicMock) -> None:
"""
GIVEN:
- Corrupted classifier pickle file
self.assertIsNone(load_classifier())
patched_pickle_load.assert_called()
- def test_load_new_scikit_learn_version(self):
+ def test_load_new_scikit_learn_version(self) -> None:
"""
GIVEN:
- classifier pickle file created with a different scikit-learn version
# Need to rethink how to pass the load through to a file with a single
# old model?
- def test_one_correspondent_predict(self):
+ def test_one_correspondent_predict(self) -> None:
c1 = Correspondent.objects.create(
name="c1",
matching_algorithm=Correspondent.MATCH_AUTO,
self.classifier.train()
self.assertEqual(self.classifier.predict_correspondent(doc1.content), c1.pk)
- def test_one_correspondent_predict_manydocs(self):
+ def test_one_correspondent_predict_manydocs(self) -> None:
c1 = Correspondent.objects.create(
name="c1",
matching_algorithm=Correspondent.MATCH_AUTO,
self.assertEqual(self.classifier.predict_correspondent(doc1.content), c1.pk)
self.assertIsNone(self.classifier.predict_correspondent(doc2.content))
- def test_one_type_predict(self):
+ def test_one_type_predict(self) -> None:
dt = DocumentType.objects.create(
name="dt",
matching_algorithm=DocumentType.MATCH_AUTO,
self.classifier.train()
self.assertEqual(self.classifier.predict_document_type(doc1.content), dt.pk)
- def test_one_type_predict_manydocs(self):
+ def test_one_type_predict_manydocs(self) -> None:
dt = DocumentType.objects.create(
name="dt",
matching_algorithm=DocumentType.MATCH_AUTO,
self.assertEqual(self.classifier.predict_document_type(doc1.content), dt.pk)
self.assertIsNone(self.classifier.predict_document_type(doc2.content))
- def test_one_path_predict(self):
+ def test_one_path_predict(self) -> None:
sp = StoragePath.objects.create(
name="sp",
matching_algorithm=StoragePath.MATCH_AUTO,
self.classifier.train()
self.assertEqual(self.classifier.predict_storage_path(doc1.content), sp.pk)
- def test_one_path_predict_manydocs(self):
+ def test_one_path_predict_manydocs(self) -> None:
sp = StoragePath.objects.create(
name="sp",
matching_algorithm=StoragePath.MATCH_AUTO,
self.assertEqual(self.classifier.predict_storage_path(doc1.content), sp.pk)
self.assertIsNone(self.classifier.predict_storage_path(doc2.content))
- def test_one_tag_predict(self):
+ def test_one_tag_predict(self) -> None:
t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
doc1 = Document.objects.create(
self.classifier.train()
self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
- def test_one_tag_predict_unassigned(self):
+ def test_one_tag_predict_unassigned(self) -> None:
Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
doc1 = Document.objects.create(
self.classifier.train()
self.assertListEqual(self.classifier.predict_tags(doc1.content), [])
- def test_two_tags_predict_singledoc(self):
+ def test_two_tags_predict_singledoc(self) -> None:
t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
t2 = Tag.objects.create(name="t2", matching_algorithm=Tag.MATCH_AUTO, pk=121)
self.classifier.train()
self.assertListEqual(self.classifier.predict_tags(doc4.content), [t1.pk, t2.pk])
- def test_two_tags_predict(self):
+ def test_two_tags_predict(self) -> None:
t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
t2 = Tag.objects.create(name="t2", matching_algorithm=Tag.MATCH_AUTO, pk=121)
self.assertListEqual(self.classifier.predict_tags(doc3.content), [])
self.assertListEqual(self.classifier.predict_tags(doc4.content), [t1.pk, t2.pk])
- def test_one_tag_predict_multi(self):
+ def test_one_tag_predict_multi(self) -> None:
t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
doc1 = Document.objects.create(
self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
self.assertListEqual(self.classifier.predict_tags(doc2.content), [t1.pk])
- def test_one_tag_predict_multi_2(self):
+ def test_one_tag_predict_multi_2(self) -> None:
t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
doc1 = Document.objects.create(
self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
self.assertListEqual(self.classifier.predict_tags(doc2.content), [])
- def test_load_classifier_not_exists(self):
+ def test_load_classifier_not_exists(self) -> None:
self.assertFalse(Path(settings.MODEL_FILE).exists())
self.assertIsNone(load_classifier())
@mock.patch("documents.classifier.DocumentClassifier.load")
- def test_load_classifier(self, load):
+ def test_load_classifier(self, load) -> None:
Path(settings.MODEL_FILE).touch()
self.assertIsNotNone(load_classifier())
load.assert_called_once()
@pytest.mark.skip(
reason="Disabled caching due to high memory usage - need to investigate.",
)
- def test_load_classifier_cached(self):
+ def test_load_classifier_cached(self) -> None:
classifier = load_classifier()
self.assertIsNotNone(classifier)
load.assert_not_called()
@mock.patch("documents.classifier.DocumentClassifier.load")
- def test_load_classifier_incompatible_version(self, load):
+ def test_load_classifier_incompatible_version(self, load) -> None:
Path(settings.MODEL_FILE).touch()
self.assertTrue(Path(settings.MODEL_FILE).exists())
self.assertFalse(Path(settings.MODEL_FILE).exists())
@mock.patch("documents.classifier.DocumentClassifier.load")
- def test_load_classifier_os_error(self, load):
+ def test_load_classifier_os_error(self, load) -> None:
Path(settings.MODEL_FILE).touch()
self.assertTrue(Path(settings.MODEL_FILE).exists())
self.assertIsNone(load_classifier())
self.assertTrue(Path(settings.MODEL_FILE).exists())
- def test_load_old_classifier_version(self):
+ def test_load_old_classifier_version(self) -> None:
shutil.copy(
Path(__file__).parent / "data" / "v1.17.4.model.pickle",
self.dirs.scratch_dir,
self.assertIsNone(classifier)
@mock.patch("documents.classifier.DocumentClassifier.load")
- def test_load_classifier_raise_exception(self, mock_load):
+ def test_load_classifier_raise_exception(self, mock_load) -> None:
Path(settings.MODEL_FILE).touch()
mock_load.side_effect = IncompatibleClassifierVersionError("Dummy Error")
with self.assertRaises(IncompatibleClassifierVersionError):
load_classifier(raise_exception=True)
-def test_preprocess_content():
+def test_preprocess_content() -> None:
"""
GIVEN:
- Advanced text processing is enabled (default)
assert result == expected_preprocess_content
-def test_preprocess_content_nltk_disabled():
+def test_preprocess_content_nltk_disabled() -> None:
"""
GIVEN:
- Advanced text processing is disabled
assert result == expected_preprocess_content
-def test_preprocess_content_nltk_load_fail(mocker):
+def test_preprocess_content_nltk_load_fail(mocker) -> None:
"""
GIVEN:
- NLTK stop words fail to load
class _BaseTestParser(DocumentParser):
- def get_settings(self):
+ def get_settings(self) -> None:
"""
This parser does not implement additional settings yet
"""
class DummyParser(_BaseTestParser):
- def __init__(self, logging_group, scratch_dir, archive_path):
+ def __init__(self, logging_group, scratch_dir, archive_path) -> None:
super().__init__(logging_group, None)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
self.archive_path = archive_path
def get_thumbnail(self, document_path, mime_type, file_name=None):
return self.fake_thumb
- def parse(self, document_path, mime_type, file_name=None):
+ def parse(self, document_path, mime_type, file_name=None) -> None:
self.text = "The Text"
def get_thumbnail(self, document_path, mime_type, file_name=None):
return self.fake_thumb
- def __init__(self, logging_group, progress_callback=None):
+ def __init__(self, logging_group, progress_callback=None) -> None:
super().__init__(logging_group, progress_callback)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=self.tempdir)
- def parse(self, document_path, mime_type, file_name=None):
+ def parse(self, document_path, mime_type, file_name=None) -> None:
self.text = "The text"
self.archive_path = Path(self.tempdir / "archive.pdf")
shutil.copy(document_path, self.archive_path)
class FaultyParser(_BaseTestParser):
- def __init__(self, logging_group, scratch_dir):
+ def __init__(self, logging_group, scratch_dir) -> None:
super().__init__(logging_group)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
class FaultyGenericExceptionParser(_BaseTestParser):
- def __init__(self, logging_group, scratch_dir):
+ def __init__(self, logging_group, scratch_dir) -> None:
super().__init__(logging_group)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
first_progress_max=100,
last_progress=100,
last_progress_max=100,
- ):
+ ) -> None:
self.assertGreaterEqual(len(self.status.payloads), 2)
payload = self.status.payloads[0]
):
return FaultyGenericExceptionParser(logging_group, self.dirs.scratch_dir)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
patcher = mock.patch("documents.parsers.document_consumer_declaration.send")
return dst
@override_settings(FILENAME_FORMAT=None, TIME_ZONE="America/Chicago")
- def testNormalOperation(self):
+ def testNormalOperation(self) -> None:
filename = self.get_test_file()
# Get the local time, as an aware datetime
self.assertEqual(document.created.day, rough_create_date_local.day)
@override_settings(FILENAME_FORMAT=None)
- def testDeleteMacFiles(self):
+ def testDeleteMacFiles(self) -> None:
# https://github.com/jonaswinkler/paperless-ng/discussions/1037
filename = self.get_test_file()
self.assertIsNotFile(shadow_file)
self.assertIsNotFile(filename)
- def testOverrideFilename(self):
+ def testOverrideFilename(self) -> None:
filename = self.get_test_file()
override_filename = "Statement for November.pdf"
self._assert_first_last_send_progress()
- def testOverrideTitle(self):
+ def testOverrideTitle(self) -> None:
with self.get_consumer(
self.get_test_file(),
DocumentMetadataOverrides(title="Override Title"),
self.assertEqual(document.title, "Override Title")
self._assert_first_last_send_progress()
- def testOverrideCorrespondent(self):
+ def testOverrideCorrespondent(self) -> None:
c = Correspondent.objects.create(name="test")
with self.get_consumer(
self.assertEqual(document.correspondent.id, c.id)
self._assert_first_last_send_progress()
- def testOverrideDocumentType(self):
+ def testOverrideDocumentType(self) -> None:
dt = DocumentType.objects.create(name="test")
with self.get_consumer(
self.assertEqual(document.document_type.id, dt.id)
self._assert_first_last_send_progress()
- def testOverrideStoragePath(self):
+ def testOverrideStoragePath(self) -> None:
sp = StoragePath.objects.create(name="test")
with self.get_consumer(
self.assertEqual(document.storage_path.id, sp.id)
self._assert_first_last_send_progress()
- def testOverrideTags(self):
+ def testOverrideTags(self) -> None:
t1 = Tag.objects.create(name="t1")
t2 = Tag.objects.create(name="t2")
t3 = Tag.objects.create(name="t3")
self.assertIn(t3, document.tags.all())
self._assert_first_last_send_progress()
- def testOverrideCustomFields(self):
+ def testOverrideCustomFields(self) -> None:
cf1 = CustomField.objects.create(name="Custom Field 1", data_type="string")
cf2 = CustomField.objects.create(
name="Custom Field 2",
)
self._assert_first_last_send_progress()
- def testOverrideAsn(self):
+ def testOverrideAsn(self) -> None:
with self.get_consumer(
self.get_test_file(),
DocumentMetadataOverrides(asn=123),
self.assertEqual(document.archive_serial_number, 123)
self._assert_first_last_send_progress()
- def testMetadataOverridesSkipAsnPropagation(self):
+ def testMetadataOverridesSkipAsnPropagation(self) -> None:
overrides = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn=True)
self.assertTrue(overrides.skip_asn)
- def testOverrideTitlePlaceholders(self):
+ def testOverrideTitlePlaceholders(self) -> None:
c = Correspondent.objects.create(name="Correspondent Name")
dt = DocumentType.objects.create(name="DocType Name")
self.assertEqual(document.title, f"{c.name}{dt.name} {now.strftime('%m-%y')}")
self._assert_first_last_send_progress()
- def testOverrideOwner(self):
+ def testOverrideOwner(self) -> None:
testuser = User.objects.create(username="testuser")
with self.get_consumer(
self.assertEqual(document.owner, testuser)
self._assert_first_last_send_progress()
- def testOverridePermissions(self):
+ def testOverridePermissions(self) -> None:
testuser = User.objects.create(username="testuser")
testgroup = Group.objects.create(name="testgroup")
self.assertTrue(group_checker.has_perm("view_document", document))
self._assert_first_last_send_progress()
- def testNotAFile(self):
+ def testNotAFile(self) -> None:
with self.assertRaisesMessage(ConsumerError, "File not found"):
with self.get_consumer(Path("non-existing-file")) as consumer:
consumer.run()
self._assert_first_last_send_progress(last_status="FAILED")
- def testDuplicates1(self):
+ def testDuplicates1(self) -> None:
with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 2)
self._assert_first_last_send_progress()
- def testDuplicates2(self):
+ def testDuplicates2(self) -> None:
with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 2)
self._assert_first_last_send_progress()
- def testDuplicates3(self):
+ def testDuplicates3(self) -> None:
with self.get_consumer(self.get_test_archive_file()) as consumer:
consumer.run()
with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
- def testDuplicateInTrash(self):
+ def testDuplicateInTrash(self) -> None:
with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 1)
- def testAsnExists(self):
+ def testAsnExists(self) -> None:
with self.get_consumer(
self.get_test_file(),
DocumentMetadataOverrides(asn=123),
) as consumer:
consumer.run()
- def testAsnExistsInTrash(self):
+ def testAsnExistsInTrash(self) -> None:
with self.get_consumer(
self.get_test_file(),
DocumentMetadataOverrides(asn=123),
consumer.run()
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def testNoParsers(self, m):
+ def testNoParsers(self, m) -> None:
m.return_value = []
with self.assertRaisesMessage(
self._assert_first_last_send_progress(last_status="FAILED")
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def testFaultyParser(self, m):
+ def testFaultyParser(self, m) -> None:
m.return_value = [
(
None,
self._assert_first_last_send_progress(last_status="FAILED")
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def testGenericParserException(self, m):
+ def testGenericParserException(self, m) -> None:
m.return_value = [
(
None,
self._assert_first_last_send_progress(last_status="FAILED")
@mock.patch("documents.consumer.ConsumerPlugin._write")
- def testPostSaveError(self, m):
+ def testPostSaveError(self, m) -> None:
filename = self.get_test_file()
m.side_effect = OSError("NO.")
self.assertEqual(Document.objects.all().count(), 0)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
- def testFilenameHandling(self):
+ def testFilenameHandling(self) -> None:
with self.get_consumer(
self.get_test_file(),
DocumentMetadataOverrides(title="new docs"),
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
@mock.patch("documents.signals.handlers.generate_unique_filename")
- def testFilenameHandlingUnstableFormat(self, m):
+ def testFilenameHandlingUnstableFormat(self, m) -> None:
filenames = ["this", "that", "now this", "i cannot decide"]
def get_filename():
self._assert_first_last_send_progress()
@mock.patch("documents.consumer.load_classifier")
- def testClassifyDocument(self, m):
+ def testClassifyDocument(self, m) -> None:
correspondent = Correspondent.objects.create(
name="test",
matching_algorithm=Correspondent.MATCH_AUTO,
self._assert_first_last_send_progress()
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
- def test_delete_duplicate(self):
+ def test_delete_duplicate(self) -> None:
dst = self.get_test_file()
self.assertIsFile(dst)
self._assert_first_last_send_progress(last_status=ProgressStatusOptions.FAILED)
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
- def test_delete_duplicate_in_trash(self):
+ def test_delete_duplicate_in_trash(self) -> None:
dst = self.get_test_file()
with self.get_consumer(dst) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 0)
@override_settings(CONSUMER_DELETE_DUPLICATES=False)
- def test_no_delete_duplicate(self):
+ def test_no_delete_duplicate(self) -> None:
dst = self.get_test_file()
self.assertIsFile(dst)
@override_settings(FILENAME_FORMAT="{title}")
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def test_similar_filenames(self, m):
+ def test_similar_filenames(self, m) -> None:
shutil.copy(
Path(__file__).parent / "samples" / "simple.pdf",
settings.CONSUMPTION_DIR / "simple.pdf",
sanity_check()
@mock.patch("documents.consumer.run_subprocess")
- def test_try_to_clean_invalid_pdf(self, m):
+ def test_try_to_clean_invalid_pdf(self, m) -> None:
shutil.copy(
Path(__file__).parent / "samples" / "invalid_pdf.pdf",
settings.CONSUMPTION_DIR / "invalid_pdf.pdf",
mock_consumer_declaration_send: mock.Mock,
mock_mail_parser_parse: mock.Mock,
mock_mailrule_get: mock.Mock,
- ):
+ ) -> None:
"""
GIVEN:
- A mail document from a mail rule
@mock.patch("documents.consumer.magic.from_file", fake_magic_from_file)
class TestConsumerCreatedDate(DirectoriesMixin, GetConsumerMixin, TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
- def test_consume_date_from_content(self):
+ def test_consume_date_from_content(self) -> None:
"""
GIVEN:
- File content with date in DMY (default) format
)
@override_settings(FILENAME_DATE_ORDER="YMD")
- def test_consume_date_from_filename(self):
+ def test_consume_date_from_filename(self) -> None:
"""
GIVEN:
- File content with date in DMY (default) format
datetime.date(2022, 2, 1),
)
- def test_consume_date_filename_date_use_content(self):
+ def test_consume_date_filename_date_use_content(self) -> None:
"""
GIVEN:
- File content with date in DMY (default) format
@override_settings(
IGNORE_DATES=(datetime.date(2010, 12, 13), datetime.date(2011, 11, 12)),
)
- def test_consume_date_use_content_with_ignore(self):
+ def test_consume_date_use_content_with_ignore(self) -> None:
"""
GIVEN:
- File content with dates in DMY (default) format
@mock.patch("documents.consumer.run_subprocess")
@override_settings(PRE_CONSUME_SCRIPT=None)
- def test_no_pre_consume_script(self, m):
+ def test_no_pre_consume_script(self, m) -> None:
with self.get_consumer(self.test_file) as c:
c.run()
m.assert_not_called()
@mock.patch("documents.consumer.run_subprocess")
@override_settings(PRE_CONSUME_SCRIPT="does-not-exist")
- def test_pre_consume_script_not_found(self, m):
+ def test_pre_consume_script_not_found(self, m) -> None:
with self.get_consumer(self.test_file) as c:
self.assertRaises(ConsumerError, c.run)
m.assert_not_called()
@mock.patch("documents.consumer.run_subprocess")
- def test_pre_consume_script(self, m):
+ def test_pre_consume_script(self, m) -> None:
with tempfile.NamedTemporaryFile() as script:
with override_settings(PRE_CONSUME_SCRIPT=script.name):
with self.get_consumer(self.test_file) as c:
}
self.assertDictEqual(environment, {**environment, **subset})
- def test_script_with_output(self):
+ def test_script_with_output(self) -> None:
"""
GIVEN:
- A script which outputs to stdout and stderr
cm.output,
)
- def test_script_exit_non_zero(self):
+ def test_script_exit_non_zero(self) -> None:
"""
GIVEN:
- A script which exits with a non-zero exit code
@mock.patch("documents.consumer.run_subprocess")
@override_settings(POST_CONSUME_SCRIPT=None)
- def test_no_post_consume_script(self, m):
+ def test_no_post_consume_script(self, m) -> None:
doc = Document.objects.create(title="Test", mime_type="application/pdf")
tag1 = Tag.objects.create(name="a")
tag2 = Tag.objects.create(name="b")
m.assert_not_called()
@override_settings(POST_CONSUME_SCRIPT="does-not-exist")
- def test_post_consume_script_not_found(self):
+ def test_post_consume_script_not_found(self) -> None:
doc = Document.objects.create(title="Test", mime_type="application/pdf")
with self.get_consumer(self.test_file) as consumer:
consumer.run_post_consume_script(doc)
@mock.patch("documents.consumer.run_subprocess")
- def test_post_consume_script_simple(self, m):
+ def test_post_consume_script_simple(self, m) -> None:
with tempfile.NamedTemporaryFile() as script:
with override_settings(POST_CONSUME_SCRIPT=script.name):
doc = Document.objects.create(title="Test", mime_type="application/pdf")
m.assert_called_once()
@mock.patch("documents.consumer.run_subprocess")
- def test_post_consume_script_with_correspondent_and_type(self, m):
+ def test_post_consume_script_with_correspondent_and_type(self, m) -> None:
with tempfile.NamedTemporaryFile() as script:
with override_settings(POST_CONSUME_SCRIPT=script.name):
c = Correspondent.objects.create(name="my_bank")
self.assertDictEqual(environment, {**environment, **subset})
- def test_script_exit_non_zero(self):
+ def test_script_exit_non_zero(self) -> None:
"""
GIVEN:
- A script which exits with a non-zero exit code
@pytest.mark.django_db()
class TestDate:
- def test_date_format_1(self):
+ def test_date_format_1(self) -> None:
text = "lorem ipsum 130218 lorem ipsum"
assert parse_date("", text) is None
- def test_date_format_2(self):
+ def test_date_format_2(self) -> None:
text = "lorem ipsum 2018 lorem ipsum"
assert parse_date("", text) is None
- def test_date_format_3(self):
+ def test_date_format_3(self) -> None:
text = "lorem ipsum 20180213 lorem ipsum"
assert parse_date("", text) is None
- def test_date_format_4(self, settings_timezone: ZoneInfo):
+ def test_date_format_4(self, settings_timezone: ZoneInfo) -> None:
text = "lorem ipsum 13.02.2018 lorem ipsum"
date = parse_date("", text)
assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
- def test_date_format_5(self, settings_timezone: ZoneInfo):
+ def test_date_format_5(self, settings_timezone: ZoneInfo) -> None:
text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem ipsum"
date = parse_date("", text)
assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
- def test_date_format_6(self):
+ def test_date_format_6(self) -> None:
text = (
"lorem ipsum\n"
"Wohnort\n"
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
settings.DATE_PARSER_LANGUAGES = ["de"]
text = "lorem ipsum\nMärz 2019\nlorem ipsum"
date = parse_date("", text)
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
settings.DATE_PARSER_LANGUAGES = ["de"]
text = (
"lorem ipsum\n"
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
settings.DATE_PARSER_LANGUAGES = ["de"]
text = "lorem ipsum\n27. Nullmonth 2020\nMärz 2020\nlorem ipsum"
assert parse_date("", text) == datetime.datetime(
tzinfo=settings_timezone,
)
- def test_date_format_10(self, settings_timezone: ZoneInfo):
+ def test_date_format_10(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 22-MAR-2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_11(self, settings_timezone: ZoneInfo):
+ def test_date_format_11(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 22 MAR 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_12(self, settings_timezone: ZoneInfo):
+ def test_date_format_12(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 22/MAR/2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_13(self, settings_timezone: ZoneInfo):
+ def test_date_format_13(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 22.MAR.2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_14(self, settings_timezone: ZoneInfo):
+ def test_date_format_14(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 22.MAR 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_15(self):
+ def test_date_format_15(self) -> None:
text = "Customer Number Currency 22.MAR.22 Credit Card 1934829304"
assert parse_date("", text) is None
- def test_date_format_16(self):
+ def test_date_format_16(self) -> None:
text = "Customer Number Currency 22.MAR,22 Credit Card 1934829304"
assert parse_date("", text) is None
- def test_date_format_17(self):
+ def test_date_format_17(self) -> None:
text = "Customer Number Currency 22,MAR,2022 Credit Card 1934829304"
assert parse_date("", text) is None
- def test_date_format_18(self):
+ def test_date_format_18(self) -> None:
text = "Customer Number Currency 22 MAR,2022 Credit Card 1934829304"
assert parse_date("", text) is None
- def test_date_format_19(self, settings_timezone: ZoneInfo):
+ def test_date_format_19(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 21st MAR 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_20(self, settings_timezone: ZoneInfo):
+ def test_date_format_20(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 22nd March 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_21(self, settings_timezone: ZoneInfo):
+ def test_date_format_21(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 2nd MAR 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_22(self, settings_timezone: ZoneInfo):
+ def test_date_format_22(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 23rd MAR 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_23(self, settings_timezone: ZoneInfo):
+ def test_date_format_23(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 24th MAR 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_24(self, settings_timezone: ZoneInfo):
+ def test_date_format_24(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 21-MAR-2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_25(self, settings_timezone: ZoneInfo):
+ def test_date_format_25(self, settings_timezone: ZoneInfo) -> None:
text = "Customer Number Currency 25TH MAR 2022 Credit Card 1934829304"
assert parse_date("", text) == datetime.datetime(
2022,
tzinfo=settings_timezone,
)
- def test_date_format_26(self, settings_timezone: ZoneInfo):
+ def test_date_format_26(self, settings_timezone: ZoneInfo) -> None:
text = "CHASE 0 September 25, 2019 JPMorgan Chase Bank, NA. P0 Box 182051"
assert parse_date("", text) == datetime.datetime(
2019,
tzinfo=settings_timezone,
)
- def test_crazy_date_past(self):
+ def test_crazy_date_past(self) -> None:
assert parse_date("", "01-07-0590 00:00:00") is None
- def test_crazy_date_future(self):
+ def test_crazy_date_future(self) -> None:
assert parse_date("", "01-07-2350 00:00:00") is None
- def test_crazy_date_with_spaces(self):
+ def test_crazy_date_with_spaces(self) -> None:
assert parse_date("", "20 408000l 2475") is None
def test_utf_month_names(
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
settings.DATE_PARSER_LANGUAGES = ["fr", "de", "hr", "cs", "pl", "tr"]
assert parse_date("", "13 décembre 2023") == datetime.datetime(
2023,
tzinfo=settings_timezone,
)
- def test_multiple_dates(self, settings_timezone: ZoneInfo):
+ def test_multiple_dates(self, settings_timezone: ZoneInfo) -> None:
text = """This text has multiple dates.
For example 02.02.2018, 22 July 2022 and December 2021.
But not 24-12-9999 because it's in the future..."""
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
"""
GIVEN:
- Date parsing from the filename is enabled
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
"""
GIVEN:
- Date parsing from the filename is enabled
"No date in here",
) == datetime.datetime(2021, 1, 10, 0, 0, tzinfo=settings_timezone)
- def test_filename_date_parse_invalid(self, settings: SettingsWrapper):
+ def test_filename_date_parse_invalid(self, settings: SettingsWrapper) -> None:
"""
GIVEN:
- Date parsing from the filename is enabled
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
"""
GIVEN:
- Date parsing from the filename is enabled
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
"""
GIVEN:
- Ignore dates have been set
self,
settings: SettingsWrapper,
settings_timezone: ZoneInfo,
- ):
+ ) -> None:
"""
GIVEN:
- Ignore dates have been set
class TestDelayedQuery(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
# all tests run without permission criteria, so has_no_owner query will always
# be appended.
),
)
- def test_get_permission_criteria(self):
+ def test_get_permission_criteria(self) -> None:
# tests contains tuples of user instances and the expected filter
tests = (
(None, [query.Term("has_owner", text=False)]),
shutil.rmtree(self.thumb_dir)
self.overrides.disable()
- def test_file_deletion(self):
+ def test_file_deletion(self) -> None:
document = Document.objects.create(
correspondent=Correspondent.objects.create(name="Test0"),
title="Title",
empty_trash([document.pk])
self.assertEqual(mock_unlink.call_count, 2)
- def test_document_soft_delete(self):
+ def test_document_soft_delete(self) -> None:
document = Document.objects.create(
correspondent=Correspondent.objects.create(name="Test0"),
title="Title",
empty_trash([document.pk])
self.assertEqual(mock_unlink.call_count, 2)
- def test_file_name(self):
+ def test_file_name(self) -> None:
doc = Document(
mime_type="application/pdf",
title="test",
)
self.assertEqual(doc.get_public_filename(), "2020-12-25 test.pdf")
- def test_file_name_jpg(self):
+ def test_file_name_jpg(self) -> None:
doc = Document(
mime_type="image/jpeg",
title="test",
)
self.assertEqual(doc.get_public_filename(), "2020-12-25 test.jpg")
- def test_file_name_unknown(self):
+ def test_file_name_unknown(self) -> None:
doc = Document(
mime_type="application/zip",
title="test",
)
self.assertEqual(doc.get_public_filename(), "2020-12-25 test.zip")
- def test_file_name_invalid_type(self):
+ def test_file_name_invalid_type(self) -> None:
doc = Document(
mime_type="image/jpegasd",
title="test",
self.assertEqual(doc.get_public_filename(), "2020-12-25 test")
-def test_suggestion_content():
+def test_suggestion_content() -> None:
"""
Check that the document for suggestion is cropped, only if it exceeds the length limit.
"""
class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
SAMPLE_DIR = Path(__file__).parent / "samples"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.dirs.double_sided_dir = self.dirs.consumption_dir / "double-sided"
self.dirs.double_sided_dir.mkdir()
self.assertIsNotFile(dst)
return msg
- def create_staging_file(self, src="double-sided-odd.pdf", datetime=None):
+ def create_staging_file(self, src="double-sided-odd.pdf", datetime=None) -> None:
shutil.copy(self.SAMPLE_DIR / src, self.staging_file)
if datetime is None:
datetime = dt.datetime.now()
os.utime(str(self.staging_file), (datetime.timestamp(),) * 2)
- def test_odd_numbered_moved_to_staging(self):
+ def test_odd_numbered_moved_to_staging(self) -> None:
"""
GIVEN:
- No staging file exists
)
self.assertIn("Received odd numbered pages", msg)
- def test_collation(self):
+ def test_collation(self) -> None:
"""
GIVEN:
- A staging file not older than TIMEOUT_MINUTES with odd pages exists
r"This is page 4.*This is page 5",
)
- def test_staging_file_expiration(self):
+ def test_staging_file_expiration(self) -> None:
"""
GIVEN:
- A staging file older than TIMEOUT_MINUTES exists
self.assertIsFile(self.staging_file)
self.assertIn("Received odd numbered pages", msg)
- def test_less_odd_pages_then_even_fails(self):
+ def test_less_odd_pages_then_even_fails(self) -> None:
"""
GIVEN:
- A valid staging file
self.assertIsNotFile(self.staging_file)
@override_settings(CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=True)
- def test_tiff_upload_enabled(self):
+ def test_tiff_upload_enabled(self) -> None:
"""
GIVEN:
- CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT is true
Pdf.open(self.staging_file)
@override_settings(CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=False)
- def test_tiff_upload_disabled(self):
+ def test_tiff_upload_disabled(self) -> None:
"""
GIVEN:
- CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT is false
)
@override_settings(CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME="quux")
- def test_different_upload_dir_name(self):
+ def test_different_upload_dir_name(self) -> None:
"""
GIVEN:
- No staging file exists
self.consume_file("double-sided-odd.pdf", Path("..") / "quux" / "foo.pdf")
self.assertIsFile(self.staging_file)
- def test_only_double_sided_dir_is_handled(self):
+ def test_only_double_sided_dir_is_handled(self) -> None:
"""
GIVEN:
- No staging file exists
self.assertIsNotFile(self.staging_file)
self.assertRegex(msg, r"Success. New document id \d+ created")
- def test_subdirectory_upload(self):
+ def test_subdirectory_upload(self) -> None:
"""
GIVEN:
- A staging file exists
)
@override_settings(CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED=False)
- def test_disabled_double_sided_dir_upload(self):
+ def test_disabled_double_sided_dir_upload(self) -> None:
"""
GIVEN:
- CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED is false
class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT="")
- def test_generate_source_filename(self):
+ def test_generate_source_filename(self) -> None:
document = Document()
document.mime_type = "application/pdf"
document.save()
self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf"))
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
- def test_file_renaming(self):
+ def test_file_renaming(self) -> None:
document = Document()
document.mime_type = "application/pdf"
document.save()
)
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
- def test_file_renaming_missing_permissions(self):
+ def test_file_renaming_missing_permissions(self) -> None:
document = Document()
document.mime_type = "application/pdf"
(settings.ORIGINALS_DIR / "none").chmod(0o777)
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
- def test_file_renaming_database_error(self):
+ def test_file_renaming_database_error(self) -> None:
Document.objects.create(
mime_type="application/pdf",
checksum="AAAAA",
self.assertEqual(document.filename, "none/none.pdf")
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
- def test_document_delete(self):
+ def test_document_delete(self) -> None:
document = Document()
document.mime_type = "application/pdf"
FILENAME_FORMAT="{correspondent}/{correspondent}",
EMPTY_TRASH_DIR=Path(tempfile.mkdtemp()),
)
- def test_document_delete_trash_dir(self):
+ def test_document_delete_trash_dir(self) -> None:
document = Document()
document.mime_type = "application/pdf"
self.assertIsFile(Path(settings.EMPTY_TRASH_DIR) / "none_01.pdf")
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
- def test_document_delete_nofile(self):
+ def test_document_delete_nofile(self) -> None:
document = Document()
document.mime_type = "application/pdf"
empty_trash([document.pk])
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
- def test_directory_not_empty(self):
+ def test_directory_not_empty(self) -> None:
document = Document()
document.mime_type = "application/pdf"
self.assertIsFile(important_file)
@override_settings(FILENAME_FORMAT="{document_type} - {title}")
- def test_document_type(self):
+ def test_document_type(self) -> None:
dt = DocumentType.objects.create(name="my_doc_type")
d = Document.objects.create(title="the_doc", mime_type="application/pdf")
self.assertEqual(generate_filename(d), Path("my_doc_type - the_doc.pdf"))
@override_settings(FILENAME_FORMAT="{asn} - {title}")
- def test_asn(self):
+ def test_asn(self) -> None:
d1 = Document.objects.create(
title="the_doc",
mime_type="application/pdf",
self.assertEqual(generate_filename(d2), Path("none - the_doc.pdf"))
@override_settings(FILENAME_FORMAT="{title} {tag_list}")
- def test_tag_list(self):
+ def test_tag_list(self) -> None:
doc = Document.objects.create(title="doc1", mime_type="application/pdf")
doc.tags.create(name="tag2")
doc.tags.create(name="tag1")
self.assertEqual(generate_filename(doc), Path("doc2.pdf"))
@override_settings(FILENAME_FORMAT="//etc/something/{title}")
- def test_filename_relative(self):
+ def test_filename_relative(self) -> None:
doc = Document.objects.create(title="doc1", mime_type="application/pdf")
doc.filename = generate_filename(doc)
doc.save()
@override_settings(
FILENAME_FORMAT="{created_year}-{created_month}-{created_day}",
)
- def test_created_year_month_day(self):
+ def test_created_year_month_day(self) -> None:
d1 = timezone.make_aware(datetime.datetime(2020, 3, 6, 1, 1, 1))
doc1 = Document.objects.create(
title="doc1",
@override_settings(
FILENAME_FORMAT="{added_year}-{added_month}-{added_day}",
)
- def test_added_year_month_day(self):
+ def test_added_year_month_day(self) -> None:
d1 = timezone.make_aware(datetime.datetime(232, 1, 9, 1, 1, 1))
doc1 = Document.objects.create(
title="doc1",
@override_settings(
FILENAME_FORMAT="{correspondent}/{correspondent}/{correspondent}",
)
- def test_nested_directory_cleanup(self):
+ def test_nested_directory_cleanup(self) -> None:
document = Document()
document.mime_type = "application/pdf"
self.assertIsDir(settings.ORIGINALS_DIR)
@override_settings(FILENAME_FORMAT="{doc_pk}")
- def test_format_doc_pk(self):
+ def test_format_doc_pk(self) -> None:
document = Document()
document.pk = 1
document.mime_type = "application/pdf"
self.assertEqual(generate_filename(document), Path("0013579.pdf"))
@override_settings(FILENAME_FORMAT=None)
- def test_format_none(self):
+ def test_format_none(self) -> None:
document = Document()
document.pk = 1
document.mime_type = "application/pdf"
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
- def test_try_delete_empty_directories(self):
+ def test_try_delete_empty_directories(self) -> None:
# Create our working directory
tmp: Path = settings.ORIGINALS_DIR / "test_delete_empty"
tmp.mkdir(exist_ok=True, parents=True)
self.assertIsNotDir(tmp / "notempty" / "empty")
@override_settings(FILENAME_FORMAT="{% if x is None %}/{title]")
- def test_invalid_format(self):
+ def test_invalid_format(self) -> None:
document = Document()
document.pk = 1
document.mime_type = "application/pdf"
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
@override_settings(FILENAME_FORMAT="{created__year}")
- def test_invalid_format_key(self):
+ def test_invalid_format_key(self) -> None:
document = Document()
document.pk = 1
document.mime_type = "application/pdf"
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
@override_settings(FILENAME_FORMAT="{title}")
- def test_duplicates(self):
+ def test_duplicates(self) -> None:
document = Document.objects.create(
mime_type="application/pdf",
title="qwe",
@override_settings(FILENAME_FORMAT="{title}")
@mock.patch("documents.signals.handlers.Document.objects.filter")
@mock.patch("documents.signals.handlers.shutil.move")
- def test_no_move_only_save(self, mock_move, mock_filter):
+ def test_no_move_only_save(self, mock_move, mock_filter) -> None:
"""
GIVEN:
- A document with a filename
CELERY_TASK_ALWAYS_EAGER=True,
)
@mock.patch("documents.signals.handlers.update_filename_and_move_files")
- def test_select_cf_updated(self, m):
+ def test_select_cf_updated(self, m) -> None:
"""
GIVEN:
- A document with a select type custom field
class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT=None)
- def test_create_no_format(self):
+ def test_create_no_format(self) -> None:
original = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
Path(original).touch()
self.assertIsFile(doc.archive_path)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
- def test_create_with_format(self):
+ def test_create_with_format(self) -> None:
original = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
Path(original).touch()
)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
- def test_move_archive_gone(self):
+ def test_move_archive_gone(self) -> None:
original = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
Path(original).touch()
self.assertIsNotFile(doc.archive_path)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
- def test_move_archive_exists(self):
+ def test_move_archive_exists(self) -> None:
original = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
existing_archive_file = settings.ARCHIVE_DIR / "none" / "my_doc.pdf"
self.assertEqual(doc.archive_filename, "none/my_doc_01.pdf")
@override_settings(FILENAME_FORMAT="{title}")
- def test_move_original_only(self):
+ def test_move_original_only(self) -> None:
original = settings.ORIGINALS_DIR / "document_01.pdf"
archive = settings.ARCHIVE_DIR / "document.pdf"
Path(original).touch()
self.assertIsFile(doc.archive_path)
@override_settings(FILENAME_FORMAT="{title}")
- def test_move_archive_only(self):
+ def test_move_archive_only(self) -> None:
original = settings.ORIGINALS_DIR / "document.pdf"
archive = settings.ARCHIVE_DIR / "document_01.pdf"
Path(original).touch()
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
@mock.patch("documents.signals.handlers.shutil.move")
- def test_move_archive_error(self, m):
- def fake_rename(src, dst):
+ def test_move_archive_error(self, m) -> None:
+ def fake_rename(src, dst) -> None:
if "archive" in str(src):
raise OSError
else:
self.assertIsFile(doc.archive_path)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
- def test_move_file_gone(self):
+ def test_move_file_gone(self) -> None:
original = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
# Path(original).touch()
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
@mock.patch("documents.signals.handlers.shutil.move")
- def test_move_file_error(self, m):
- def fake_rename(src, dst):
+ def test_move_file_error(self, m) -> None:
+ def fake_rename(src, dst) -> None:
if "original" in str(src):
raise OSError
else:
self.assertIsFile(doc.archive_path)
@override_settings(FILENAME_FORMAT="")
- def test_archive_deleted(self):
+ def test_archive_deleted(self) -> None:
original = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
Path(original).touch()
self.assertIsNotFile(doc.archive_path)
@override_settings(FILENAME_FORMAT="{title}")
- def test_archive_deleted2(self):
+ def test_archive_deleted2(self) -> None:
original = settings.ORIGINALS_DIR / "document.webp"
original2 = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
self.assertIsNotFile(doc2.source_path)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
- def test_database_error(self):
+ def test_database_error(self) -> None:
original = settings.ORIGINALS_DIR / "0000001.pdf"
archive = settings.ARCHIVE_DIR / "0000001.pdf"
Path(original).touch()
class TestFilenameGeneration(DirectoriesMixin, TestCase):
@override_settings(FILENAME_FORMAT="{title}")
- def test_invalid_characters(self):
+ def test_invalid_characters(self) -> None:
doc = Document.objects.create(
title="This. is the title.",
mime_type="application/pdf",
self.assertEqual(generate_filename(doc), Path("my-invalid-..-title-yay.pdf"))
@override_settings(FILENAME_FORMAT="{created}")
- def test_date(self):
+ def test_date(self) -> None:
doc = Document.objects.create(
title="does not matter",
created=datetime.date(2020, 5, 21),
)
self.assertEqual(generate_filename(doc), Path("2020-05-21.pdf"))
- def test_dynamic_path(self):
+ def test_dynamic_path(self) -> None:
"""
GIVEN:
- A document with a defined storage path
)
self.assertEqual(generate_filename(doc), Path("TestFolder/2020-06-25.pdf"))
- def test_dynamic_path_with_none(self):
+ def test_dynamic_path_with_none(self) -> None:
"""
GIVEN:
- A document with a defined storage path
@override_settings(
FILENAME_FORMAT_REMOVE_NONE=True,
)
- def test_dynamic_path_remove_none(self):
+ def test_dynamic_path_remove_none(self) -> None:
"""
GIVEN:
- A document with a defined storage path
sp.save()
self.assertEqual(generate_filename(doc), Path("2020/does not matter.pdf"))
- def test_multiple_doc_paths(self):
+ def test_multiple_doc_paths(self) -> None:
"""
GIVEN:
- Two documents, each with different storage paths
@override_settings(
FILENAME_FORMAT=None,
)
- def test_no_path_fallback(self):
+ def test_no_path_fallback(self) -> None:
"""
GIVEN:
- Two documents, one with defined storage path, the other not
"{{ correspondent or 'missing' }}/{{ title }}"
),
)
- def test_placeholder_matches_none_variants_and_false(self):
+ def test_placeholder_matches_none_variants_and_false(self) -> None:
"""
GIVEN:
- Templates that compare against 'none', '-none-' and rely on truthiness
@override_settings(
FILENAME_FORMAT="{created_year_short}/{created_month_name_short}/{created_month_name}/{title}",
)
- def test_short_names_created(self):
+ def test_short_names_created(self) -> None:
doc = Document.objects.create(
title="The Title",
created=datetime.date(1989, 12, 2),
@override_settings(
FILENAME_FORMAT="{added_year_short}/{added_month_name}/{added_month_name_short}/{title}",
)
- def test_short_names_added(self):
+ def test_short_names_added(self) -> None:
doc = Document.objects.create(
title="The Title",
added=timezone.make_aware(datetime.datetime(1984, 8, 21, 7, 36, 51, 153)),
@override_settings(
FILENAME_FORMAT="{owner_username}/{title}",
)
- def test_document_owner_string(self):
+ def test_document_owner_string(self) -> None:
"""
GIVEN:
- Document with an other
@override_settings(
FILENAME_FORMAT="{original_name}",
)
- def test_document_original_filename(self):
+ def test_document_original_filename(self) -> None:
"""
GIVEN:
- Document with an original filename
FILENAME_FORMAT="XX{correspondent}/{title}",
FILENAME_FORMAT_REMOVE_NONE=True,
)
- def test_remove_none_not_dir(self):
+ def test_remove_none_not_dir(self) -> None:
"""
GIVEN:
- A document with & filename format that includes correspondent as part of directory name
document.filename = generate_filename(document)
self.assertEqual(document.filename, Path("XX/doc1.pdf"))
- def test_complex_template_strings(self):
+ def test_complex_template_strings(self) -> None:
"""
GIVEN:
- Storage paths with complex conditionals and logic
@override_settings(
FILENAME_FORMAT="{{creation_date}}/{{ title_name_str }}",
)
- def test_template_with_undefined_var(self):
+ def test_template_with_undefined_var(self) -> None:
"""
GIVEN:
- Filename format with one or more undefined variables
@override_settings(
FILENAME_FORMAT="{{created}}/{{ document.save() }}",
)
- def test_template_with_security(self):
+ def test_template_with_security(self) -> None:
"""
GIVEN:
- Filename format with one or more undefined variables
"WARNING:paperless.templating:Template attempted restricted operation: <bound method Model.save of <Document: 2020-06-25 Does Matter>> is not safely callable",
)
- def test_template_with_custom_fields(self):
+ def test_template_with_custom_fields(self) -> None:
"""
GIVEN:
- Filename format which accesses custom field data
Path("invoices/0.pdf"),
)
- def test_datetime_filter(self):
+ def test_datetime_filter(self) -> None:
"""
GIVEN:
- Filename format with datetime filter
Path("2024-10-01/Some Title.pdf"),
)
- def test_slugify_filter(self):
+ def test_slugify_filter(self) -> None:
"""
GIVEN:
- Filename format with slugify filter
return super().setUp()
@override_settings(FILENAME_FORMAT=None)
- def test_custom_field_not_in_template_skips_filename_work(self):
+ def test_custom_field_not_in_template_skips_filename_work(self) -> None:
storage_path = StoragePath.objects.create(path="{{created}}/{{ title }}")
self.doc.storage_path = storage_path
self.doc.save()
self.assertEqual(m.call_count, 0)
@override_settings(FILENAME_FORMAT=None)
- def test_custom_field_in_template_triggers_filename_update(self):
+ def test_custom_field_in_template_triggers_filename_update(self) -> None:
storage_path = StoragePath.objects.create(
path="{{ custom_fields|get_cf_value('flavor') }}/{{ title }}",
)
),
],
)
- def test_localize_date_path_building(self, filename_format, expected_filename):
+ def test_localize_date_path_building(
+ self,
+ filename_format,
+ expected_filename,
+ ) -> None:
document = DocumentFactory.create(
title="My Document",
mime_type="application/pdf",
assert f"Unsupported type {type(invalid_value)}" in str(excinfo.value)
- def test_localize_date_raises_error_for_invalid_locale(self):
+ def test_localize_date_raises_error_for_invalid_locale(self) -> None:
with pytest.raises(ValueError) as excinfo:
localize_date(self.TEST_DATE, "medium", "invalid_locale_code")
class TestAutoComplete(DirectoriesMixin, TestCase):
- def test_auto_complete(self):
+ def test_auto_complete(self) -> None:
doc1 = Document.objects.create(
title="doc1",
checksum="A",
self.assertListEqual(index.autocomplete(ix, "tes", limit=1), [b"test2"])
self.assertListEqual(index.autocomplete(ix, "tes", limit=0), [])
- def test_archive_serial_number_ranging(self):
+ def test_archive_serial_number_ranging(self) -> None:
"""
GIVEN:
- Document with an archive serial number above schema allowed size
expected_str = "ERROR:paperless.index:Not indexing Archive Serial Number 4294967296 of document 1"
self.assertIn(expected_str, error_str)
- def test_archive_serial_number_is_none(self):
+ def test_archive_serial_number_is_none(self) -> None:
"""
GIVEN:
- Document with no archive serial number
self.assertIsNone(kwargs["asn"])
@override_settings(TIME_ZONE="Pacific/Auckland")
- def test_added_today_respects_local_timezone_boundary(self):
+ def test_added_today_respects_local_timezone_boundary(self) -> None:
tz = get_current_timezone()
fixed_now = datetime(2025, 7, 20, 15, 0, 0, tzinfo=tz)
self.assertIn(fragment, result)
return result
- def test_range_keywords(self):
+ def test_range_keywords(self) -> None:
"""
Test various different range keywords
"""
with self.subTest(query=query):
self._assert_rewrite_contains(query, now_dt, *fragments)
- def test_additional_fields(self):
+ def test_additional_fields(self) -> None:
fixed_now = datetime(2025, 7, 20, 15, 30, 45, tzinfo=timezone.utc)
# created
self._assert_rewrite_contains("created:today", fixed_now, "created:[20250720")
# modified
self._assert_rewrite_contains("modified:today", fixed_now, "modified:[20250720")
- def test_basic_syntax_variants(self):
+ def test_basic_syntax_variants(self) -> None:
"""
Test that quoting, casing, and multi-clause queries are parsed.
"""
self.assertIn("added:[20250720", result)
self.assertIn("created:[20250719", result)
- def test_no_match(self):
+ def test_no_match(self) -> None:
"""
Test that queries without keywords are unchanged.
"""
self.assertEqual(query, result)
@override_settings(TIME_ZONE="Pacific/Auckland")
- def test_timezone_awareness(self):
+ def test_timezone_awareness(self) -> None:
"""
Test timezone conversion.
"""
class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
- def _assert_recreate_called(self, mock_create_in):
+ def _assert_recreate_called(self, mock_create_in) -> None:
mock_create_in.assert_called_once()
path_arg, schema_arg = mock_create_in.call_args.args
self.assertEqual(path_arg, settings.INDEX_DIR)
self.assertEqual(schema_arg.__class__.__name__, "Schema")
- def test_transient_missing_segment_does_not_force_recreate(self):
+ def test_transient_missing_segment_does_not_force_recreate(self) -> None:
"""
GIVEN:
- Index directory exists
mock_create_in.assert_not_called()
self.assertEqual(file_marker.read_text(), "keep")
- def test_transient_errors_exhaust_retries_and_recreate(self):
+ def test_transient_errors_exhaust_retries_and_recreate(self) -> None:
"""
GIVEN:
- Index directory exists
cm.output[0],
)
- def test_non_transient_error_recreates_index(self):
+ def test_non_transient_error_recreates_index(self) -> None:
"""
GIVEN:
- Index directory exists
mime_type="application/pdf",
)
- def test_archiver(self):
+ def test_archiver(self) -> None:
doc = self.make_models()
shutil.copy(sample_file, Path(self.dirs.originals_dir) / f"{doc.id:07}.pdf")
call_command("document_archiver", "--processes", "1")
- def test_handle_document(self):
+ def test_handle_document(self) -> None:
doc = self.make_models()
shutil.copy(sample_file, Path(self.dirs.originals_dir) / f"{doc.id:07}.pdf")
self.assertTrue(filecmp.cmp(sample_file, doc.source_path))
self.assertEqual(doc.archive_filename, "none/A.pdf")
- def test_unknown_mime_type(self):
+ def test_unknown_mime_type(self) -> None:
doc = self.make_models()
doc.mime_type = "sdgfh"
doc.save()
self.assertIsFile(doc.source_path)
@override_settings(FILENAME_FORMAT="{title}")
- def test_naming_priorities(self):
+ def test_naming_priorities(self) -> None:
doc1 = Document.objects.create(
checksum="A",
title="document",
class TestMakeIndex(TestCase):
@mock.patch("documents.management.commands.document_index.index_reindex")
- def test_reindex(self, m):
+ def test_reindex(self, m) -> None:
call_command("document_index", "reindex")
m.assert_called_once()
@mock.patch("documents.management.commands.document_index.index_optimize")
- def test_optimize(self, m):
+ def test_optimize(self, m) -> None:
call_command("document_index", "optimize")
m.assert_called_once()
class TestRenamer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT="")
- def test_rename(self):
+ def test_rename(self) -> None:
doc = Document.objects.create(title="test", mime_type="image/jpeg")
doc.filename = generate_filename(doc)
doc.archive_filename = generate_filename(doc, archive_filename=True)
@mock.patch(
"documents.management.commands.document_create_classifier.train_classifier",
)
- def test_create_classifier(self, m):
+ def test_create_classifier(self, m) -> None:
call_command("document_create_classifier")
m.assert_called_once()
class TestSanityChecker(DirectoriesMixin, TestCase):
- def test_no_issues(self):
+ def test_no_issues(self) -> None:
with self.assertLogs() as capture:
call_command("document_sanity_checker")
self.assertEqual(len(capture.output), 1)
self.assertIn("Sanity checker detected no issues.", capture.output[0])
- def test_errors(self):
+ def test_errors(self) -> None:
doc = Document.objects.create(
title="test",
content="test",
class TestConvertMariaDBUUID(TestCase):
@mock.patch("django.db.connection.schema_editor")
- def test_convert(self, m):
+ def test_convert(self, m) -> None:
m.alter_field.return_value = None
stdout = StringIO()
class TestPruneAuditLogs(TestCase):
- def test_prune_audit_logs(self):
+ def test_prune_audit_logs(self) -> None:
LogEntry.objects.create(
content_type=ContentType.objects.get_for_model(Document),
object_id=1,
return manifest
- def test_exporter(self, *, use_filename_format=False):
+ def test_exporter(self, *, use_filename_format=False) -> None:
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
# everything is alright after the test
self.assertEqual(len(messages), 0)
- def test_exporter_with_filename_format(self):
+ def test_exporter_with_filename_format(self) -> None:
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
):
self.test_exporter(use_filename_format=True)
- def test_update_export_changed_time(self):
+ def test_update_export_changed_time(self) -> None:
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
st_mtime_4 = (self.target / "manifest.json").stat().st_mtime
self.assertEqual(st_mtime_3, st_mtime_4)
- def test_update_export_changed_checksum(self):
+ def test_update_export_changed_checksum(self) -> None:
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
self.assertIsFile(self.target / "manifest.json")
- def test_update_export_deleted_document(self):
+ def test_update_export_deleted_document(self) -> None:
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
self.assertTrue(len(manifest), 6)
@override_settings(FILENAME_FORMAT="{title}/{correspondent}")
- def test_update_export_changed_location(self):
+ def test_update_export_changed_location(self) -> None:
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
self.target / "wow2" / "none_01.pdf",
)
- def test_export_missing_files(self):
+ def test_export_missing_files(self) -> None:
target = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, target)
Document.objects.create(
self.assertRaises(FileNotFoundError, call_command, "document_exporter", target)
@override_settings(PASSPHRASE="test")
- def test_export_zipped(self):
+ def test_export_zipped(self) -> None:
"""
GIVEN:
- Request to export documents to zipfile
self.assertIn("metadata.json", zip.namelist())
@override_settings(PASSPHRASE="test")
- def test_export_zipped_format(self):
+ def test_export_zipped_format(self) -> None:
"""
GIVEN:
- Request to export documents to zipfile
self.assertIn("metadata.json", zip.namelist())
@override_settings(PASSPHRASE="test")
- def test_export_zipped_with_delete(self):
+ def test_export_zipped_with_delete(self) -> None:
"""
GIVEN:
- Request to export documents to zipfile
self.assertIn("manifest.json", zip.namelist())
self.assertIn("metadata.json", zip.namelist())
- def test_export_target_not_exists(self):
+ def test_export_target_not_exists(self) -> None:
"""
GIVEN:
- Request to export documents to directory that doesn't exist
self.assertEqual("That path doesn't exist", str(e.exception))
- def test_export_target_exists_but_is_file(self):
+ def test_export_target_exists_but_is_file(self) -> None:
"""
GIVEN:
- Request to export documents to file instead of directory
self.assertEqual("That path isn't a directory", str(e.exception))
- def test_export_target_not_writable(self):
+ def test_export_target_not_writable(self) -> None:
"""
GIVEN:
- Request to export documents to directory that's not writeable
str(e.exception),
)
- def test_no_archive(self):
+ def test_no_archive(self) -> None:
"""
GIVEN:
- Request to export documents to directory
call_command("document_importer", "--no-progress-bar", self.target)
self.assertEqual(Document.objects.count(), 4)
- def test_no_thumbnail(self):
+ def test_no_thumbnail(self) -> None:
"""
GIVEN:
- Request to export documents to directory
call_command("document_importer", "--no-progress-bar", self.target)
self.assertEqual(Document.objects.count(), 4)
- def test_split_manifest(self):
+ def test_split_manifest(self) -> None:
"""
GIVEN:
- Request to export documents to directory
self.assertEqual(Document.objects.count(), 4)
self.assertEqual(CustomFieldInstance.objects.count(), 1)
- def test_folder_prefix(self):
+ def test_folder_prefix(self) -> None:
"""
GIVEN:
- Request to export documents to directory
call_command("document_importer", "--no-progress-bar", self.target)
self.assertEqual(Document.objects.count(), 4)
- def test_import_db_transaction_failed(self):
+ def test_import_db_transaction_failed(self) -> None:
"""
GIVEN:
- Import from manifest started
self.assertEqual(ContentType.objects.count(), num_content_type_objects)
self.assertEqual(Permission.objects.count(), num_permission_objects + 1)
- def test_exporter_with_auditlog_disabled(self):
+ def test_exporter_with_auditlog_disabled(self) -> None:
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
for obj in manifest:
self.assertNotEqual(obj["model"], "auditlog.logentry")
- def test_export_data_only(self):
+ def test_export_data_only(self) -> None:
"""
GIVEN:
- Request to export documents with data only
shutil.rmtree(self.target, ignore_errors=True)
return super().tearDown()
- def test_export_passphrase(self):
+ def test_export_passphrase(self) -> None:
"""
GIVEN:
- A mail account exists
social_token = SocialToken.objects.first()
self.assertIsNotNone(social_token)
- def test_import_crypt_no_passphrase(self):
+ def test_import_crypt_no_passphrase(self) -> None:
"""
GIVEN:
- A mail account exists
"No passphrase was given, but this export contains encrypted fields",
)
- def test_export_warn_plaintext(self):
+ def test_export_warn_plaintext(self) -> None:
"""
GIVEN:
- A mail account exists
)
return stdout.getvalue(), stderr.getvalue()
- def test_invalid_ratio_lower_limit(self):
+ def test_invalid_ratio_lower_limit(self) -> None:
"""
GIVEN:
- Invalid ratio below lower limit
self.call_command("--ratio", "-1")
self.assertIn("The ratio must be between 0 and 100", str(e.exception))
- def test_invalid_ratio_upper_limit(self):
+ def test_invalid_ratio_upper_limit(self) -> None:
"""
GIVEN:s
- Invalid ratio above upper
self.call_command("--ratio", "101")
self.assertIn("The ratio must be between 0 and 100", str(e.exception))
- def test_invalid_process_count(self):
+ def test_invalid_process_count(self) -> None:
"""
GIVEN:
- Invalid process count less than 0 above upper
self.call_command("--processes", "0")
self.assertIn("There must be at least 1 process", str(e.exception))
- def test_no_matches(self):
+ def test_no_matches(self) -> None:
"""
GIVEN:
- 2 documents exist
stdout, _ = self.call_command()
self.assertIn("No matches found", stdout)
- def test_with_matches(self):
+ def test_with_matches(self) -> None:
"""
GIVEN:
- 2 documents exist
stdout, _ = self.call_command("--processes", "1")
self.assertRegex(stdout, self.MSG_REGEX)
- def test_with_3_matches(self):
+ def test_with_3_matches(self) -> None:
"""
GIVEN:
- 3 documents exist
for line in lines:
self.assertRegex(line, self.MSG_REGEX)
- def test_document_deletion(self):
+ def test_document_deletion(self) -> None:
"""
GIVEN:
- 3 documents exist
self.assertIsNotNone(Document.objects.get(pk=1))
self.assertIsNotNone(Document.objects.get(pk=2))
- def test_empty_content(self):
+ def test_empty_content(self) -> None:
"""
GIVEN:
- 2 documents exist, content is empty (pw-protected)
SampleDirMixin,
TestCase,
):
- def test_check_manifest_exists(self):
+ def test_check_manifest_exists(self) -> None:
"""
GIVEN:
- Source directory exists
str(e.exception),
)
- def test_check_manifest_malformed(self):
+ def test_check_manifest_malformed(self) -> None:
"""
GIVEN:
- Source directory exists
str(e.exception),
)
- def test_check_manifest_file_not_found(self):
+ def test_check_manifest_file_not_found(self) -> None:
"""
GIVEN:
- Source directory exists
)
self.assertIn('The manifest file refers to "noexist.pdf"', str(e.exception))
- def test_import_permission_error(self):
+ def test_import_permission_error(self) -> None:
"""
GIVEN:
- Original file which cannot be read from
cmd.check_manifest_validity()
self.assertIn("Failed to read from archive file", str(cm.exception))
- def test_import_source_not_existing(self):
+ def test_import_source_not_existing(self) -> None:
"""
GIVEN:
- Source given doesn't exist
call_command("document_importer", Path("/tmp/notapath"))
self.assertIn("That path doesn't exist", str(cm.exception))
- def test_import_source_not_readable(self):
+ def test_import_source_not_readable(self) -> None:
"""
GIVEN:
- Source given isn't readable
str(cm.exception),
)
- def test_import_source_does_not_exist(self):
+ def test_import_source_does_not_exist(self) -> None:
"""
GIVEN:
- Source directory does not exist
call_command("document_importer", "--no-progress-bar", str(path))
self.assertIn("That path doesn't exist", str(e.exception))
- def test_import_files_exist(self):
+ def test_import_files_exist(self) -> None:
"""
GIVEN:
- Source directory does exist
str(stdout.read()),
)
- def test_import_with_user_exists(self):
+ def test_import_with_user_exists(self) -> None:
"""
GIVEN:
- Source directory does exist
stdout.read(),
)
- def test_import_with_documents_exists(self):
+ def test_import_with_documents_exists(self) -> None:
"""
GIVEN:
- Source directory does exist
str(stdout.read()),
)
- def test_import_no_metadata_or_version_file(self):
+ def test_import_no_metadata_or_version_file(self) -> None:
"""
GIVEN:
- A source directory with a manifest file only
self.assertIn("No version.json or metadata.json file located", stdout_str)
- def test_import_version_file(self):
+ def test_import_version_file(self) -> None:
"""
GIVEN:
- A source directory with a manifest file and version file
self.assertIn("Version mismatch:", stdout_str)
self.assertIn("importing 2.8.1", stdout_str)
- def test_import_zipped_export(self):
+ def test_import_zipped_export(self) -> None:
"""
GIVEN:
- A zip file with correct content (manifest.json and version.json inside)
class TestRetagger(DirectoriesMixin, TestCase):
- def make_models(self):
+ def make_models(self) -> None:
self.sp1 = StoragePath.objects.create(
name="dummy a",
path="{created_data}/{title}",
super().setUp()
self.make_models()
- def test_add_tags(self):
+ def test_add_tags(self) -> None:
call_command("document_retagger", "--tags")
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertEqual(d_first.tags.first(), self.tag_first)
self.assertEqual(d_second.tags.first(), self.tag_second)
- def test_add_type(self):
+ def test_add_type(self) -> None:
call_command("document_retagger", "--document_type")
d_first, d_second, _, _ = self.get_updated_docs()
self.assertEqual(d_first.document_type, self.doctype_first)
self.assertEqual(d_second.document_type, self.doctype_second)
- def test_add_correspondent(self):
+ def test_add_correspondent(self) -> None:
call_command("document_retagger", "--correspondent")
d_first, d_second, _, _ = self.get_updated_docs()
self.assertEqual(d_first.correspondent, self.correspondent_first)
self.assertEqual(d_second.correspondent, self.correspondent_second)
- def test_overwrite_preserve_inbox(self):
+ def test_overwrite_preserve_inbox(self) -> None:
self.d1.tags.add(self.tag_second)
call_command("document_retagger", "--tags", "--overwrite")
)
self.assertEqual(d_auto.tags.count(), 0)
- def test_add_tags_suggest(self):
+ def test_add_tags_suggest(self) -> None:
call_command("document_retagger", "--tags", "--suggest")
d_first, d_second, _, d_auto = self.get_updated_docs()
self.assertEqual(d_second.tags.count(), 0)
self.assertEqual(d_auto.tags.count(), 1)
- def test_add_type_suggest(self):
+ def test_add_type_suggest(self) -> None:
call_command("document_retagger", "--document_type", "--suggest")
d_first, d_second, _, _ = self.get_updated_docs()
self.assertIsNone(d_first.document_type)
self.assertIsNone(d_second.document_type)
- def test_add_correspondent_suggest(self):
+ def test_add_correspondent_suggest(self) -> None:
call_command("document_retagger", "--correspondent", "--suggest")
d_first, d_second, _, _ = self.get_updated_docs()
self.assertIsNone(d_first.correspondent)
self.assertIsNone(d_second.correspondent)
- def test_add_tags_suggest_url(self):
+ def test_add_tags_suggest_url(self) -> None:
call_command(
"document_retagger",
"--tags",
self.assertEqual(d_second.tags.count(), 0)
self.assertEqual(d_auto.tags.count(), 1)
- def test_add_type_suggest_url(self):
+ def test_add_type_suggest_url(self) -> None:
call_command(
"document_retagger",
"--document_type",
self.assertIsNone(d_first.document_type)
self.assertIsNone(d_second.document_type)
- def test_add_correspondent_suggest_url(self):
+ def test_add_correspondent_suggest_url(self) -> None:
call_command(
"document_retagger",
"--correspondent",
self.assertIsNone(d_first.correspondent)
self.assertIsNone(d_second.correspondent)
- def test_add_storage_path(self):
+ def test_add_storage_path(self) -> None:
"""
GIVEN:
- 2 storage paths with documents which match them
self.assertIsNone(d_second.storage_path)
self.assertEqual(d_unrelated.storage_path, self.sp3)
- def test_overwrite_storage_path(self):
+ def test_overwrite_storage_path(self) -> None:
"""
GIVEN:
- 2 storage paths with documents which match them
self.assertIsNone(d_second.storage_path)
self.assertEqual(d_unrelated.storage_path, self.sp2)
- def test_id_range_parameter(self):
+ def test_id_range_parameter(self) -> None:
commandOutput = ""
Document.objects.create(
checksum="E",
)
return out.getvalue()
- def test_no_user(self):
+ def test_no_user(self) -> None:
"""
GIVEN:
- Environment does not contain admin user info
"Please check if PAPERLESS_ADMIN_PASSWORD has been set in the environment\n",
)
- def test_create(self):
+ def test_create(self) -> None:
"""
GIVEN:
- Environment does contain admin user password
self.assertEqual(user.email, "root@localhost")
self.assertEqual(out, 'Created superuser "admin" with provided password.\n')
- def test_some_superuser_exists(self):
+ def test_some_superuser_exists(self) -> None:
"""
GIVEN:
- A super user already exists
"Did not create superuser, the DB already contains superusers\n",
)
- def test_admin_superuser_exists(self):
+ def test_admin_superuser_exists(self) -> None:
"""
GIVEN:
- A super user already exists
self.assertTrue(user.check_password("password"))
self.assertEqual(out, "Did not create superuser, a user admin already exists\n")
- def test_admin_user_exists(self):
+ def test_admin_user_exists(self) -> None:
"""
GIVEN:
- A user already exists with the username admin
self.assertFalse(user.is_superuser)
self.assertEqual(out, "Did not create superuser, a user admin already exists\n")
- def test_no_password(self):
+ def test_no_password(self) -> None:
"""
GIVEN:
- No environment data is set
"Please check if PAPERLESS_ADMIN_PASSWORD has been set in the environment\n",
)
- def test_user_email(self):
+ def test_user_email(self) -> None:
"""
GIVEN:
- Environment does contain admin user password
self.assertEqual(user.username, "admin")
self.assertEqual(out, 'Created superuser "admin" with provided password.\n')
- def test_user_username(self):
+ def test_user_username(self) -> None:
"""
GIVEN:
- Environment does contain admin user password
class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
- def make_models(self):
+ def make_models(self) -> None:
self.d1 = Document.objects.create(
checksum="A",
title="A",
super().setUp()
self.make_models()
- def test_process_document(self):
+ def test_process_document(self) -> None:
self.assertIsNotFile(self.d1.thumbnail_path)
_process_document(self.d1.id)
self.assertIsFile(self.d1.thumbnail_path)
- def test_process_document_password_protected(self):
+ def test_process_document_password_protected(self) -> None:
self.assertIsFile(get_default_thumbnail())
self.assertIsNotFile(self.d3.thumbnail_path)
_process_document(self.d3.id)
self.assertIsFile(self.d3.thumbnail_path)
@mock.patch("documents.management.commands.document_thumbnails.shutil.move")
- def test_process_document_invalid_mime_type(self, m: mock.Mock):
+ def test_process_document_invalid_mime_type(self, m: mock.Mock) -> None:
self.d1.mime_type = "asdasdasd"
self.d1.save()
# Not called during processing of document
m.assert_not_called()
- def test_command(self):
+ def test_command(self) -> None:
self.assertIsNotFile(self.d1.thumbnail_path)
self.assertIsNotFile(self.d2.thumbnail_path)
call_command("document_thumbnails", "--processes", "1")
self.assertIsFile(self.d1.thumbnail_path)
self.assertIsFile(self.d2.thumbnail_path)
- def test_command_documentid(self):
+ def test_command_documentid(self) -> None:
self.assertIsNotFile(self.d1.thumbnail_path)
self.assertIsNotFile(self.d2.thumbnail_path)
call_command("document_thumbnails", "--processes", "1", "-d", f"{self.d1.id}")
no_match: Iterable[str],
*,
case_sensitive: bool = False,
- ):
+ ) -> None:
for klass in (Tag, Correspondent, DocumentType):
instance = klass.objects.create(
name=str(randint(10000, 99999)),
class TestMatching(_TestMatchingBase):
- def test_match_none(self):
+ def test_match_none(self) -> None:
self._test_matching(
"",
"MATCH_NONE",
),
)
- def test_match_all(self):
+ def test_match_all(self) -> None:
self._test_matching(
"alpha charlie gamma",
"MATCH_ALL",
),
)
- def test_match_any(self):
+ def test_match_any(self) -> None:
self._test_matching(
"alpha charlie gamma",
"MATCH_ANY",
("the lazy fox jumped over the brown dogs",),
)
- def test_match_literal(self):
+ def test_match_literal(self) -> None:
self._test_matching(
"alpha charlie gamma",
"MATCH_LITERAL",
),
)
- def test_match_regex(self):
+ def test_match_regex(self) -> None:
self._test_matching(
r"alpha\w+gamma",
"MATCH_REGEX",
),
)
- def test_tach_invalid_regex(self):
+ def test_tach_invalid_regex(self) -> None:
self._test_matching("[", "MATCH_REGEX", [], ["Don't match this"])
- def test_match_regex_timeout_returns_false(self):
+ def test_match_regex_timeout_returns_false(self) -> None:
tag = Tag.objects.create(
name="slow",
match=r"(a+)+$",
f"Expected timeout log, got {cm.output}",
)
- def test_match_fuzzy(self):
+ def test_match_fuzzy(self) -> None:
self._test_matching(
"Springfield, Miss.",
"MATCH_FUZZY",
class TestCaseSensitiveMatching(_TestMatchingBase):
- def test_match_all(self):
+ def test_match_all(self) -> None:
self._test_matching(
"alpha charlie gamma",
"MATCH_ALL",
case_sensitive=True,
)
- def test_match_any(self):
+ def test_match_any(self) -> None:
self._test_matching(
"alpha charlie gamma",
"MATCH_ANY",
case_sensitive=True,
)
- def test_match_literal(self):
+ def test_match_literal(self) -> None:
self._test_matching(
"alpha charlie gamma",
"MATCH_LITERAL",
case_sensitive=True,
)
- def test_match_regex(self):
+ def test_match_regex(self) -> None:
self._test_matching(
r"alpha\w+gamma",
"MATCH_REGEX",
doing what we expect wrt to tag & correspondent matching.
"""
- def setUp(self):
+ def setUp(self) -> None:
TestCase.setUp(self)
User.objects.create_user(username="test_consumer", password="12345")
self.doc_contains = Document.objects.create(
def tearDown(self) -> None:
shutil.rmtree(self.index_dir, ignore_errors=True)
- def test_tag_applied_any(self):
+ def test_tag_applied_any(self) -> None:
t1 = Tag.objects.create(
name="test",
match="keyword",
)
self.assertTrue(list(self.doc_contains.tags.all()) == [t1])
- def test_tag_not_applied(self):
+ def test_tag_not_applied(self) -> None:
Tag.objects.create(
name="test",
match="no-match",
)
self.assertTrue(list(self.doc_contains.tags.all()) == [])
- def test_correspondent_applied(self):
+ def test_correspondent_applied(self) -> None:
correspondent = Correspondent.objects.create(
name="test",
match="keyword",
)
self.assertTrue(self.doc_contains.correspondent == correspondent)
- def test_correspondent_not_applied(self):
+ def test_correspondent_not_applied(self) -> None:
Tag.objects.create(
name="test",
match="no-match",
migrate_from = "0007_document_content_length"
migrate_to = "0008_sharelinkbundle"
- def setUpBeforeMigration(self, apps):
+ def setUpBeforeMigration(self, apps) -> None:
User = apps.get_model("auth", "User")
Group = apps.get_model("auth", "Group")
self.Permission = apps.get_model("auth", "Permission")
self.user.user_permissions.add(add_document.id)
self.group.permissions.add(add_document.id)
- def test_share_link_permissions_granted_to_add_document_holders(self):
+ def test_share_link_permissions_granted_to_add_document_holders(self) -> None:
share_perms = self.Permission.objects.filter(
codename__contains="sharelinkbundle",
)
migrate_from = "0008_sharelinkbundle"
migrate_to = "0007_document_content_length"
- def setUpBeforeMigration(self, apps):
+ def setUpBeforeMigration(self, apps) -> None:
User = apps.get_model("auth", "User")
Group = apps.get_model("auth", "Group")
self.Permission = apps.get_model("auth", "Permission")
self.user.user_permissions.add(add_document.id, *self.share_perm_ids)
self.group.permissions.add(add_document.id, *self.share_perm_ids)
- def test_share_link_permissions_revoked_on_reverse(self):
+ def test_share_link_permissions_revoked_on_reverse(self) -> None:
self.assertFalse(
self.user.user_permissions.filter(pk__in=self.share_perm_ids).exists(),
)
class CorrespondentTestCase(TestCase):
- def test___str__(self):
+ def test___str__(self) -> None:
for s in ("test", "oχi", "test with fun_charÅc'\"terß"):
correspondent = CorrespondentFactory.create(name=s)
self.assertEqual(str(correspondent), s)
class DocumentTestCase(TestCase):
- def test_correspondent_deletion_does_not_cascade(self):
+ def test_correspondent_deletion_does_not_cascade(self) -> None:
self.assertEqual(Correspondent.objects.all().count(), 0)
correspondent = CorrespondentFactory.create()
self.assertEqual(Correspondent.objects.all().count(), 1)
class TestParserDiscovery(TestCase):
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def test_get_parser_class_1_parser(self, m, *args):
+ def test_get_parser_class_1_parser(self, m, *args) -> None:
"""
GIVEN:
- Parser declared for a given mimetype
self.assertEqual(get_parser_class_for_mime_type("application/pdf"), DummyParser)
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def test_get_parser_class_n_parsers(self, m, *args):
+ def test_get_parser_class_n_parsers(self, m, *args) -> None:
"""
GIVEN:
- Two parsers declared for a given mimetype
)
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def test_get_parser_class_0_parsers(self, m, *args):
+ def test_get_parser_class_0_parsers(self, m, *args) -> None:
"""
GIVEN:
- No parsers are declared
self.assertIsNone(get_parser_class_for_mime_type("application/pdf"))
@mock.patch("documents.parsers.document_consumer_declaration.send")
- def test_get_parser_class_no_valid_parser(self, m, *args):
+ def test_get_parser_class_no_valid_parser(self, m, *args) -> None:
"""
GIVEN:
- No parser declared for a given mimetype
class TestParserAvailability(TestCase):
- def test_tesseract_parser(self):
+ def test_tesseract_parser(self) -> None:
"""
GIVEN:
- Various mime types
RasterisedDocumentParser,
)
- def test_text_parser(self):
+ def test_text_parser(self) -> None:
"""
GIVEN:
- Various mime types of a text form
TextDocumentParser,
)
- def test_tika_parser(self):
+ def test_tika_parser(self) -> None:
"""
GIVEN:
- Various mime types of a office document form
TikaDocumentParser,
)
- def test_no_parser_for_mime(self):
+ def test_no_parser_for_mime(self) -> None:
self.assertIsNone(get_parser_class_for_mime_type("text/sdgsdf"))
- def test_default_extension(self):
+ def test_default_extension(self) -> None:
# Test no parser declared still returns a an extension
self.assertEqual(get_default_file_extension("application/zip"), ".zip")
# Test invalid mimetype returns no extension
self.assertEqual(get_default_file_extension("aasdasd/dgfgf"), "")
- def test_file_extension_support(self):
+ def test_file_extension_support(self) -> None:
self.assertTrue(is_file_ext_supported(".pdf"))
self.assertFalse(is_file_ext_supported(".hsdfh"))
self.assertFalse(is_file_ext_supported(""))
archive_filename="0000001.pdf",
)
- def assertSanityError(self, doc: Document, messageRegex):
+ def assertSanityError(self, doc: Document, messageRegex) -> None:
messages = check_sanity()
self.assertTrue(messages.has_error)
with self.assertLogs() as capture:
)
self.assertRegex(capture.records[1].message, messageRegex)
- def test_no_issues(self):
+ def test_no_issues(self) -> None:
self.make_test_data()
messages = check_sanity()
self.assertFalse(messages.has_error)
"Sanity checker detected no issues.",
)
- def test_no_docs(self):
+ def test_no_docs(self) -> None:
self.assertEqual(len(check_sanity()), 0)
- def test_success(self):
+ def test_success(self) -> None:
self.make_test_data()
self.assertEqual(len(check_sanity()), 0)
- def test_no_thumbnail(self):
+ def test_no_thumbnail(self) -> None:
doc = self.make_test_data()
Path(doc.thumbnail_path).unlink()
self.assertSanityError(doc, "Thumbnail of document does not exist")
- def test_thumbnail_no_access(self):
+ def test_thumbnail_no_access(self) -> None:
doc = self.make_test_data()
Path(doc.thumbnail_path).chmod(0o000)
self.assertSanityError(doc, "Cannot read thumbnail file of document")
Path(doc.thumbnail_path).chmod(0o777)
- def test_no_original(self):
+ def test_no_original(self) -> None:
doc = self.make_test_data()
Path(doc.source_path).unlink()
self.assertSanityError(doc, "Original of document does not exist.")
- def test_original_no_access(self):
+ def test_original_no_access(self) -> None:
doc = self.make_test_data()
Path(doc.source_path).chmod(0o000)
self.assertSanityError(doc, "Cannot read original file of document")
Path(doc.source_path).chmod(0o777)
- def test_original_checksum_mismatch(self):
+ def test_original_checksum_mismatch(self) -> None:
doc = self.make_test_data()
doc.checksum = "WOW"
doc.save()
self.assertSanityError(doc, "Checksum mismatch. Stored: WOW, actual: ")
- def test_no_archive(self):
+ def test_no_archive(self) -> None:
doc = self.make_test_data()
Path(doc.archive_path).unlink()
self.assertSanityError(doc, "Archived version of document does not exist.")
- def test_archive_no_access(self):
+ def test_archive_no_access(self) -> None:
doc = self.make_test_data()
Path(doc.archive_path).chmod(0o000)
self.assertSanityError(doc, "Cannot read archive file of document")
Path(doc.archive_path).chmod(0o777)
- def test_archive_checksum_mismatch(self):
+ def test_archive_checksum_mismatch(self) -> None:
doc = self.make_test_data()
doc.archive_checksum = "WOW"
doc.save()
self.assertSanityError(doc, "Checksum mismatch of archived document")
- def test_empty_content(self):
+ def test_empty_content(self) -> None:
doc = self.make_test_data()
doc.content = ""
doc.save()
"Document contains no OCR data",
)
- def test_orphaned_file(self):
+ def test_orphaned_file(self) -> None:
self.make_test_data()
Path(self.dirs.originals_dir, "orphaned").touch()
messages = check_sanity()
@override_settings(
APP_LOGO="logo/logo.png",
)
- def test_ignore_logo(self):
+ def test_ignore_logo(self) -> None:
self.make_test_data()
logo_dir = Path(self.dirs.media_dir, "logo")
logo_dir.mkdir(parents=True, exist_ok=True)
messages = check_sanity()
self.assertFalse(messages.has_warning)
- def test_ignore_ignorable_files(self):
+ def test_ignore_ignorable_files(self) -> None:
self.make_test_data()
Path(self.dirs.media_dir, ".DS_Store").touch()
Path(self.dirs.media_dir, "desktop.ini").touch()
messages = check_sanity()
self.assertFalse(messages.has_warning)
- def test_archive_filename_no_checksum(self):
+ def test_archive_filename_no_checksum(self) -> None:
doc = self.make_test_data()
doc.archive_checksum = None
doc.save()
self.assertSanityError(doc, "has an archive file, but its checksum is missing.")
- def test_archive_checksum_no_filename(self):
+ def test_archive_checksum_no_filename(self) -> None:
doc = self.make_test_data()
doc.archive_filename = None
doc.save()
class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/share_link_bundles/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="bundle_admin")
self.client.force_authenticate(self.user)
self.document = DocumentFactory.create()
@mock.patch("documents.views.build_share_link_bundle.delay")
- def test_create_bundle_triggers_build_job(self, delay_mock):
+ def test_create_bundle_triggers_build_job(self, delay_mock) -> None:
payload = {
"document_ids": [self.document.pk],
"file_version": ShareLink.FileVersion.ARCHIVE,
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
delay_mock.assert_called_once_with(bundle.pk)
- def test_create_bundle_rejects_missing_documents(self):
+ def test_create_bundle_rejects_missing_documents(self) -> None:
payload = {
"document_ids": [9999],
"file_version": ShareLink.FileVersion.ARCHIVE,
self.assertIn("document_ids", response.data)
@mock.patch("documents.views.has_perms_owner_aware", return_value=False)
- def test_create_bundle_rejects_insufficient_permissions(self, perms_mock):
+ def test_create_bundle_rejects_insufficient_permissions(self, perms_mock) -> None:
payload = {
"document_ids": [self.document.pk],
"file_version": ShareLink.FileVersion.ARCHIVE,
perms_mock.assert_called()
@mock.patch("documents.views.build_share_link_bundle.delay")
- def test_rebuild_bundle_resets_state(self, delay_mock):
+ def test_rebuild_bundle_resets_state(self, delay_mock) -> None:
bundle = ShareLinkBundle.objects.create(
slug="rebuild-slug",
file_version=ShareLink.FileVersion.ARCHIVE,
self.assertEqual(bundle.file_path, "")
delay_mock.assert_called_once_with(bundle.pk)
- def test_rebuild_bundle_rejects_processing_status(self):
+ def test_rebuild_bundle_rejects_processing_status(self) -> None:
bundle = ShareLinkBundle.objects.create(
slug="processing-slug",
file_version=ShareLink.FileVersion.ARCHIVE,
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("detail", response.data)
- def test_create_bundle_rejects_duplicate_documents(self):
+ def test_create_bundle_rejects_duplicate_documents(self) -> None:
payload = {
"document_ids": [self.document.pk, self.document.pk],
"file_version": ShareLink.FileVersion.ARCHIVE,
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("document_ids", response.data)
- def test_download_ready_bundle_streams_file(self):
+ def test_download_ready_bundle_streams_file(self) -> None:
bundle_file = Path(self.dirs.media_dir) / "bundles" / "ready.zip"
bundle_file.parent.mkdir(parents=True, exist_ok=True)
bundle_file.write_bytes(b"binary-zip-content")
self.assertEqual(content, b"binary-zip-content")
self.assertIn("attachment;", response["Content-Disposition"])
- def test_download_pending_bundle_returns_202(self):
+ def test_download_pending_bundle_returns_202(self) -> None:
bundle = ShareLinkBundle.objects.create(
slug="pendingslug",
file_version=ShareLink.FileVersion.ARCHIVE,
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
- def test_download_failed_bundle_returns_503(self):
+ def test_download_failed_bundle_returns_503(self) -> None:
bundle = ShareLinkBundle.objects.create(
slug="failedslug",
file_version=ShareLink.FileVersion.ARCHIVE,
self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)
- def test_expired_share_link_redirects(self):
+ def test_expired_share_link_redirects(self) -> None:
share_link = ShareLink.objects.create(
slug="expiredlink",
document=self.document,
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertIn("sharelink_expired=1", response["Location"])
- def test_unknown_share_link_redirects(self):
+ def test_unknown_share_link_redirects(self) -> None:
self.client.logout()
response = self.client.get("/share/unknownsharelink/")
class ShareLinkBundleTaskTests(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.document = DocumentFactory.create()
- def test_cleanup_expired_share_link_bundles(self):
+ def test_cleanup_expired_share_link_bundles(self) -> None:
expired_path = Path(self.dirs.media_dir) / "expired.zip"
expired_path.parent.mkdir(parents=True, exist_ok=True)
expired_path.write_bytes(b"expired")
self.assertFalse(expired_path.exists())
self.assertTrue(active_path.exists())
- def test_cleanup_expired_share_link_bundles_logs_on_failure(self):
+ def test_cleanup_expired_share_link_bundles_logs_on_failure(self) -> None:
expired_bundle = ShareLinkBundle.objects.create(
slug="expired-bundle",
file_version=ShareLink.FileVersion.ARCHIVE,
class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.document = DocumentFactory.create(
mime_type="application/pdf",
path.write_bytes(content)
return path
- def test_build_share_link_bundle_creates_zip_and_sets_metadata(self):
+ def test_build_share_link_bundle_creates_zip_and_sets_metadata(self) -> None:
self._write_document_file(archive=False, content=b"source")
archive_path = self._write_document_file(archive=True, content=b"archive")
bundle = ShareLinkBundle.objects.create(
self.assertEqual(len(names), 1)
self.assertEqual(zipf.read(names[0]), archive_path.read_bytes())
- def test_build_share_link_bundle_overwrites_existing_file(self):
+ def test_build_share_link_bundle_overwrites_existing_file(self) -> None:
self._write_document_file(archive=False, content=b"source")
bundle = ShareLinkBundle.objects.create(
slug="overwrite",
self.assertTrue(final_path.exists())
self.assertNotEqual(final_path.read_bytes(), b"old")
- def test_build_share_link_bundle_failure_marks_failed(self):
+ def test_build_share_link_bundle_failure_marks_failed(self) -> None:
self._write_document_file(archive=False, content=b"source")
bundle = ShareLinkBundle.objects.create(
slug="fail-bundle",
for path in scratch_zips:
path.unlink(missing_ok=True)
- def test_build_share_link_bundle_missing_bundle_noop(self):
+ def test_build_share_link_bundle_missing_bundle_noop(self) -> None:
# Should not raise when bundle does not exist
build_share_link_bundle(99999)
class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.document = DocumentFactory.create()
self.document.checksum = "doc1checksum"
)
self.bundle_two.documents.set([self.other_document])
- def test_filter_documents_returns_all_for_empty_value(self):
+ def test_filter_documents_returns_all_for_empty_value(self) -> None:
filterset = ShareLinkBundleFilterSet(
data={"documents": ""},
queryset=ShareLinkBundle.objects.all(),
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
- def test_filter_documents_handles_invalid_input(self):
+ def test_filter_documents_handles_invalid_input(self) -> None:
filterset = ShareLinkBundleFilterSet(
data={"documents": "invalid"},
queryset=ShareLinkBundle.objects.all(),
self.assertFalse(filterset.qs.exists())
- def test_filter_documents_filters_by_multiple_ids(self):
+ def test_filter_documents_filters_by_multiple_ids(self) -> None:
filterset = ShareLinkBundleFilterSet(
data={"documents": f"{self.document.pk},{self.other_document.pk}"},
queryset=ShareLinkBundle.objects.all(),
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
- def test_filter_documents_returns_queryset_for_empty_ids(self):
+ def test_filter_documents_returns_queryset_for_empty_ids(self) -> None:
filterset = ShareLinkBundleFilterSet(
data={"documents": ","},
queryset=ShareLinkBundle.objects.all(),
class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
- def test_absolute_file_path_handles_relative_and_absolute(self):
+ def test_absolute_file_path_handles_relative_and_absolute(self) -> None:
relative_path = Path("relative.zip")
bundle = ShareLinkBundle.objects.create(
slug="relative-bundle",
self.assertEqual(bundle.absolute_file_path.resolve(), absolute_path.resolve())
- def test_str_returns_translated_slug(self):
+ def test_str_returns_translated_slug(self) -> None:
bundle = ShareLinkBundle.objects.create(
slug="string-slug",
file_version=ShareLink.FileVersion.ORIGINAL,
self.assertIn("string-slug", str(bundle))
- def test_remove_file_deletes_existing_file(self):
+ def test_remove_file_deletes_existing_file(self) -> None:
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove.zip"
bundle_path.parent.mkdir(parents=True, exist_ok=True)
bundle_path.write_bytes(b"remove-me")
self.assertFalse(bundle_path.exists())
- def test_remove_file_handles_oserror(self):
+ def test_remove_file_handles_oserror(self) -> None:
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove-error.zip"
bundle_path.parent.mkdir(parents=True, exist_ok=True)
bundle_path.write_bytes(b"remove-me")
self.assertTrue(bundle_path.exists())
- def test_delete_calls_remove_file(self):
+ def test_delete_calls_remove_file(self) -> None:
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "delete.zip"
bundle_path.parent.mkdir(parents=True, exist_ok=True)
bundle_path.write_bytes(b"remove-me")
class ShareLinkBundleSerializerTests(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.document = DocumentFactory.create()
- def test_validate_document_ids_rejects_duplicates(self):
+ def test_validate_document_ids_rejects_duplicates(self) -> None:
serializer = ShareLinkBundleSerializer(
data={
"document_ids": [self.document.pk, self.document.pk],
self.assertFalse(serializer.is_valid())
self.assertIn("document_ids", serializer.errors)
- def test_create_assigns_documents_and_expiration(self):
+ def test_create_assigns_documents_and_expiration(self) -> None:
serializer = ShareLinkBundleSerializer(
data={
"document_ids": [self.document.pk],
delta=timedelta(seconds=10),
)
- def test_create_raises_when_missing_documents(self):
+ def test_create_raises_when_missing_documents(self) -> None:
serializer = ShareLinkBundleSerializer(
data={
"document_ids": [self.document.pk, 9999],
class TestTagHierarchy(APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.user = User.objects.create_superuser(username="admin")
self.client.force_authenticate(user=self.user)
mime_type="application/pdf",
)
- def test_document_api_add_child_adds_parent(self):
+ def test_document_api_add_child_adds_parent(self) -> None:
self.client.patch(
f"/api/documents/{self.document.pk}/",
{"tags": [self.child.pk]},
tags = set(self.document.tags.values_list("pk", flat=True))
assert tags == {self.parent.pk, self.child.pk}
- def test_document_api_remove_parent_removes_children(self):
+ def test_document_api_remove_parent_removes_children(self) -> None:
self.document.add_nested_tags([self.parent, self.child])
self.client.patch(
f"/api/documents/{self.document.pk}/",
self.document.refresh_from_db()
assert self.document.tags.count() == 0
- def test_document_api_remove_parent_removes_child(self):
+ def test_document_api_remove_parent_removes_child(self) -> None:
self.document.add_nested_tags([self.child])
self.client.patch(
f"/api/documents/{self.document.pk}/",
self.document.refresh_from_db()
assert self.document.tags.count() == 0
- def test_bulk_edit_respects_hierarchy(self):
+ def test_bulk_edit_respects_hierarchy(self) -> None:
bulk_edit.add_tag([self.document.pk], self.child.pk)
self.document.refresh_from_db()
tags = set(self.document.tags.values_list("pk", flat=True))
self.document.refresh_from_db()
assert self.document.tags.count() == 0
- def test_workflow_actions(self):
+ def test_workflow_actions(self) -> None:
workflow = Workflow.objects.create(name="wf", order=0)
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
self.document.refresh_from_db()
assert self.document.tags.count() == 0
- def test_tag_view_parent_update_adds_parent_to_docs(self):
+ def test_tag_view_parent_update_adds_parent_to_docs(self) -> None:
orphan = Tag.objects.create(name="Orphan")
self.document.tags.add(orphan)
tags = set(self.document.tags.values_list("pk", flat=True))
assert tags == {self.parent.pk, orphan.pk}
- def test_child_document_count_included_when_parent_paginated(self):
+ def test_child_document_count_included_when_parent_paginated(self) -> None:
self.document.tags.add(self.child)
response = self.client.get(
assert child_entry["id"] == self.child.pk
assert child_entry["document_count"] == 1
- def test_tag_serializer_populates_document_filter_context(self):
+ def test_tag_serializer_populates_document_filter_context(self) -> None:
context = {}
serializer = TagSerializer(self.parent, context=context)
assert serializer.data # triggers serialization
assert "document_count_filter" in context
- def test_cannot_set_parent_to_self(self):
+ def test_cannot_set_parent_to_self(self) -> None:
tag = Tag.objects.create(name="Selfie")
resp = self.client.patch(
f"/api/tags/{tag.pk}/",
assert resp.status_code == 400
assert "Cannot set itself as parent" in str(resp.data["parent"])
- def test_cannot_set_parent_to_descendant(self):
+ def test_cannot_set_parent_to_descendant(self) -> None:
a = Tag.objects.create(name="A")
b = Tag.objects.create(name="B", tn_parent=a)
c = Tag.objects.create(name="C", tn_parent=b)
assert resp.status_code == 400
assert "Cannot set parent to a descendant" in str(resp.data["parent"])
- def test_max_depth_on_create(self):
+ def test_max_depth_on_create(self) -> None:
a = Tag.objects.create(name="A1")
b = Tag.objects.create(name="B1", tn_parent=a)
c = Tag.objects.create(name="C1", tn_parent=b)
assert "parent" in resp_fail.data
assert "Maximum nesting depth exceeded" in str(resp_fail.data["parent"])
- def test_max_depth_on_move_subtree(self):
+ def test_max_depth_on_move_subtree(self) -> None:
a = Tag.objects.create(name="A2")
b = Tag.objects.create(name="B2", tn_parent=a)
c = Tag.objects.create(name="C2", tn_parent=b)
x.refresh_from_db()
assert x.parent_pk == c.id
- def test_is_root_filter_returns_only_root_tags(self):
+ def test_is_root_filter_returns_only_root_tags(self) -> None:
other_root = Tag.objects.create(name="Other parent")
response = self.client.get(
@mock.patch("documents.consumer.magic.from_file", fake_magic_from_file)
class TestTaskSignalHandler(DirectoriesMixin, TestCase):
- def util_call_before_task_publish_handler(self, headers_to_use, body_to_use):
+ def util_call_before_task_publish_handler(
+ self,
+ headers_to_use,
+ body_to_use,
+ ) -> None:
"""
Simple utility to call the pre-run handle and ensure it created a single task
instance
self.assertEqual(PaperlessTask.objects.all().count(), 1)
- def test_before_task_publish_handler_consume(self):
+ def test_before_task_publish_handler_consume(self) -> None:
"""
GIVEN:
- A celery task is started via the consume folder
self.assertEqual(1, task.owner_id)
self.assertEqual(celery.states.PENDING, task.status)
- def test_task_prerun_handler(self):
+ def test_task_prerun_handler(self) -> None:
"""
GIVEN:
- A celery task is started via the consume folder
self.assertEqual(celery.states.STARTED, task.status)
- def test_task_postrun_handler(self):
+ def test_task_postrun_handler(self) -> None:
"""
GIVEN:
- A celery task is started via the consume folder
self.assertEqual(celery.states.SUCCESS, task.status)
- def test_task_failure_handler(self):
+ def test_task_failure_handler(self) -> None:
"""
GIVEN:
- A celery task is started via the consume folder
class TestIndexReindex(DirectoriesMixin, TestCase):
- def test_index_reindex(self):
+ def test_index_reindex(self) -> None:
Document.objects.create(
title="test",
content="my document",
tasks.index_reindex()
- def test_index_optimize(self):
+ def test_index_optimize(self) -> None:
Document.objects.create(
title="test",
content="my document",
class TestClassifier(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@mock.patch("documents.tasks.load_classifier")
- def test_train_classifier_no_auto_matching(self, load_classifier):
+ def test_train_classifier_no_auto_matching(self, load_classifier) -> None:
tasks.train_classifier()
load_classifier.assert_not_called()
@mock.patch("documents.tasks.load_classifier")
- def test_train_classifier_with_auto_tag(self, load_classifier):
+ def test_train_classifier_with_auto_tag(self, load_classifier) -> None:
load_classifier.return_value = None
Tag.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
tasks.train_classifier()
self.assertIsNotFile(settings.MODEL_FILE)
@mock.patch("documents.tasks.load_classifier")
- def test_train_classifier_with_auto_type(self, load_classifier):
+ def test_train_classifier_with_auto_type(self, load_classifier) -> None:
load_classifier.return_value = None
DocumentType.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
tasks.train_classifier()
self.assertIsNotFile(settings.MODEL_FILE)
@mock.patch("documents.tasks.load_classifier")
- def test_train_classifier_with_auto_correspondent(self, load_classifier):
+ def test_train_classifier_with_auto_correspondent(self, load_classifier) -> None:
load_classifier.return_value = None
Correspondent.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
tasks.train_classifier()
load_classifier.assert_called_once()
self.assertIsNotFile(settings.MODEL_FILE)
- def test_train_classifier(self):
+ def test_train_classifier(self) -> None:
c = Correspondent.objects.create(matching_algorithm=Tag.MATCH_AUTO, name="test")
doc = Document.objects.create(correspondent=c, content="test", title="test")
self.assertIsNotFile(settings.MODEL_FILE)
class TestSanityCheck(DirectoriesMixin, TestCase):
@mock.patch("documents.tasks.sanity_checker.check_sanity")
- def test_sanity_check_success(self, m):
+ def test_sanity_check_success(self, m) -> None:
m.return_value = SanityCheckMessages()
self.assertEqual(tasks.sanity_check(), "No issues detected.")
m.assert_called_once()
@mock.patch("documents.tasks.sanity_checker.check_sanity")
- def test_sanity_check_error(self, m):
+ def test_sanity_check_error(self, m) -> None:
messages = SanityCheckMessages()
messages.error(None, "Some error")
m.return_value = messages
m.assert_called_once()
@mock.patch("documents.tasks.sanity_checker.check_sanity")
- def test_sanity_check_error_no_raise(self, m):
+ def test_sanity_check_error_no_raise(self, m) -> None:
messages = SanityCheckMessages()
messages.error(None, "Some error")
m.return_value = messages
m.assert_called_once()
@mock.patch("documents.tasks.sanity_checker.check_sanity")
- def test_sanity_check_warning(self, m):
+ def test_sanity_check_warning(self, m) -> None:
messages = SanityCheckMessages()
messages.warning(None, "Some warning")
m.return_value = messages
m.assert_called_once()
@mock.patch("documents.tasks.sanity_checker.check_sanity")
- def test_sanity_check_info(self, m):
+ def test_sanity_check_info(self, m) -> None:
messages = SanityCheckMessages()
messages.info(None, "Some info")
m.return_value = messages
class TestBulkUpdate(DirectoriesMixin, TestCase):
- def test_bulk_update_documents(self):
+ def test_bulk_update_documents(self) -> None:
doc1 = Document.objects.create(
title="test",
content="my document",
- Document is only deleted if it has been in trash for more than delay (default 30 days)
"""
- def test_empty_trash(self):
+ def test_empty_trash(self) -> None:
doc = Document.objects.create(
title="test",
content="my document",
class TestUpdateContent(DirectoriesMixin, TestCase):
- def test_update_content_maybe_archive_file(self):
+ def test_update_content_maybe_archive_file(self) -> None:
"""
GIVEN:
- Existing document with archive file
self.assertNotEqual(Document.objects.get(pk=doc.pk).content, "test")
self.assertNotEqual(Document.objects.get(pk=doc.pk).archive_checksum, "wow")
- def test_update_content_maybe_archive_file_no_archive(self):
+ def test_update_content_maybe_archive_file_no_archive(self) -> None:
"""
GIVEN:
- Existing document without archive file
AI_ENABLED=True,
LLM_EMBEDDING_BACKEND="huggingface",
)
- def test_ai_index_success(self):
+ def test_ai_index_success(self) -> None:
"""
GIVEN:
- Document exists, AI is enabled, llm index backend is set
AI_ENABLED=True,
LLM_EMBEDDING_BACKEND="huggingface",
)
- def test_ai_index_failure(self):
+ def test_ai_index_failure(self) -> None:
"""
GIVEN:
- Document exists, AI is enabled, llm index backend is set
self.assertEqual(task.status, states.FAILURE)
self.assertIn("LLM index update failed.", task.result)
- def test_update_document_in_llm_index(self):
+ def test_update_document_in_llm_index(self) -> None:
"""
GIVEN:
- Nothing
tasks.update_document_in_llm_index(doc)
llm_index_add_or_update_document.assert_called_once_with(doc)
- def test_remove_document_from_llm_index(self):
+ def test_remove_document_from_llm_index(self) -> None:
"""
GIVEN:
- Nothing
self.user = User.objects.create_user("testuser")
super().setUp()
- def test_login_redirect(self):
+ def test_login_redirect(self) -> None:
response = self.client.get("/")
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response.url, "/accounts/login/?next=/")
- def test_index(self):
+ def test_index(self) -> None:
self.client.force_login(self.user)
for language_given, language_actual in [
("", "en-US"),
)
@override_settings(BASE_URL="/paperless/")
- def test_index_app_logo_with_base_url(self):
+ def test_index_app_logo_with_base_url(self) -> None:
"""
GIVEN:
- Existing config with app_logo specified
f"/paperless{config.app_logo}",
)
- def test_share_link_views(self):
+ def test_share_link_views(self) -> None:
"""
GIVEN:
- Share link created
self.assertEqual(response.request["PATH_INFO"], "/accounts/login/")
self.assertContains(response, b"Share link has expired")
- def test_list_with_full_permissions(self):
+ def test_list_with_full_permissions(self) -> None:
"""
GIVEN:
- Tags with different permissions
else:
assert False, f"Unexpected tag found: {tag['name']}"
- def test_list_no_n_plus_1_queries(self):
+ def test_list_no_n_plus_1_queries(self) -> None:
"""
GIVEN:
- Tags with different permissions
class TestAISuggestions(DirectoriesMixin, TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.user = User.objects.create_superuser(username="testuser")
self.document = Document.objects.create(
title="Test Document",
AI_ENABLED=True,
LLM_BACKEND="mock_backend",
)
- def test_suggestions_with_cached_llm(self, mock_refresh_cache, mock_get_cache):
+ def test_suggestions_with_cached_llm(
+ self,
+ mock_refresh_cache,
+ mock_get_cache,
+ ) -> None:
mock_get_cache.return_value = MagicMock(suggestions={"tags": ["tag1", "tag2"]})
self.client.force_login(user=self.user)
def test_suggestions_with_ai_enabled(
self,
mock_get_ai_classification,
- ):
+ ) -> None:
mock_get_ai_classification.return_value = {
"title": "AI Title",
"tags": ["tag1", "tag2"],
},
)
- def test_invalidate_suggestions_cache(self):
+ def test_invalidate_suggestions_cache(self) -> None:
self.client.force_login(user=self.user)
suggestions = {
"title": "AI Title",
class TestAIChatStreamingView(DirectoriesMixin, TestCase):
ENDPOINT = "/api/documents/chat/"
- def setUp(self):
+ def setUp(self) -> None:
self.user = User.objects.create_user(username="testuser", password="pass")
self.client.force_login(user=self.user)
self.document = Document.objects.create(
super().setUp()
@override_settings(AI_ENABLED=False)
- def test_post_ai_disabled(self):
+ def test_post_ai_disabled(self) -> None:
response = self.client.post(
self.ENDPOINT,
data='{"q": "question"}',
@patch("documents.views.stream_chat_with_documents")
@patch("documents.views.get_objects_for_user_owner_aware")
@override_settings(AI_ENABLED=True)
- def test_post_no_document_id(self, mock_get_objects, mock_stream_chat):
+ def test_post_no_document_id(self, mock_get_objects, mock_stream_chat) -> None:
mock_get_objects.return_value = [self.document]
mock_stream_chat.return_value = iter([b"data"])
response = self.client.post(
@patch("documents.views.stream_chat_with_documents")
@override_settings(AI_ENABLED=True)
- def test_post_with_document_id(self, mock_stream_chat):
+ def test_post_with_document_id(self, mock_stream_chat) -> None:
mock_stream_chat.return_value = iter([b"data"])
response = self.client.post(
self.ENDPOINT,
self.assertEqual(response["Content-Type"], "text/event-stream")
@override_settings(AI_ENABLED=True)
- def test_post_with_invalid_document_id(self):
+ def test_post_with_invalid_document_id(self) -> None:
response = self.client.post(
self.ENDPOINT,
data='{"q": "question", "document_id": 999999}',
@patch("documents.views.has_perms_owner_aware")
@override_settings(AI_ENABLED=True)
- def test_post_with_document_id_no_permission(self, mock_has_perms):
+ def test_post_with_document_id_no_permission(self, mock_has_perms) -> None:
mock_has_perms.return_value = False
response = self.client.post(
self.ENDPOINT,
return super().setUp()
- def test_workflow_match(self):
+ def test_workflow_match(self) -> None:
"""
GIVEN:
- Existing workflow
expected_str = f"Document matched {trigger} from {w}"
self.assertIn(expected_str, info)
- def test_workflow_match_mailrule(self):
+ def test_workflow_match_mailrule(self) -> None:
"""
GIVEN:
- Existing workflow
expected_str = f"Document matched {trigger} from {w}"
self.assertIn(expected_str, info)
- def test_workflow_match_multiple(self):
+ def test_workflow_match_multiple(self) -> None:
"""
GIVEN:
- Multiple existing workflows
expected_str = f"Document matched {trigger2} from {w2}"
self.assertIn(expected_str, cm.output[1])
- def test_workflow_fnmatch_path(self):
+ def test_workflow_fnmatch_path(self) -> None:
"""
GIVEN:
- Existing workflow
expected_str = f"Document matched {trigger} from {w}"
self.assertIn(expected_str, cm.output[0])
- def test_workflow_no_match_filename(self):
+ def test_workflow_no_match_filename(self) -> None:
"""
GIVEN:
- Existing workflow
expected_str = f"Document filename {test_file.name} does not match"
self.assertIn(expected_str, cm.output[1])
- def test_workflow_no_match_path(self):
+ def test_workflow_no_match_path(self) -> None:
"""
GIVEN:
- Existing workflow
expected_str = f"Document path {test_file} does not match"
self.assertIn(expected_str, cm.output[1])
- def test_workflow_no_match_mail_rule(self):
+ def test_workflow_no_match_mail_rule(self) -> None:
"""
GIVEN:
- Existing workflow
expected_str = "Document mail rule 99 !="
self.assertIn(expected_str, cm.output[1])
- def test_workflow_no_match_source(self):
+ def test_workflow_no_match_source(self) -> None:
"""
GIVEN:
- Existing workflow
expected_str = f"Document source {DocumentSource.ApiUpload.name} not in ['{DocumentSource.ConsumeFolder.name}', '{DocumentSource.MailFetch.name}']"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_no_match_trigger_type(self):
+ def test_document_added_no_match_trigger_type(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
)
expected_str = f"No matching triggers with type {WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED} found"
self.assertIn(expected_str, cm.output[1])
- def test_workflow_repeat_custom_fields(self):
+ def test_workflow_repeat_custom_fields(self) -> None:
"""
GIVEN:
- Existing workflows which assign the same custom field
expected_str = f"Document matched {trigger} from {w}"
self.assertIn(expected_str, cm.output[0])
- def test_document_added_workflow(self):
+ def test_document_added_workflow(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_filename="*sample*",
self.assertEqual(doc.correspondent, self.c2)
self.assertEqual(doc.title, f"Doc created in {created.year}")
- def test_document_added_no_match_filename(self):
+ def test_document_added_no_match_filename(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_filename="*foobar*",
expected_str = f"Document filename {doc.original_filename} does not match"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_match_content_matching(self):
+ def test_document_added_match_content_matching(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
matching_algorithm=MatchingModel.MATCH_LITERAL,
expected_str = f"Document matched {trigger} from {w}"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_no_match_content_matching(self):
+ def test_document_added_no_match_content_matching(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
matching_algorithm=MatchingModel.MATCH_LITERAL,
expected_str = f"Document content matching settings for algorithm '{trigger.matching_algorithm}' did not match"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_no_match_tags(self):
+ def test_document_added_no_match_tags(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
expected_str = f"Document tags {list(doc.tags.all())} do not include {list(trigger.filter_has_tags.all())}"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_no_match_all_tags(self):
+ def test_document_added_no_match_all_tags(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
)
self.assertIn(expected_str, cm.output[1])
- def test_document_added_excluded_tags(self):
+ def test_document_added_excluded_tags(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
)
self.assertIn(expected_str, cm.output[1])
- def test_document_added_excluded_correspondent(self):
+ def test_document_added_excluded_correspondent(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
)
self.assertIn(expected_str, cm.output[1])
- def test_document_added_excluded_document_types(self):
+ def test_document_added_excluded_document_types(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
)
self.assertIn(expected_str, cm.output[1])
- def test_document_added_excluded_storage_paths(self):
+ def test_document_added_excluded_storage_paths(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
)
self.assertIn(expected_str, cm.output[1])
- def test_document_added_any_filters(self):
+ def test_document_added_any_filters(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
self.assertFalse(matched)
self.assertIn("storage path", reason)
- def test_document_added_custom_field_query_no_match(self):
+ def test_document_added_custom_field_query_no_match(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query=json.dumps(
cm.output[1],
)
- def test_document_added_custom_field_query_match(self):
+ def test_document_added_custom_field_query_match(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query=json.dumps(
self.assertTrue(matched)
self.assertIsNone(reason)
- def test_prefilter_documents_custom_field_query(self):
+ def test_prefilter_documents_custom_field_query(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query=json.dumps(
self.assertIn(doc1, filtered)
self.assertNotIn(doc2, filtered)
- def test_prefilter_documents_any_filters(self):
+ def test_prefilter_documents_any_filters(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
self.assertIn(allowed_document, filtered)
self.assertNotIn(blocked_document, filtered)
- def test_consumption_trigger_requires_filter_configuration(self):
+ def test_consumption_trigger_requires_filter_configuration(self) -> None:
serializer = WorkflowTriggerSerializer(
data={
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
[str(error) for error in errors],
)
- def test_workflow_trigger_serializer_clears_empty_custom_field_query(self):
+ def test_workflow_trigger_serializer_clears_empty_custom_field_query(self) -> None:
serializer = WorkflowTriggerSerializer(
data={
"type": WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
self.assertTrue(serializer.is_valid(), serializer.errors)
self.assertIsNone(serializer.validated_data.get("filter_custom_field_query"))
- def test_existing_document_invalid_custom_field_query_configuration(self):
+ def test_existing_document_invalid_custom_field_query_configuration(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query="{ not json",
self.assertFalse(matched)
self.assertEqual(reason, "Invalid custom field query configuration")
- def test_prefilter_documents_returns_none_for_invalid_custom_field_query(self):
+ def test_prefilter_documents_returns_none_for_invalid_custom_field_query(
+ self,
+ ) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query="{ not json",
self.assertEqual(list(filtered), [])
- def test_prefilter_documents_applies_all_filters(self):
+ def test_prefilter_documents_applies_all_filters(self) -> None:
other_document_type = DocumentType.objects.create(name="Other Type")
other_storage_path = StoragePath.objects.create(
name="Blocked path",
self.assertIn(allowed_document, filtered)
self.assertNotIn(blocked_document, filtered)
- def test_document_added_no_match_doctype(self):
+ def test_document_added_no_match_doctype(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_has_document_type=self.dt,
expected_str = f"Document doc type {doc.document_type} does not match {trigger.filter_has_document_type}"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_no_match_correspondent(self):
+ def test_document_added_no_match_correspondent(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_has_correspondent=self.c,
expected_str = f"Document correspondent {doc.correspondent} does not match {trigger.filter_has_correspondent}"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_no_match_storage_path(self):
+ def test_document_added_no_match_storage_path(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_has_storage_path=self.sp,
expected_str = f"Document storage path {doc.storage_path} does not match {trigger.filter_has_storage_path}"
self.assertIn(expected_str, cm.output[1])
- def test_document_added_invalid_title_placeholders(self):
+ def test_document_added_invalid_title_placeholders(self) -> None:
"""
GIVEN:
- Existing workflow with added trigger type
self.assertEqual(doc.title, "Doc {created_year]")
- def test_document_updated_workflow(self):
+ def test_document_updated_workflow(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
filter_has_document_type=self.dt,
self.assertEqual(doc.custom_fields.all().count(), 1)
- def test_document_consumption_workflow_month_placeholder_addded(self):
+ def test_document_consumption_workflow_month_placeholder_addded(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
sources=f"{DocumentSource.ApiUpload}",
r"Doc added in \w{3,}",
) # Match any 3-letter month name
- def test_document_updated_workflow_existing_custom_field(self):
+ def test_document_updated_workflow_existing_custom_field(self) -> None:
"""
GIVEN:
- Existing workflow with UPDATED trigger and action that assigns a custom field with a value
doc.refresh_from_db()
self.assertEqual(doc.custom_fields.get(field=self.cf1).value, "new value")
- def test_document_updated_workflow_merge_permissions(self):
+ def test_document_updated_workflow_merge_permissions(self) -> None:
"""
GIVEN:
- Existing workflow with UPDATED trigger and action that sets permissions
# group2 should have been added
self.assertIn(self.group2, group_perms)
- def test_workflow_scheduled_trigger_created(self):
+ def test_workflow_scheduled_trigger_created(self) -> None:
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against the created field and action that assigns owner
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
- def test_workflow_scheduled_trigger_added(self):
+ def test_workflow_scheduled_trigger_added(self) -> None:
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against the added field and action that assigns owner
self.assertEqual(doc.owner, self.user2)
@mock.patch("documents.models.Document.objects.filter", autospec=True)
- def test_workflow_scheduled_trigger_modified(self, mock_filter):
+ def test_workflow_scheduled_trigger_modified(self, mock_filter) -> None:
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against the modified field and action that assigns owner
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
- def test_workflow_scheduled_trigger_custom_field(self):
+ def test_workflow_scheduled_trigger_custom_field(self) -> None:
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against a custom field and action that assigns owner
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
- def test_workflow_scheduled_already_run(self):
+ def test_workflow_scheduled_already_run(self) -> None:
"""
GIVEN:
- Existing workflow with SCHEDULED trigger
doc.refresh_from_db()
self.assertIsNone(doc.owner)
- def test_workflow_scheduled_trigger_too_early(self):
+ def test_workflow_scheduled_trigger_too_early(self) -> None:
"""
GIVEN:
- Existing workflow with SCHEDULED trigger and recurring interval of 7 days
doc.refresh_from_db()
self.assertIsNone(doc.owner)
- def test_workflow_scheduled_recurring_respects_latest_run(self):
+ def test_workflow_scheduled_recurring_respects_latest_run(self) -> None:
"""
GIVEN:
- Scheduled workflow marked as recurring with a 1-day interval
2,
)
- def test_workflow_scheduled_trigger_negative_offset_customfield(self):
+ def test_workflow_scheduled_trigger_negative_offset_customfield(self) -> None:
"""
GIVEN:
- Workflow with offset -7 (i.e., 7 days *before* the date)
doc2.refresh_from_db()
self.assertIsNone(doc2.owner)
- def test_workflow_scheduled_trigger_negative_offset_created(self):
+ def test_workflow_scheduled_trigger_negative_offset_created(self) -> None:
"""
GIVEN:
- Existing workflow with SCHEDULED trigger and negative offset of -7 days (so 7 days before date)
doc2.refresh_from_db()
self.assertIsNone(doc2.owner) # has not triggered yet
- def test_offset_positive_means_after(self):
+ def test_offset_positive_means_after(self) -> None:
"""
GIVEN:
- Document created 30 days ago
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
- def test_workflow_scheduled_filters_queryset(self):
+ def test_workflow_scheduled_filters_queryset(self) -> None:
"""
GIVEN:
- Existing workflow with scheduled trigger
)
self.assertEqual(filtered_docs.count(), 5)
- def test_workflow_enabled_disabled(self):
+ def test_workflow_enabled_disabled(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_filename="*sample*",
self.assertEqual(doc.title, "Title assign owner")
self.assertEqual(doc.owner, self.user2)
- def test_new_trigger_type_raises_exception(self):
+ def test_new_trigger_type_raises_exception(self) -> None:
trigger = WorkflowTrigger.objects.create(
type=99,
)
)
self.assertRaises(Exception, document_matches_workflow, doc, w, 99)
- def test_removal_action_document_updated_workflow(self):
+ def test_removal_action_document_updated_workflow(self) -> None:
"""
GIVEN:
- Workflow with removal action
group_perms: QuerySet = get_groups_with_perms(doc)
self.assertNotIn(self.group1, group_perms)
- def test_removal_action_document_updated_removeall(self):
+ def test_removal_action_document_updated_removeall(self) -> None:
"""
GIVEN:
- Workflow with removal action with remove all fields set
group_perms: QuerySet = get_groups_with_perms(doc)
self.assertNotIn(self.group1, group_perms)
- def test_removal_action_document_consumed(self):
+ def test_removal_action_document_consumed(self) -> None:
"""
GIVEN:
- Workflow with assignment and removal actions
expected_str = f"Document matched {trigger} from {w}"
self.assertIn(expected_str, info)
- def test_removal_action_document_consumed_remove_all(self):
+ def test_removal_action_document_consumed_remove_all(self) -> None:
"""
GIVEN:
- Workflow with assignment and removal actions with remove all fields set
expected_str = f"Document matched {trigger} from {w}"
self.assertIn(expected_str, info)
- def test_workflow_with_tag_actions_doesnt_overwrite_other_actions(self):
+ def test_workflow_with_tag_actions_doesnt_overwrite_other_actions(self) -> None:
"""
GIVEN:
- Document updated workflow filtered by has tag with two actions, first adds owner, second removes a tag
)
@mock.patch("httpx.post")
@mock.patch("django.core.mail.message.EmailMessage.send")
- def test_workflow_email_action(self, mock_email_send, mock_post):
+ def test_workflow_email_action(self, mock_email_send, mock_post) -> None:
"""
GIVEN:
- Document updated workflow with email action
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("django.core.mail.message.EmailMessage.send")
- def test_workflow_email_include_file(self, mock_email_send):
+ def test_workflow_email_include_file(self, mock_email_send) -> None:
"""
GIVEN:
- Document updated workflow with email action
PAPERLESS_URL="http://localhost:8000",
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
- def test_workflow_email_attachment_uses_storage_filename(self):
+ def test_workflow_email_attachment_uses_storage_filename(self) -> None:
"""
GIVEN:
- Document updated workflow with include document action
@override_settings(
EMAIL_ENABLED=False,
)
- def test_workflow_email_action_no_email_setup(self):
+ def test_workflow_email_action_no_email_setup(self) -> None:
"""
GIVEN:
- Document updated workflow with email action
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("django.core.mail.message.EmailMessage.send")
- def test_workflow_email_action_fail(self, mock_email_send):
+ def test_workflow_email_action_fail(self, mock_email_send) -> None:
"""
GIVEN:
- Document updated workflow with email action
)
@mock.patch("httpx.post")
@mock.patch("django.core.mail.message.EmailMessage.send")
- def test_workflow_email_consumption_started(self, mock_email_send, mock_post):
+ def test_workflow_email_consumption_started(
+ self,
+ mock_email_send,
+ mock_post,
+ ) -> None:
"""
GIVEN:
- Workflow with email action and consumption trigger
BASE_URL="/paperless/",
)
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
- def test_workflow_webhook_action_body(self, mock_post):
+ def test_workflow_webhook_action_body(self, mock_post) -> None:
"""
GIVEN:
- Document updated workflow with webhook action which uses body
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
- def test_workflow_webhook_action_w_files(self, mock_post):
+ def test_workflow_webhook_action_w_files(self, mock_post) -> None:
"""
GIVEN:
- Document updated workflow with webhook action which includes document
@override_settings(
PAPERLESS_URL="http://localhost:8000",
)
- def test_workflow_webhook_action_fail(self):
+ def test_workflow_webhook_action_fail(self) -> None:
"""
GIVEN:
- Document updated workflow with webhook action
expected_str = "Error occurred sending webhook"
self.assertIn(expected_str, cm.output[0])
- def test_workflow_webhook_action_url_invalid_params_headers(self):
+ def test_workflow_webhook_action_url_invalid_params_headers(self) -> None:
"""
GIVEN:
- Document updated workflow with webhook action
self.assertIn(expected_str, cm.output[1])
@mock.patch("httpx.Client.post")
- def test_workflow_webhook_send_webhook_task(self, mock_post):
+ def test_workflow_webhook_send_webhook_task(self, mock_post) -> None:
mock_post.return_value = mock.Mock(
status_code=200,
json=mock.Mock(return_value={"status": "ok"}),
)
@mock.patch("httpx.Client.post")
- def test_workflow_webhook_send_webhook_retry(self, mock_http):
+ def test_workflow_webhook_send_webhook_retry(self, mock_http) -> None:
mock_http.return_value.raise_for_status = mock.Mock(
side_effect=HTTPStatusError(
"Error",
self.assertIn(expected_str, cm.output[0])
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
- def test_workflow_webhook_action_consumption(self, mock_post):
+ def test_workflow_webhook_action_consumption(self, mock_post) -> None:
"""
GIVEN:
- Workflow with webhook action and consumption trigger
def test_send_webhook_data_or_json(
self,
httpx_mock: HTTPXMock,
- ):
+ ) -> None:
"""
GIVEN:
- Nothing
class TestWebhookSecurity:
- def test_blocks_invalid_scheme_or_hostname(self, httpx_mock: HTTPXMock):
+ def test_blocks_invalid_scheme_or_hostname(self, httpx_mock: HTTPXMock) -> None:
"""
GIVEN:
- Invalid URL schemes or hostnames
)
@override_settings(WEBHOOKS_ALLOWED_PORTS=[80, 443])
- def test_blocks_disallowed_port(self, httpx_mock: HTTPXMock):
+ def test_blocks_disallowed_port(self, httpx_mock: HTTPXMock) -> None:
"""
GIVEN:
- URL with a disallowed port
assert httpx_mock.get_request() is None
@override_settings(WEBHOOKS_ALLOW_INTERNAL_REQUESTS=False)
- def test_blocks_private_loopback_linklocal(self, httpx_mock: HTTPXMock, resolve_to):
+ def test_blocks_private_loopback_linklocal(
+ self,
+ httpx_mock: HTTPXMock,
+ resolve_to,
+ ) -> None:
"""
GIVEN:
- URL with a private, loopback, or link-local IP address
as_json=False,
)
- def test_allows_public_ip_and_sends(self, httpx_mock: HTTPXMock, resolve_to):
+ def test_allows_public_ip_and_sends(
+ self,
+ httpx_mock: HTTPXMock,
+ resolve_to,
+ ) -> None:
"""
GIVEN:
- URL with a public IP address
assert req.url.host == "52.207.186.75"
assert req.headers["host"] == "paperless-ngx.com"
- def test_follow_redirects_disabled(self, httpx_mock: HTTPXMock, resolve_to):
+ def test_follow_redirects_disabled(self, httpx_mock: HTTPXMock, resolve_to) -> None:
"""
GIVEN:
- A URL that redirects
assert len(httpx_mock.get_requests()) == 1
- def test_strips_user_supplied_host_header(self, httpx_mock: HTTPXMock, resolve_to):
+ def test_strips_user_supplied_host_header(
+ self,
+ httpx_mock: HTTPXMock,
+ resolve_to,
+ ) -> None:
"""
GIVEN:
- A URL with a user-supplied Host header
return dirs
-def remove_dirs(dirs):
+def remove_dirs(dirs) -> None:
shutil.rmtree(dirs.media_dir, ignore_errors=True)
shutil.rmtree(dirs.data_dir, ignore_errors=True)
shutil.rmtree(dirs.scratch_dir, ignore_errors=True)
Utilities for checks various state information of the file system
"""
- def assertIsFile(self, path: PathLike | str):
+ def assertIsFile(self, path: PathLike | str) -> None:
self.assertTrue(Path(path).resolve().is_file(), f"File does not exist: {path}")
- def assertIsNotFile(self, path: PathLike | str):
+ def assertIsNotFile(self, path: PathLike | str) -> None:
self.assertFalse(Path(path).resolve().is_file(), f"File does exist: {path}")
- def assertIsDir(self, path: PathLike | str):
+ def assertIsDir(self, path: PathLike | str) -> None:
self.assertTrue(Path(path).resolve().is_dir(), f"Dir does not exist: {path}")
- def assertIsNotDir(self, path: PathLike | str):
+ def assertIsNotDir(self, path: PathLike | str) -> None:
self.assertFalse(Path(path).resolve().is_dir(), f"Dir does exist: {path}")
def assertFilesEqual(
self,
path1: PathLike | str,
path2: PathLike | str,
- ):
+ ) -> None:
path1 = Path(path1)
path2 = Path(path2)
import hashlib
self.assertEqual(hash1, hash2, "File SHA256 mismatch")
- def assertFileCountInDir(self, path: PathLike | str, count: int):
+ def assertFileCountInDir(self, path: PathLike | str, count: int) -> None:
path = Path(path).resolve()
self.assertTrue(path.is_dir(), f"Path {path} is not a directory")
files = [x for x in path.iterdir() if x.is_file()]
migrate_to = None
auto_migrate = True
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
assert self.migrate_from and self.migrate_to, (
if self.auto_migrate:
self.performMigration()
- def performMigration(self):
+ def performMigration(self) -> None:
# Run the migration to test
executor = MigrationExecutor(connection)
executor.loader.build_graph() # reload.
self.apps = executor.loader.project_state(self.migrate_to).apps
- def setUpBeforeMigration(self, apps):
+ def setUpBeforeMigration(self, apps) -> None:
pass
- def tearDown(self):
+ def tearDown(self) -> None:
"""
Ensure the database schema is restored to the latest migration after
each migration test, so subsequent tests run against HEAD.
self.open()
return self
- def __exit__(self, exc_type, exc_val, exc_tb):
+ def __exit__(self, exc_type, exc_val, exc_tb) -> None:
self.close()
def open(self) -> None:
),
)
class UnifiedSearchViewSet(DocumentViewSet):
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.searcher = None
.prefetch_related("filter_rules")
)
- def perform_create(self, serializer):
+ def perform_create(self, serializer) -> None:
serializer.save(owner=self.request.user)
verbose_name = _("Paperless")
- def ready(self):
+ def ready(self) -> None:
from django.contrib.auth.signals import user_login_failed
user_login_failed.connect(handle_failed_login)
class AutoLoginMiddleware(MiddlewareMixin):
- def process_request(self, request: HttpRequest):
+ def process_request(self, request: HttpRequest) -> None:
# Dont use auto-login with token request
if request.path.startswith("/api/token/") and request.method == "POST":
return None
)
raise AcceptConnection
- def disconnect(self, close_code):
+ def disconnect(self, close_code) -> None:
async_to_sync(self.channel_layer.group_discard)(
"status_updates",
self.channel_name,
)
- def status_update(self, event):
+ def status_update(self, event) -> None:
if not self._authenticated():
self.close()
else:
if self._can_view(event["data"]):
self.send(json.dumps(event))
- def documents_deleted(self, event):
+ def documents_deleted(self, event) -> None:
if not self._authenticated():
self.close()
else:
return PREFIX + get_table_cache_key(db_alias, table)
-def invalidate_db_cache():
+def invalidate_db_cache() -> None:
return cachalot_invalidate(cache_alias="read-cache")
class TestCustomAccountAdapter(TestCase):
- def test_is_open_for_signup(self):
+ def test_is_open_for_signup(self) -> None:
adapter = get_adapter()
# With no accounts, signups should be allowed
settings.ACCOUNT_ALLOW_SIGNUPS = False
self.assertFalse(adapter.is_open_for_signup(None))
- def test_is_safe_url(self):
+ def test_is_safe_url(self) -> None:
request = HttpRequest()
request.get_host = mock.Mock(return_value="example.com")
with context.request_context(request):
self.assertFalse(adapter.is_safe_url(url))
@mock.patch("allauth.core.internal.ratelimit.consume", return_value=True)
- def test_pre_authenticate(self, mock_consume):
+ def test_pre_authenticate(self, mock_consume) -> None:
adapter = get_adapter()
request = HttpRequest()
request.get_host = mock.Mock(return_value="example.com")
with self.assertRaises(ValidationError):
adapter.pre_authenticate(request)
- def test_get_reset_password_from_key_url(self):
+ def test_get_reset_password_from_key_url(self) -> None:
request = HttpRequest()
request.get_host = mock.Mock(return_value="foo.org")
with context.request_context(request):
)
@override_settings(ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
- def test_save_user_adds_groups(self):
+ def test_save_user_adds_groups(self) -> None:
Group.objects.create(name="group1")
user = User.objects.create_user("testuser")
adapter = get_adapter()
self.assertTrue(user.groups.filter(name="group1").exists())
self.assertFalse(user.groups.filter(name="group2").exists())
- def test_fresh_install_save_creates_superuser(self):
+ def test_fresh_install_save_creates_superuser(self) -> None:
adapter = get_adapter()
form = mock.Mock(
cleaned_data={
class TestCustomSocialAccountAdapter(TestCase):
- def test_is_open_for_signup(self):
+ def test_is_open_for_signup(self) -> None:
adapter = get_social_adapter()
# Test when SOCIALACCOUNT_ALLOW_SIGNUPS is True
settings.SOCIALACCOUNT_ALLOW_SIGNUPS = False
self.assertFalse(adapter.is_open_for_signup(None, None))
- def test_get_connect_redirect_url(self):
+ def test_get_connect_redirect_url(self) -> None:
adapter = get_social_adapter()
request = None
socialaccount = None
)
@override_settings(SOCIAL_ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
- def test_save_user_adds_groups(self):
+ def test_save_user_adds_groups(self) -> None:
Group.objects.create(name="group1")
adapter = get_social_adapter()
request = HttpRequest()
self.assertTrue(user.groups.filter(name="group1").exists())
self.assertFalse(user.groups.filter(name="group2").exists())
- def test_error_logged_on_authentication_error(self):
+ def test_error_logged_on_authentication_error(self) -> None:
adapter = get_social_adapter()
request = HttpRequest()
with self.assertLogs("paperless.auth", level="INFO") as log_cm:
class TestDrfTokenStrategy(TestCase):
- def test_create_access_token_creates_new_token(self):
+ def test_create_access_token_creates_new_token(self) -> None:
"""
GIVEN:
- A user with no existing DRF token
token = Token.objects.get(user=user)
self.assertEqual(token_key, token.key)
- def test_create_access_token_returns_existing_token(self):
+ def test_create_access_token_returns_existing_token(self) -> None:
"""
GIVEN:
- A user with an existing DRF token
# Verify only one token exists (no duplicate created)
self.assertEqual(Token.objects.filter(user=user).count(), 1)
- def test_create_access_token_returns_none_for_unauthenticated_user(self):
+ def test_create_access_token_returns_none_for_unauthenticated_user(self) -> None:
"""
GIVEN:
- An unauthenticated request
class TestChecks(DirectoriesMixin, TestCase):
- def test_binaries(self):
+ def test_binaries(self) -> None:
self.assertEqual(binaries_check(None), [])
@override_settings(CONVERT_BINARY="uuuhh")
- def test_binaries_fail(self):
+ def test_binaries_fail(self) -> None:
self.assertEqual(len(binaries_check(None)), 1)
- def test_paths_check(self):
+ def test_paths_check(self) -> None:
self.assertEqual(paths_check(None), [])
@override_settings(
DATA_DIR=Path("whatever"),
CONSUMPTION_DIR=Path("idontcare"),
)
- def test_paths_check_dont_exist(self):
+ def test_paths_check_dont_exist(self) -> None:
msgs = paths_check(None)
self.assertEqual(len(msgs), 3, str(msgs))
for msg in msgs:
self.assertTrue(msg.msg.endswith("is set but doesn't exist."))
- def test_paths_check_no_access(self):
+ def test_paths_check_no_access(self) -> None:
Path(self.dirs.data_dir).chmod(0o000)
Path(self.dirs.media_dir).chmod(0o000)
Path(self.dirs.consumption_dir).chmod(0o000)
self.assertTrue(msg.msg.endswith("is not writeable"))
@override_settings(DEBUG=False)
- def test_debug_disabled(self):
+ def test_debug_disabled(self) -> None:
self.assertEqual(debug_mode_check(None), [])
@override_settings(DEBUG=True)
- def test_debug_enabled(self):
+ def test_debug_enabled(self) -> None:
self.assertEqual(len(debug_mode_check(None)), 1)
class TestSettingsChecksAgainstDefaults(DirectoriesMixin, TestCase):
- def test_all_valid(self):
+ def test_all_valid(self) -> None:
"""
GIVEN:
- Default settings
class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
@override_settings(OCR_OUTPUT_TYPE="notapdf")
- def test_invalid_output_type(self):
+ def test_invalid_output_type(self) -> None:
"""
GIVEN:
- Default settings
self.assertIn('OCR output type "notapdf"', msg.msg)
@override_settings(OCR_MODE="makeitso")
- def test_invalid_ocr_type(self):
+ def test_invalid_ocr_type(self) -> None:
"""
GIVEN:
- Default settings
self.assertIn('OCR output mode "makeitso"', msg.msg)
@override_settings(OCR_MODE="skip_noarchive")
- def test_deprecated_ocr_type(self):
+ def test_deprecated_ocr_type(self) -> None:
"""
GIVEN:
- Default settings
self.assertIn("deprecated", msg.msg)
@override_settings(OCR_SKIP_ARCHIVE_FILE="invalid")
- def test_invalid_ocr_skip_archive_file(self):
+ def test_invalid_ocr_skip_archive_file(self) -> None:
"""
GIVEN:
- Default settings
self.assertIn('OCR_SKIP_ARCHIVE_FILE setting "invalid"', msg.msg)
@override_settings(OCR_CLEAN="cleanme")
- def test_invalid_ocr_clean(self):
+ def test_invalid_ocr_clean(self) -> None:
"""
GIVEN:
- Default settings
class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
@override_settings(TIME_ZONE="TheMoon\\MyCrater")
- def test_invalid_timezone(self):
+ def test_invalid_timezone(self) -> None:
"""
GIVEN:
- Default settings
class TestBarcodeSettingsChecks(DirectoriesMixin, TestCase):
@override_settings(CONSUMER_BARCODE_SCANNER="Invalid")
- def test_barcode_scanner_invalid(self):
+ def test_barcode_scanner_invalid(self) -> None:
msgs = settings_values_check(None)
self.assertEqual(len(msgs), 1)
self.assertIn('Invalid Barcode Scanner "Invalid"', msg.msg)
@override_settings(CONSUMER_BARCODE_SCANNER="")
- def test_barcode_scanner_empty(self):
+ def test_barcode_scanner_empty(self) -> None:
msgs = settings_values_check(None)
self.assertEqual(len(msgs), 1)
self.assertIn('Invalid Barcode Scanner ""', msg.msg)
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
- def test_barcode_scanner_valid(self):
+ def test_barcode_scanner_valid(self) -> None:
msgs = settings_values_check(None)
self.assertEqual(len(msgs), 0)
class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(EMAIL_CERTIFICATE_FILE=Path("/tmp/not_actually_here.pem"))
- def test_not_valid_file(self):
+ def test_not_valid_file(self) -> None:
"""
GIVEN:
- Default settings
class TestAuditLogChecks(TestCase):
- def test_was_enabled_once(self):
+ def test_was_enabled_once(self) -> None:
"""
GIVEN:
- Audit log is not enabled
from paperless.settings import _parse_caches
-def test_all_redis_caches_have_same_custom_prefix(monkeypatch):
+def test_all_redis_caches_have_same_custom_prefix(monkeypatch) -> None:
"""
Check that when setting a custom Redis prefix,
it is set for both the Django default cache and the read cache.
class TestDbCacheSettings:
- def test_cachalot_default_settings(self):
+ def test_cachalot_default_settings(self) -> None:
# Cachalot must be installed even if disabled,
# so the cache can be invalidated anytime
assert "cachalot" not in settings.INSTALLED_APPS
"PAPERLESS_READ_CACHE_TTL": "7200",
},
)
- def test_cachalot_custom_settings(self):
+ def test_cachalot_custom_settings(self) -> None:
settings = _parse_cachalot_settings()
assert settings["CACHALOT_ENABLED"]
self,
env_var_ttl: int,
expected_cachalot_timeout: int,
- ):
+ ) -> None:
with patch.dict(os.environ, {"PAPERLESS_READ_CACHE_TTL": f"{env_var_ttl}"}):
cachalot_timeout = _parse_cachalot_settings()["CACHALOT_TIMEOUT"]
assert cachalot_timeout == expected_cachalot_timeout
CACHALOT_TIMEOUT=1,
)
@pytest.mark.django_db(transaction=True)
-def test_cache_hit_when_enabled():
+def test_cache_hit_when_enabled() -> None:
cachalot_settings.reload()
assert cachalot_settings.CACHALOT_ENABLED
@pytest.mark.django_db(transaction=True)
-def test_cache_is_disabled_by_default():
+def test_cache_is_disabled_by_default() -> None:
cachalot_settings.reload()
# Invalidate the cache just in case
invalidate_db_cache()
class TestRemoteUser(DirectoriesMixin, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(
username="temp_admin",
)
- def test_remote_user(self):
+ def test_remote_user(self) -> None:
"""
GIVEN:
- Configured user
self.assertEqual(response.status_code, status.HTTP_200_OK)
- def test_remote_user_api(self):
+ def test_remote_user_api(self) -> None:
"""
GIVEN:
- Configured user
],
},
)
- def test_remote_user_api_disabled(self):
+ def test_remote_user_api_disabled(self) -> None:
"""
GIVEN:
- Configured user
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN],
)
- def test_remote_user_header_setting(self):
+ def test_remote_user_header_setting(self) -> None:
"""
GIVEN:
- Remote user header name is set
Tests the parsing of the PAPERLESS_IGNORE_DATES setting value
"""
- def _parse_checker(self, test_cases):
+ def _parse_checker(self, test_cases) -> None:
"""
Helper function to check ignore date parsing
expected_date_set,
)
- def test_no_ignore_dates_set(self):
+ def test_no_ignore_dates_set(self) -> None:
"""
GIVEN:
- No ignore dates are set
"""
self.assertSetEqual(_parse_ignore_dates(""), set())
- def test_single_ignore_dates_set(self):
+ def test_single_ignore_dates_set(self) -> None:
"""
GIVEN:
- Ignore dates are set per certain inputs
class TestThreadCalculation(TestCase):
- def test_workers_threads(self):
+ def test_workers_threads(self) -> None:
"""
GIVEN:
- Certain CPU counts
class TestRedisSocketConversion(TestCase):
- def test_redis_socket_parsing(self):
+ def test_redis_socket_parsing(self) -> None:
"""
GIVEN:
- Various Redis connection URI formats
LLM_INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME = 23.0 * 60.0 * 60.0
- def test_schedule_configuration_default(self):
+ def test_schedule_configuration_default(self) -> None:
"""
GIVEN:
- No configured task schedules
schedule,
)
- def test_schedule_configuration_changed(self):
+ def test_schedule_configuration_changed(self) -> None:
"""
GIVEN:
- Email task is configured non-default
schedule,
)
- def test_schedule_configuration_disabled(self):
+ def test_schedule_configuration_disabled(self) -> None:
"""
GIVEN:
- Search index task is disabled
schedule,
)
- def test_schedule_configuration_disabled_all(self):
+ def test_schedule_configuration_disabled_all(self) -> None:
"""
GIVEN:
- All tasks are disabled
class TestDBSettings(TestCase):
- def test_db_timeout_with_sqlite(self):
+ def test_db_timeout_with_sqlite(self) -> None:
"""
GIVEN:
- PAPERLESS_DB_TIMEOUT is set
databases["default"]["OPTIONS"],
)
- def test_db_timeout_with_not_sqlite(self):
+ def test_db_timeout_with_not_sqlite(self) -> None:
"""
GIVEN:
- PAPERLESS_DB_TIMEOUT is set but db is not sqlite
class TestPaperlessURLSettings(TestCase):
- def test_paperless_url(self):
+ def test_paperless_url(self) -> None:
"""
GIVEN:
- PAPERLESS_URL is set
class TestPathSettings(TestCase):
- def test_default_paths(self):
+ def test_default_paths(self) -> None:
"""
GIVEN:
- PAPERLESS_FORCE_SCRIPT_NAME is not set
) # LOGOUT_REDIRECT_URL
@mock.patch("os.environ", {"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless"})
- def test_subpath(self):
+ def test_subpath(self) -> None:
"""
GIVEN:
- PAPERLESS_FORCE_SCRIPT_NAME is set
"PAPERLESS_LOGOUT_REDIRECT_URL": "/foobar/",
},
)
- def test_subpath_with_explicit_logout_url(self):
+ def test_subpath_with_explicit_logout_url(self) -> None:
"""
GIVEN:
- PAPERLESS_FORCE_SCRIPT_NAME is set and so is PAPERLESS_LOGOUT_REDIRECT_URL
("en+zh-Hans+zh-Hant", ["en", "zh-Hans", "zh-Hant", "zh"]),
],
)
-def test_parser_date_parser_languages(languages, expected):
+def test_parser_date_parser_languages(languages, expected) -> None:
assert sorted(_parse_dateparser_languages(languages)) == sorted(expected)
class TestFailedLoginLogging(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.creds = {
"username": "john lennon",
}
- def test_unauthenticated(self):
+ def test_unauthenticated(self) -> None:
"""
GIVEN:
- Request with no authentication provided
],
)
- def test_none(self):
+ def test_none(self) -> None:
"""
GIVEN:
- Request with no IP possible
],
)
- def test_public(self):
+ def test_public(self) -> None:
"""
GIVEN:
- Request with publicly routeable IP
],
)
- def test_private(self):
+ def test_private(self) -> None:
"""
GIVEN:
- Request with private range IP
class TestSyncSocialLoginGroups(TestCase):
@override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
- def test_sync_enabled(self):
+ def test_sync_enabled(self) -> None:
"""
GIVEN:
- Enabled group syncing, a user, and a social login
self.assertEqual(list(user.groups.all()), [group])
@override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=False)
- def test_sync_disabled(self):
+ def test_sync_disabled(self) -> None:
"""
GIVEN:
- Disabled group syncing, a user, and a social login
self.assertEqual(list(user.groups.all()), [])
@override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
- def test_no_groups(self):
+ def test_no_groups(self) -> None:
"""
GIVEN:
- Enabled group syncing, a user, and a social login with no groups
self.assertEqual(list(user.groups.all()), [])
@override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
- def test_userinfo_groups(self):
+ def test_userinfo_groups(self) -> None:
"""
GIVEN:
- Enabled group syncing, and `groups` nested under `userinfo`
self.assertEqual(list(user.groups.all()), [group])
@override_settings(SOCIAL_ACCOUNT_SYNC_GROUPS=True)
- def test_id_token_groups_fallback(self):
+ def test_id_token_groups_fallback(self) -> None:
"""
GIVEN:
- Enabled group syncing, and `groups` only under `id_token`
from ui_settings
"""
- def test_user_group_deletion_cleanup(self):
+ def test_user_group_deletion_cleanup(self) -> None:
"""
GIVEN:
- Existing user
self.assertEqual(permissions.get("default_view_groups"), [])
self.assertEqual(permissions.get("default_change_groups"), [])
- def test_user_group_deletion_error_handling(self):
+ def test_user_group_deletion_error_handling(self) -> None:
"""
GIVEN:
- Existing user and group
@override_settings(CHANNEL_LAYERS=TEST_CHANNEL_LAYERS)
class TestWebSockets(TestCase):
- async def test_no_auth(self):
+ async def test_no_auth(self) -> None:
communicator = WebsocketCommunicator(application, "/ws/status/")
connected, _ = await communicator.connect()
self.assertFalse(connected)
@mock.patch("paperless.consumers.StatusConsumer.close")
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
- async def test_close_on_no_auth(self, _authenticated, mock_close):
+ async def test_close_on_no_auth(self, _authenticated, mock_close) -> None:
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
mock_close.assert_called_once()
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
- async def test_auth(self, _authenticated):
+ async def test_auth(self, _authenticated) -> None:
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
await communicator.disconnect()
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
- async def test_receive_status_update(self, _authenticated):
+ async def test_receive_status_update(self, _authenticated) -> None:
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
await communicator.disconnect()
- async def test_status_update_check_perms(self):
+ async def test_status_update_check_perms(self) -> None:
communicator = WebsocketCommunicator(application, "/ws/status/")
communicator.scope["user"] = mock.Mock()
await communicator.disconnect()
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
- async def test_receive_documents_deleted(self, _authenticated):
+ async def test_receive_documents_deleted(self, _authenticated) -> None:
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
await communicator.disconnect()
@mock.patch("channels.layers.InMemoryChannelLayer.group_send")
- def test_manager_send_progress(self, mock_group_send):
+ def test_manager_send_progress(self, mock_group_send) -> None:
with ProgressManager(task_id="test") as manager:
manager.send_progress(
ProgressStatusOptions.STARTED,
)
@mock.patch("channels.layers.InMemoryChannelLayer.group_send")
- def test_manager_send_documents_deleted(self, mock_group_send):
+ def test_manager_send_documents_deleted(self, mock_group_send) -> None:
with DocumentsStatusManager() as manager:
manager.send_documents_deleted([1, 2, 3])
A client for interacting with an LLM backend.
"""
- def __init__(self):
+ def __init__(self) -> None:
self.settings = AIConfig()
self.llm = self.get_llm()
@pytest.mark.django_db
-def test_build_document_node(real_document):
+def test_build_document_node(real_document) -> None:
nodes = indexing.build_document_node(real_document)
assert len(nodes) > 0
assert nodes[0].metadata["document_id"] == str(real_document.id)
temp_llm_index_dir,
real_document,
mock_embed_model,
-):
+) -> None:
with patch("documents.models.Document.objects.all") as mock_all:
mock_queryset = MagicMock()
mock_queryset.exists.return_value = True
temp_llm_index_dir,
real_document,
mock_embed_model,
-):
+) -> None:
# Pre-create a meta.json with incorrect data
(temp_llm_index_dir / "meta.json").write_text(
json.dumps({"embedding_model": "old", "dim": 1}),
temp_llm_index_dir,
real_document,
mock_embed_model,
-):
+) -> None:
doc2 = Document.objects.create(
title="Test Document 2",
content="This is some test content 2.",
def test_get_or_create_storage_context_raises_exception(
temp_llm_index_dir,
mock_embed_model,
-):
+) -> None:
with pytest.raises(Exception):
indexing.get_or_create_storage_context(rebuild=False)
temp_llm_index_dir,
real_document,
mock_embed_model,
-):
+) -> None:
with (
patch(
"paperless_ai.indexing.load_index_from_storage",
def test_load_or_build_index_raises_exception_when_no_nodes(
temp_llm_index_dir,
mock_embed_model,
-):
+) -> None:
with (
patch(
"paperless_ai.indexing.load_index_from_storage",
def test_load_or_build_index_succeeds_when_nodes_given(
temp_llm_index_dir,
mock_embed_model,
-):
+) -> None:
with (
patch(
"paperless_ai.indexing.load_index_from_storage",
temp_llm_index_dir,
real_document,
mock_embed_model,
-):
+) -> None:
indexing.update_llm_index(rebuild=True)
indexing.llm_index_add_or_update_document(real_document)
temp_llm_index_dir,
real_document,
mock_embed_model,
-):
+) -> None:
indexing.update_llm_index(rebuild=True)
index = indexing.load_or_build_index()
assert len(index.docstore.docs) == 1
def test_update_llm_index_no_documents(
temp_llm_index_dir,
mock_embed_model,
-):
+) -> None:
with patch("documents.models.Document.objects.all") as mock_all:
mock_queryset = MagicMock()
mock_queryset.exists.return_value = False
@pytest.mark.django_db
-def test_queue_llm_index_update_if_needed_enqueues_when_idle_or_skips_recent():
+def test_queue_llm_index_update_if_needed_enqueues_when_idle_or_skips_recent() -> None:
# No existing tasks
with patch("documents.tasks.llmindex_index") as mock_task:
result = indexing.queue_llm_index_update_if_needed(
def test_query_similar_documents(
temp_llm_index_dir,
real_document,
-):
+) -> None:
with (
patch("paperless_ai.indexing.get_or_create_storage_context") as mock_storage,
patch("paperless_ai.indexing.load_or_build_index") as mock_load_or_build_index,
def test_query_similar_documents_triggers_update_when_index_missing(
temp_llm_index_dir,
real_document,
-):
+) -> None:
with (
patch(
"paperless_ai.indexing.vector_store_file_exists",
return doc
-def test_stream_chat_with_one_document_full_content(mock_document):
+def test_stream_chat_with_one_document_full_content(mock_document) -> None:
with (
patch("paperless_ai.chat.AIClient") as mock_client_cls,
patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
assert output == ["chunk1", "chunk2"]
-def test_stream_chat_with_multiple_documents_retrieval(patch_embed_nodes):
+def test_stream_chat_with_multiple_documents_retrieval(patch_embed_nodes) -> None:
with (
patch("paperless_ai.chat.AIClient") as mock_client_cls,
patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
assert output == ["chunk1", "chunk2"]
-def test_stream_chat_no_matching_nodes():
+def test_stream_chat_no_matching_nodes() -> None:
with (
patch("paperless_ai.chat.AIClient") as mock_client_cls,
patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
class TestAIMatching(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
# Create test data for Tag
self.tag1 = Tag.objects.create(name="Test Tag 1")
self.tag2 = Tag.objects.create(name="Test Tag 2")
self.storage_path2 = StoragePath.objects.create(name="Test Storage Path 2")
@patch("paperless_ai.matching.get_objects_for_user_owner_aware")
- def test_match_tags_by_name(self, mock_get_objects):
+ def test_match_tags_by_name(self, mock_get_objects) -> None:
mock_get_objects.return_value = Tag.objects.all()
names = ["Test Tag 1", "Nonexistent Tag"]
result = match_tags_by_name(names, user=None)
self.assertEqual(result[0].name, "Test Tag 1")
@patch("paperless_ai.matching.get_objects_for_user_owner_aware")
- def test_match_correspondents_by_name(self, mock_get_objects):
+ def test_match_correspondents_by_name(self, mock_get_objects) -> None:
mock_get_objects.return_value = Correspondent.objects.all()
names = ["Test Correspondent 1", "Nonexistent Correspondent"]
result = match_correspondents_by_name(names, user=None)
self.assertEqual(result[0].name, "Test Correspondent 1")
@patch("paperless_ai.matching.get_objects_for_user_owner_aware")
- def test_match_document_types_by_name(self, mock_get_objects):
+ def test_match_document_types_by_name(self, mock_get_objects) -> None:
mock_get_objects.return_value = DocumentType.objects.all()
names = ["Test Document Type 1", "Nonexistent Document Type"]
result = match_document_types_by_name(names, user=None)
self.assertEqual(result[0].name, "Test Document Type 1")
@patch("paperless_ai.matching.get_objects_for_user_owner_aware")
- def test_match_storage_paths_by_name(self, mock_get_objects):
+ def test_match_storage_paths_by_name(self, mock_get_objects) -> None:
mock_get_objects.return_value = StoragePath.objects.all()
names = ["Test Storage Path 1", "Nonexistent Storage Path"]
result = match_storage_paths_by_name(names, user=None)
self.assertEqual(len(result), 1)
self.assertEqual(result[0].name, "Test Storage Path 1")
- def test_extract_unmatched_names(self):
+ def test_extract_unmatched_names(self) -> None:
llm_names = ["Test Tag 1", "Nonexistent Tag"]
matched_objects = [self.tag1]
unmatched_names = extract_unmatched_names(llm_names, matched_objects)
self.assertEqual(unmatched_names, ["Nonexistent Tag"])
@patch("paperless_ai.matching.get_objects_for_user_owner_aware")
- def test_match_tags_by_name_with_empty_names(self, mock_get_objects):
+ def test_match_tags_by_name_with_empty_names(self, mock_get_objects) -> None:
mock_get_objects.return_value = Tag.objects.all()
names = [None, "", " "]
result = match_tags_by_name(names, user=None)
self.assertEqual(result, [])
@patch("paperless_ai.matching.get_objects_for_user_owner_aware")
- def test_match_tags_with_fuzzy_matching(self, mock_get_objects):
+ def test_match_tags_with_fuzzy_matching(self, mock_get_objects) -> None:
mock_get_objects.return_value = Tag.objects.all()
names = ["Test Taag 1", "Teest Tag 2"]
result = match_tags_by_name(names, user=None)
verbose_name = _("Paperless mail")
- def ready(self):
+ def ready(self) -> None:
from documents.signals import document_consumer_declaration
if settings.TIKA_ENABLED:
A mail action that deletes mails after processing.
"""
- def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+ def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
M.delete(message_uid)
def get_criteria(self):
return {"seen": False}
- def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+ def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
M.flag(message_uid, [MailMessageFlags.SEEN], value=True)
A mail action that moves mails to a different folder after processing.
"""
- def post_consume(self, M, message_uid, parameter):
+ def post_consume(self, M, message_uid, parameter) -> None:
M.move(message_uid, parameter)
def get_criteria(self):
return {"flagged": False}
- def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+ def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
M.flag(message_uid, [MailMessageFlags.FLAGGED], value=True)
A mail action that tags mails after processing.
"""
- def __init__(self, parameter: str, *, supports_gmail_labels: bool):
+ def __init__(self, parameter: str, *, supports_gmail_labels: bool) -> None:
# The custom tag should look like "apple:<color>"
if "apple:" in parameter.lower():
_, self.color = parameter.split(":")
else: # pragma: no cover
raise ValueError("This should never happen.")
- def post_consume(self, M: MailBox, message_uid: str, parameter: str):
+ def post_consume(self, M: MailBox, message_uid: str, parameter: str) -> None:
if self.supports_gmail_labels:
M.client.uid("STORE", message_uid, "+X-GM-LABELS", self.keyword)
raise MailError("No keyword specified.")
-def mailbox_login(mailbox: MailBox, account: MailAccount):
+def mailbox_login(mailbox: MailBox, account: MailAccount) -> None:
logger = logging.getLogger("paperless_mail")
try:
message_uid: str,
message_subject: str,
message_date: datetime.datetime,
-):
+) -> None:
"""
This shared task applies the mail action of a particular mail rule to the
given mail. Creates a ProcessedMail object, so that the mail won't be
message_uid: str,
message_subject: str,
message_date: datetime.datetime,
-):
+) -> None:
"""
A shared task that is called whenever something goes wrong during
consumption of a file. See queue_consumption_tasks.
consume_tasks: list[Signature],
rule: MailRule,
message: MailMessage,
-):
+) -> None:
"""
Queue a list of consumption tasks (Signatures for the consume_file shared
task) with celery.
self.renew_logging_group()
self._init_preprocessors()
- def _init_preprocessors(self):
+ def _init_preprocessors(self) -> None:
self._message_preprocessors: list[MailMessagePreprocessor] = []
for preprocessor_type in self._message_preprocessor_types:
self._init_preprocessor(preprocessor_type)
- def _init_preprocessor(self, preprocessor_type):
+ def _init_preprocessor(self, preprocessor_type) -> None:
if preprocessor_type.able_to_run():
try:
self._message_preprocessors.append(preprocessor_type())
mime_type: str,
file_name=None,
mailrule_id: int | None = None,
- ):
+ ) -> None:
"""
Parses the given .eml into formatted text, based on the decoded email.
html_pdf.write_bytes(response.content)
return html_pdf
- def get_settings(self):
+ def get_settings(self) -> None:
"""
This parser does not implement additional settings yet
"""
NAME = "MailMessageDecryptor"
- def __init__(self):
+ def __init__(self) -> None:
super().__init__()
self.renew_logging_group()
self._gpg = GPG(gnupghome=settings.EMAIL_GNUPG_HOME)
class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/mail_accounts/"
- def setUp(self):
+ def setUp(self) -> None:
self.bogus_mailbox = BogusMailBox()
patcher = mock.patch("paperless_mail.mail.MailBox")
self.user.save()
self.client.force_authenticate(user=self.user)
- def test_get_mail_accounts(self):
+ def test_get_mail_accounts(self) -> None:
"""
GIVEN:
- Configured mail accounts
self.assertEqual(returned_account1["imap_security"], account1.imap_security)
self.assertEqual(returned_account1["character_set"], account1.character_set)
- def test_create_mail_account(self):
+ def test_create_mail_account(self) -> None:
"""
WHEN:
- API request is made to add a mail account
self.assertEqual(returned_account1.imap_security, account1["imap_security"])
self.assertEqual(returned_account1.character_set, account1["character_set"])
- def test_delete_mail_account(self):
+ def test_delete_mail_account(self) -> None:
"""
GIVEN:
- Existing mail account
self.assertEqual(len(MailAccount.objects.all()), 0)
- def test_update_mail_account(self):
+ def test_update_mail_account(self) -> None:
"""
GIVEN:
- Existing mail accounts
self.assertEqual(returned_account2.name, "Updated Name 2")
self.assertEqual(returned_account2.password, "123xyz")
- def test_mail_account_test_fail(self):
+ def test_mail_account_test_fail(self) -> None:
"""
GIVEN:
- Errnoeous mail account details
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
- def test_mail_account_test_success(self):
+ def test_mail_account_test_success(self) -> None:
"""
GIVEN:
- Working mail account details
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["success"], True)
- def test_mail_account_test_existing(self):
+ def test_mail_account_test_existing(self) -> None:
"""
GIVEN:
- Testing server details for an existing account with obfuscated password (***)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["success"], True)
- def test_get_mail_accounts_owner_aware(self):
+ def test_get_mail_accounts_owner_aware(self) -> None:
"""
GIVEN:
- Configured accounts with different users
class TestAPIMailRules(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/mail_rules/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_user(username="temp_admin")
self.user.save()
self.client.force_authenticate(user=self.user)
- def test_get_mail_rules(self):
+ def test_get_mail_rules(self) -> None:
"""
GIVEN:
- Configured mail accounts and rules
self.assertEqual(returned_rule1["order"], rule1.order)
self.assertEqual(returned_rule1["attachment_type"], rule1.attachment_type)
- def test_create_mail_rule(self):
+ def test_create_mail_rule(self) -> None:
"""
GIVEN:
- Configured mail account exists
rule1["assign_owner_from_rule"],
)
- def test_delete_mail_rule(self):
+ def test_delete_mail_rule(self) -> None:
"""
GIVEN:
- Existing mail rule
self.assertEqual(len(MailRule.objects.all()), 0)
- def test_update_mail_rule(self):
+ def test_update_mail_rule(self) -> None:
"""
GIVEN:
- Existing mail rule
self.assertEqual(returned_rule1.name, "Updated Name 1")
self.assertEqual(returned_rule1.action, MailRule.MailAction.DELETE)
- def test_get_mail_rules_owner_aware(self):
+ def test_get_mail_rules_owner_aware(self) -> None:
"""
GIVEN:
- Configured rules with different users
self.assertEqual(response.data["results"][1]["name"], rule2.name)
self.assertEqual(response.data["results"][2]["name"], rule4.name)
- def test_mailrule_maxage_validation(self):
+ def test_mailrule_maxage_validation(self) -> None:
"""
GIVEN:
- An existing mail account
class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/processed_mail/"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_user(username="temp_admin")
self.user.save()
self.client.force_authenticate(user=self.user)
- def test_get_processed_mails_owner_aware(self):
+ def test_get_processed_mails_owner_aware(self) -> None:
"""
GIVEN:
- Configured processed mails with different users
returned_ids = {r["id"] for r in response.data["results"]}
self.assertSetEqual(returned_ids, {pm1.id, pm2.id, pm4.id})
- def test_get_processed_mails_filter_by_rule(self):
+ def test_get_processed_mails_filter_by_rule(self) -> None:
"""
GIVEN:
- Processed mails belonging to two different rules
returned_ids = {r["id"] for r in response.data["results"]}
self.assertSetEqual(returned_ids, {pm1.id, pm2.id})
- def test_bulk_delete_processed_mails(self):
+ def test_bulk_delete_processed_mails(self) -> None:
"""
GIVEN:
- Processed mails belonging to two different rules and different users
class BogusFolderManager:
current_folder = "INBOX"
- def set(self, new_folder):
+ def set(self, new_folder) -> None:
if new_folder not in ["INBOX", "spam"]:
raise MailboxFolderSelectError(None, "uhm")
self.current_folder = new_folder
class BogusClient:
- def __init__(self, messages):
+ def __init__(self, messages) -> None:
self.messages: list[MailMessage] = messages
self.capabilities: list[str] = []
def __enter__(self):
return self
- def __exit__(self, exc_type, exc_val, exc_tb):
+ def __exit__(self, exc_type, exc_val, exc_tb) -> None:
pass
- def authenticate(self, mechanism, authobject):
+ def authenticate(self, mechanism, authobject) -> None:
# authobject must be a callable object
auth_bytes = authobject(None)
if auth_bytes != b"\x00admin\x00w57\xc3\xa4\xc3\xb6\xc3\xbcw4b6huwb6nhu":
raise MailboxLoginError("BAD", "OK")
- def uid(self, command, *args):
+ def uid(self, command, *args) -> None:
if command == "STORE":
for message in self.messages:
if message.uid == args[0]:
# A dummy access token
ACCESS_TOKEN = "ea7e075cd3acf2c54c48e600398d5d5a"
- def __init__(self):
+ def __init__(self) -> None:
self.messages: list[MailMessage] = []
self.messages_spam: list[MailMessage] = []
self.folder = BogusFolderManager()
def __enter__(self):
return self
- def __exit__(self, exc_type, exc_val, exc_tb):
+ def __exit__(self, exc_type, exc_val, exc_tb) -> None:
pass
- def updateClient(self):
+ def updateClient(self) -> None:
self.client = BogusClient(self.messages)
- def login(self, username, password):
+ def login(self, username, password) -> None:
# This will raise a UnicodeEncodeError if the password is not ASCII only
password.encode("ascii")
# Otherwise, check for correct values
if username != self.USERNAME or password != self.ASCII_PASSWORD:
raise MailboxLoginError("BAD", "OK")
- def login_utf8(self, username, password):
+ def login_utf8(self, username, password) -> None:
# Expected to only be called with the UTF-8 password
if username != self.USERNAME or password != self.UTF_PASSWORD:
raise MailboxLoginError("BAD", "OK")
- def xoauth2(self, username: str, access_token: str):
+ def xoauth2(self, username: str, access_token: str) -> None:
if username != self.USERNAME or access_token != self.ACCESS_TOKEN:
raise MailboxLoginError("BAD", "OK")
return list(msg)
- def delete(self, uid_list):
+ def delete(self, uid_list) -> None:
self.messages = list(filter(lambda m: m.uid not in uid_list, self.messages))
- def flag(self, uid_list, flag_set, value):
+ def flag(self, uid_list, flag_set, value) -> None:
for message in self.messages:
if message.uid in uid_list:
for flag in flag_set:
if hasattr(message, "flags"):
del message.flags
- def move(self, uid_list, folder):
+ def move(self, uid_list, folder) -> None:
if folder == "spam":
self.messages_spam += list(
filter(lambda m: m.uid in uid_list, self.messages),
class MessageBuilder:
- def __init__(self):
+ def __init__(self) -> None:
self._used_uids = set()
def create_message(
return imap_msg
-def reset_bogus_mailbox(bogus_mailbox: BogusMailBox, message_builder: MessageBuilder):
+def reset_bogus_mailbox(
+ bogus_mailbox: BogusMailBox,
+ message_builder: MessageBuilder,
+) -> None:
bogus_mailbox.messages = []
bogus_mailbox.messages_spam = []
bogus_mailbox.messages.append(
class MailMocker(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.bogus_mailbox = BogusMailBox()
self.messageBuilder = MessageBuilder()
def assert_queue_consumption_tasks_call_args(
self,
expected_call_args: list[list[dict[str, str]]],
- ):
+ ) -> None:
"""
Verifies that queue_consumption_tasks has been called with the expected arguments.
else:
self.fail("No match for expected arg")
- def apply_mail_actions(self):
+ def apply_mail_actions(self) -> None:
"""
Applies pending actions to mails by inspecting calls to the queue_consumption_tasks method.
"""
apply_mail_action([], rule.pk, message.uid, message.subject, message.date)
-def assert_eventually_equals(getter_fn, expected_value, timeout=1.0, interval=0.05):
+def assert_eventually_equals(
+ getter_fn,
+ expected_value,
+ timeout=1.0,
+ interval=0.05,
+) -> None:
"""
Repeatedly calls `getter_fn()` until the result equals `expected_value`,
or times out after `timeout` seconds.
deadline = time.time() + timeout
while time.time() < deadline:
if getter_fn() == expected_value:
- return
+ return None
time.sleep(interval)
actual = getter_fn()
raise AssertionError(f"Expected {expected_value}, but got {actual}")
FileSystemAssertsMixin,
TestCase,
):
- def setUp(self):
+ def setUp(self) -> None:
self.mailMocker = MailMocker()
self.mailMocker.setUp()
self.mail_account_handler = MailAccountHandler()
super().setUp()
- def test_get_correspondent(self):
+ def test_get_correspondent(self) -> None:
message = namedtuple("MailMessage", [])
message.from_ = "someone@somewhere.com"
message.from_values = EmailAddress(
c = handler._get_correspondent(message, rule)
self.assertEqual(c, someone_else)
- def test_get_title(self):
+ def test_get_title(self) -> None:
message = namedtuple("MailMessage", [])
message.subject = "the message title"
att = namedtuple("Attachment", [])
)
self.assertEqual(handler._get_title(message, att, rule), None)
- def test_handle_message(self):
+ def test_handle_message(self) -> None:
message = self.mailMocker.messageBuilder.create_message(
subject="the message title",
from_="Myself",
],
)
- def test_handle_empty_message(self):
+ def test_handle_empty_message(self) -> None:
message = namedtuple("MailMessage", [])
message.attachments = []
self.mailMocker._queue_consumption_tasks_mock.assert_not_called()
self.assertEqual(result, 0)
- def test_handle_unknown_mime_type(self):
+ def test_handle_unknown_mime_type(self) -> None:
message = self.mailMocker.messageBuilder.create_message(
attachments=[
_AttachmentDef(filename="f1.pdf"),
],
)
- def test_handle_disposition(self):
+ def test_handle_disposition(self) -> None:
message = self.mailMocker.messageBuilder.create_message(
attachments=[
_AttachmentDef(
],
)
- def test_handle_inline_files(self):
+ def test_handle_inline_files(self) -> None:
message = self.mailMocker.messageBuilder.create_message(
attachments=[
_AttachmentDef(
],
)
- def test_filename_filter(self):
+ def test_filename_filter(self) -> None:
"""
GIVEN:
- Email with multiple similar named attachments
)
@pytest.mark.flaky(reruns=4)
- def test_filename_filter_inline_no_consumption(self):
+ def test_filename_filter_inline_no_consumption(self) -> None:
"""
GIVEN:
- Rule that processes all attachments but filters by filename
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 1)
- def test_handle_mail_account_mark_read(self):
+ def test_handle_mail_account_mark_read(self) -> None:
account = MailAccount.objects.create(
name="test",
imap_server="",
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
@pytest.mark.flaky(reruns=4)
- def test_handle_mail_account_delete(self):
+ def test_handle_mail_account_delete(self) -> None:
account = MailAccount.objects.create(
name="test",
imap_server="",
assert_eventually_equals(lambda: len(self.mailMocker.bogus_mailbox.messages), 1)
- def test_handle_mail_account_delete_no_filters(self):
+ def test_handle_mail_account_delete_no_filters(self) -> None:
account = MailAccount.objects.create(
name="test",
imap_server="",
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0)
@pytest.mark.flaky(reruns=4)
- def test_handle_mail_account_flag(self):
+ def test_handle_mail_account_flag(self) -> None:
account = MailAccount.objects.create(
name="test",
imap_server="",
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
@pytest.mark.flaky(reruns=4)
- def test_handle_mail_account_move(self):
+ def test_handle_mail_account_move(self) -> None:
account = MailAccount.objects.create(
name="test",
imap_server="",
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1)
- def test_handle_mail_account_move_no_filters(self):
+ def test_handle_mail_account_move_no_filters(self) -> None:
account = MailAccount.objects.create(
name="test",
imap_server="",
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 3)
- def test_handle_mail_account_tag(self):
+ def test_handle_mail_account_tag(self) -> None:
account = MailAccount.objects.create(
name="test",
imap_server="",
0,
)
- def test_handle_mail_account_tag_gmail(self):
+ def test_handle_mail_account_tag_gmail(self) -> None:
self.mailMocker.bogus_mailbox._host = "imap.gmail.com"
self.mailMocker.bogus_mailbox.client.capabilities = ["X-GM-EXT-1"]
)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
- def test_tag_mail_action_applemail_wrong_input(self):
+ def test_tag_mail_action_applemail_wrong_input(self) -> None:
self.assertRaises(
MailError,
TagMailAction,
supports_gmail_labels=False,
)
- def test_handle_mail_account_tag_applemail(self):
+ def test_handle_mail_account_tag_applemail(self) -> None:
# all mails will be FLAGGED afterwards
account = MailAccount.objects.create(
)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
- def test_error_login(self):
+ def test_error_login(self) -> None:
"""
GIVEN:
- Account configured with incorrect password
self.mail_account_handler.handle_mail_account(account)
@pytest.mark.flaky(reruns=4)
- def test_error_skip_account(self):
+ def test_error_skip_account(self) -> None:
_ = MailAccount.objects.create(
name="test",
imap_server="",
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1)
@pytest.mark.flaky(reruns=4)
- def test_error_skip_rule(self):
+ def test_error_skip_rule(self) -> None:
account = MailAccount.objects.create(
name="test2",
imap_server="",
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1)
- def test_error_folder_set(self):
+ def test_error_folder_set(self) -> None:
"""
GIVEN:
- Mail rule with non-existent folder
self.mailMocker.bogus_mailbox.folder.list.assert_called_once()
self.mailMocker._queue_consumption_tasks_mock.assert_not_called()
- def test_error_folder_set_error_listing(self):
+ def test_error_folder_set_error_listing(self) -> None:
"""
GIVEN:
- Mail rule with non-existent folder
@pytest.mark.flaky(reruns=4)
@mock.patch("paperless_mail.mail.MailAccountHandler._get_correspondent")
- def test_error_skip_mail(self, m):
- def get_correspondent_fake(message, rule):
+ def test_error_skip_mail(self, m) -> None:
+ def get_correspondent_fake(message, rule) -> None:
if message.from_ == "amazon@amazon.de":
raise ValueError("Does not compute.")
else:
"amazon@amazon.de",
)
- def test_error_create_correspondent(self):
+ def test_error_create_correspondent(self) -> None:
account = MailAccount.objects.create(
name="test2",
imap_server="",
)
@pytest.mark.flaky(reruns=4)
- def test_filters(self):
+ def test_filters(self) -> None:
account = MailAccount.objects.create(
name="test3",
imap_server="",
expected_mail_count,
)
- def test_auth_plain_fallback(self):
+ def test_auth_plain_fallback(self) -> None:
"""
GIVEN:
- Mail account with password containing non-ASCII characters
)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
- def test_auth_plain_fallback_fails_still(self):
+ def test_auth_plain_fallback_fails_still(self) -> None:
"""
GIVEN:
- Mail account with password containing non-ASCII characters
account,
)
- def test_auth_with_valid_token(self):
+ def test_auth_with_valid_token(self) -> None:
"""
GIVEN:
- Mail account configured with access token
)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3)
- def test_disabled_rule(self):
+ def test_disabled_rule(self) -> None:
"""
GIVEN:
- Mail rule is disabled
class TestPostConsumeAction(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.account = MailAccount.objects.create(
name="test",
imap_server="imap.test.com",
@mock.patch(
"paperless_mail.management.commands.mail_fetcher.tasks.process_mail_accounts",
)
- def test_mail_fetcher(self, m):
+ def test_mail_fetcher(self, m) -> None:
call_command("mail_fetcher")
m.assert_called_once()
class TestTasks(TestCase):
@mock.patch("paperless_mail.tasks.MailAccountHandler.handle_mail_account")
- def test_all_accounts(self, m):
+ def test_all_accounts(self, m) -> None:
m.side_effect = lambda account: 6
MailAccount.objects.create(
self.assertIn("No new", result)
@mock.patch("paperless_mail.tasks.MailAccountHandler.handle_mail_account")
- def test_accounts_no_enabled_rules(self, m):
+ def test_accounts_no_enabled_rules(self, m) -> None:
m.side_effect = lambda account: 6
MailAccount.objects.create(
self.assertEqual(m.call_count, 0)
@mock.patch("paperless_mail.tasks.MailAccountHandler.handle_mail_account")
- def test_process_with_account_ids(self, m):
+ def test_process_with_account_ids(self, m) -> None:
m.side_effect = lambda account: 6
account_a = MailAccount.objects.create(
class TestMailAccountTestView(APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.mailMocker = MailMocker()
self.mailMocker.setUp()
self.user = User.objects.create_user(
self.client.force_authenticate(user=self.user)
self.url = "/api/mail_accounts/test/"
- def test_mail_account_test_view_success(self):
+ def test_mail_account_test_view_success(self) -> None:
data = {
"imap_server": "imap.example.com",
"imap_port": 993,
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, {"success": True})
- def test_mail_account_test_view_mail_error(self):
+ def test_mail_account_test_view_mail_error(self) -> None:
data = {
"imap_server": "imap.example.com",
"imap_port": 993,
class TestMailAccountProcess(APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.mailMocker = MailMocker()
self.mailMocker.setUp()
self.user = User.objects.create_superuser(
self.url = f"/api/mail_accounts/{self.account.pk}/process/"
@mock.patch("paperless_mail.tasks.process_mail_accounts.delay")
- def test_mail_account_process_view(self, m):
+ def test_mail_account_process_view(self, m) -> None:
response = self.client.post(self.url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
class TestMailRuleAPI(APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.user = User.objects.create_superuser(
username="testuser",
password="testpassword",
)
self.url = "/api/mail_rules/"
- def test_create_mail_rule(self):
+ def test_create_mail_rule(self) -> None:
"""
GIVEN:
- Valid data for creating a mail rule
rule = MailRule.objects.first()
self.assertEqual(rule.name, "Test Rule")
- def test_mail_rule_action_parameter_required_for_tag_or_move(self):
+ def test_mail_rule_action_parameter_required_for_tag_or_move(self) -> None:
"""
GIVEN:
- Valid data for creating a mail rule without action_parameter
settings.OUTLOOK_OAUTH_CLIENT_SECRET = "test_outlook_client_secret"
super().setUp()
- def test_generate_paths(self):
+ def test_generate_paths(self) -> None:
"""
GIVEN:
- Mocked settings for OAuth callback and base URLs
)
@mock.patch("httpx_oauth.oauth2.BaseOAuth2.get_access_token")
- def test_oauth_callback_view_fails(self, mock_get_access_token):
+ def test_oauth_callback_view_fails(self, mock_get_access_token) -> None:
"""
GIVEN:
- Mocked settings for Gmail and Outlook OAuth client IDs and secrets
self.assertIn("Error getting access token: test_error", cm.output[0])
- def test_oauth_callback_view_insufficient_permissions(self):
+ def test_oauth_callback_view_insufficient_permissions(self) -> None:
"""
GIVEN:
- Mocked settings for Gmail and Outlook OAuth client IDs and secrets
MailAccount.objects.filter(imap_server="outlook.office365.com").exists(),
)
- def test_oauth_callback_view_no_code(self):
+ def test_oauth_callback_view_no_code(self) -> None:
"""
GIVEN:
- Mocked settings for Gmail and Outlook OAuth client IDs and secrets
MailAccount.objects.filter(imap_server="outlook.office365.com").exists(),
)
- def test_oauth_callback_view_invalid_state(self):
+ def test_oauth_callback_view_invalid_state(self) -> None:
"""
GIVEN:
- Mocked settings for Gmail and Outlook OAuth client IDs and secrets
self,
mail_parser: MailDocumentParser,
sample_dir: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh parser
self,
mail_parser: MailDocumentParser,
broken_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh parser
self,
mail_parser: MailDocumentParser,
simple_txt_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh parser
self,
caplog: pytest.LogCaptureFixture,
mail_parser: MailDocumentParser,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
self,
mail_parser: MailDocumentParser,
simple_txt_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
mocker: MockerFixture,
mail_parser: MailDocumentParser,
simple_txt_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- An E-Mail was parsed
self,
httpx_mock: HTTPXMock,
mail_parser: MailDocumentParser,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
parsed = mail_parser.tika_parse("None")
assert parsed == ""
- def test_tika_parse(self, httpx_mock: HTTPXMock, mail_parser: MailDocumentParser):
+ def test_tika_parse(
+ self,
+ httpx_mock: HTTPXMock,
+ mail_parser: MailDocumentParser,
+ ) -> None:
"""
GIVEN:
- Fresh start
self,
httpx_mock: HTTPXMock,
mail_parser: MailDocumentParser,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
self,
settings: SettingsWrapper,
mail_parser: MailDocumentParser,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
mocker: MockerFixture,
mail_parser: MailDocumentParser,
simple_txt_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
httpx_mock: HTTPXMock,
mail_parser: MailDocumentParser,
html_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
httpx_mock: HTTPXMock,
mail_parser: MailDocumentParser,
simple_txt_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Fresh start
mail_parser: MailDocumentParser,
simple_txt_email_file: Path,
simple_txt_email_pdf_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Simple text email with no HTML content
mail_parser: MailDocumentParser,
html_email_file: Path,
html_email_pdf_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- email with HTML content
mail_parser: MailDocumentParser,
html_email_file: Path,
html_email_pdf_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- email with HTML content
mail_parser: MailDocumentParser,
html_email_file: Path,
html_email_pdf_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- email with HTML content
mail_parser: MailDocumentParser,
html_email_file: Path,
html_email_html_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Email message with HTML content
httpx_mock: HTTPXMock,
mail_parser: MailDocumentParser,
html_email_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Email message with HTML content
mail_parser: MailDocumentParser,
html_email_file: Path,
html_email_pdf_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Email message
class MessageEncryptor:
- def __init__(self):
+ def __init__(self) -> None:
self.gpg_home = tempfile.mkdtemp()
self.gpg = gnupg.GPG(gnupghome=self.gpg_home)
self._testUser = "testuser@example.com"
class TestMailMessageGpgDecryptor(TestMail):
@classmethod
- def setUpClass(cls):
+ def setUpClass(cls) -> None:
"""Create GPG encryptor once for all tests in this class."""
super().setUpClass()
cls.messageEncryptor = MessageEncryptor()
@classmethod
- def tearDownClass(cls):
+ def tearDownClass(cls) -> None:
"""Clean up GPG resources after all tests complete."""
if hasattr(cls, "messageEncryptor"):
cls.messageEncryptor.cleanup()
super().tearDownClass()
- def setUp(self):
+ def setUp(self) -> None:
with override_settings(
EMAIL_GNUPG_HOME=self.messageEncryptor.gpg_home,
EMAIL_ENABLE_GPG_DECRYPTOR=True,
):
super().setUp()
- def test_preprocessor_is_able_to_run(self):
+ def test_preprocessor_is_able_to_run(self) -> None:
with override_settings(
EMAIL_GNUPG_HOME=self.messageEncryptor.gpg_home,
EMAIL_ENABLE_GPG_DECRYPTOR=True,
):
self.assertTrue(MailMessageDecryptor.able_to_run())
- def test_preprocessor_is_able_to_run2(self):
+ def test_preprocessor_is_able_to_run2(self) -> None:
with override_settings(
EMAIL_GNUPG_HOME=None,
EMAIL_ENABLE_GPG_DECRYPTOR=True,
):
self.assertTrue(MailMessageDecryptor.able_to_run())
- def test_is_not_able_to_run_disabled(self):
+ def test_is_not_able_to_run_disabled(self) -> None:
with override_settings(
EMAIL_ENABLE_GPG_DECRYPTOR=False,
):
self.assertFalse(MailMessageDecryptor.able_to_run())
- def test_is_not_able_to_run_bogus_path(self):
+ def test_is_not_able_to_run_bogus_path(self) -> None:
with override_settings(
EMAIL_ENABLE_GPG_DECRYPTOR=True,
EMAIL_GNUPG_HOME="_)@# notapath &%#$",
):
self.assertFalse(MailMessageDecryptor.able_to_run())
- def test_fails_at_initialization(self):
+ def test_fails_at_initialization(self) -> None:
with (
mock.patch("gnupg.GPG.__init__") as mock_run,
override_settings(
handler = MailAccountHandler()
self.assertEqual(len(handler._message_preprocessors), 0)
- def test_decrypt_fails(self):
+ def test_decrypt_fails(self) -> None:
encrypted_message, _ = self.create_encrypted_unencrypted_message_pair()
# This test creates its own empty GPG home to test decryption failure
empty_gpg_home = tempfile.mkdtemp()
pass
shutil.rmtree(empty_gpg_home, ignore_errors=True)
- def test_decrypt_encrypted_mail(self):
+ def test_decrypt_encrypted_mail(self) -> None:
"""
Creates a mail with attachments. Then encrypts it with a new key.
Verifies that this encrypted message can be decrypted with attachments intact.
encrypted_message = self.messageEncryptor.encrypt(message)
return encrypted_message, message
- def test_handle_encrypted_message(self):
+ def test_handle_encrypted_message(self) -> None:
message = self.mailMocker.messageBuilder.create_message(
subject="the message title",
from_="Myself",
class PaperlessRemoteParserConfig(AppConfig):
name = "paperless_remote"
- def ready(self):
+ def ready(self) -> None:
from documents.signals import document_consumer_declaration
document_consumer_declaration.connect(remote_consumer_declaration)
class TestChecks(TestCase):
@override_settings(REMOTE_OCR_ENGINE=None)
- def test_no_engine(self):
+ def test_no_engine(self) -> None:
msgs = check_remote_parser_configured(None)
self.assertEqual(len(msgs), 0)
@override_settings(REMOTE_OCR_ENGINE="azureai")
@override_settings(REMOTE_OCR_API_KEY="somekey")
@override_settings(REMOTE_OCR_ENDPOINT=None)
- def test_azure_no_endpoint(self):
+ def test_azure_no_endpoint(self) -> None:
msgs = check_remote_parser_configured(None)
self.assertEqual(len(msgs), 1)
self.assertTrue(
class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
SAMPLE_FILES = Path(__file__).resolve().parent / "samples"
- def assertContainsStrings(self, content: str, strings: list[str]):
+ def assertContainsStrings(self, content: str, strings: list[str]) -> None:
# Asserts that all strings appear in content, in the given order.
indices = []
for s in strings:
@mock.patch("paperless_tesseract.parsers.run_subprocess")
@mock.patch("azure.ai.documentintelligence.DocumentIntelligenceClient")
- def test_get_text_with_azure(self, mock_client_cls, mock_subprocess):
+ def test_get_text_with_azure(self, mock_client_cls, mock_subprocess) -> None:
# Arrange mock Azure client
mock_client = mock.Mock()
mock_client_cls.return_value = mock_client
]
# Simulate pdftotext by writing dummy text to sidecar file
- def fake_run(cmd, *args, **kwargs):
+ def fake_run(cmd, *args, **kwargs) -> None:
with Path(cmd[-1]).open("w", encoding="utf-8") as f:
f.write("This is a test document.")
)
@mock.patch("azure.ai.documentintelligence.DocumentIntelligenceClient")
- def test_get_text_with_azure_error_logged_and_returns_none(self, mock_client_cls):
+ def test_get_text_with_azure_error_logged_and_returns_none(
+ self,
+ mock_client_cls,
+ ) -> None:
mock_client = mock.Mock()
mock_client.begin_analyze_document.side_effect = RuntimeError("fail")
mock_client_cls.return_value = mock_client
REMOTE_OCR_API_KEY="key",
REMOTE_OCR_ENDPOINT="https://endpoint.cognitiveservices.azure.com",
)
- def test_supported_mime_types_valid_config(self):
+ def test_supported_mime_types_valid_config(self) -> None:
parser = RemoteDocumentParser(uuid.uuid4())
expected_types = {
"application/pdf": ".pdf",
}
self.assertEqual(parser.supported_mime_types(), expected_types)
- def test_supported_mime_types_invalid_config(self):
+ def test_supported_mime_types_invalid_config(self) -> None:
parser = get_parser(uuid.uuid4())
self.assertEqual(parser.supported_mime_types(), {})
REMOTE_OCR_API_KEY=None,
REMOTE_OCR_ENDPOINT=None,
)
- def test_parse_with_invalid_config(self):
+ def test_parse_with_invalid_config(self) -> None:
parser = get_parser(uuid.uuid4())
parser.parse(self.SAMPLE_FILES / "simple-digital.pdf", "application/pdf")
self.assertEqual(parser.text, "")
class PaperlessTesseractConfig(AppConfig):
name = "paperless_tesseract"
- def ready(self):
+ def ready(self) -> None:
from documents.signals import document_consumer_declaration
document_consumer_declaration.connect(tesseract_consumer_declaration)
return ocrmypdf_args
- def parse(self, document_path: Path, mime_type, file_name=None):
+ def parse(self, document_path: Path, mime_type, file_name=None) -> None:
# This forces tesseract to use one core per page.
os.environ["OMP_THREAD_LIMIT"] = "1"
VALID_TEXT_LENGTH = 50
class TestChecks(TestCase):
- def test_default_language(self):
+ def test_default_language(self) -> None:
check_default_language_available(None)
@override_settings(OCR_LANGUAGE="")
- def test_no_language(self):
+ def test_no_language(self) -> None:
msgs = check_default_language_available(None)
self.assertEqual(len(msgs), 1)
self.assertTrue(
@override_settings(OCR_LANGUAGE="ita")
@mock.patch("paperless_tesseract.checks.get_tesseract_langs")
- def test_invalid_language(self, m):
+ def test_invalid_language(self, m) -> None:
m.return_value = ["deu", "eng"]
msgs = check_default_language_available(None)
self.assertEqual(len(msgs), 1)
@override_settings(OCR_LANGUAGE="chi_sim")
@mock.patch("paperless_tesseract.checks.get_tesseract_langs")
- def test_multi_part_language(self, m):
+ def test_multi_part_language(self, m) -> None:
"""
GIVEN:
- An OCR language which is multi part (ie chi-sim)
@override_settings(OCR_LANGUAGE="chi-sim")
@mock.patch("paperless_tesseract.checks.get_tesseract_langs")
- def test_multi_part_language_bad_format(self, m):
+ def test_multi_part_language_bad_format(self, m) -> None:
"""
GIVEN:
- An OCR language which is multi part (ie chi-sim)
class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
SAMPLE_FILES = Path(__file__).resolve().parent / "samples"
- def assertContainsStrings(self, content, strings):
+ def assertContainsStrings(self, content, strings) -> None:
# Asserts that all strings appear in content, in the given order.
indices = []
for s in strings:
self.fail(f"'{s}' is not in '{content}'")
self.assertListEqual(indices, sorted(indices))
- def test_post_process_text(self):
+ def test_post_process_text(self) -> None:
text_cases = [
("simple string", "simple string"),
("simple newline\n testing string", "simple newline\ntesting string"),
f"strip_exceess_whitespace({source}) != '{result}', but '{actual_result}'",
)
- def test_get_text_from_pdf(self):
+ def test_get_text_from_pdf(self) -> None:
parser = RasterisedDocumentParser(uuid.uuid4())
text = parser.extract_text(
None,
self.assertContainsStrings(text.strip(), ["This is a test document."])
- def test_get_page_count(self):
+ def test_get_page_count(self) -> None:
"""
GIVEN:
- PDF file with a single page
)
self.assertEqual(page_count, 6)
- def test_get_page_count_password_protected(self):
+ def test_get_page_count_password_protected(self) -> None:
"""
GIVEN:
- Password protected PDF file
self.assertEqual(page_count, None)
self.assertIn("Unable to determine PDF page count", cm.output[0])
- def test_thumbnail(self):
+ def test_thumbnail(self) -> None:
parser = RasterisedDocumentParser(uuid.uuid4())
thumb = parser.get_thumbnail(
str(self.SAMPLE_FILES / "simple-digital.pdf"),
self.assertIsFile(thumb)
@mock.patch("documents.parsers.run_convert")
- def test_thumbnail_fallback(self, m):
- def call_convert(input_file, output_file, **kwargs):
+ def test_thumbnail_fallback(self, m) -> None:
+ def call_convert(input_file, output_file, **kwargs) -> None:
if ".pdf" in str(input_file):
raise ParseError("Does not compute.")
else:
)
self.assertIsFile(thumb)
- def test_thumbnail_encrypted(self):
+ def test_thumbnail_encrypted(self) -> None:
parser = RasterisedDocumentParser(uuid.uuid4())
thumb = parser.get_thumbnail(
str(self.SAMPLE_FILES / "encrypted.pdf"),
)
self.assertIsFile(thumb)
- def test_get_dpi(self):
+ def test_get_dpi(self) -> None:
parser = RasterisedDocumentParser(None)
dpi = parser.get_dpi(str(self.SAMPLE_FILES / "simple-no-dpi.png"))
dpi = parser.get_dpi(str(self.SAMPLE_FILES / "simple.png"))
self.assertEqual(dpi, 72)
- def test_simple_digital(self):
+ def test_simple_digital(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
self.assertContainsStrings(parser.get_text(), ["This is a test document."])
- def test_with_form(self):
+ def test_with_form(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
)
@override_settings(OCR_MODE="redo")
- def test_with_form_error(self):
+ def test_with_form_error(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
)
@override_settings(OCR_MODE="skip")
- def test_signed(self):
+ def test_signed(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "signed.pdf"), "application/pdf")
)
@override_settings(OCR_MODE="skip")
- def test_encrypted(self):
+ def test_encrypted(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
self.assertEqual(parser.get_text(), "")
@override_settings(OCR_MODE="redo")
- def test_with_form_error_notext(self):
+ def test_with_form_error_notext(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
str(self.SAMPLE_FILES / "with-form.pdf"),
)
@override_settings(OCR_MODE="force")
- def test_with_form_force(self):
+ def test_with_form_force(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
["Please enter your name in here:", "This is a PDF document with a form."],
)
- def test_image_simple(self):
+ def test_image_simple(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "simple.png"), "image/png")
self.assertContainsStrings(parser.get_text(), ["This is a test document."])
- def test_image_simple_alpha(self):
+ def test_image_simple_alpha(self) -> None:
parser = RasterisedDocumentParser(None)
with tempfile.TemporaryDirectory() as tempdir:
self.assertContainsStrings(parser.get_text(), ["This is a test document."])
- def test_image_calc_a4_dpi(self):
+ def test_image_calc_a4_dpi(self) -> None:
parser = RasterisedDocumentParser(None)
dpi = parser.calculate_a4_dpi(
self.assertEqual(dpi, 62)
@mock.patch("paperless_tesseract.parsers.RasterisedDocumentParser.calculate_a4_dpi")
- def test_image_dpi_fail(self, m):
+ def test_image_dpi_fail(self, m) -> None:
m.return_value = None
parser = RasterisedDocumentParser(None)
- def f():
+ def f() -> None:
parser.parse(
str(self.SAMPLE_FILES / "simple-no-dpi.png"),
"image/png",
self.assertRaises(ParseError, f)
@override_settings(OCR_IMAGE_DPI=72, MAX_IMAGE_PIXELS=0)
- def test_image_no_dpi_default(self):
+ def test_image_no_dpi_default(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "simple-no-dpi.png"), "image/png")
["this is a test document."],
)
- def test_multi_page(self):
+ def test_multi_page(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
)
@override_settings(OCR_PAGES=2, OCR_MODE="skip")
- def test_multi_page_pages_skip(self):
+ def test_multi_page_pages_skip(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
)
@override_settings(OCR_PAGES=2, OCR_MODE="redo")
- def test_multi_page_pages_redo(self):
+ def test_multi_page_pages_redo(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
)
@override_settings(OCR_PAGES=2, OCR_MODE="force")
- def test_multi_page_pages_force(self):
+ def test_multi_page_pages_force(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
str(self.SAMPLE_FILES / "multi-page-digital.pdf"),
)
@override_settings(OCR_MODE="skip")
- def test_multi_page_analog_pages_skip(self):
+ def test_multi_page_analog_pages_skip(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
str(self.SAMPLE_FILES / "multi-page-images.pdf"),
)
@override_settings(OCR_PAGES=2, OCR_MODE="redo")
- def test_multi_page_analog_pages_redo(self):
+ def test_multi_page_analog_pages_redo(self) -> None:
"""
GIVEN:
- File with text contained in images but no text layer
self.assertNotIn("page 3", parser.get_text().lower())
@override_settings(OCR_PAGES=1, OCR_MODE="force")
- def test_multi_page_analog_pages_force(self):
+ def test_multi_page_analog_pages_force(self) -> None:
"""
GIVEN:
- File with text contained in images but no text layer
self.assertNotIn("page 3", parser.get_text().lower())
@override_settings(OCR_MODE="skip_noarchive")
- def test_skip_noarchive_withtext(self):
+ def test_skip_noarchive_withtext(self) -> None:
"""
GIVEN:
- File with existing text layer
)
@override_settings(OCR_MODE="skip_noarchive")
- def test_skip_noarchive_notext(self):
+ def test_skip_noarchive_notext(self) -> None:
"""
GIVEN:
- File with text contained in images but no text layer
self.assertIsNotNone(parser.archive_path)
@override_settings(OCR_SKIP_ARCHIVE_FILE="never")
- def test_skip_archive_never_withtext(self):
+ def test_skip_archive_never_withtext(self) -> None:
"""
GIVEN:
- File with existing text layer
)
@override_settings(OCR_SKIP_ARCHIVE_FILE="never")
- def test_skip_archive_never_withimages(self):
+ def test_skip_archive_never_withimages(self) -> None:
"""
GIVEN:
- File with text contained in images but no text layer
)
@override_settings(OCR_SKIP_ARCHIVE_FILE="with_text")
- def test_skip_archive_withtext_withtext(self):
+ def test_skip_archive_withtext_withtext(self) -> None:
"""
GIVEN:
- File with existing text layer
)
@override_settings(OCR_SKIP_ARCHIVE_FILE="with_text")
- def test_skip_archive_withtext_withimages(self):
+ def test_skip_archive_withtext_withimages(self) -> None:
"""
GIVEN:
- File with text contained in images but no text layer
)
@override_settings(OCR_SKIP_ARCHIVE_FILE="always")
- def test_skip_archive_always_withtext(self):
+ def test_skip_archive_always_withtext(self) -> None:
"""
GIVEN:
- File with existing text layer
)
@override_settings(OCR_SKIP_ARCHIVE_FILE="always")
- def test_skip_archive_always_withimages(self):
+ def test_skip_archive_always_withimages(self) -> None:
"""
GIVEN:
- File with text contained in images but no text layer
)
@override_settings(OCR_MODE="skip")
- def test_multi_page_mixed(self):
+ def test_multi_page_mixed(self) -> None:
"""
GIVEN:
- File with some text contained in images and some in text layer
self.assertIn("[OCR skipped on page(s) 4-6]", sidecar)
@override_settings(OCR_MODE="redo")
- def test_single_page_mixed(self):
+ def test_single_page_mixed(self) -> None:
"""
GIVEN:
- File with some text contained in images and some in text layer
)
@override_settings(OCR_MODE="skip_noarchive")
- def test_multi_page_mixed_no_archive(self):
+ def test_multi_page_mixed_no_archive(self) -> None:
"""
GIVEN:
- File with some text contained in images and some in text layer
)
@override_settings(OCR_MODE="skip", OCR_ROTATE_PAGES=True)
- def test_rotate(self):
+ def test_rotate(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "rotated.pdf"), "application/pdf")
self.assertContainsStrings(
],
)
- def test_multi_page_tiff(self):
+ def test_multi_page_tiff(self) -> None:
"""
GIVEN:
- Multi-page TIFF image
["page 1", "page 2", "page 3"],
)
- def test_multi_page_tiff_alpha(self):
+ def test_multi_page_tiff_alpha(self) -> None:
"""
GIVEN:
- Multi-page TIFF image
["page 1", "page 2", "page 3"],
)
- def test_multi_page_tiff_alpha_srgb(self):
+ def test_multi_page_tiff_alpha_srgb(self) -> None:
"""
GIVEN:
- Multi-page TIFF image
["page 1", "page 2", "page 3"],
)
- def test_ocrmypdf_parameters(self):
+ def test_ocrmypdf_parameters(self) -> None:
parser = RasterisedDocumentParser(None)
params = parser.construct_ocrmypdf_parameters(
input_file="input.pdf",
params = parser.construct_ocrmypdf_parameters("", "", "", "")
self.assertNotIn("max_image_mpixels", params)
- def test_rtl_language_detection(self):
+ def test_rtl_language_detection(self) -> None:
"""
GIVEN:
- File with text in an RTL language
self.assertIn("ةﯾﻠﺧﺎدﻻ ةرازو", parser.get_text())
@mock.patch("ocrmypdf.ocr")
- def test_gs_rendering_error(self, m):
+ def test_gs_rendering_error(self, m) -> None:
m.side_effect = SubprocessOutputError("Ghostscript PDF/A rendering failed")
parser = RasterisedDocumentParser(None)
class TestParserFileTypes(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
SAMPLE_FILES = Path(__file__).parent / "samples"
- def test_bmp(self):
+ def test_bmp(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "simple.bmp"), "image/bmp")
self.assertIsFile(parser.archive_path)
self.assertIn("this is a test document", parser.get_text().lower())
- def test_jpg(self):
+ def test_jpg(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "simple.jpg"), "image/jpeg")
self.assertIsFile(parser.archive_path)
self.assertIn("this is a test document", parser.get_text().lower())
- def test_heic(self):
+ def test_heic(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "simple.heic"), "image/heic")
self.assertIsFile(parser.archive_path)
self.assertIn("pizza", parser.get_text().lower())
@override_settings(OCR_IMAGE_DPI=200)
- def test_gif(self):
+ def test_gif(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "simple.gif"), "image/gif")
self.assertIsFile(parser.archive_path)
self.assertIn("this is a test document", parser.get_text().lower())
- def test_tiff(self):
+ def test_tiff(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(str(self.SAMPLE_FILES / "simple.tif"), "image/tiff")
self.assertIsFile(parser.archive_path)
self.assertIn("this is a test document", parser.get_text().lower())
@override_settings(OCR_IMAGE_DPI=72)
- def test_webp(self):
+ def test_webp(self) -> None:
parser = RasterisedDocumentParser(None)
parser.parse(
str(self.SAMPLE_FILES / "document.webp"),
safe_fallback=False,
)
- def test_db_settings_ocr_pages(self):
+ def test_db_settings_ocr_pages(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_PAGES than
params = self.get_params()
self.assertEqual(params["pages"], "1-5")
- def test_db_settings_ocr_language(self):
+ def test_db_settings_ocr_language(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_LANGUAGE than
params = self.get_params()
self.assertEqual(params["language"], "fra+ita")
- def test_db_settings_ocr_output_type(self):
+ def test_db_settings_ocr_output_type(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_OUTPUT_TYPE than
params = self.get_params()
self.assertEqual(params["output_type"], "pdfa")
- def test_db_settings_ocr_mode(self):
+ def test_db_settings_ocr_mode(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_MODE than
self.assertNotIn("redo_ocr", params)
self.assertNotIn("force_ocr", params)
- def test_db_settings_ocr_clean(self):
+ def test_db_settings_ocr_clean(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_CLEAN than
self.assertTrue(params["clean_final"])
self.assertNotIn("clean", params)
- def test_db_settings_ocr_deskew(self):
+ def test_db_settings_ocr_deskew(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_DESKEW than
params = self.get_params()
self.assertTrue(params["deskew"])
- def test_db_settings_ocr_rotate(self):
+ def test_db_settings_ocr_rotate(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_ROTATE_PAGES
self.assertTrue(params["rotate_pages"])
self.assertAlmostEqual(params["rotate_pages_threshold"], 15.0)
- def test_db_settings_ocr_max_pixels(self):
+ def test_db_settings_ocr_max_pixels(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_MAX_IMAGE_PIXELS than
params = self.get_params()
self.assertAlmostEqual(params["max_image_mpixels"], 1.0)
- def test_db_settings_ocr_color_convert(self):
+ def test_db_settings_ocr_color_convert(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_COLOR_CONVERSION_STRATEGY than
"UseDeviceIndependentColor",
)
- def test_ocr_user_args(self):
+ def test_ocr_user_args(self) -> None:
"""
GIVEN:
- Django settings defines different value for OCR_USER_ARGS than
class PaperlessTextConfig(AppConfig):
name = "paperless_text"
- def ready(self):
+ def ready(self) -> None:
from documents.signals import document_consumer_declaration
document_consumer_declaration.connect(text_consumer_declaration)
return out_path
- def parse(self, document_path, mime_type, file_name=None):
+ def parse(self, document_path, mime_type, file_name=None) -> None:
self.text = self.read_file_handle_unicode_errors(document_path)
- def get_settings(self):
+ def get_settings(self) -> None:
"""
This parser does not implement additional settings yet
"""
class TestTextParser:
- def test_thumbnail(self, text_parser: TextDocumentParser, sample_txt_file: Path):
+ def test_thumbnail(
+ self,
+ text_parser: TextDocumentParser,
+ sample_txt_file: Path,
+ ) -> None:
# just make sure that it does not crash
f = text_parser.get_thumbnail(sample_txt_file, "text/plain")
assert f.exists()
assert f.is_file()
- def test_parse(self, text_parser: TextDocumentParser, sample_txt_file: Path):
+ def test_parse(
+ self,
+ text_parser: TextDocumentParser,
+ sample_txt_file: Path,
+ ) -> None:
text_parser.parse(sample_txt_file, "text/plain")
assert text_parser.get_text() == "This is a test file.\n"
self,
text_parser: TextDocumentParser,
malformed_txt_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Text file which contains invalid UTF bytes
assert text_parser.get_text() == "Pantothens�ure\n"
assert text_parser.get_archive_path() is None
- def test_thumbnail_large_file(self, text_parser: TextDocumentParser):
+ def test_thumbnail_large_file(self, text_parser: TextDocumentParser) -> None:
"""
GIVEN:
- A very large text file (>50MB)
class PaperlessTikaConfig(AppConfig):
name = "paperless_tika"
- def ready(self):
+ def ready(self) -> None:
from documents.signals import document_consumer_declaration
if settings.TIKA_ENABLED:
)
return []
- def parse(self, document_path: Path, mime_type: str, file_name=None):
+ def parse(self, document_path: Path, mime_type: str, file_name=None) -> None:
self.log.info(f"Sending {document_path} to Tika server")
try:
self,
tika_parser: TikaDocumentParser,
sample_odt_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- An input ODT format document
self,
tika_parser: TikaDocumentParser,
sample_docx_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- An input DOCX format document
self,
tika_parser: TikaDocumentParser,
sample_doc_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- An input DOC format document
self,
tika_parser: TikaDocumentParser,
sample_broken_odt: Path,
- ):
+ ) -> None:
"""
GIVEN:
- An input ODT format document
settings: SettingsWrapper,
tika_parser: TikaDocumentParser,
sample_odt_file: Path,
- ):
+ ) -> None:
settings.TIME_ZONE = "America/Chicago"
# Pretend parse response
httpx_mock.add_response(
httpx_mock: HTTPXMock,
tika_parser: TikaDocumentParser,
sample_odt_file: Path,
- ):
+ ) -> None:
httpx_mock.add_response(
json={
"Content-Type": "application/vnd.oasis.opendocument.text",
httpx_mock: HTTPXMock,
tika_parser: TikaDocumentParser,
sample_odt_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Document needs to be converted to PDF
settings: SettingsWrapper,
tika_parser: TikaDocumentParser,
sample_odt_file: Path,
- ):
+ ) -> None:
"""
GIVEN:
- Document needs to be converted to PDF