from documents import bulk_edit
from documents import index
-from documents.ai.llm_classifier import get_ai_document_classification
-from documents.ai.matching import extract_unmatched_names
-from documents.ai.matching import match_correspondents_by_name
-from documents.ai.matching import match_document_types_by_name
-from documents.ai.matching import match_storage_paths_by_name
-from documents.ai.matching import match_tags_by_name
from documents.bulk_download import ArchiveOnlyStrategy
from documents.bulk_download import OriginalAndArchiveStrategy
from documents.bulk_download import OriginalsOnlyStrategy
from documents.templating.filepath import validate_filepath_template_and_render
from documents.utils import get_boolean
from paperless import version
+from paperless.ai.ai_classifier import get_ai_document_classification
+from paperless.ai.matching import extract_unmatched_names
+from paperless.ai.matching import match_correspondents_by_name
+from paperless.ai.matching import match_document_types_by_name
+from paperless.ai.matching import match_storage_paths_by_name
+from paperless.ai.matching import match_tags_by_name
from paperless.celery import app as celery_app
from paperless.config import GeneralConfig
from paperless.db import GnuPG
from documents.models import Document
from paperless.ai.client import run_llm_query
-logger = logging.getLogger("paperless.ai.llm_classifier")
+logger = logging.getLogger("paperless.ai.ai_classifier")
def get_ai_document_classification(document: Document) -> dict:
try:
result = run_llm_query(prompt)
- suggestions = parse_llm_classification_response(result)
+ suggestions = parse_ai_classification_response(result)
return suggestions or {}
except Exception:
logger.exception("Error during LLM classification: %s", exc_info=True)
return {}
-def parse_llm_classification_response(text: str) -> dict:
+def parse_ai_classification_response(text: str) -> dict:
"""
Parses LLM output and ensures it conforms to expected schema.
"""
}
except json.JSONDecodeError:
# fallback: try to extract JSON manually?
+ logger.exception(
+ "Failed to parse LLM classification response: %s",
+ text,
+ exc_info=True,
+ )
return {}