export const LLMEmbeddingBackendConfig = {
OPENAI: 'openai',
- LOCAL: 'local',
+ HUGGINGFACE: 'huggingface',
}
export const LLMBackendConfig = {
elif self.settings.llm_backend == "openai":
return OpenAI(
model=self.settings.llm_model or "gpt-3.5-turbo",
- api_key=self.settings.openai_api_key,
+ api_key=self.settings.llm_api_key,
)
else:
raise ValueError(f"Unsupported LLM backend: {self.settings.llm_backend}")
data["barcode_tag_mapping"] = None
if "language" in data and data["language"] == "":
data["language"] = None
+ if "llm_api_key" in data and data["llm_api_key"] is not None:
+ if data["llm_api_key"] == "":
+ data["llm_api_key"] = None
+ elif len(data["llm_api_key"].replace("*", "")) == 0:
+ del data["llm_api_key"]
return super().run_validation(data)
def update(self, instance, validated_data):
def test_get_llm_openai(mock_ai_config, mock_openai_llm):
mock_ai_config.llm_backend = "openai"
mock_ai_config.llm_model = "test_model"
- mock_ai_config.openai_api_key = "test_api_key"
+ mock_ai_config.llm_api_key = "test_api_key"
client = AIClient()