feat: 添加LLM配置持久化功能 [AC-AISVC-50]

- LLM配置保存到 config/llm_config.json 文件
- 服务重启后自动加载已保存的配置
- 与嵌入模型配置保持一致的持久化机制
This commit is contained in:
MerCry 2026-02-26 19:30:26 +08:00
parent 15016d3448
commit f81d18a517
1 changed files with 39 additions and 4 deletions

View File

@ -5,8 +5,10 @@ LLM Provider Factory and Configuration Management.
Design pattern: Factory pattern for pluggable LLM providers. Design pattern: Factory pattern for pluggable LLM providers.
""" """
import json
import logging import logging
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pathlib import Path
from typing import Any from typing import Any
from app.services.llm.base import LLMClient, LLMConfig from app.services.llm.base import LLMClient, LLMConfig
@ -14,6 +16,8 @@ from app.services.llm.openai_client import OpenAIClient
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
LLM_CONFIG_FILE = Path("config/llm_config.json")
@dataclass @dataclass
class LLMProviderInfo: class LLMProviderInfo:
@ -257,7 +261,7 @@ class LLMProviderFactory:
class LLMConfigManager: class LLMConfigManager:
""" """
Manager for LLM configuration. Manager for LLM configuration.
[AC-ASA-16, AC-ASA-17, AC-ASA-18] Configuration management with hot-reload. [AC-ASA-16, AC-ASA-17, AC-ASA-18] Configuration management with hot-reload and persistence.
""" """
def __init__(self): def __init__(self):
@ -275,11 +279,40 @@ class LLMConfigManager:
} }
self._client: LLMClient | None = None self._client: LLMClient | None = None
self._load_from_file()
def _load_from_file(self) -> None:
"""Load configuration from file if exists."""
try:
if LLM_CONFIG_FILE.exists():
with open(LLM_CONFIG_FILE, 'r', encoding='utf-8') as f:
saved = json.load(f)
self._current_provider = saved.get("provider", self._current_provider)
saved_config = saved.get("config", {})
if saved_config:
self._current_config.update(saved_config)
logger.info(f"[AC-ASA-16] Loaded LLM config from file: provider={self._current_provider}")
except Exception as e:
logger.warning(f"[AC-ASA-16] Failed to load LLM config from file: {e}")
def _save_to_file(self) -> None:
"""Save configuration to file."""
try:
LLM_CONFIG_FILE.parent.mkdir(parents=True, exist_ok=True)
with open(LLM_CONFIG_FILE, 'w', encoding='utf-8') as f:
json.dump({
"provider": self._current_provider,
"config": self._current_config,
}, f, indent=2, ensure_ascii=False)
logger.info(f"[AC-ASA-16] Saved LLM config to file: provider={self._current_provider}")
except Exception as e:
logger.error(f"[AC-ASA-16] Failed to save LLM config to file: {e}")
def get_current_config(self) -> dict[str, Any]: def get_current_config(self) -> dict[str, Any]:
"""Get current LLM configuration.""" """Get current LLM configuration."""
return { return {
"provider": self._current_provider, "provider": self._current_provider,
"config": self._current_config, "config": self._current_config.copy(),
} }
async def update_config( async def update_config(
@ -289,7 +322,7 @@ class LLMConfigManager:
) -> bool: ) -> bool:
""" """
Update LLM configuration. Update LLM configuration.
[AC-ASA-16] Hot-reload configuration. [AC-ASA-16] Hot-reload configuration with persistence.
Args: Args:
provider: Provider name provider: Provider name
@ -311,6 +344,8 @@ class LLMConfigManager:
self._current_provider = provider self._current_provider = provider
self._current_config = validated_config self._current_config = validated_config
self._save_to_file()
logger.info(f"[AC-ASA-16] LLM config updated: provider={provider}") logger.info(f"[AC-ASA-16] LLM config updated: provider={provider}")
return True return True
@ -365,7 +400,7 @@ class LLMConfigManager:
test_provider = provider or self._current_provider test_provider = provider or self._current_provider
test_config = config if config else self._current_config test_config = config if config else self._current_config
logger.info(f"[AC-ASA-17] Test connection: provider={test_provider}, config={test_config}") logger.info(f"[AC-ASA-17] Test connection: provider={test_provider}, model={test_config.get('model')}")
if test_provider not in LLM_PROVIDERS: if test_provider not in LLM_PROVIDERS:
return { return {