fix: 修复编排器使用错误的LLM配置问题 [AC-AISVC-50]

- 移除编排器中硬编码的LLMConfig创建
- 让LLM客户端使用自己的默认配置(从LLMConfigManager获取)
- 修复流式生成方法同样的问题
This commit is contained in:
MerCry 2026-02-26 19:58:55 +08:00
parent f81d18a517
commit a9d1079294
1 changed files with 2 additions and 9 deletions

View File

@ -119,13 +119,7 @@ class OrchestratorService:
max_evidence_tokens=getattr(settings, "rag_max_evidence_tokens", 2000),
enable_rag=True,
)
self._llm_config = LLMConfig(
model=getattr(settings, "llm_model", "gpt-4o-mini"),
max_tokens=getattr(settings, "llm_max_tokens", 2048),
temperature=getattr(settings, "llm_temperature", 0.7),
timeout_seconds=getattr(settings, "llm_timeout_seconds", 30),
max_retries=getattr(settings, "llm_max_retries", 3),
)
self._llm_config: LLMConfig | None = None
async def generate(
self,
@ -345,7 +339,6 @@ class OrchestratorService:
try:
ctx.llm_response = await self._llm_client.generate(
messages=messages,
config=self._llm_config,
)
ctx.diagnostics["llm_mode"] = "live"
ctx.diagnostics["llm_model"] = ctx.llm_response.model
@ -627,7 +620,7 @@ class OrchestratorService:
"""
messages = self._build_llm_messages(ctx)
async for chunk in self._llm_client.stream_generate(messages, self._llm_config):
async for chunk in self._llm_client.stream_generate(messages):
if not state_machine.can_send_message():
break