diff --git a/ai-service-admin/src/api/llm.ts b/ai-service-admin/src/api/llm.ts index b474dcc..9f66c08 100644 --- a/ai-service-admin/src/api/llm.ts +++ b/ai-service-admin/src/api/llm.ts @@ -6,7 +6,9 @@ import type { LLMTestResult, LLMTestRequest, LLMProvidersResponse, - LLMConfigUpdateResponse + LLMUsageTypesResponse, + LLMConfigUpdateResponse, + LLMAllConfigs } from '@/types/llm' export function getLLMProviders(): Promise { @@ -16,10 +18,22 @@ export function getLLMProviders(): Promise { }) } -export function getLLMConfig(): Promise { +export function getLLMUsageTypes(): Promise { + return request({ + url: '/admin/llm/usage-types', + method: 'get' + }) +} + +export function getLLMConfig(usageType?: string): Promise { + const params: Record = {} + if (usageType) { + params.usage_type = usageType + } return request({ url: '/admin/llm/config', - method: 'get' + method: 'get', + params }) } @@ -46,5 +60,7 @@ export type { LLMTestResult, LLMTestRequest, LLMProvidersResponse, - LLMConfigUpdateResponse + LLMUsageTypesResponse, + LLMConfigUpdateResponse, + LLMAllConfigs } diff --git a/ai-service-admin/src/stores/llm.ts b/ai-service-admin/src/stores/llm.ts index 656ab29..0ea2d82 100644 --- a/ai-service-admin/src/stores/llm.ts +++ b/ai-service-admin/src/stores/llm.ts @@ -2,26 +2,40 @@ import { defineStore } from 'pinia' import { ref, computed } from 'vue' import { getLLMProviders, + getLLMUsageTypes, getLLMConfig, updateLLMConfig, testLLM, type LLMProviderInfo, type LLMConfig, type LLMConfigUpdate, - type LLMTestResult + type LLMTestResult, + type LLMUsageType, + type LLMAllConfigs } from '@/api/llm' export const useLLMStore = defineStore('llm', () => { const providers = ref([]) - const currentConfig = ref({ - provider: '', - config: {} + const usageTypes = ref([]) + const allConfigs = ref({ + chat: { provider: '', config: {} }, + kb_processing: { provider: '', config: {} } }) + const currentUsageType = ref('chat') const loading = ref(false) const providersLoading = ref(false) const testResult = ref(null) const testLoading = ref(false) + const currentConfig = computed(() => { + const config = allConfigs.value[currentUsageType.value as keyof LLMAllConfigs] + return { + provider: config?.provider || '', + config: config?.config || {}, + usage_type: currentUsageType.value + } + }) + const currentProvider = computed(() => { return providers.value.find(p => p.name === currentConfig.value.provider) }) @@ -43,16 +57,29 @@ export const useLLMStore = defineStore('llm', () => { } } + const loadUsageTypes = async () => { + try { + const res: any = await getLLMUsageTypes() + usageTypes.value = res?.usage_types || res?.data?.usage_types || [] + } catch (error) { + console.error('Failed to load LLM usage types:', error) + throw error + } + } + const loadConfig = async () => { loading.value = true try { const res: any = await getLLMConfig() - const config = res?.data || res - if (config) { - currentConfig.value = { - provider: config.provider || '', - config: config.config || {}, - updated_at: config.updated_at + const configs = res?.data || res + if (configs) { + if (configs.chat && configs.kb_processing) { + allConfigs.value = configs + } else { + allConfigs.value = { + chat: { provider: configs.provider || '', config: configs.config || {} }, + kb_processing: { provider: configs.provider || '', config: configs.config || {} } + } } } } catch (error) { @@ -68,7 +95,8 @@ export const useLLMStore = defineStore('llm', () => { try { const updateData: LLMConfigUpdate = { provider: currentConfig.value.provider, - config: currentConfig.value.config + config: currentConfig.value.config, + usage_type: currentUsageType.value } await updateLLMConfig(updateData) } catch (error) { @@ -86,7 +114,8 @@ export const useLLMStore = defineStore('llm', () => { const result = await testLLM({ test_prompt: testPrompt, provider: currentConfig.value.provider, - config: currentConfig.value.config + config: currentConfig.value.config, + usage_type: currentUsageType.value }) testResult.value = result return result @@ -103,7 +132,8 @@ export const useLLMStore = defineStore('llm', () => { } const setProvider = (providerName: string) => { - currentConfig.value.provider = providerName + const usageTypeKey = currentUsageType.value as keyof LLMAllConfigs + allConfigs.value[usageTypeKey].provider = providerName const provider = providers.value.find(p => p.name === providerName) if (provider?.config_schema?.properties) { const newConfig: Record = {} @@ -127,14 +157,19 @@ export const useLLMStore = defineStore('llm', () => { } } }) - currentConfig.value.config = newConfig + allConfigs.value[usageTypeKey].config = newConfig } else { - currentConfig.value.config = {} + allConfigs.value[usageTypeKey].config = {} } } const updateConfigValue = (key: string, value: any) => { - currentConfig.value.config[key] = value + const usageTypeKey = currentUsageType.value as keyof LLMAllConfigs + allConfigs.value[usageTypeKey].config[key] = value + } + + const setCurrentUsageType = (usageType: string) => { + currentUsageType.value = usageType } const clearTestResult = () => { @@ -143,6 +178,9 @@ export const useLLMStore = defineStore('llm', () => { return { providers, + usageTypes, + allConfigs, + currentUsageType, currentConfig, loading, providersLoading, @@ -151,11 +189,13 @@ export const useLLMStore = defineStore('llm', () => { currentProvider, configSchema, loadProviders, + loadUsageTypes, loadConfig, saveCurrentConfig, runTest, setProvider, updateConfigValue, + setCurrentUsageType, clearTestResult } }) diff --git a/ai-service-admin/src/types/llm.ts b/ai-service-admin/src/types/llm.ts index e64bec2..22ae5ac 100644 --- a/ai-service-admin/src/types/llm.ts +++ b/ai-service-admin/src/types/llm.ts @@ -5,15 +5,32 @@ export interface LLMProviderInfo { config_schema: Record } +export interface LLMUsageType { + name: string + display_name: string + description: string +} + export interface LLMConfig { provider: string config: Record updated_at?: string } +export interface LLMConfigByUsage { + provider: string + config: Record +} + +export interface LLMAllConfigs { + chat: LLMConfigByUsage + kb_processing: LLMConfigByUsage +} + export interface LLMConfigUpdate { provider: string config?: Record + usage_type?: string } export interface LLMTestResult { @@ -31,12 +48,17 @@ export interface LLMTestRequest { test_prompt?: string provider?: string config?: Record + usage_type?: string } export interface LLMProvidersResponse { providers: LLMProviderInfo[] } +export interface LLMUsageTypesResponse { + usage_types: LLMUsageType[] +} + export interface LLMConfigUpdateResponse { success: boolean message: string diff --git a/ai-service-admin/src/views/admin/llm/index.vue b/ai-service-admin/src/views/admin/llm/index.vue index 3d5ff66..fd91d3f 100644 --- a/ai-service-admin/src/views/admin/llm/index.vue +++ b/ai-service-admin/src/views/admin/llm/index.vue @@ -4,13 +4,7 @@

LLM 模型配置

-

配置和管理系统使用的大语言模型,支持多种提供者切换。配置修改后需保存才能生效。

-
-
-
- - 上次更新: {{ formatDate(currentConfig.updated_at) }} -
+

配置和管理系统使用的大语言模型,支持多种提供者切换。可以为不同用途配置不同的模型。

@@ -30,6 +24,26 @@
+
+ + + + + {{ ut.display_name }} + + + +
+ + +