ai-robot-core/ai-service-admin/src/views/rag-lab/index.vue

546 lines
14 KiB
Vue
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

<template>
<div class="rag-lab-page">
<div class="page-header">
<h1 class="page-title">RAG 实验室</h1>
<p class="page-desc">测试检索增强生成效果查看检索结果和 AI 响应</p>
</div>
<el-row :gutter="24">
<el-col :xs="24" :sm="24" :md="10" :lg="10">
<el-card shadow="hover" class="input-card">
<template #header>
<div class="card-header">
<div class="header-left">
<div class="icon-wrapper">
<el-icon><Edit /></el-icon>
</div>
<span class="header-title">调试输入</span>
</div>
</div>
</template>
<el-form label-position="top">
<el-form-item label="查询 Query">
<el-input
v-model="query"
type="textarea"
:rows="4"
placeholder="输入测试问题..."
/>
</el-form-item>
<el-form-item label="知识库范围">
<el-select
v-model="kbIds"
multiple
placeholder="请选择知识库"
style="width: 100%"
:loading="kbLoading"
:teleported="true"
:popper-options="{ modifiers: [{ name: 'flip', enabled: true }, { name: 'preventOverflow', enabled: true }] }"
>
<el-option
v-for="kb in knowledgeBases"
:key="kb.id"
:label="`${kb.name} (${kb.documentCount}个文档)`"
:value="kb.id"
/>
</el-select>
</el-form-item>
<el-form-item label="LLM 模型">
<LLMSelector
v-model="llmProvider"
:providers="llmProviders"
:loading="llmLoading"
:current-provider="currentLLMProvider"
placeholder="使用默认配置"
clearable
@change="handleLLMChange"
/>
</el-form-item>
<el-form-item label="参数配置">
<div class="param-item">
<span class="label">Top-K</span>
<el-input-number v-model="topK" :min="1" :max="10" />
</div>
<div class="param-item">
<span class="label">Score Threshold</span>
<el-slider
v-model="scoreThreshold"
:min="0"
:max="1"
:step="0.1"
show-input
/>
</div>
<div class="param-item">
<span class="label">生成 AI 回复</span>
<el-switch v-model="generateResponse" />
</div>
<div class="param-item" v-if="generateResponse">
<span class="label">流式输出</span>
<el-switch v-model="streamOutput" />
</div>
</el-form-item>
<el-button
type="primary"
block
@click="handleRun"
:loading="loading || streaming"
>
{{ streaming ? '生成中...' : '运行实验' }}
</el-button>
<el-button
v-if="streaming"
type="danger"
block
@click="handleStopStream"
style="margin-top: 10px;"
>
停止生成
</el-button>
</el-form>
</el-card>
</el-col>
<el-col :xs="24" :sm="24" :md="14" :lg="14">
<el-tabs v-model="activeTab" type="border-card" class="result-tabs">
<el-tab-pane label="召回片段" name="retrieval">
<div v-if="retrievalResults.length === 0" class="placeholder-text">
暂无实验数据
</div>
<div v-else class="result-list">
<el-card
v-for="(item, index) in retrievalResults"
:key="index"
class="result-card"
shadow="never"
>
<div class="result-header">
<el-tag size="small" type="primary">Score: {{ item.score.toFixed(4) }}</el-tag>
<span class="source">来源: {{ item.source }}</span>
</div>
<div class="result-content">{{ item.content }}</div>
</el-card>
</div>
</el-tab-pane>
<el-tab-pane label="最终 Prompt" name="prompt">
<div v-if="!finalPrompt" class="placeholder-text">
等待实验运行...
</div>
<div v-else class="prompt-view">
<pre><code>{{ finalPrompt }}</code></pre>
</div>
</el-tab-pane>
<el-tab-pane label="AI 回复" name="ai-response" v-if="generateResponse">
<StreamOutput
v-if="streamOutput"
:content="streamContent"
:is-streaming="streaming"
:error="streamError"
/>
<AIResponseViewer
v-else
:response="aiResponse"
/>
</el-tab-pane>
<el-tab-pane label="诊断信息" name="diagnostics">
<div v-if="!diagnostics" class="placeholder-text">
等待实验运行...
</div>
<div v-else class="diagnostics-view">
<pre><code>{{ JSON.stringify(diagnostics, null, 2) }}</code></pre>
</div>
</el-tab-pane>
</el-tabs>
</el-col>
</el-row>
</div>
</template>
<script setup lang="ts">
import { ref, onMounted } from 'vue'
import { ElMessage } from 'element-plus'
import { Edit } from '@element-plus/icons-vue'
import { runRagExperiment, createSSEConnection, type AIResponse, type RetrievalResult } from '@/api/rag'
import { getLLMProviders, getLLMConfig, type LLMProviderInfo } from '@/api/llm'
import { listKnowledgeBases } from '@/api/kb'
import { useRagLabStore } from '@/stores/ragLab'
import { storeToRefs } from 'pinia'
import AIResponseViewer from '@/components/rag/AIResponseViewer.vue'
import StreamOutput from '@/components/rag/StreamOutput.vue'
import LLMSelector from '@/components/rag/LLMSelector.vue'
interface KnowledgeBase {
id: string
name: string
documentCount: number
}
const ragLabStore = useRagLabStore()
const {
query,
kbIds,
llmProvider,
topK,
scoreThreshold,
generateResponse,
streamOutput
} = storeToRefs(ragLabStore)
const loading = ref(false)
const kbLoading = ref(false)
const llmLoading = ref(false)
const streaming = ref(false)
const activeTab = ref('retrieval')
const knowledgeBases = ref<KnowledgeBase[]>([])
const llmProviders = ref<LLMProviderInfo[]>([])
const currentLLMProvider = ref('')
const retrievalResults = ref<RetrievalResult[]>([])
const finalPrompt = ref('')
const aiResponse = ref<AIResponse | null>(null)
const diagnostics = ref<any>(null)
const streamContent = ref('')
const streamError = ref<string | null>(null)
const totalLatencyMs = ref(0)
let abortStream: (() => void) | null = null
const fetchKnowledgeBases = async () => {
kbLoading.value = true
try {
const res: any = await listKnowledgeBases()
knowledgeBases.value = res.data || []
} catch (error) {
console.error('Failed to fetch knowledge bases:', error)
} finally {
kbLoading.value = false
}
}
const fetchLLMProviders = async () => {
llmLoading.value = true
try {
const [providersRes, configRes]: [any, any] = await Promise.all([
getLLMProviders(),
getLLMConfig()
])
llmProviders.value = providersRes?.providers || []
currentLLMProvider.value = configRes?.provider || ''
} catch (error) {
console.error('Failed to fetch LLM providers:', error)
} finally {
llmLoading.value = false
}
}
const handleLLMChange = (provider: LLMProviderInfo | undefined) => {
llmProvider.value = provider?.name || ''
}
const handleRun = async () => {
if (!query.value.trim()) {
ElMessage.warning('请输入查询 Query')
return
}
clearResults()
if (streamOutput.value && generateResponse.value) {
await runStreamExperiment()
} else {
await runNormalExperiment()
}
}
const runNormalExperiment = async () => {
loading.value = true
try {
const res: any = await runRagExperiment({
query: query.value,
kb_ids: kbIds.value,
top_k: topK.value,
score_threshold: scoreThreshold.value,
llm_provider: llmProvider.value || undefined,
generate_response: generateResponse.value
})
retrievalResults.value = res.retrieval_results || res.retrievalResults || []
finalPrompt.value = res.final_prompt || res.finalPrompt || ''
aiResponse.value = res.ai_response || res.aiResponse || null
diagnostics.value = res.diagnostics || null
totalLatencyMs.value = res.total_latency_ms || res.totalLatencyMs || 0
if (generateResponse.value) {
activeTab.value = 'ai-response'
} else {
activeTab.value = 'retrieval'
}
ElMessage.success('实验运行成功')
} catch (err: any) {
console.error(err)
ElMessage.error(err?.message || '实验运行失败')
} finally {
loading.value = false
}
}
const runStreamExperiment = async () => {
streaming.value = true
streamContent.value = ''
streamError.value = null
activeTab.value = 'ai-response'
abortStream = createSSEConnection(
'/admin/rag/experiments/stream',
{
query: query.value,
kb_ids: kbIds.value,
top_k: topK.value,
score_threshold: scoreThreshold.value,
llm_provider: llmProvider.value || undefined,
generate_response: true
},
(data: string) => {
try {
const parsed = JSON.parse(data)
if (parsed.type === 'content') {
streamContent.value += parsed.content || ''
} else if (parsed.type === 'retrieval') {
retrievalResults.value = parsed.results || []
} else if (parsed.type === 'prompt') {
finalPrompt.value = parsed.prompt || ''
} else if (parsed.type === 'complete') {
aiResponse.value = {
content: streamContent.value,
prompt_tokens: parsed.prompt_tokens,
completion_tokens: parsed.completion_tokens,
total_tokens: parsed.total_tokens,
latency_ms: parsed.latency_ms,
model: parsed.model
}
totalLatencyMs.value = parsed.total_latency_ms || 0
streaming.value = false
ElMessage.success('生成完成')
} else if (parsed.type === 'error') {
streamError.value = parsed.message || '流式输出错误'
streaming.value = false
ElMessage.error(streamError.value || '未知错误')
}
} catch {
streamContent.value += data
}
},
(error: Error) => {
streaming.value = false
streamError.value = error.message
ElMessage.error(error.message)
},
() => {
streaming.value = false
}
)
}
const handleStopStream = () => {
if (abortStream) {
abortStream()
abortStream = null
}
streaming.value = false
ElMessage.info('已停止生成')
}
const clearResults = () => {
retrievalResults.value = []
finalPrompt.value = ''
aiResponse.value = null
diagnostics.value = null
streamContent.value = ''
streamError.value = null
totalLatencyMs.value = 0
}
onMounted(() => {
fetchKnowledgeBases()
fetchLLMProviders()
})
</script>
<style scoped>
.rag-lab-page {
padding: 24px;
min-height: calc(100vh - 60px);
}
.page-header {
margin-bottom: 24px;
}
.page-title {
margin: 0 0 8px 0;
font-size: 24px;
font-weight: 700;
color: var(--text-primary);
letter-spacing: -0.5px;
}
.page-desc {
margin: 0;
font-size: 14px;
color: var(--text-secondary);
line-height: 1.6;
}
.input-card {
animation: fadeInUp 0.5s ease-out;
}
@keyframes fadeInUp {
from {
opacity: 0;
transform: translateY(20px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.card-header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 0;
}
.header-left {
display: flex;
align-items: center;
gap: 12px;
}
.icon-wrapper {
width: 36px;
height: 36px;
display: flex;
align-items: center;
justify-content: center;
background-color: var(--primary-lighter);
border-radius: 10px;
color: var(--primary-color);
font-size: 18px;
}
.header-title {
font-size: 15px;
font-weight: 600;
color: var(--text-primary);
}
.param-item {
display: flex;
align-items: center;
margin-bottom: 16px;
gap: 16px;
}
.param-item .label {
width: 140px;
font-size: 13px;
font-weight: 500;
color: var(--text-secondary);
flex-shrink: 0;
}
.param-item :deep(.el-slider) {
flex: 1;
}
.result-tabs {
animation: fadeInUp 0.6s ease-out;
}
.result-tabs :deep(.el-tabs__header) {
border-radius: 12px 12px 0 0;
}
.placeholder-text {
color: var(--text-tertiary);
text-align: center;
padding: 60px 20px;
font-size: 14px;
}
.result-list {
max-height: 600px;
overflow-y: auto;
padding-right: 8px;
}
.result-card {
margin-bottom: 16px;
border: 1px solid var(--border-color);
}
.result-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 12px;
}
.source {
font-size: 12px;
color: var(--text-tertiary);
}
.result-content {
font-size: 14px;
line-height: 1.7;
color: var(--text-primary);
}
.prompt-view,
.diagnostics-view {
background-color: var(--bg-tertiary);
padding: 16px;
border-radius: 10px;
max-height: 600px;
overflow-y: auto;
}
.prompt-view pre,
.diagnostics-view pre {
margin: 0;
white-space: pre-wrap;
word-wrap: break-word;
font-family: var(--font-mono);
font-size: 13px;
line-height: 1.6;
color: var(--text-primary);
}
@media (max-width: 768px) {
.rag-lab-page {
padding: 16px;
}
.page-title {
font-size: 20px;
}
.param-item {
flex-direction: column;
align-items: flex-start;
gap: 8px;
}
.param-item .label {
width: 100%;
}
}
</style>