[AC-AISVC-50] 合入第一个稳定版本 #2

Merged
MerCry merged 32 commits from feature/prompt-unification-and-logging into main 2026-02-26 13:03:31 +00:00
7 changed files with 259 additions and 0 deletions
Showing only changes of commit adc78a6b9b - Show all commits

17
.env.example Normal file
View File

@ -0,0 +1,17 @@
# AI Service Environment Variables
# Copy this file to .env and modify as needed
# LLM Configuration
AI_SERVICE_LLM_PROVIDER=openai
AI_SERVICE_LLM_API_KEY=your-api-key-here
AI_SERVICE_LLM_BASE_URL=https://api.openai.com/v1
AI_SERVICE_LLM_MODEL=gpt-4o-mini
# If using DeepSeek
# AI_SERVICE_LLM_PROVIDER=openai
# AI_SERVICE_LLM_API_KEY=your-deepseek-api-key
# AI_SERVICE_LLM_BASE_URL=https://api.deepseek.com/v1
# AI_SERVICE_LLM_MODEL=deepseek-chat
# Ollama Configuration (optional)
# AI_SERVICE_OLLAMA_BASE_URL=http://ollama:11434

View File

@ -0,0 +1,19 @@
node_modules
dist
.env
.env.local
.env.*.local
*.log
.idea/
.vscode/
*.swp
*.swo
.git
.gitignore
*.md
!README.md

View File

@ -0,0 +1,22 @@
# AI Service Admin Frontend Dockerfile
FROM node:20-alpine AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci
COPY . .
RUN npm run build
FROM nginx:alpine
COPY --from=builder /app/dist /usr/share/nginx/html
COPY nginx.conf /etc/nginx/conf.d/default.conf
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]

View File

@ -0,0 +1,28 @@
server {
listen 80;
server_name localhost;
root /usr/share/nginx/html;
index index.html;
location / {
try_files $uri $uri/ /index.html;
}
location /api/ {
proxy_pass http://ai-service:8080/;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_cache_bypass $http_upgrade;
proxy_read_timeout 300s;
proxy_connect_timeout 75s;
}
gzip on;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
gzip_min_length 1000;
}

53
ai-service/.dockerignore Normal file
View File

@ -0,0 +1,53 @@
__pycache__
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
.pytest_cache
.coverage
htmlcov/
.tox/
.hypothesis/
.mypy_cache/
.ruff_cache/
.env
.env.local
.env.*.local
*.log
*.pot
*.pyc
.idea/
.vscode/
*.swp
*.swo
tests/
scripts/
*.md
!README.md
.git
.gitignore
.gitea
check_qdrant.py

32
ai-service/Dockerfile Normal file
View File

@ -0,0 +1,32 @@
# AI Service Backend Dockerfile
FROM python:3.11-slim AS builder
WORKDIR /app
RUN pip install --no-cache-dir uv
COPY pyproject.toml .
RUN uv pip install --system --no-cache-dir .
FROM python:3.11-slim
WORKDIR /app
RUN groupadd -r appgroup && useradd -r -g appgroup appuser
COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin
COPY app ./app
RUN chown -R appuser:appgroup /app
USER appuser
EXPOSE 8080
ENV PYTHONUNBUFFERED=1
ENV PYTHONDONTWRITEBYTECODE=1
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8080"]

88
docker-compose.yaml Normal file
View File

@ -0,0 +1,88 @@
version: '3.8'
services:
ai-service:
build:
context: ./ai-service
dockerfile: Dockerfile
container_name: ai-service
restart: unless-stopped
ports:
- "8080:8080"
environment:
- AI_SERVICE_DEBUG=false
- AI_SERVICE_LOG_LEVEL=INFO
- AI_SERVICE_DATABASE_URL=postgresql+asyncpg://postgres:postgres@postgres:5432/ai_service
- AI_SERVICE_QDRANT_URL=http://qdrant:6333
- AI_SERVICE_LLM_PROVIDER=${AI_SERVICE_LLM_PROVIDER:-openai}
- AI_SERVICE_LLM_API_KEY=${AI_SERVICE_LLM_API_KEY:-}
- AI_SERVICE_LLM_BASE_URL=${AI_SERVICE_LLM_BASE_URL:-https://api.openai.com/v1}
- AI_SERVICE_LLM_MODEL=${AI_SERVICE_LLM_MODEL:-gpt-4o-mini}
- AI_SERVICE_OLLAMA_BASE_URL=${AI_SERVICE_OLLAMA_BASE_URL:-http://ollama:11434}
depends_on:
postgres:
condition: service_healthy
qdrant:
condition: service_started
networks:
- ai-network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
ai-service-admin:
build:
context: ./ai-service-admin
dockerfile: Dockerfile
container_name: ai-service-admin
restart: unless-stopped
ports:
- "3000:80"
depends_on:
- ai-service
networks:
- ai-network
postgres:
image: postgres:15-alpine
container_name: ai-postgres
restart: unless-stopped
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=ai_service
volumes:
- postgres_data:/var/lib/postgresql/data
- ./ai-service/scripts/init_db.sql:/docker-entrypoint-initdb.d/init_db.sql:ro
ports:
- "5432:5432"
networks:
- ai-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d ai_service"]
interval: 10s
timeout: 5s
retries: 5
qdrant:
image: qdrant/qdrant:latest
container_name: ai-qdrant
restart: unless-stopped
ports:
- "6333:6333"
- "6334:6334"
volumes:
- qdrant_data:/qdrant/storage
networks:
- ai-network
networks:
ai-network:
driver: bridge
volumes:
postgres_data:
qdrant_data: