diff --git a/backend/.env.example b/backend/.env.example new file mode 100644 index 0000000..ef4c292 --- /dev/null +++ b/backend/.env.example @@ -0,0 +1,9 @@ +PORT=3000 +MONGODB_URI=mongodb://localhost:27017/multi_llm_chat +OPENAI_API_KEY=your_openai_key_here +# 可选:自定义 OpenAI Base +OPENAI_BASE_URL=https://api.openai.com + +# DeepSeek 兼容 (同 OpenAI 协议) +DEEPSEEK_API_KEY=your_deepseek_key_here +DEEPSEEK_BASE_URL=https://api.deepseek.com diff --git a/backend/.gitignore b/backend/.gitignore new file mode 100644 index 0000000..9c97bbd --- /dev/null +++ b/backend/.gitignore @@ -0,0 +1,3 @@ +node_modules +dist +.env diff --git a/backend/README.md b/backend/README.md new file mode 100644 index 0000000..85fc1a3 --- /dev/null +++ b/backend/README.md @@ -0,0 +1,31 @@ +# 后端服务 + +Node.js + Express + TypeScript + Mongoose + OpenAI 示例。 + +## 环境变量 +复制 `.env.example` 为 `.env` 并填写: + +``` +PORT=3000 +MONGODB_URI=mongodb://localhost:27017/multi_llm_chat +OPENAI_API_KEY=sk-... +# 可选 OpenAI base (代理): OPENAI_BASE_URL=https://api.openai.com +DEEPSEEK_API_KEY=sk-deepseek... +DEEPSEEK_BASE_URL=https://api.deepseek.com +``` + +## 运行 +```bash +npm install +npm run dev +``` + +## 接口 +- POST /api/chat { modelId, messages } => { text } +- POST /api/chat/stream (SSE) data: 直到 data: [DONE] +- POST /api/conversations 保存对话 +- GET /api/conversations/:id 读取对话 +- GET /api/health 健康检查 + +## 说明 +已接入 OpenAI 与 DeepSeek(DeepSeek 通过 model 名包含 "deepseek" 自动匹配,使用 OpenAI 协议 /v1/chat/completions)。可继续在 `llmService.ts` 中扩展更多 provider 逻辑。 diff --git a/backend/package.json b/backend/package.json new file mode 100644 index 0000000..58f4c27 --- /dev/null +++ b/backend/package.json @@ -0,0 +1,30 @@ +{ + "name": "multi-llm-chat-backend", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "dev": "tsx watch src/index.ts", + "start": "node dist/index.js", + "build": "tsc -p tsconfig.json" + }, + "dependencies": { + "axios": "^1.7.2", + "bcryptjs": "^2.4.3", + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "express": "^4.19.2", + "jsonwebtoken": "^9.0.2", + "mongoose": "^8.5.3", + "openai": "^4.56.0" + }, + "devDependencies": { + "@types/bcryptjs": "^2.4.6", + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/jsonwebtoken": "^9.0.10", + "@types/node": "^20.14.10", + "tsx": "^4.15.7", + "typescript": "^5.5.4" + } +} diff --git a/backend/src/index.ts b/backend/src/index.ts new file mode 100644 index 0000000..9cb7ad8 --- /dev/null +++ b/backend/src/index.ts @@ -0,0 +1,30 @@ +import 'dotenv/config'; +import express from 'express'; +import cors from 'cors'; +import mongoose from 'mongoose'; +import chatRoutes from './routes/chat.js'; +import authRoutes from './routes/auth.js'; + +const app = express(); +app.use(cors()); +app.use(express.json({ limit: '1mb' })); + +app.use('/api/auth', authRoutes); +app.use('/api', chatRoutes); + +app.get('/api/health', (_req, res) => res.json({ ok: true, time: new Date().toISOString() })); + +const PORT = process.env.PORT || 3000; + +async function start() { + try { + const mongo = process.env.MONGODB_URI || 'mongodb://localhost:27017/multi_llm_chat'; + console.log('Connecting MongoDB ->', mongo); + await mongoose.connect(mongo); + app.listen(PORT, () => console.log('Server listening on ' + PORT)); + } catch (e) { + console.error('Failed to start server', e); + process.exit(1); + } +} +start(); diff --git a/backend/src/middleware/auth.ts b/backend/src/middleware/auth.ts new file mode 100644 index 0000000..85294c1 --- /dev/null +++ b/backend/src/middleware/auth.ts @@ -0,0 +1,29 @@ +import { Request, Response, NextFunction } from 'express'; +import jwt from 'jsonwebtoken'; + +export interface AuthRequest extends Request { userId?: string; } + +export function auth(required = true) { + return (req: AuthRequest, res: Response, next: NextFunction) => { + const header = req.headers.authorization; + if (!header) { + if (required) return res.status(401).json({ error: 'NO_AUTH' }); + return next(); + } + const token = header.replace(/^Bearer\s+/i, ''); + try { + const secret = process.env.JWT_SECRET || 'dev_secret'; + const payload = jwt.verify(token, secret) as { uid: string }; + req.userId = payload.uid; + next(); + } catch (e) { + if (required) return res.status(401).json({ error: 'BAD_TOKEN' }); + next(); + } + }; +} + +export function signToken(uid: string) { + const secret = process.env.JWT_SECRET || 'dev_secret'; + return jwt.sign({ uid }, secret, { expiresIn: '7d' }); +} diff --git a/backend/src/models/Conversation.ts b/backend/src/models/Conversation.ts new file mode 100644 index 0000000..f29314b --- /dev/null +++ b/backend/src/models/Conversation.ts @@ -0,0 +1,31 @@ +import { Schema, model, Document } from 'mongoose'; + +export interface IMessage { + role: 'user' | 'assistant' | 'system'; + content: string; + createdAt?: Date; +} + +export interface IConversation extends Document { + title?: string; + userId?: string; + modelId: string; + messages: IMessage[]; + createdAt: Date; + updatedAt: Date; +} + +const MessageSchema = new Schema({ + role: { type: String, required: true }, + content: { type: String, required: true }, + createdAt: { type: Date, default: Date.now } +}, { _id: false }); + +const ConversationSchema = new Schema({ + title: String, + userId: { type: String, index: true }, + modelId: { type: String, required: true }, + messages: { type: [MessageSchema], default: [] }, +}, { timestamps: true }); + +export const Conversation = model('Conversation', ConversationSchema); diff --git a/backend/src/models/User.ts b/backend/src/models/User.ts new file mode 100644 index 0000000..518a7b5 --- /dev/null +++ b/backend/src/models/User.ts @@ -0,0 +1,15 @@ +import { Schema, model, Document } from 'mongoose'; + +export interface IUser extends Document { + email: string; + passwordHash: string; + createdAt: Date; + updatedAt: Date; +} + +const UserSchema = new Schema({ + email: { type: String, required: true, unique: true, index: true }, + passwordHash: { type: String, required: true } +}, { timestamps: true }); + +export const User = model('User', UserSchema); diff --git a/backend/src/routes/auth.ts b/backend/src/routes/auth.ts new file mode 100644 index 0000000..9b6e159 --- /dev/null +++ b/backend/src/routes/auth.ts @@ -0,0 +1,28 @@ +import { Router } from 'express'; +import bcrypt from 'bcryptjs'; +import { User } from '../models/User.js'; +import { signToken } from '../middleware/auth.js'; + +const router = Router(); + +router.post('/register', async (req, res) => { + const { email, password } = req.body || {}; + if (!email || !password) return res.status(400).json({ error: 'MISSING_FIELDS' }); + const exist = await User.findOne({ email }); + if (exist) return res.status(409).json({ error: 'EMAIL_EXISTS' }); + const passwordHash = await bcrypt.hash(password, 10); + const user = await User.create({ email, passwordHash }); + return res.json({ token: signToken(user.id), user: { id: user.id, email: user.email } }); +}); + +router.post('/login', async (req, res) => { + const { email, password } = req.body || {}; + if (!email || !password) return res.status(400).json({ error: 'MISSING_FIELDS' }); + const user = await User.findOne({ email }); + if (!user) return res.status(401).json({ error: 'INVALID_CREDENTIALS' }); + const ok = await bcrypt.compare(password, user.passwordHash); + if (!ok) return res.status(401).json({ error: 'INVALID_CREDENTIALS' }); + return res.json({ token: signToken(user.id), user: { id: user.id, email: user.email } }); +}); + +export default router; diff --git a/backend/src/routes/chat.ts b/backend/src/routes/chat.ts new file mode 100644 index 0000000..7274e29 --- /dev/null +++ b/backend/src/routes/chat.ts @@ -0,0 +1,100 @@ +import { Router, Request, Response } from 'express'; +import { Conversation } from '../models/Conversation.js'; +import { streamChat, oneShot, StreamChunk } from '../services/llmService.js'; +import { auth, AuthRequest } from '../middleware/auth.js'; + +const router = Router(); + +router.post('/chat', auth(false), async (req: AuthRequest, res: Response) => { + const { modelId, messages, conversationId, save } = req.body || {}; + try { + const text = await oneShot(modelId, messages); + let convId = conversationId; + if (save && req.userId) { + if (convId) { + await Conversation.findByIdAndUpdate(convId, { $push: { messages: { role: 'assistant', content: text } } }); + } else { + const cleaned = (messages||[]).filter((m:any)=>m && typeof m.content==='string' && m.content.trim().length>0); + const title = (cleaned?.[0]?.content || text.slice(0,30)) || '新对话'; + const conv = await Conversation.create({ userId: req.userId, modelId, title, messages: [...cleaned, { role: 'assistant', content: text }] }); + convId = conv.id; + } + } + res.json({ text, conversationId: convId }); + } catch (e:any) { + console.error(e); + res.status(500).json({ error: e.message }); + } +}); + +router.post('/chat/stream', auth(false), async (req: AuthRequest, res: Response) => { + const { modelId, messages, conversationId, save } = req.body || {}; + res.setHeader('Content-Type', 'text/event-stream'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + res.flushHeaders?.(); + console.log('[stream] start model=%s messages=%d conv=%s', modelId, (messages||[]).length, conversationId || 'new'); + try { + let buffer = ''; + for await (const chunk of streamChat(modelId, messages) as AsyncGenerator) { + if (chunk.done) { + if (save && req.userId) { + if (buffer.trim()) { // 只在有内容时保存 + if (conversationId) { + await Conversation.findByIdAndUpdate(conversationId, { $push: { messages: { role: 'assistant', content: buffer } } }); + } else { + const cleaned = (messages||[]).filter((m:any)=>m && typeof m.content==='string' && m.content.trim().length>0); + const title = (cleaned?.[0]?.content || buffer.slice(0,30)) || '新对话'; + const conv = await Conversation.create({ userId: req.userId, modelId, title, messages: [...cleaned, { role: 'assistant', content: buffer }] }); + res.write('data: {"conversationId":"'+conv.id+'"}\n\n'); + } + } + } + res.write('data: [DONE]\n\n'); + } else { + if (chunk.kind === 'answer') buffer += chunk.text; + const payload = chunk.kind ? JSON.stringify({ kind: chunk.kind, text: chunk.text }) : chunk.text; + res.write('data: ' + payload.replace(/\n/g, '\\n') + '\n\n'); + (res as any).flush?.(); + } + } + } catch (e:any) { + console.error(e); + res.write('data: [ERROR] ' + (e.message || 'unknown') + '\n\n'); + } finally { + console.log('[stream] end model=%s', modelId); + res.end(); + } +}); + +router.post('/conversations', auth(), async (req: AuthRequest, res: Response) => { + try { + const { modelId, messages, title } = req.body; + const conv = await Conversation.create({ userId: req.userId, modelId, messages, title }); + res.json(conv); + } catch (e:any) { + res.status(500).json({ error: e.message }); + } +}); + +router.get('/conversations/:id', auth(), async (req: AuthRequest, res: Response) => { + const conv = await Conversation.findOne({ _id: req.params.id, userId: req.userId }); + if (!conv) return res.status(404).json({ error: 'Not found' }); + res.json(conv); +}); + +router.get('/conversations', auth(), async (req: AuthRequest, res: Response) => { + const list = await Conversation.find({ userId: req.userId }).sort({ updatedAt: -1 }).limit(50); + res.json(list); +}); + +router.delete('/conversations/:id', auth(), async (req: AuthRequest, res: Response) => { + try { + await Conversation.deleteOne({ _id: req.params.id, userId: req.userId }); + res.json({ ok: true }); + } catch (e:any) { + res.status(500).json({ error: e.message }); + } +}); + +export default router; diff --git a/backend/src/services/llmService.ts b/backend/src/services/llmService.ts new file mode 100644 index 0000000..d0ed02e --- /dev/null +++ b/backend/src/services/llmService.ts @@ -0,0 +1,75 @@ +import OpenAI from 'openai'; + +export interface ProviderConfig { + name: 'openai' | 'deepseek'; + apiKey: string; + baseURL: string; +} + +function detectProvider(model: string): ProviderConfig { + if (/deepseek/i.test(model)) { + const apiKey = process.env.DEEPSEEK_API_KEY || ''; + if (!apiKey) throw new Error('Missing DEEPSEEK_API_KEY'); + return { + name: 'deepseek', + apiKey, + baseURL: process.env.DEEPSEEK_BASE_URL || 'https://api.deepseek.com' + }; + } + const apiKey = process.env.OPENAI_API_KEY || ''; + if (!apiKey) throw new Error('Missing OPENAI_API_KEY'); + return { + name: 'openai', + apiKey, + baseURL: process.env.OPENAI_BASE_URL || process.env.OPENAI_API_BASE || 'https://api.openai.com/v1' + }; +} + +function buildClient(cfg: ProviderConfig) { + // 允许用户传入已经包含 /v1 的 baseURL,避免重复 /v1/v1 + let base = cfg.baseURL.replace(/\/$/, ''); + if (!/\/v1$/i.test(base)) base += '/v1'; + return new OpenAI({ apiKey: cfg.apiKey, baseURL: base }); +} + +export interface StreamChunk { text: string; done?: boolean; kind?: 'reasoning' | 'answer'; } + +export async function *streamChat(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]): AsyncGenerator { + messages = ensureSystem(messages) as { role: 'user'|'assistant'|'system'; content: string }[]; + const cfg = detectProvider(model); + const client = buildClient(cfg); + const stream = await client.chat.completions.create({ + model, + messages, + stream: true, + temperature: 0.7, + }); + for await (const part of stream) { + const choice = part.choices?.[0]; + // Deepseek reasoning model: reasoning_content or similar field (SDK may expose in delta.extra) + const reasoning: any = (choice as any)?.delta?.reasoning_content || (choice as any)?.delta?.reasoning; + if (reasoning) yield { text: reasoning, kind: 'reasoning' }; + const delta = choice?.delta?.content; + if (delta) yield { text: delta, kind: 'answer' } as StreamChunk; + } + yield { text: '', done: true } as StreamChunk; +} + +export async function oneShot(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) { + messages = ensureSystem(messages) as { role: 'user'|'assistant'|'system'; content: string }[]; + const cfg = detectProvider(model); + const client = buildClient(cfg); + const completion = await client.chat.completions.create({ model, messages, stream: false }); + const first = completion.choices[0]; + const reasoning: string | undefined = (first as any)?.message?.reasoning_content || (first as any)?.message?.reasoning; + const content = first?.message?.content || ''; + return reasoning ? reasoning + '\n' + content : content; +} + +const DEFAULT_SYSTEM = process.env.SYSTEM_PROMPT || 'You are a helpful assistant.'; +function ensureSystem(msgs: { role: any; content: string }[]) { + if (!msgs || !Array.isArray(msgs)) return [{ role: 'system', content: DEFAULT_SYSTEM }]; + const hasSystem = msgs.some(m => m.role === 'system'); + console.log('hasSystem', hasSystem); + return hasSystem ? msgs : [{ role: 'system', content: DEFAULT_SYSTEM }, ...msgs]; +} \ No newline at end of file diff --git a/backend/src/services/openaiService.ts b/backend/src/services/openaiService.ts new file mode 100644 index 0000000..cdec808 --- /dev/null +++ b/backend/src/services/openaiService.ts @@ -0,0 +1,30 @@ +import OpenAI from 'openai'; + +const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); + +export interface ChatCompletionChunk { + text: string; + done?: boolean; +} + +export async function *streamChat(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) { + // 使用新的 responses API (若版本支持);否则回退到 chat.completions + const stream = await client.chat.completions.create({ + model, + messages, + stream: true, + temperature: 0.7, + }); + for await (const part of stream) { + const delta = part.choices?.[0]?.delta?.content; + if (delta) { + yield { text: delta } as ChatCompletionChunk; + } + } + yield { text: '', done: true }; +} + +export async function oneShot(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) { + const completion = await client.chat.completions.create({ model, messages }); + return completion.choices[0]?.message?.content || ''; +} diff --git a/backend/tsconfig.json b/backend/tsconfig.json new file mode 100644 index 0000000..0354d16 --- /dev/null +++ b/backend/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "moduleResolution": "Node", + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true, + "outDir": "dist", + "types": ["node"], + "resolveJsonModule": true + }, + "include": ["src"], + "exclude": ["node_modules", "dist"] +} diff --git a/frontend/.env.example b/frontend/.env.example new file mode 100644 index 0000000..d52418f --- /dev/null +++ b/frontend/.env.example @@ -0,0 +1,2 @@ +VITE_API_BASE=/api +# 可自定义后端地址: VITE_API_BASE=http://localhost:3000/api diff --git a/frontend/.eslintrc.cjs b/frontend/.eslintrc.cjs new file mode 100644 index 0000000..9c5728f --- /dev/null +++ b/frontend/.eslintrc.cjs @@ -0,0 +1,21 @@ +/* eslint-env node */ +module.exports = { + root: true, + env: { browser: true, es2022: true, node: true }, + extends: [ + 'eslint:recommended', + 'plugin:vue/vue3-recommended', + 'plugin:@typescript-eslint/recommended' + ], + parser: 'vue-eslint-parser', + parserOptions: { + parser: '@typescript-eslint/parser', + ecmaVersion: 'latest', + sourceType: 'module' + }, + rules: { + 'vue/multi-word-component-names': 0, + '@typescript-eslint/no-explicit-any': 0, + '@typescript-eslint/explicit-module-boundary-types': 0 + } +}; diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..9e92932 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,33 @@ +# 多模型 AI 聊天前端 + +基于 Vite + Vue3 + TypeScript + Pinia + Vue Router 的多大模型可切换聊天应用基础骨架。 + +## 功能概览 +- 左侧侧边栏:路由导航 + 模型下拉切换 +- Chat 页面: + - 基础消息列表 + - 流式输出 (SSE/Fetch 读取) + - Shift+Enter 换行、Enter 发送 +- Settings 页面:简单的各 Provider API Key / Endpoint 输入 (本地存储) +- About 页面:说明 +- 明暗主题切换(侧边栏按钮,通过 data-theme 切换 CSS 变量) + +## 后端接口假设 +- POST `/api/chat/stream` (SSE 或 chunked 文本, 行以 `data: 内容` 开头, `[DONE]` 结束) +- POST `/api/chat` 返回 `{ text: string }` + +## 开发 +```bash +npm install +npm run dev +``` + +## TODO / 后续可扩展 +- 会话列表 & 历史存储 +- Markdown + 代码高亮 + 复制按钮 +- 模型配置动态拉取 +- 错误与重试、取消请求 (AbortController) +- 工具调用 / 图像 / 文件消息类型 +- RAG 检索侧栏 +- 用户登录与鉴权 +``` diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..694eac8 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,14 @@ + + + + + + AI 多模型聊天 + + + + +
+ + + diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..258ecea --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,29 @@ +{ + "name": "multi-llm-chat-frontend", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview", + "lint": "eslint . --ext .ts,.tsx,.vue --fix", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "axios": "^1.7.2", + "pinia": "^2.1.7", + "vue": "^3.4.29", + "vue-router": "^4.3.0" + }, + "devDependencies": { + "@types/node": "^24.3.0", + "@typescript-eslint/eslint-plugin": "^7.18.0", + "@typescript-eslint/parser": "^7.18.0", + "@vitejs/plugin-vue": "^5.0.4", + "eslint": "^8.57.0", + "eslint-plugin-vue": "^9.27.0", + "typescript": "^5.5.4", + "vite": "^5.4.0" + } +} diff --git a/frontend/public/favicon.svg b/frontend/public/favicon.svg new file mode 100644 index 0000000..d201e0c --- /dev/null +++ b/frontend/public/favicon.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/frontend/src/App.vue b/frontend/src/App.vue new file mode 100644 index 0000000..b4ff461 --- /dev/null +++ b/frontend/src/App.vue @@ -0,0 +1,113 @@ + + + + + diff --git a/frontend/src/components/ConversationList.vue b/frontend/src/components/ConversationList.vue new file mode 100644 index 0000000..5116354 --- /dev/null +++ b/frontend/src/components/ConversationList.vue @@ -0,0 +1,50 @@ + + + diff --git a/frontend/src/main.ts b/frontend/src/main.ts new file mode 100644 index 0000000..6dd3240 --- /dev/null +++ b/frontend/src/main.ts @@ -0,0 +1,14 @@ +import { createApp } from 'vue'; +import { createPinia } from 'pinia'; +import App from './App.vue'; +import router from './router'; +import './styles/global.css'; +import { useThemeStore } from './stores/themeStore'; + +const app = createApp(App); +app.use(createPinia()); +app.use(router); +app.mount('#app'); +// 初始化主题 +const themeStore = useThemeStore(); +themeStore.applyTheme(); diff --git a/frontend/src/router.ts b/frontend/src/router.ts new file mode 100644 index 0000000..021212d --- /dev/null +++ b/frontend/src/router.ts @@ -0,0 +1,20 @@ +import { createRouter, createWebHistory, RouteRecordRaw } from 'vue-router'; +import ChatView from './views/ChatView.vue'; +import SettingsView from './views/SettingsView.vue'; +import AboutView from './views/AboutView.vue'; +import LoginView from './views/LoginView.vue'; + +const routes: RouteRecordRaw[] = [ + { path: '/', redirect: '/chat' }, + { path: '/chat', component: ChatView }, + { path: '/login', component: LoginView }, + { path: '/settings', component: SettingsView }, + { path: '/about', component: AboutView }, +]; + +const router = createRouter({ + history: createWebHistory(), + routes, +}); + +export default router; diff --git a/frontend/src/services/chatService.ts b/frontend/src/services/chatService.ts new file mode 100644 index 0000000..20dafa0 --- /dev/null +++ b/frontend/src/services/chatService.ts @@ -0,0 +1,55 @@ +import axios from 'axios'; + +interface SendMessage { role: 'user' | 'assistant'; content: string; } +interface ChatRequest { modelId: string; messages: SendMessage[]; conversationId?: string | null; save?: boolean; } + +export interface StreamPayload { kind?: 'reasoning' | 'answer'; text: string; conversationId?: string; } + +// 这里演示一个 SSE / 流式的消费方式,后端需提供 /api/chat/stream 接口 +export async function *chatWithModel(req: ChatRequest, token?: string): AsyncGenerator { + // 先尝试使用 fetch EventSource-like + const resp = await fetch('/api/chat/stream', { + method: 'POST', + headers: { 'Content-Type': 'application/json', ...(token ? { Authorization: 'Bearer ' + token } : {}) }, + body: JSON.stringify(req), + }); + if (!resp.ok || !resp.body) { + throw new Error('网络错误 ' + resp.status); + } + const reader = resp.body.getReader(); + const decoder = new TextDecoder(); + let done = false; + while (!done) { + const chunk = await reader.read(); + done = chunk.done || false; + if (chunk.value) { + const text = decoder.decode(chunk.value, { stream: !done }); + // 假设后端用 "data: ...\n\n" SSE 格式 + const parts = text.split(/\n\n/).filter(Boolean); + for (const p of parts) { + const m = p.match(/^data: (.*)$/m); + if (m) { + const payload = m[1]; + if (payload === '[DONE]') return; + if (payload.startsWith('{') && payload.endsWith('}')) { + try { + const obj = JSON.parse(payload); + yield obj; + } catch { /* swallow */ } + } else if (payload.startsWith('[ERROR]')) { + throw new Error(payload.slice(7).trim()); + } else { + yield { text: payload }; + } + } else { + yield { text: p }; + } + } + } + } +} + +export async function sendOnce(req: ChatRequest, token?: string): Promise<{ text: string; conversationId?: string; }> { + const { data } = await axios.post('/api/chat', req, { headers: token ? { Authorization: 'Bearer ' + token } : {} }); + return data; +} diff --git a/frontend/src/shims-vue.d.ts b/frontend/src/shims-vue.d.ts new file mode 100644 index 0000000..ac1ded7 --- /dev/null +++ b/frontend/src/shims-vue.d.ts @@ -0,0 +1,5 @@ +declare module '*.vue' { + import { DefineComponent } from 'vue' + const component: DefineComponent<{}, {}, any> + export default component +} diff --git a/frontend/src/stores/authStore.ts b/frontend/src/stores/authStore.ts new file mode 100644 index 0000000..af3293b --- /dev/null +++ b/frontend/src/stores/authStore.ts @@ -0,0 +1,50 @@ +import { defineStore } from 'pinia'; +import axios from 'axios'; + +interface UserInfo { id: string; email: string; } + +interface AuthState { + token: string | null; + user: UserInfo | null; + loading: boolean; + error: string | null; +} + +export const useAuthStore = defineStore('auth', { + state: (): AuthState => ({ + token: localStorage.getItem('token'), + user: null, + loading: false, + error: null, + }), + getters: { + isAuthed: (s) => !!s.token, + }, + actions: { + async login(email: string, password: string) { + this.loading = true; this.error = null; + try { + const { data } = await axios.post('/api/auth/login', { email, password }); + this.token = data.token; localStorage.setItem('token', data.token); + this.user = data.user ? { id: data.user.id, email: data.user.email } : { id: 'me', email }; + } catch (e:any) { + this.error = e.response?.data?.error || e.message; + throw e; + } finally { + this.loading = false; + } + }, + async register(email: string, password: string) { + this.loading = true; this.error = null; + try { + const { data } = await axios.post('/api/auth/register', { email, password }); + this.token = data.token; localStorage.setItem('token', data.token); + this.user = data.user ? { id: data.user.id, email: data.user.email } : { id: 'me', email }; + } catch (e:any) { + this.error = e.response?.data?.error || e.message; + throw e; + } finally { this.loading = false; } + }, + logout() { this.token = null; this.user = null; localStorage.removeItem('token'); } + } +}); diff --git a/frontend/src/stores/conversationStore.ts b/frontend/src/stores/conversationStore.ts new file mode 100644 index 0000000..81f0c98 --- /dev/null +++ b/frontend/src/stores/conversationStore.ts @@ -0,0 +1,66 @@ +import { defineStore } from 'pinia'; +import axios from 'axios'; +import { useAuthStore } from './authStore'; + +export interface ConversationSummary { _id: string; title: string; modelId: string; updatedAt: string; } +export interface ConversationMessage { role: 'user' | 'assistant' | 'system'; content: string; transient?: boolean } + +interface ConversationState { + list: ConversationSummary[]; + loading: boolean; + error: string | null; + currentId: string | null; + messages: ConversationMessage[]; +} + +export const useConversationStore = defineStore('conversation', { + state: (): ConversationState => ({ list: [], loading: false, error: null, currentId: null, messages: [] }), + actions: { + async fetchList() { + const auth = useAuthStore(); + if (!auth.token) { this.list = []; return; } + this.loading = true; this.error = null; + try { + const { data } = await axios.get('/api/conversations', { headers: { Authorization: 'Bearer ' + auth.token } }); + this.list = data; + } catch (e:any) { this.error = e.response?.data?.error || e.message; } + finally { this.loading = false; } + }, + async loadConversation(id: string) { + const auth = useAuthStore(); + if (!auth.token) return; + this.loading = true; this.error = null; + try { + const { data } = await axios.get('/api/conversations/' + id, { headers: { Authorization: 'Bearer ' + auth.token } }); + this.currentId = id; + this.messages = data.messages || []; + } catch (e:any) { this.error = e.response?.data?.error || e.message; } + finally { this.loading = false; } + }, + resetCurrent() { this.currentId = null; this.messages = []; }, + createNewConversation(notify?: string) { + this.currentId = null; + this.messages = []; + if (notify) { + this.messages.push({ role: 'system', content: notify, transient: true }); + } + }, + pushMessage(m: ConversationMessage) { this.messages.push(m); }, + patchLastAssistant(content: string) { + for (let i = this.messages.length - 1; i >=0; i--) { + const msg = this.messages[i]; + if (msg.role === 'assistant') { msg.content = content; return; } + } + } + , + async deleteConversation(id: string) { + const auth = useAuthStore(); + if (!auth.token) return; + try { + await axios.delete('/api/conversations/' + id, { headers: { Authorization: 'Bearer ' + auth.token } }); + this.list = this.list.filter(l => l._id !== id); + if (this.currentId === id) this.resetCurrent(); + } catch (e:any) { this.error = e.response?.data?.error || e.message; } + } + } +}); diff --git a/frontend/src/stores/modelStore.ts b/frontend/src/stores/modelStore.ts new file mode 100644 index 0000000..abfbc87 --- /dev/null +++ b/frontend/src/stores/modelStore.ts @@ -0,0 +1,44 @@ +import { defineStore } from 'pinia'; + +export interface ModelInfo { + id: string; + label: string; + provider: 'openai' | 'azureOpenAI' | 'anthropic' | 'google' | 'ollama' | 'unknown'; + model: string; // provider 实际模型名 + maxTokens?: number; +} + +interface State { + currentModel: string; + supportedModels: ModelInfo[]; +} + +export const useModelStore = defineStore('modelStore', { + state: (): State => ({ + // 从 localStorage 读取上次选择的模型,避免 SSR/Node 环境报错时使用短路 + currentModel: typeof window !== 'undefined' && localStorage.getItem('currentModel') ? String(localStorage.getItem('currentModel')) : 'gpt-4o-mini', + supportedModels: [ + { id: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'openai', model: 'gpt-4o-mini' }, + { id: 'gpt-4o', label: 'GPT-4o', provider: 'openai', model: 'gpt-4o' }, + { id: 'claude-3-5', label: 'Claude 3.5', provider: 'anthropic', model: 'claude-3-5-sonnet-latest' }, + { id: 'gemini-flash', label: 'Gemini Flash', provider: 'google', model: 'gemini-1.5-flash' }, + { id: 'ollama-llama3', label: 'Llama3 (本地)', provider: 'ollama', model: 'llama3' }, + { id: 'deepseek-chat', label: 'DeepSeek Chat', provider: 'openai', model: 'deepseek-chat' } + ], + }), + actions: { + setCurrentModel(id: string) { + (this as any).currentModel = id; + try { localStorage.setItem('currentModel', id); } catch (e) { /* ignore */ } + }, + addModel(m: ModelInfo) { + const self = this as any as State; + if (!self.supportedModels.find((x: ModelInfo) => x.id === m.id)) { + self.supportedModels.push(m); + } + }, + }, + getters: { + currentModelInfo: (s: State) => s.supportedModels.find((m: ModelInfo) => m.id === s.currentModel), + } +}); diff --git a/frontend/src/stores/themeStore.ts b/frontend/src/stores/themeStore.ts new file mode 100644 index 0000000..c1d0161 --- /dev/null +++ b/frontend/src/stores/themeStore.ts @@ -0,0 +1,21 @@ +import { defineStore } from 'pinia'; + +export type Theme = 'dark' | 'light'; +const THEME_KEY = 'appTheme'; + +export const useThemeStore = defineStore('themeStore', { + state: () => ({ + theme: (localStorage.getItem(THEME_KEY) as Theme) || 'dark' + }), + actions: { + applyTheme() { + document.documentElement.setAttribute('data-theme', this.theme); + }, + setTheme(t: Theme) { + this.theme = t; + localStorage.setItem(THEME_KEY, t); + this.applyTheme(); + }, + toggle() { this.setTheme(this.theme === 'dark' ? 'light' : 'dark'); } + } +}); \ No newline at end of file diff --git a/frontend/src/styles/global.css b/frontend/src/styles/global.css new file mode 100644 index 0000000..9386399 --- /dev/null +++ b/frontend/src/styles/global.css @@ -0,0 +1,37 @@ +:root { + --bg-color:#141419; + --text-color:#e5e7eb; + --sidebar-bg:#1e1e24; + --border-color:#333; + --accent:#3b82f6; + --accent-hover:#2563eb; + --bubble-user-bg:#3a3f58; + --bubble-ai-bg:#2d2f39; + --bubble-text-color:#f2f2f7; + --input-bg:#262730; + --input-border:#333; + --button-bg:var(--accent); + --button-disabled-bg:#4b5563; + --shadow-color:rgba(0,0,0,.3); +} + +[data-theme='light'] { + --bg-color:#f5f7fa; + --text-color:#1f2933; + --sidebar-bg:#ffffff; + --border-color:#e2e8f0; + --bubble-user-bg:#2563eb10; + --bubble-ai-bg:#f1f5f9; + --bubble-text-color:#1f2933; + --input-bg:#ffffff; + --input-border:#cbd5e1; + --button-bg:#2563eb; + --button-disabled-bg:#94a3b8; + --shadow-color:rgba(0,0,0,.08); +} + +html,body { margin:0; padding:0; height:100%; } +body { background:var(--bg-color); color:var(--text-color); font-family: system-ui,-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,'Noto Sans',sans-serif; overflow:hidden; } +* { box-sizing:border-box; } +::-webkit-scrollbar { width:8px; } +::-webkit-scrollbar-thumb { background:var(--border-color); border-radius:4px; } diff --git a/frontend/src/views/AboutView.vue b/frontend/src/views/AboutView.vue new file mode 100644 index 0000000..88ce89f --- /dev/null +++ b/frontend/src/views/AboutView.vue @@ -0,0 +1,11 @@ + + + diff --git a/frontend/src/views/ChatView.vue b/frontend/src/views/ChatView.vue new file mode 100644 index 0000000..3d5c303 --- /dev/null +++ b/frontend/src/views/ChatView.vue @@ -0,0 +1,175 @@ + + + + + diff --git a/frontend/src/views/LoginView.vue b/frontend/src/views/LoginView.vue new file mode 100644 index 0000000..c2897e0 --- /dev/null +++ b/frontend/src/views/LoginView.vue @@ -0,0 +1,41 @@ + + + diff --git a/frontend/src/views/SettingsView.vue b/frontend/src/views/SettingsView.vue new file mode 100644 index 0000000..a189efa --- /dev/null +++ b/frontend/src/views/SettingsView.vue @@ -0,0 +1,35 @@ + + + diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..142c4a2 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "target": "ES2020", + "useDefineForClassFields": true, + "module": "ESNext", + "moduleResolution": "Node", + "strict": true, + "jsx": "preserve", + "resolveJsonModule": true, + "isolatedModules": true, + "esModuleInterop": true, + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "types": ["node"], + "baseUrl": "./", + "paths": { + "@/*": ["src/*"] + } + }, + "include": ["src"], + "references": [] +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..3953889 --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,21 @@ +import { defineConfig } from 'vite'; +import vue from '@vitejs/plugin-vue'; +import path from 'node:path'; + +export default defineConfig({ + plugins: [vue()], + resolve: { + alias: { + '@': path.resolve(__dirname, 'src'), + }, + }, + server: { + port: 5173, + proxy: { + '/api': { + target: 'http://localhost:3000', + changeOrigin: true + } + }, + }, +});