system commit

This commit is contained in:
2025-08-22 14:22:43 +08:00
parent bf6f910db1
commit a1537e3f9f
36 changed files with 1311 additions and 0 deletions

9
backend/.env.example Normal file
View File

@@ -0,0 +1,9 @@
PORT=3000
MONGODB_URI=mongodb://localhost:27017/multi_llm_chat
OPENAI_API_KEY=your_openai_key_here
# 可选:自定义 OpenAI Base
OPENAI_BASE_URL=https://api.openai.com
# DeepSeek 兼容 (同 OpenAI 协议)
DEEPSEEK_API_KEY=your_deepseek_key_here
DEEPSEEK_BASE_URL=https://api.deepseek.com

3
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
node_modules
dist
.env

31
backend/README.md Normal file
View File

@@ -0,0 +1,31 @@
# 后端服务
Node.js + Express + TypeScript + Mongoose + OpenAI 示例。
## 环境变量
复制 `.env.example``.env` 并填写:
```
PORT=3000
MONGODB_URI=mongodb://localhost:27017/multi_llm_chat
OPENAI_API_KEY=sk-...
# 可选 OpenAI base (代理): OPENAI_BASE_URL=https://api.openai.com
DEEPSEEK_API_KEY=sk-deepseek...
DEEPSEEK_BASE_URL=https://api.deepseek.com
```
## 运行
```bash
npm install
npm run dev
```
## 接口
- POST /api/chat { modelId, messages } => { text }
- POST /api/chat/stream (SSE) data: <chunk> 直到 data: [DONE]
- POST /api/conversations 保存对话
- GET /api/conversations/:id 读取对话
- GET /api/health 健康检查
## 说明
已接入 OpenAI 与 DeepSeekDeepSeek 通过 model 名包含 "deepseek" 自动匹配,使用 OpenAI 协议 /v1/chat/completions。可继续在 `llmService.ts` 中扩展更多 provider 逻辑。

30
backend/package.json Normal file
View File

@@ -0,0 +1,30 @@
{
"name": "multi-llm-chat-backend",
"private": true,
"version": "0.1.0",
"type": "module",
"scripts": {
"dev": "tsx watch src/index.ts",
"start": "node dist/index.js",
"build": "tsc -p tsconfig.json"
},
"dependencies": {
"axios": "^1.7.2",
"bcryptjs": "^2.4.3",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"express": "^4.19.2",
"jsonwebtoken": "^9.0.2",
"mongoose": "^8.5.3",
"openai": "^4.56.0"
},
"devDependencies": {
"@types/bcryptjs": "^2.4.6",
"@types/cors": "^2.8.17",
"@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.10",
"@types/node": "^20.14.10",
"tsx": "^4.15.7",
"typescript": "^5.5.4"
}
}

30
backend/src/index.ts Normal file
View File

@@ -0,0 +1,30 @@
import 'dotenv/config';
import express from 'express';
import cors from 'cors';
import mongoose from 'mongoose';
import chatRoutes from './routes/chat.js';
import authRoutes from './routes/auth.js';
const app = express();
app.use(cors());
app.use(express.json({ limit: '1mb' }));
app.use('/api/auth', authRoutes);
app.use('/api', chatRoutes);
app.get('/api/health', (_req, res) => res.json({ ok: true, time: new Date().toISOString() }));
const PORT = process.env.PORT || 3000;
async function start() {
try {
const mongo = process.env.MONGODB_URI || 'mongodb://localhost:27017/multi_llm_chat';
console.log('Connecting MongoDB ->', mongo);
await mongoose.connect(mongo);
app.listen(PORT, () => console.log('Server listening on ' + PORT));
} catch (e) {
console.error('Failed to start server', e);
process.exit(1);
}
}
start();

View File

@@ -0,0 +1,29 @@
import { Request, Response, NextFunction } from 'express';
import jwt from 'jsonwebtoken';
export interface AuthRequest extends Request { userId?: string; }
export function auth(required = true) {
return (req: AuthRequest, res: Response, next: NextFunction) => {
const header = req.headers.authorization;
if (!header) {
if (required) return res.status(401).json({ error: 'NO_AUTH' });
return next();
}
const token = header.replace(/^Bearer\s+/i, '');
try {
const secret = process.env.JWT_SECRET || 'dev_secret';
const payload = jwt.verify(token, secret) as { uid: string };
req.userId = payload.uid;
next();
} catch (e) {
if (required) return res.status(401).json({ error: 'BAD_TOKEN' });
next();
}
};
}
export function signToken(uid: string) {
const secret = process.env.JWT_SECRET || 'dev_secret';
return jwt.sign({ uid }, secret, { expiresIn: '7d' });
}

View File

@@ -0,0 +1,31 @@
import { Schema, model, Document } from 'mongoose';
export interface IMessage {
role: 'user' | 'assistant' | 'system';
content: string;
createdAt?: Date;
}
export interface IConversation extends Document {
title?: string;
userId?: string;
modelId: string;
messages: IMessage[];
createdAt: Date;
updatedAt: Date;
}
const MessageSchema = new Schema<IMessage>({
role: { type: String, required: true },
content: { type: String, required: true },
createdAt: { type: Date, default: Date.now }
}, { _id: false });
const ConversationSchema = new Schema<IConversation>({
title: String,
userId: { type: String, index: true },
modelId: { type: String, required: true },
messages: { type: [MessageSchema], default: [] },
}, { timestamps: true });
export const Conversation = model<IConversation>('Conversation', ConversationSchema);

View File

@@ -0,0 +1,15 @@
import { Schema, model, Document } from 'mongoose';
export interface IUser extends Document {
email: string;
passwordHash: string;
createdAt: Date;
updatedAt: Date;
}
const UserSchema = new Schema<IUser>({
email: { type: String, required: true, unique: true, index: true },
passwordHash: { type: String, required: true }
}, { timestamps: true });
export const User = model<IUser>('User', UserSchema);

View File

@@ -0,0 +1,28 @@
import { Router } from 'express';
import bcrypt from 'bcryptjs';
import { User } from '../models/User.js';
import { signToken } from '../middleware/auth.js';
const router = Router();
router.post('/register', async (req, res) => {
const { email, password } = req.body || {};
if (!email || !password) return res.status(400).json({ error: 'MISSING_FIELDS' });
const exist = await User.findOne({ email });
if (exist) return res.status(409).json({ error: 'EMAIL_EXISTS' });
const passwordHash = await bcrypt.hash(password, 10);
const user = await User.create({ email, passwordHash });
return res.json({ token: signToken(user.id), user: { id: user.id, email: user.email } });
});
router.post('/login', async (req, res) => {
const { email, password } = req.body || {};
if (!email || !password) return res.status(400).json({ error: 'MISSING_FIELDS' });
const user = await User.findOne({ email });
if (!user) return res.status(401).json({ error: 'INVALID_CREDENTIALS' });
const ok = await bcrypt.compare(password, user.passwordHash);
if (!ok) return res.status(401).json({ error: 'INVALID_CREDENTIALS' });
return res.json({ token: signToken(user.id), user: { id: user.id, email: user.email } });
});
export default router;

100
backend/src/routes/chat.ts Normal file
View File

@@ -0,0 +1,100 @@
import { Router, Request, Response } from 'express';
import { Conversation } from '../models/Conversation.js';
import { streamChat, oneShot, StreamChunk } from '../services/llmService.js';
import { auth, AuthRequest } from '../middleware/auth.js';
const router = Router();
router.post('/chat', auth(false), async (req: AuthRequest, res: Response) => {
const { modelId, messages, conversationId, save } = req.body || {};
try {
const text = await oneShot(modelId, messages);
let convId = conversationId;
if (save && req.userId) {
if (convId) {
await Conversation.findByIdAndUpdate(convId, { $push: { messages: { role: 'assistant', content: text } } });
} else {
const cleaned = (messages||[]).filter((m:any)=>m && typeof m.content==='string' && m.content.trim().length>0);
const title = (cleaned?.[0]?.content || text.slice(0,30)) || '新对话';
const conv = await Conversation.create({ userId: req.userId, modelId, title, messages: [...cleaned, { role: 'assistant', content: text }] });
convId = conv.id;
}
}
res.json({ text, conversationId: convId });
} catch (e:any) {
console.error(e);
res.status(500).json({ error: e.message });
}
});
router.post('/chat/stream', auth(false), async (req: AuthRequest, res: Response) => {
const { modelId, messages, conversationId, save } = req.body || {};
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.flushHeaders?.();
console.log('[stream] start model=%s messages=%d conv=%s', modelId, (messages||[]).length, conversationId || 'new');
try {
let buffer = '';
for await (const chunk of streamChat(modelId, messages) as AsyncGenerator<StreamChunk>) {
if (chunk.done) {
if (save && req.userId) {
if (buffer.trim()) { // 只在有内容时保存
if (conversationId) {
await Conversation.findByIdAndUpdate(conversationId, { $push: { messages: { role: 'assistant', content: buffer } } });
} else {
const cleaned = (messages||[]).filter((m:any)=>m && typeof m.content==='string' && m.content.trim().length>0);
const title = (cleaned?.[0]?.content || buffer.slice(0,30)) || '新对话';
const conv = await Conversation.create({ userId: req.userId, modelId, title, messages: [...cleaned, { role: 'assistant', content: buffer }] });
res.write('data: {"conversationId":"'+conv.id+'"}\n\n');
}
}
}
res.write('data: [DONE]\n\n');
} else {
if (chunk.kind === 'answer') buffer += chunk.text;
const payload = chunk.kind ? JSON.stringify({ kind: chunk.kind, text: chunk.text }) : chunk.text;
res.write('data: ' + payload.replace(/\n/g, '\\n') + '\n\n');
(res as any).flush?.();
}
}
} catch (e:any) {
console.error(e);
res.write('data: [ERROR] ' + (e.message || 'unknown') + '\n\n');
} finally {
console.log('[stream] end model=%s', modelId);
res.end();
}
});
router.post('/conversations', auth(), async (req: AuthRequest, res: Response) => {
try {
const { modelId, messages, title } = req.body;
const conv = await Conversation.create({ userId: req.userId, modelId, messages, title });
res.json(conv);
} catch (e:any) {
res.status(500).json({ error: e.message });
}
});
router.get('/conversations/:id', auth(), async (req: AuthRequest, res: Response) => {
const conv = await Conversation.findOne({ _id: req.params.id, userId: req.userId });
if (!conv) return res.status(404).json({ error: 'Not found' });
res.json(conv);
});
router.get('/conversations', auth(), async (req: AuthRequest, res: Response) => {
const list = await Conversation.find({ userId: req.userId }).sort({ updatedAt: -1 }).limit(50);
res.json(list);
});
router.delete('/conversations/:id', auth(), async (req: AuthRequest, res: Response) => {
try {
await Conversation.deleteOne({ _id: req.params.id, userId: req.userId });
res.json({ ok: true });
} catch (e:any) {
res.status(500).json({ error: e.message });
}
});
export default router;

View File

@@ -0,0 +1,75 @@
import OpenAI from 'openai';
export interface ProviderConfig {
name: 'openai' | 'deepseek';
apiKey: string;
baseURL: string;
}
function detectProvider(model: string): ProviderConfig {
if (/deepseek/i.test(model)) {
const apiKey = process.env.DEEPSEEK_API_KEY || '';
if (!apiKey) throw new Error('Missing DEEPSEEK_API_KEY');
return {
name: 'deepseek',
apiKey,
baseURL: process.env.DEEPSEEK_BASE_URL || 'https://api.deepseek.com'
};
}
const apiKey = process.env.OPENAI_API_KEY || '';
if (!apiKey) throw new Error('Missing OPENAI_API_KEY');
return {
name: 'openai',
apiKey,
baseURL: process.env.OPENAI_BASE_URL || process.env.OPENAI_API_BASE || 'https://api.openai.com/v1'
};
}
function buildClient(cfg: ProviderConfig) {
// 允许用户传入已经包含 /v1 的 baseURL避免重复 /v1/v1
let base = cfg.baseURL.replace(/\/$/, '');
if (!/\/v1$/i.test(base)) base += '/v1';
return new OpenAI({ apiKey: cfg.apiKey, baseURL: base });
}
export interface StreamChunk { text: string; done?: boolean; kind?: 'reasoning' | 'answer'; }
export async function *streamChat(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]): AsyncGenerator<StreamChunk> {
messages = ensureSystem(messages) as { role: 'user'|'assistant'|'system'; content: string }[];
const cfg = detectProvider(model);
const client = buildClient(cfg);
const stream = await client.chat.completions.create({
model,
messages,
stream: true,
temperature: 0.7,
});
for await (const part of stream) {
const choice = part.choices?.[0];
// Deepseek reasoning model: reasoning_content or similar field (SDK may expose in delta.extra)
const reasoning: any = (choice as any)?.delta?.reasoning_content || (choice as any)?.delta?.reasoning;
if (reasoning) yield { text: reasoning, kind: 'reasoning' };
const delta = choice?.delta?.content;
if (delta) yield { text: delta, kind: 'answer' } as StreamChunk;
}
yield { text: '', done: true } as StreamChunk;
}
export async function oneShot(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) {
messages = ensureSystem(messages) as { role: 'user'|'assistant'|'system'; content: string }[];
const cfg = detectProvider(model);
const client = buildClient(cfg);
const completion = await client.chat.completions.create({ model, messages, stream: false });
const first = completion.choices[0];
const reasoning: string | undefined = (first as any)?.message?.reasoning_content || (first as any)?.message?.reasoning;
const content = first?.message?.content || '';
return reasoning ? reasoning + '\n' + content : content;
}
const DEFAULT_SYSTEM = process.env.SYSTEM_PROMPT || 'You are a helpful assistant.';
function ensureSystem(msgs: { role: any; content: string }[]) {
if (!msgs || !Array.isArray(msgs)) return [{ role: 'system', content: DEFAULT_SYSTEM }];
const hasSystem = msgs.some(m => m.role === 'system');
console.log('hasSystem', hasSystem);
return hasSystem ? msgs : [{ role: 'system', content: DEFAULT_SYSTEM }, ...msgs];
}

View File

@@ -0,0 +1,30 @@
import OpenAI from 'openai';
const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
export interface ChatCompletionChunk {
text: string;
done?: boolean;
}
export async function *streamChat(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) {
// 使用新的 responses API (若版本支持);否则回退到 chat.completions
const stream = await client.chat.completions.create({
model,
messages,
stream: true,
temperature: 0.7,
});
for await (const part of stream) {
const delta = part.choices?.[0]?.delta?.content;
if (delta) {
yield { text: delta } as ChatCompletionChunk;
}
}
yield { text: '', done: true };
}
export async function oneShot(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) {
const completion = await client.chat.completions.create({ model, messages });
return completion.choices[0]?.message?.content || '';
}

16
backend/tsconfig.json Normal file
View File

@@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "ESNext",
"moduleResolution": "Node",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true,
"outDir": "dist",
"types": ["node"],
"resolveJsonModule": true
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}