Files
chat/backend/src/services/openaiService.ts

31 lines
964 B
TypeScript
Raw Normal View History

2025-08-22 14:22:43 +08:00
import OpenAI from 'openai';
const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
export interface ChatCompletionChunk {
text: string;
done?: boolean;
}
export async function *streamChat(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) {
// 使用新的 responses API (若版本支持);否则回退到 chat.completions
const stream = await client.chat.completions.create({
model,
messages,
stream: true,
temperature: 0.7,
});
for await (const part of stream) {
const delta = part.choices?.[0]?.delta?.content;
if (delta) {
yield { text: delta } as ChatCompletionChunk;
}
}
yield { text: '', done: true };
}
export async function oneShot(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) {
const completion = await client.chat.completions.create({ model, messages });
return completion.choices[0]?.message?.content || '';
}