import OpenAI from 'openai'; const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); export interface ChatCompletionChunk { text: string; done?: boolean; } export async function *streamChat(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) { // 使用新的 responses API (若版本支持);否则回退到 chat.completions const stream = await client.chat.completions.create({ model, messages, stream: true, temperature: 0.7, }); for await (const part of stream) { const delta = part.choices?.[0]?.delta?.content; if (delta) { yield { text: delta } as ChatCompletionChunk; } } yield { text: '', done: true }; } export async function oneShot(model: string, messages: { role: 'user'|'assistant'|'system'; content: string }[]) { const completion = await client.chat.completions.create({ model, messages }); return completion.choices[0]?.message?.content || ''; }