feat: add OpenAI LLM provider
This commit is contained in:
28
packages/llm/src/providers/openai.ts
Normal file
28
packages/llm/src/providers/openai.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import OpenAI from "openai";
|
||||
import type { LLMMessage, LLMOptions } from "@codeboard/shared";
|
||||
import type { LLMProvider } from "./base.js";
|
||||
|
||||
export class OpenAIProvider implements LLMProvider {
|
||||
name = "openai";
|
||||
private client: OpenAI;
|
||||
private defaultModel: string;
|
||||
|
||||
constructor(apiKey: string, model?: string, baseUrl?: string) {
|
||||
this.client = new OpenAI({
|
||||
apiKey,
|
||||
baseURL: baseUrl,
|
||||
});
|
||||
this.defaultModel = model ?? "gpt-4o";
|
||||
}
|
||||
|
||||
async chat(messages: LLMMessage[], options?: LLMOptions): Promise<string> {
|
||||
const response = await this.client.chat.completions.create({
|
||||
model: options?.model ?? this.defaultModel,
|
||||
messages: messages.map((m) => ({ role: m.role, content: m.content })),
|
||||
temperature: options?.temperature ?? 0.3,
|
||||
max_tokens: options?.maxTokens ?? 4096,
|
||||
});
|
||||
|
||||
return response.choices[0]?.message?.content ?? "";
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user