From a025b5cfd32189699bc19b8b9287623c2219b216 Mon Sep 17 00:00:00 2001 From: Matt Rubens Date: Tue, 14 Jan 2025 15:20:26 -0500 Subject: [PATCH] Add the o1 model (#1246) --- src/api/providers/openai-native.ts | 1 + src/shared/api.ts | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/src/api/providers/openai-native.ts b/src/api/providers/openai-native.ts index d11481add4..f91a90dbc5 100644 --- a/src/api/providers/openai-native.ts +++ b/src/api/providers/openai-native.ts @@ -24,6 +24,7 @@ export class OpenAiNativeHandler implements ApiHandler { async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream { switch (this.getModel().id) { + case "o1": case "o1-preview": case "o1-mini": { // o1 doesnt support streaming, non-1 temp, or system prompt diff --git a/src/shared/api.ts b/src/shared/api.ts index d87d13d272..52d974f725 100644 --- a/src/shared/api.ts +++ b/src/shared/api.ts @@ -311,6 +311,14 @@ export type OpenAiNativeModelId = keyof typeof openAiNativeModels export const openAiNativeDefaultModelId: OpenAiNativeModelId = "gpt-4o" export const openAiNativeModels = { // don't support tool use yet + o1: { + maxTokens: 100_000, + contextWindow: 200_000, + supportsImages: true, + supportsPromptCache: false, + inputPrice: 15, + outputPrice: 60, + }, "o1-preview": { maxTokens: 32_768, contextWindow: 128_000,