Skip to content

Commit

Permalink
chore: Prettier fix formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
saoudrizwan committed Jan 6, 2025
1 parent 9de7253 commit 25cd7cc
Show file tree
Hide file tree
Showing 106 changed files with 1,629 additions and 5,283 deletions.
2 changes: 1 addition & 1 deletion .prettierrc.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"tabWidth": 4,
"useTabs": true,
"printWidth": 120,
"printWidth": 130,
"semi": false,
"bracketSameLine": true
}
6 changes: 1 addition & 5 deletions .vscode/extensions.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
{
// See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"dbaeumer.vscode-eslint",
"connor4312.esbuild-problem-matchers",
"ms-vscode.extension-test-runner"
]
"recommendations": ["dbaeumer.vscode-eslint", "connor4312.esbuild-problem-matchers", "ms-vscode.extension-test-runner"]
}
6 changes: 1 addition & 5 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,7 @@
"tasks": [
{
"label": "watch",
"dependsOn": [
"npm: build:webview",
"npm: watch:tsc",
"npm: watch:esbuild"
],
"dependsOn": ["npm: build:webview", "npm: watch:tsc", "npm: watch:esbuild"],
"presentation": {
"reveal": "never"
},
Expand Down
27 changes: 5 additions & 22 deletions esbuild.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@ const esbuildProblemMatcherPlugin = {
build.onEnd((result) => {
result.errors.forEach(({ text, location }) => {
console.error(`✘ [ERROR] ${text}`)
console.error(
` ${location.file}:${location.line}:${location.column}:`,
)
console.error(` ${location.file}:${location.line}:${location.column}:`)
})
console.log("[watch] build finished")
})
Expand All @@ -32,26 +30,14 @@ const copyWasmFiles = {
setup(build) {
build.onEnd(() => {
// tree sitter
const sourceDir = path.join(
__dirname,
"node_modules",
"web-tree-sitter",
)
const sourceDir = path.join(__dirname, "node_modules", "web-tree-sitter")
const targetDir = path.join(__dirname, "dist")

// Copy tree-sitter.wasm
fs.copyFileSync(
path.join(sourceDir, "tree-sitter.wasm"),
path.join(targetDir, "tree-sitter.wasm"),
)
fs.copyFileSync(path.join(sourceDir, "tree-sitter.wasm"), path.join(targetDir, "tree-sitter.wasm"))

// Copy language-specific WASM files
const languageWasmDir = path.join(
__dirname,
"node_modules",
"tree-sitter-wasms",
"out",
)
const languageWasmDir = path.join(__dirname, "node_modules", "tree-sitter-wasms", "out")
const languages = [
"typescript",
"tsx",
Expand All @@ -70,10 +56,7 @@ const copyWasmFiles = {

languages.forEach((lang) => {
const filename = `tree-sitter-${lang}.wasm`
fs.copyFileSync(
path.join(languageWasmDir, filename),
path.join(targetDir, filename),
)
fs.copyFileSync(path.join(languageWasmDir, filename), path.join(targetDir, filename))
})
})
},
Expand Down
5 changes: 1 addition & 4 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,7 @@ import { ApiStream } from "./transform/stream"
import { DeepSeekHandler } from "./providers/deepseek"

export interface ApiHandler {
createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
): ApiStream
createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream
getModel(): { id: string; info: ModelInfo }
}

Expand Down
58 changes: 18 additions & 40 deletions src/api/providers/anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
import { Anthropic } from "@anthropic-ai/sdk"
import { Stream as AnthropicStream } from "@anthropic-ai/sdk/streaming"
import {
anthropicDefaultModelId,
AnthropicModelId,
anthropicModels,
ApiHandlerOptions,
ModelInfo,
} from "../../shared/api"
import { anthropicDefaultModelId, AnthropicModelId, anthropicModels, ApiHandlerOptions, ModelInfo } from "../../shared/api"
import { ApiHandler } from "../index"
import { ApiStream } from "../transform/stream"

Expand All @@ -22,10 +16,7 @@ export class AnthropicHandler implements ApiHandler {
})
}

async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
): ApiStream {
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
let stream: AnthropicStream<Anthropic.Beta.PromptCaching.Messages.RawPromptCachingBetaMessageStreamEvent>
const modelId = this.getModel().id
switch (modelId) {
Expand All @@ -38,14 +29,11 @@ export class AnthropicHandler implements ApiHandler {
The latest message will be the new user message, one before will be the assistant message from a previous request, and the user message before that will be a previously cached user message. So we need to mark the latest user message as ephemeral to cache it for the next request, and mark the second to last user message as ephemeral to let the server know the last message to retrieve from the cache for the current request..
*/
const userMsgIndices = messages.reduce(
(acc, msg, index) =>
msg.role === "user" ? [...acc, index] : acc,
(acc, msg, index) => (msg.role === "user" ? [...acc, index] : acc),
[] as number[],
)
const lastUserMsgIndex =
userMsgIndices[userMsgIndices.length - 1] ?? -1
const secondLastMsgUserIndex =
userMsgIndices[userMsgIndices.length - 2] ?? -1
const lastUserMsgIndex = userMsgIndices[userMsgIndices.length - 1] ?? -1
const secondLastMsgUserIndex = userMsgIndices[userMsgIndices.length - 2] ?? -1
stream = await this.client.beta.promptCaching.messages.create(
{
model: modelId,
Expand All @@ -59,10 +47,7 @@ export class AnthropicHandler implements ApiHandler {
},
], // setting cache breakpoint for system prompt so new tasks can reuse it
messages: messages.map((message, index) => {
if (
index === lastUserMsgIndex ||
index === secondLastMsgUserIndex
) {
if (index === lastUserMsgIndex || index === secondLastMsgUserIndex) {
return {
...message,
content:
Expand All @@ -76,19 +61,15 @@ export class AnthropicHandler implements ApiHandler {
},
},
]
: message.content.map(
(content, contentIndex) =>
contentIndex ===
message.content.length -
1
? {
...content,
cache_control:
{
type: "ephemeral",
},
}
: content,
: message.content.map((content, contentIndex) =>
contentIndex === message.content.length - 1
? {
...content,
cache_control: {
type: "ephemeral",
},
}
: content,
),
}
}
Expand All @@ -110,8 +91,7 @@ export class AnthropicHandler implements ApiHandler {
case "claude-3-haiku-20240307":
return {
headers: {
"anthropic-beta":
"prompt-caching-2024-07-31",
"anthropic-beta": "prompt-caching-2024-07-31",
},
}
default:
Expand Down Expand Up @@ -145,10 +125,8 @@ export class AnthropicHandler implements ApiHandler {
type: "usage",
inputTokens: usage.input_tokens || 0,
outputTokens: usage.output_tokens || 0,
cacheWriteTokens:
usage.cache_creation_input_tokens || undefined,
cacheReadTokens:
usage.cache_read_input_tokens || undefined,
cacheWriteTokens: usage.cache_creation_input_tokens || undefined,
cacheReadTokens: usage.cache_read_input_tokens || undefined,
}
break
case "message_delta":
Expand Down
25 changes: 5 additions & 20 deletions src/api/providers/bedrock.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
import AnthropicBedrock from "@anthropic-ai/bedrock-sdk"
import { Anthropic } from "@anthropic-ai/sdk"
import { ApiHandler } from "../"
import {
ApiHandlerOptions,
bedrockDefaultModelId,
BedrockModelId,
bedrockModels,
ModelInfo,
} from "../../shared/api"
import { ApiHandlerOptions, bedrockDefaultModelId, BedrockModelId, bedrockModels, ModelInfo } from "../../shared/api"
import { ApiStream } from "../transform/stream"

// https://docs.anthropic.com/en/api/claude-on-amazon-bedrock
Expand All @@ -20,26 +14,17 @@ export class AwsBedrockHandler implements ApiHandler {
this.client = new AnthropicBedrock({
// Authenticate by either providing the keys below or use the default AWS credential providers, such as
// using ~/.aws/credentials or the "AWS_SECRET_ACCESS_KEY" and "AWS_ACCESS_KEY_ID" environment variables.
...(this.options.awsAccessKey
? { awsAccessKey: this.options.awsAccessKey }
: {}),
...(this.options.awsSecretKey
? { awsSecretKey: this.options.awsSecretKey }
: {}),
...(this.options.awsSessionToken
? { awsSessionToken: this.options.awsSessionToken }
: {}),
...(this.options.awsAccessKey ? { awsAccessKey: this.options.awsAccessKey } : {}),
...(this.options.awsSecretKey ? { awsSecretKey: this.options.awsSecretKey } : {}),
...(this.options.awsSessionToken ? { awsSessionToken: this.options.awsSessionToken } : {}),

// awsRegion changes the aws region to which the request is made. By default, we read AWS_REGION,
// and if that's not present, we default to us-east-1. Note that we do not read ~/.aws/config for the region.
awsRegion: this.options.awsRegion,
})
}

async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
): ApiStream {
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
// cross region inference requires prefixing the model id with the region
let modelId: string
if (this.options.awsUseCrossRegionInference) {
Expand Down
18 changes: 3 additions & 15 deletions src/api/providers/deepseek.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"
import { ApiHandler } from "../"
import {
ApiHandlerOptions,
DeepSeekModelId,
ModelInfo,
deepSeekDefaultModelId,
deepSeekModels,
} from "../../shared/api"
import { ApiHandlerOptions, DeepSeekModelId, ModelInfo, deepSeekDefaultModelId, deepSeekModels } from "../../shared/api"
import { convertToOpenAiMessages } from "../transform/openai-format"
import { ApiStream } from "../transform/stream"

Expand All @@ -23,18 +17,12 @@ export class DeepSeekHandler implements ApiHandler {
})
}

async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
): ApiStream {
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const stream = await this.client.chat.completions.create({
model: this.getModel().id,
max_completion_tokens: this.getModel().info.maxTokens,
temperature: 0,
messages: [
{ role: "system", content: systemPrompt },
...convertToOpenAiMessages(messages),
],
messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)],
stream: true,
stream_options: { include_usage: true },
})
Expand Down
13 changes: 2 additions & 11 deletions src/api/providers/gemini.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
import { Anthropic } from "@anthropic-ai/sdk"
import { GoogleGenerativeAI } from "@google/generative-ai"
import { ApiHandler } from "../"
import {
ApiHandlerOptions,
geminiDefaultModelId,
GeminiModelId,
geminiModels,
ModelInfo,
} from "../../shared/api"
import { ApiHandlerOptions, geminiDefaultModelId, GeminiModelId, geminiModels, ModelInfo } from "../../shared/api"
import { convertAnthropicMessageToGemini } from "../transform/gemini-format"
import { ApiStream } from "../transform/stream"

Expand All @@ -23,10 +17,7 @@ export class GeminiHandler implements ApiHandler {
this.client = new GoogleGenerativeAI(options.geminiApiKey)
}

async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
): ApiStream {
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const model = this.client.getGenerativeModel({
model: this.getModel().id,
systemInstruction: systemPrompt,
Expand Down
15 changes: 3 additions & 12 deletions src/api/providers/lmstudio.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"
import { ApiHandler } from "../"
import {
ApiHandlerOptions,
ModelInfo,
openAiModelInfoSaneDefaults,
} from "../../shared/api"
import { ApiHandlerOptions, ModelInfo, openAiModelInfoSaneDefaults } from "../../shared/api"
import { convertToOpenAiMessages } from "../transform/openai-format"
import { ApiStream } from "../transform/stream"

Expand All @@ -16,17 +12,12 @@ export class LmStudioHandler implements ApiHandler {
constructor(options: ApiHandlerOptions) {
this.options = options
this.client = new OpenAI({
baseURL:
(this.options.lmStudioBaseUrl || "http://localhost:1234") +
"/v1",
baseURL: (this.options.lmStudioBaseUrl || "http://localhost:1234") + "/v1",
apiKey: "noop",
})
}

async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
): ApiStream {
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
...convertToOpenAiMessages(messages),
Expand Down
15 changes: 3 additions & 12 deletions src/api/providers/ollama.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"
import { ApiHandler } from "../"
import {
ApiHandlerOptions,
ModelInfo,
openAiModelInfoSaneDefaults,
} from "../../shared/api"
import { ApiHandlerOptions, ModelInfo, openAiModelInfoSaneDefaults } from "../../shared/api"
import { convertToOpenAiMessages } from "../transform/openai-format"
import { ApiStream } from "../transform/stream"

Expand All @@ -16,17 +12,12 @@ export class OllamaHandler implements ApiHandler {
constructor(options: ApiHandlerOptions) {
this.options = options
this.client = new OpenAI({
baseURL:
(this.options.ollamaBaseUrl || "http://localhost:11434") +
"/v1",
baseURL: (this.options.ollamaBaseUrl || "http://localhost:11434") + "/v1",
apiKey: "ollama",
})
}

async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
): ApiStream {
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
...convertToOpenAiMessages(messages),
Expand Down
Loading

0 comments on commit 25cd7cc

Please sign in to comment.