Skip to content

Commit aa92192

Browse files
committed
feat: Support Gemini via Vertex AI
Note: I needed to use a forked version of web-auth-library due to the latter only supporting ESM upstream.
1 parent 3123a1a commit aa92192

File tree

8 files changed

+232
-26
lines changed

8 files changed

+232
-26
lines changed

package.json

+3
Original file line numberDiff line numberDiff line change
@@ -43,5 +43,8 @@
4343
"packageManager": "[email protected]",
4444
"dependencies": {
4545
"puppeteer": "^19.7.2"
46+
},
47+
"resolutions": {
48+
"web-auth-library": "getappmap/web-auth-library#v1.0.3-cjs"
4649
}
4750
}

packages/cli/src/cmds/index/aiEnvVar.ts

+6-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
export const AI_KEY_ENV_VARS = ['OPENAI_API_KEY', 'AZURE_OPENAI_API_KEY', 'ANTHROPIC_API_KEY'];
1+
export const AI_KEY_ENV_VARS = [
2+
'GOOGLE_WEB_CREDENTIALS',
3+
'OPENAI_API_KEY',
4+
'AZURE_OPENAI_API_KEY',
5+
'ANTHROPIC_API_KEY',
6+
];
27

38
export default function detectAIEnvVar(): string | undefined {
49
return Object.keys(process.env).find((key) => AI_KEY_ENV_VARS.includes(key));

packages/cli/src/cmds/index/index.ts

+1-2
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,7 @@ import LocalNavie from '../../rpc/explain/navie/navie-local';
2424
import RemoteNavie from '../../rpc/explain/navie/navie-remote';
2525
import { InteractionEvent } from '@appland/navie/dist/interaction-history';
2626
import { update } from '../../rpc/file/update';
27-
28-
const AI_KEY_ENV_VARS = ['OPENAI_API_KEY', 'AZURE_OPENAI_API_KEY'];
27+
import { AI_KEY_ENV_VARS } from './aiEnvVar';
2928

3029
export const command = 'index';
3130
export const describe =

packages/cli/src/rpc/llmConfiguration.ts

+7-2
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,14 @@ function openAIBaseURL(): string | undefined {
4747
return baseUrl;
4848
}
4949

50+
const DEFAULT_BASE_URLS = {
51+
anthropic: 'https://api.anthropic.com/v1/',
52+
'vertex-ai': 'https://googleapis.com',
53+
openai: undefined,
54+
} as const;
55+
5056
export function getLLMConfiguration(): LLMConfiguration {
51-
const baseUrl =
52-
SELECTED_BACKEND === 'anthropic' ? 'https://api.anthropic.com/v1/' : openAIBaseURL();
57+
const baseUrl = (SELECTED_BACKEND && DEFAULT_BASE_URLS[SELECTED_BACKEND]) ?? openAIBaseURL();
5358

5459
return {
5560
baseUrl,

packages/navie/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@
4141
"dependencies": {
4242
"@langchain/anthropic": "^0.3.1",
4343
"@langchain/core": "^0.2.27",
44+
"@langchain/google-vertexai-web": "^0.1.0",
4445
"@langchain/openai": "^0.2.7",
4546
"fast-xml-parser": "^4.4.0",
4647
"js-yaml": "^4.1.0",
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import { warn } from 'node:console';
22

3+
import GoogleVertexAICompletionService from './google-vertexai-completion-service';
34
import OpenAICompletionService from './openai-completion-service';
45
import AnthropicCompletionService from './anthropic-completion-service';
56
import CompletionService from './completion-service';
@@ -10,47 +11,43 @@ interface Options {
1011
modelName: string;
1112
temperature: number;
1213
trajectory: Trajectory;
14+
backend?: Backend;
1315
}
1416

15-
type Backend = 'anthropic' | 'openai';
17+
const BACKENDS = {
18+
anthropic: AnthropicCompletionService,
19+
openai: OpenAICompletionService,
20+
'vertex-ai': GoogleVertexAICompletionService,
21+
} as const;
1622

17-
function defaultBackend(): Backend {
18-
return 'ANTHROPIC_API_KEY' in process.env ? 'anthropic' : 'openai';
23+
type Backend = keyof typeof BACKENDS;
24+
25+
function defaultBackend(): Backend | undefined {
26+
if ('ANTHROPIC_API_KEY' in process.env) return 'anthropic';
27+
if ('GOOGLE_WEB_CREDENTIALS' in process.env) return 'vertex-ai';
28+
if ('OPENAI_API_KEY' in process.env) return 'openai';
1929
}
2030

2131
function environmentBackend(): Backend | undefined {
2232
switch (process.env.APPMAP_NAVIE_COMPLETION_BACKEND) {
2333
case 'anthropic':
2434
case 'openai':
35+
case 'vertex-ai':
2536
return process.env.APPMAP_NAVIE_COMPLETION_BACKEND;
2637
default:
2738
return undefined;
2839
}
2940
}
3041

31-
export const SELECTED_BACKEND: Backend = environmentBackend() ?? defaultBackend();
42+
export const SELECTED_BACKEND: Backend = environmentBackend() ?? defaultBackend() ?? 'openai';
3243

3344
export default function createCompletionService({
3445
modelName,
3546
temperature,
3647
trajectory,
48+
backend = SELECTED_BACKEND,
3749
}: Options): CompletionService {
38-
const backend = environmentBackend() ?? defaultBackend();
3950
const messageTokenReducerService = new MessageTokenReducerService();
40-
if (backend === 'anthropic') {
41-
warn('Using Anthropic AI backend');
42-
return new AnthropicCompletionService(
43-
modelName,
44-
temperature,
45-
trajectory,
46-
messageTokenReducerService
47-
);
48-
}
49-
warn('Using OpenAI backend');
50-
return new OpenAICompletionService(
51-
modelName,
52-
temperature,
53-
trajectory,
54-
messageTokenReducerService
55-
);
51+
warn(`Using completion service ${backend}`);
52+
return new BACKENDS[backend](modelName, temperature, trajectory, messageTokenReducerService);
5653
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
import { warn } from 'node:console';
2+
import { isNativeError } from 'node:util/types';
3+
4+
import { ChatVertexAI, type ChatVertexAIInput } from '@langchain/google-vertexai-web';
5+
import { zodResponseFormat } from 'openai/helpers/zod';
6+
import { z } from 'zod';
7+
8+
import Trajectory from '../lib/trajectory';
9+
import Message from '../message';
10+
import CompletionService, {
11+
CompleteOptions,
12+
Completion,
13+
CompletionRetries,
14+
CompletionRetryDelay,
15+
convertToMessage,
16+
mergeSystemMessages,
17+
Usage,
18+
} from './completion-service';
19+
20+
export default class GoogleVertexAICompletionService implements CompletionService {
21+
constructor(
22+
public readonly modelName: string,
23+
public readonly temperature: number,
24+
private trajectory: Trajectory
25+
) {}
26+
27+
// Construct a model with non-default options. There doesn't seem to be a way to configure
28+
// the model parameters at invocation time like with OpenAI.
29+
private buildModel(options?: ChatVertexAIInput): ChatVertexAI {
30+
return new ChatVertexAI({
31+
model: this.modelName,
32+
temperature: this.temperature,
33+
streaming: true,
34+
...options,
35+
});
36+
}
37+
38+
get miniModelName(): string {
39+
const miniModel = process.env.APPMAP_NAVIE_MINI_MODEL;
40+
return miniModel ?? 'gemini-1.5-flash-002';
41+
}
42+
43+
// Request a JSON object with a given JSON schema.
44+
async json<Schema extends z.ZodType>(
45+
messages: Message[],
46+
schema: Schema,
47+
options?: CompleteOptions
48+
): Promise<z.infer<Schema> | undefined> {
49+
const model = this.buildModel({
50+
...options,
51+
streaming: false,
52+
responseMimeType: 'application/json',
53+
});
54+
const sentMessages = mergeSystemMessages([
55+
...messages,
56+
{
57+
role: 'system',
58+
content: `Use the following JSON schema for your response:\n\n${JSON.stringify(
59+
zodResponseFormat(schema, 'requestedObject').json_schema.schema,
60+
null,
61+
2
62+
)}`,
63+
},
64+
]);
65+
66+
for (const message of sentMessages) this.trajectory.logSentMessage(message);
67+
68+
const response = await model.invoke(sentMessages.map(convertToMessage));
69+
70+
this.trajectory.logReceivedMessage({
71+
role: 'assistant',
72+
content: JSON.stringify(response),
73+
});
74+
75+
const sanitizedContent = response.content.toString().replace(/^`{3,}[^\s]*?$/gm, '');
76+
const parsed = JSON.parse(sanitizedContent) as unknown;
77+
schema.parse(parsed);
78+
return parsed;
79+
}
80+
81+
async *complete(messages: readonly Message[], options?: { temperature?: number }): Completion {
82+
const usage = new Usage();
83+
const model = this.buildModel(options);
84+
const sentMessages: Message[] = mergeSystemMessages(messages);
85+
const tokens = new Array<string>();
86+
for (const message of sentMessages) this.trajectory.logSentMessage(message);
87+
88+
const maxAttempts = CompletionRetries;
89+
for (let attempt = 0; attempt < maxAttempts; attempt += 1) {
90+
try {
91+
// eslint-disable-next-line no-await-in-loop
92+
const response = await model.stream(sentMessages.map(convertToMessage));
93+
94+
// eslint-disable-next-line @typescript-eslint/naming-convention, no-await-in-loop
95+
for await (const { content, usage_metadata } of response) {
96+
yield content.toString();
97+
tokens.push(content.toString());
98+
if (usage_metadata) {
99+
usage.promptTokens += usage_metadata.input_tokens;
100+
usage.completionTokens += usage_metadata.output_tokens;
101+
}
102+
}
103+
104+
this.trajectory.logReceivedMessage({
105+
role: 'assistant',
106+
content: tokens.join(''),
107+
});
108+
109+
break;
110+
} catch (cause) {
111+
if (attempt < maxAttempts - 1 && tokens.length === 0) {
112+
const nextAttempt = CompletionRetryDelay * 2 ** attempt;
113+
warn(`Received ${JSON.stringify(cause)}, retrying in ${nextAttempt}ms`);
114+
await new Promise<void>((resolve) => {
115+
setTimeout(resolve, nextAttempt);
116+
});
117+
continue;
118+
}
119+
throw new Error(
120+
`Failed to complete after ${attempt + 1} attempt(s): ${errorMessage(cause)}`,
121+
{
122+
cause,
123+
}
124+
);
125+
}
126+
}
127+
128+
warn(usage.toString());
129+
return usage;
130+
}
131+
}
132+
133+
function errorMessage(err: unknown): string {
134+
if (isNativeError(err)) return err.cause ? errorMessage(err.cause) : err.message;
135+
return String(err);
136+
}

yarn.lock

+60
Original file line numberDiff line numberDiff line change
@@ -456,6 +456,7 @@ __metadata:
456456
dependencies:
457457
"@langchain/anthropic": ^0.3.1
458458
"@langchain/core": ^0.2.27
459+
"@langchain/google-vertexai-web": ^0.1.0
459460
"@langchain/openai": ^0.2.7
460461
"@tsconfig/node-lts": ^20.1.3
461462
"@types/jest": ^29.4.1
@@ -7038,6 +7039,41 @@ __metadata:
70387039
languageName: node
70397040
linkType: hard
70407041

7042+
"@langchain/google-common@npm:~0.1.0":
7043+
version: 0.1.1
7044+
resolution: "@langchain/google-common@npm:0.1.1"
7045+
dependencies:
7046+
uuid: ^10.0.0
7047+
zod-to-json-schema: ^3.22.4
7048+
peerDependencies:
7049+
"@langchain/core": ">=0.2.21 <0.4.0"
7050+
checksum: e460a08eaf5e6902c3cb7e8deb9edddcdb46c6bc38657ee1050d05ab5f17bf864bf298a9f00cc41e2824f8c072d79c1dca9b84a7ce64ebcf5a5357af14f5b9d9
7051+
languageName: node
7052+
linkType: hard
7053+
7054+
"@langchain/google-vertexai-web@npm:^0.1.0":
7055+
version: 0.1.0
7056+
resolution: "@langchain/google-vertexai-web@npm:0.1.0"
7057+
dependencies:
7058+
"@langchain/google-webauth": ~0.1.0
7059+
peerDependencies:
7060+
"@langchain/core": ">=0.2.21 <0.4.0"
7061+
checksum: 8c32499e4070ddf28de26e3e4354c60303921e0be84aa68bbcbbeecd5e79e78354fb940708dcfc94efbc67f51893e51039288d78418ec00ec3f64a6cb1e5b20e
7062+
languageName: node
7063+
linkType: hard
7064+
7065+
"@langchain/google-webauth@npm:~0.1.0":
7066+
version: 0.1.0
7067+
resolution: "@langchain/google-webauth@npm:0.1.0"
7068+
dependencies:
7069+
"@langchain/google-common": ~0.1.0
7070+
web-auth-library: ^1.0.3
7071+
peerDependencies:
7072+
"@langchain/core": ">=0.2.21 <0.4.0"
7073+
checksum: 90d7c04f95e9950ec5fb39a779352f145efa319d2003564b82a183809ef92d64f8f878999e5cb9c75b1bfda83e38c9b650946c928b1d137dd8bf0bebbaddca74
7074+
languageName: node
7075+
linkType: hard
7076+
70417077
"@langchain/openai@npm:>=0.1.0 <0.3.0, @langchain/openai@npm:^0.2.7":
70427078
version: 0.2.7
70437079
resolution: "@langchain/openai@npm:0.2.7"
@@ -28406,6 +28442,13 @@ __metadata:
2840628442
languageName: node
2840728443
linkType: hard
2840828444

28445+
"jose@npm:>= 4.12.0 < 5.0.0":
28446+
version: 4.15.9
28447+
resolution: "jose@npm:4.15.9"
28448+
checksum: 41abe1c99baa3cf8a78ebbf93da8f8e50e417b7a26754c4afa21865d87527b8ac2baf66de2c5f6accc3f7d7158658dae7364043677236ea1d07895b040097f15
28449+
languageName: node
28450+
linkType: hard
28451+
2840928452
"joycon@npm:^3.0.1":
2841028453
version: 3.1.1
2841128454
resolution: "joycon@npm:3.1.1"
@@ -36925,6 +36968,13 @@ [email protected]:
3692536968
languageName: node
3692636969
linkType: hard
3692736970

36971+
"rfc4648@npm:^1.5.2":
36972+
version: 1.5.3
36973+
resolution: "rfc4648@npm:1.5.3"
36974+
checksum: 19c81d502582e377125b00fbd7a5cdb0e351f9a1e40182fa9f608b48e1ab852d211b75facb2f4f3fa17f7c6ebc2ef4acca61ae7eb7fbcfa4768f11d2db678116
36975+
languageName: node
36976+
linkType: hard
36977+
3692836978
"rfdc@npm:^1.3.0":
3692936979
version: 1.3.0
3693036980
resolution: "rfdc@npm:1.3.0"
@@ -41838,6 +41888,16 @@ typescript@~4.4.3:
4183841888
languageName: node
4183941889
linkType: hard
4184041890

41891+
"web-auth-library@getappmap/web-auth-library#v1.0.3-cjs":
41892+
version: 1.0.3
41893+
resolution: "web-auth-library@https://github.com/getappmap/web-auth-library.git#commit=f60401541b00795465224c5da786fd273dbc9459"
41894+
dependencies:
41895+
jose: ">= 4.12.0 < 5.0.0"
41896+
rfc4648: ^1.5.2
41897+
checksum: 8031779036fdba6f7eba1638e73368a11b7745a26cab2401998878e767e83fcd88c5b0270ddce140fd1f1822e177b09619a856368295a644af256a2d2d721b88
41898+
languageName: node
41899+
linkType: hard
41900+
4184141901
"web-streams-polyfill@npm:4.0.0-beta.3":
4184241902
version: 4.0.0-beta.3
4184341903
resolution: "web-streams-polyfill@npm:4.0.0-beta.3"

0 commit comments

Comments
 (0)