Skip to content

Commit d9a0393

Browse files
committed
feat: added anthropic ai api
1 parent 135a8cb commit d9a0393

File tree

8 files changed

+3253
-73
lines changed

8 files changed

+3253
-73
lines changed

Pipfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ verify_ssl = true
44
name = "pypi"
55

66
[packages]
7-
# viur-core = {file = "../viur-core", editable = true, extras = ["mailjet"]}
87
viur-core = "==3.7.8"
8+
anthropic = "*"
99

1010
[dev-packages]
1111
viur-cli = "~=2.1"

Pipfile.lock

+326-69
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

deploy/ai/__init__.py

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from .anthropic import query_anthropic

deploy/ai/anthropic.py

+91
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
from viur.core import conf
2+
from viur.core.prototypes import List, Tree
3+
import os, json, anthropic
4+
5+
6+
def query_anthropic(
7+
user_prompt: str,
8+
model:str="claude-3-7-sonnet-20250219", # Name of the model to use
9+
max_tokens:int=1024,
10+
temperature:float=1.0,
11+
modules_to_include:list[str]=None,
12+
enable_caching:bool=False,
13+
max_thinking_tokens:int=0, # disables thinking if <= 0
14+
system_prompt:str="You are a coding-assistant that helps develop python-code for accessing a viur-backend. You only output json-strings containing a single key named \"code\".",
15+
dry_run:bool=False,
16+
anthropic_api_key:str=None
17+
):
18+
with open(os.path.join(conf.instance.project_base_path,"ai","contexts","scriptor_docs.txt"), "r") as scriptor_docs_file:
19+
scriptor_docs_txt_data = scriptor_docs_file.read()
20+
21+
llm_params = {
22+
"model": model,
23+
"max_tokens": max_tokens,
24+
"temperature": temperature,
25+
"system": [{
26+
"type": "text",
27+
"text": system_prompt
28+
}],
29+
"messages": [{
30+
"role": "user",
31+
"content": []
32+
}]
33+
}
34+
35+
# add docs to system prompt (with or without caching)
36+
scriptor_doc_system_param = {
37+
"type": "text",
38+
"text": scriptor_docs_txt_data,
39+
}
40+
if enable_caching:
41+
scriptor_doc_system_param["cache_control"] = {"type": "ephemeral"}
42+
llm_params["system"].append(scriptor_doc_system_param)
43+
44+
# thinking configuration
45+
if max_thinking_tokens > 0:
46+
llm_params["thinking"] = {
47+
"type": "enabled",
48+
"budget_tokens": max_thinking_tokens
49+
}
50+
51+
# add module structures
52+
if modules_to_include is not None:
53+
structures_from_viur = {}
54+
for module_name in modules_to_include:
55+
module = getattr(conf.main_app.vi, module_name, None)
56+
if not module:
57+
continue
58+
59+
if isinstance(module,List):
60+
if module_name not in structures_from_viur:
61+
structures_from_viur[module_name] = module.structure()
62+
elif isinstance(module,Tree):
63+
if module_name not in structures_from_viur:
64+
structures_from_viur[module_name] = {
65+
"node": module.structure(skelType="node"),
66+
"leaf": module.structure(skelType="leaf")
67+
}
68+
else:
69+
raise ValueError(f"""The module should should be a instance of "tree" or "list". "{module}" is unsupported.""")
70+
71+
selected_module_structures = {"module_structures": structures_from_viur}
72+
selected_module_structures_description = json.dumps(selected_module_structures, indent=2)
73+
74+
if selected_module_structures["module_structures"]:
75+
llm_params["messages"][0]["content"].append({
76+
"type": "text",
77+
"text": selected_module_structures_description
78+
})
79+
80+
# finally append user prompt
81+
llm_params["messages"][0]["content"].append({
82+
"type": "text",
83+
"text": user_prompt
84+
})
85+
86+
if dry_run:
87+
return llm_params, None
88+
else:
89+
anthropic_client = anthropic.Anthropic(api_key=anthropic_api_key)
90+
message = anthropic_client.messages.create(**llm_params)
91+
return llm_params, message

0 commit comments

Comments
 (0)