How enable tool use for agents using Anthropic Claude models? #5398
-
import boto3
import asyncio
from botocore.config import Config
from autogen_ext.models.semantic_kernel import SKChatCompletionAdapter
from semantic_kernel import Kernel
from semantic_kernel.connectors.ai.bedrock import BedrockChatCompletion, BedrockChatPromptExecutionSettings
from semantic_kernel.memory.null_memory import NullMemory
my_config = Config(
region_name = 'us-east-1',
signature_version = 'v4',
retries = {
'max_attempts': 10,
'mode': 'standard'
}
)
# Create the custom boto3 client
bedrock_runtime_client = boto3.client(service_name='bedrock-runtime', config=my_config)
bedrock_client = boto3.client("bedrock", config=my_config)
# Initialize BedrockChatCompletion with the custom client
sk_client = BedrockChatCompletion(
model_id='anthropic.claude-3-5-sonnet-20240620-v1:0',
runtime_client=bedrock_runtime_client,
client=bedrock_client
)
# Configure execution settings
settings = BedrockChatPromptExecutionSettings(
temperature=0.2,
max_tokens=1000,
)
# Create SKChatCompletionAdapter
model_client = SKChatCompletionAdapter(sk_client, kernel=Kernel(memory=NullMemory()), prompt_settings=settings)
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.teams import RoundRobinGroupChat
from autogen_agentchat.ui import Console
# Define a tool
async def get_weather(city: str) -> str:
return f"The weather in {city} is 73 degrees and Sunny."
async def main() -> None:
# Define an agent
weather_agent = AssistantAgent(
name="weather_agent",
model_client=model_client,
tools=[get_weather],
)
# Define a team with a single agent and maximum auto-gen turns of 1.
agent_team = RoundRobinGroupChat([weather_agent], max_turns=1)
while True:
# Get user input from the console.
user_input = input("Enter a message (type 'exit' to leave): ")
if user_input.strip().lower() == "exit":
break
# Run the team and stream messages to the console.
stream = agent_team.run_stream(task=user_input)
await Console(stream)
asyncio.run(main()) Output
ValueError: The model does not support function calling. |
Beta Was this translation helpful? Give feedback.
Replies: 2 comments 2 replies
-
@lspinheiro do you know the right way to enable tool calls here? |
Beta Was this translation helpful? Give feedback.
-
@GxWwT , try passing the model info. We currently don't have a good set of default model capabilities for other providers and those need to be set manually. @jackgerrits , I think we could move the _model_info from openai into a shared namespace and start adding other provides in there. The openai model info can be shared with the other clients as they can be used both with semantic kernel and azure ai inference which is why I'm suggesting a shared namespace. from autogen_core.models import ModelInfo
...
# Set other attributes as needed/known
model_info = ModelInfo(vision=False, function_calling=True, json_output=True, family=ModelFamily.UNKNOWN)
sk_client = BedrockChatCompletion(
model_id='anthropic.claude-3-5-sonnet-20240620-v1:0',
runtime_client=bedrock_runtime_client,
client=bedrock_client,
model_info=model_info
) |
Beta Was this translation helpful? Give feedback.
@GxWwT , try passing the model info. We currently don't have a good set of default model capabilities for other providers and those need to be set manually. @jackgerrits , I think we could move the _model_info from openai into a shared namespace and start adding other provides in there. The openai model info can be shared with the other clients as they can be used both with semantic kernel and azure ai inference which is why I'm suggesting a shared namespace.