Skip to content

Commit

Permalink
fix: update human-in-the-loop tutorial with better system message to …
Browse files Browse the repository at this point in the history
…signal termination condition (#5253)

Resolves #5248
  • Loading branch information
ekzhu authored Jan 29, 2025
1 parent 02e968a commit 7020f2a
Showing 1 changed file with 21 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 20,
"metadata": {},
"outputs": [
{
Expand All @@ -299,27 +299,27 @@
"---------- user ----------\n",
"What is the weather in New York?\n",
"---------- lazy_assistant ----------\n",
"[FunctionCall(id='call_nSjgvWCUYo5ccacBz7yzrPLN', arguments='{}', name='transfer_to_user')]\n",
"[Prompt tokens: 68, Completion tokens: 12]\n",
"[FunctionCall(id='call_EAcMgrLGHdLw0e7iJGoMgxuu', arguments='{}', name='transfer_to_user')]\n",
"[Prompt tokens: 69, Completion tokens: 12]\n",
"---------- lazy_assistant ----------\n",
"[FunctionExecutionResult(content='Transfer to user.', call_id='call_nSjgvWCUYo5ccacBz7yzrPLN')]\n",
"[FunctionExecutionResult(content='Transfer to user.', call_id='call_EAcMgrLGHdLw0e7iJGoMgxuu')]\n",
"---------- lazy_assistant ----------\n",
"Transfer to user.\n",
"---------- Summary ----------\n",
"Number of messages: 4\n",
"Finish reason: Handoff to user from lazy_assistant detected.\n",
"Total prompt tokens: 68\n",
"Total prompt tokens: 69\n",
"Total completion tokens: 12\n",
"Duration: 0.75 seconds\n"
"Duration: 0.69 seconds\n"
]
},
{
"data": {
"text/plain": [
"TaskResult(messages=[TextMessage(source='user', models_usage=None, content='What is the weather in New York?', type='TextMessage'), ToolCallRequestEvent(source='lazy_assistant', models_usage=RequestUsage(prompt_tokens=68, completion_tokens=12), content=[FunctionCall(id='call_nSjgvWCUYo5ccacBz7yzrPLN', arguments='{}', name='transfer_to_user')], type='ToolCallRequestEvent'), ToolCallExecutionEvent(source='lazy_assistant', models_usage=None, content=[FunctionExecutionResult(content='Transfer to user.', call_id='call_nSjgvWCUYo5ccacBz7yzrPLN')], type='ToolCallExecutionEvent'), HandoffMessage(source='lazy_assistant', models_usage=None, target='user', content='Transfer to user.', type='HandoffMessage')], stop_reason='Handoff to user from lazy_assistant detected.')"
"TaskResult(messages=[TextMessage(source='user', models_usage=None, content='What is the weather in New York?', type='TextMessage'), ToolCallRequestEvent(source='lazy_assistant', models_usage=RequestUsage(prompt_tokens=69, completion_tokens=12), content=[FunctionCall(id='call_EAcMgrLGHdLw0e7iJGoMgxuu', arguments='{}', name='transfer_to_user')], type='ToolCallRequestEvent'), ToolCallExecutionEvent(source='lazy_assistant', models_usage=None, content=[FunctionExecutionResult(content='Transfer to user.', call_id='call_EAcMgrLGHdLw0e7iJGoMgxuu')], type='ToolCallExecutionEvent'), HandoffMessage(source='lazy_assistant', models_usage=None, target='user', content='Transfer to user.', context=[], type='HandoffMessage')], stop_reason='Handoff to user from lazy_assistant detected.')"
]
},
"execution_count": 4,
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -334,7 +334,7 @@
"\n",
"# Create an OpenAI model client.\n",
"model_client = OpenAIChatCompletionClient(\n",
" model=\"gpt-4o-2024-08-06\",\n",
" model=\"gpt-4o\",\n",
" # api_key=\"sk-...\", # Optional if you have an OPENAI_API_KEY env variable set.\n",
")\n",
"\n",
Expand All @@ -343,20 +343,20 @@
" \"lazy_assistant\",\n",
" model_client=model_client,\n",
" handoffs=[Handoff(target=\"user\", message=\"Transfer to user.\")],\n",
" system_message=\"Always transfer to user when you don't know the answer. Respond 'TERMINATE' when task is complete.\",\n",
" system_message=\"If you cannot complete the task, transfer to user. Otherwise, when finished, respond with 'TERMINATE'.\",\n",
")\n",
"\n",
"# Define a termination condition that checks for handoff message targetting helper and text \"TERMINATE\".\n",
"# Define a termination condition that checks for handoff messages.\n",
"handoff_termination = HandoffTermination(target=\"user\")\n",
"# Define a termination condition that checks for a specific text mention.\n",
"text_termination = TextMentionTermination(\"TERMINATE\")\n",
"combined_termination = handoff_termination | text_termination\n",
"\n",
"# Create a single-agent team.\n",
"lazy_agent_team = RoundRobinGroupChat([lazy_agent], termination_condition=combined_termination)\n",
"# Create a single-agent team with the lazy assistant and both termination conditions.\n",
"lazy_agent_team = RoundRobinGroupChat([lazy_agent], termination_condition=handoff_termination | text_termination)\n",
"\n",
"# Run the team and stream to the console.\n",
"task = \"What is the weather in New York?\"\n",
"await Console(lazy_agent_team.run_stream(task=task))"
"await Console(lazy_agent_team.run_stream(task=task), output_stats=True)"
]
},
{
Expand All @@ -369,7 +369,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 21,
"metadata": {},
"outputs": [
{
Expand All @@ -379,26 +379,18 @@
"---------- user ----------\n",
"The weather in New York is sunny.\n",
"---------- lazy_assistant ----------\n",
"Great to hear that it's sunny in New York! Is there anything else you'd like to know or discuss?\n",
"[Prompt tokens: 109, Completion tokens: 23]\n",
"Great! Enjoy the sunny weather in New York! Is there anything else you'd like to know?\n",
"---------- lazy_assistant ----------\n",
"TERMINATE\n",
"[Prompt tokens: 138, Completion tokens: 5]\n",
"---------- Summary ----------\n",
"Number of messages: 3\n",
"Finish reason: Text 'TERMINATE' mentioned\n",
"Total prompt tokens: 247\n",
"Total completion tokens: 28\n",
"Duration: 1.44 seconds\n"
"TERMINATE\n"
]
},
{
"data": {
"text/plain": [
"TaskResult(messages=[TextMessage(source='user', models_usage=None, content='The weather in New York is sunny.', type='TextMessage'), TextMessage(source='lazy_assistant', models_usage=RequestUsage(prompt_tokens=109, completion_tokens=23), content=\"Great to hear that it's sunny in New York! Is there anything else you'd like to know or discuss?\", type='TextMessage'), TextMessage(source='lazy_assistant', models_usage=RequestUsage(prompt_tokens=138, completion_tokens=5), content='TERMINATE', type='TextMessage')], stop_reason=\"Text 'TERMINATE' mentioned\")"
"TaskResult(messages=[TextMessage(source='user', models_usage=None, content='The weather in New York is sunny.', type='TextMessage'), TextMessage(source='lazy_assistant', models_usage=RequestUsage(prompt_tokens=110, completion_tokens=21), content=\"Great! Enjoy the sunny weather in New York! Is there anything else you'd like to know?\", type='TextMessage'), TextMessage(source='lazy_assistant', models_usage=RequestUsage(prompt_tokens=137, completion_tokens=5), content='TERMINATE', type='TextMessage')], stop_reason=\"Text 'TERMINATE' mentioned\")"
]
},
"execution_count": 5,
"execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
Expand Down Expand Up @@ -431,7 +423,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
"version": "3.12.7"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 7020f2a

Please sign in to comment.