Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 4bbd542

Browse files
authoredNov 26, 2024··
Merge branch '0.2' into feature/import-endpoint
2 parents 604d2da + ebb3e24 commit 4bbd542

File tree

5 files changed

+19
-5
lines changed

5 files changed

+19
-5
lines changed
 

‎autogen/agentchat/contrib/web_surfer.py

+12-2
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ def _page_down() -> str:
230230
def _find_on_page_ctrl_f(
231231
search_string: Annotated[
232232
str, "The string to search for on the page. This search string supports wildcards like '*'"
233-
]
233+
],
234234
) -> str:
235235
find_result = self.browser.find_on_page(search_string)
236236
header, content = _browser_state()
@@ -344,7 +344,17 @@ def generate_surfer_reply(
344344

345345
# Clone the messages to give context
346346
self._assistant.chat_messages[self._user_proxy] = list()
347-
history = messages[0 : len(messages) - 1]
347+
348+
# If the last message is a tool message it has to be included in context,
349+
# otherwise openAI will throw exception that not all tool calls are followed by corresponding tool messages
350+
# In a case where the last message is not a tool message, we fallback to default behavior in the library
351+
# which is copying all messages except the last one
352+
# Issue is described more thoroughly in PR https://github.com/microsoft/autogen/pull/4050
353+
if messages[-1].get("role", "assistant") == "tool":
354+
history = messages[:]
355+
else:
356+
history = messages[0 : len(messages) - 1]
357+
348358
for message in history:
349359
self._assistant.chat_messages[self._user_proxy].append(message)
350360

‎autogen/agentchat/groupchat.py

+4
Original file line numberDiff line numberDiff line change
@@ -1264,6 +1264,10 @@ async def a_run_chat(
12641264
else:
12651265
# admin agent is not found in the participants
12661266
raise
1267+
except NoEligibleSpeaker:
1268+
# No eligible speaker, terminate the conversation
1269+
break
1270+
12671271
if reply is None:
12681272
break
12691273
# The speaker sends the message without requesting a reply

‎autogen/oai/client.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1069,7 +1069,7 @@ def extract_text_or_completion_object(
10691069

10701070
def _throttle_api_calls(self, idx: int) -> None:
10711071
"""Rate limit api calls."""
1072-
if self._rate_limiters[idx]:
1072+
if idx < len(self._rate_limiters) and self._rate_limiters[idx]:
10731073
limiter = self._rate_limiters[idx]
10741074

10751075
assert limiter is not None

‎autogen/version.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.2.37"
1+
__version__ = "0.2.39"

‎website/docs/Getting-Started.mdx

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ pip install autogen-agentchat~=0.2
4545
import os
4646
from autogen import AssistantAgent, UserProxyAgent
4747

48-
llm_config = {"model": "gpt-4", "api_key": os.environ["OPENAI_API_KEY"]}
48+
llm_config = { "config_list": [{ "model": "gpt-4", "api_key": os.environ.get("OPENAI_API_KEY") }] }
4949
assistant = AssistantAgent("assistant", llm_config=llm_config)
5050
user_proxy = UserProxyAgent("user_proxy", code_execution_config=False)
5151

0 commit comments

Comments
 (0)
Please sign in to comment.