Skip to content

Commit

Permalink
Merge branch 'main' into feauture/juputerCodeExecutor2
Browse files Browse the repository at this point in the history
  • Loading branch information
ekzhu authored Jan 15, 2025
2 parents 8397d22 + acb9117 commit 0ba8f97
Show file tree
Hide file tree
Showing 51 changed files with 3,996 additions and 490 deletions.
1 change: 0 additions & 1 deletion python/packages/autogen-agentchat/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ classifiers = [
]
dependencies = [
"autogen-core==0.4.1",
"aioconsole>=0.8.1"
]

[tool.ruff]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
)

from autogen_core import CancellationToken, FunctionCall
from autogen_core.memory import Memory
from autogen_core.model_context import (
ChatCompletionContext,
UnboundedChatCompletionContext,
Expand All @@ -35,6 +36,7 @@
AgentEvent,
ChatMessage,
HandoffMessage,
MemoryQueryEvent,
MultiModalMessage,
TextMessage,
ToolCallExecutionEvent,
Expand Down Expand Up @@ -120,6 +122,7 @@ class AssistantAgent(BaseChatAgent):
will be returned as the response.
Available variables: `{tool_name}`, `{arguments}`, `{result}`.
For example, `"{tool_name}: {result}"` will create a summary like `"tool_name: result"`.
memory (Sequence[Memory] | None, optional): The memory store to use for the agent. Defaults to `None`.
Raises:
ValueError: If tool names are not unique.
Expand Down Expand Up @@ -240,9 +243,20 @@ def __init__(
) = "You are a helpful AI assistant. Solve tasks using your tools. Reply with TERMINATE when the task has been completed.",
reflect_on_tool_use: bool = False,
tool_call_summary_format: str = "{result}",
memory: Sequence[Memory] | None = None,
):
super().__init__(name=name, description=description)
self._model_client = model_client
self._memory = None
if memory is not None:
if isinstance(memory, list):
self._memory = memory
else:
raise TypeError(f"Expected Memory, List[Memory], or None, got {type(memory)}")

self._system_messages: List[
SystemMessage | UserMessage | AssistantMessage | FunctionExecutionResultMessage
] = []
if system_message is None:
self._system_messages = []
else:
Expand Down Expand Up @@ -325,6 +339,17 @@ async def on_messages_stream(
# Inner messages.
inner_messages: List[AgentEvent | ChatMessage] = []

# Update the model context with memory content.
if self._memory:
for memory in self._memory:
update_context_result = await memory.update_context(self._model_context)
if update_context_result and len(update_context_result.memories.results) > 0:
memory_query_event_msg = MemoryQueryEvent(
content=update_context_result.memories.results, source=self.name
)
inner_messages.append(memory_query_event_msg)
yield memory_query_event_msg

# Generate an inference result based on the current model context.
llm_messages = self._system_messages + await self._model_context.get_messages()
result = await self._model_client.create(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from inspect import iscoroutinefunction
from typing import Any, AsyncGenerator, Awaitable, Callable, ClassVar, Generator, Optional, Sequence, Union, cast

from aioconsole import ainput # type: ignore
from autogen_core import CancellationToken

from ..base import Response
Expand All @@ -17,10 +16,9 @@
InputFuncType = Union[SyncInputFunc, AsyncInputFunc]


# TODO: ainput doesn't seem to play nicely with jupyter.
# No input window appears in this case.
# TODO: check if using to_thread fixes this in jupyter
async def cancellable_input(prompt: str, cancellation_token: Optional[CancellationToken]) -> str:
task: asyncio.Task[str] = asyncio.create_task(ainput(prompt)) # type: ignore
task: asyncio.Task[str] = asyncio.create_task(asyncio.to_thread(input, prompt))
if cancellation_token is not None:
cancellation_token.link_future(task)
return await task
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@
from ._handoff import Handoff
from ._task import TaskResult, TaskRunner
from ._team import Team
from ._termination import TerminatedException, TerminationCondition
from ._termination import AndTerminationCondition, OrTerminationCondition, TerminatedException, TerminationCondition

__all__ = [
"ChatAgent",
"Response",
"Team",
"TerminatedException",
"TerminationCondition",
"AndTerminationCondition",
"OrTerminationCondition",
"TaskResult",
"TaskRunner",
"Handoff",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,17 @@
from abc import ABC, abstractmethod
from typing import List, Sequence

from autogen_core import Component, ComponentBase, ComponentModel
from pydantic import BaseModel
from typing_extensions import Self

from ..messages import AgentEvent, ChatMessage, StopMessage


class TerminatedException(BaseException): ...


class TerminationCondition(ABC):
class TerminationCondition(ABC, ComponentBase[BaseModel]):
"""A stateful condition that determines when a conversation should be terminated.
A termination condition is a callable that takes a sequence of ChatMessage objects
Expand Down Expand Up @@ -43,6 +47,9 @@ async def main() -> None:
asyncio.run(main())
"""

component_type = "termination"
# component_config_schema = BaseModel # type: ignore

@property
@abstractmethod
def terminated(self) -> bool:
Expand Down Expand Up @@ -72,14 +79,22 @@ async def reset(self) -> None:

def __and__(self, other: "TerminationCondition") -> "TerminationCondition":
"""Combine two termination conditions with an AND operation."""
return _AndTerminationCondition(self, other)
return AndTerminationCondition(self, other)

def __or__(self, other: "TerminationCondition") -> "TerminationCondition":
"""Combine two termination conditions with an OR operation."""
return _OrTerminationCondition(self, other)
return OrTerminationCondition(self, other)


class AndTerminationConditionConfig(BaseModel):
conditions: List[ComponentModel]


class AndTerminationCondition(TerminationCondition, Component[AndTerminationConditionConfig]):
component_config_schema = AndTerminationConditionConfig
component_type = "termination"
component_provider_override = "autogen_agentchat.base.AndTerminationCondition"

class _AndTerminationCondition(TerminationCondition):
def __init__(self, *conditions: TerminationCondition) -> None:
self._conditions = conditions
self._stop_messages: List[StopMessage] = []
Expand Down Expand Up @@ -111,8 +126,27 @@ async def reset(self) -> None:
await condition.reset()
self._stop_messages.clear()

def _to_config(self) -> AndTerminationConditionConfig:
"""Convert the AND termination condition to a config."""
return AndTerminationConditionConfig(conditions=[condition.dump_component() for condition in self._conditions])

@classmethod
def _from_config(cls, config: AndTerminationConditionConfig) -> Self:
"""Create an AND termination condition from a config."""
conditions = [TerminationCondition.load_component(condition_model) for condition_model in config.conditions]
return cls(*conditions)


class OrTerminationConditionConfig(BaseModel):
conditions: List[ComponentModel]
"""List of termination conditions where any one being satisfied is sufficient."""


class OrTerminationCondition(TerminationCondition, Component[OrTerminationConditionConfig]):
component_config_schema = OrTerminationConditionConfig
component_type = "termination"
component_provider_override = "autogen_agentchat.base.OrTerminationCondition"

class _OrTerminationCondition(TerminationCondition):
def __init__(self, *conditions: TerminationCondition) -> None:
self._conditions = conditions

Expand All @@ -133,3 +167,13 @@ async def __call__(self, messages: Sequence[AgentEvent | ChatMessage]) -> StopMe
async def reset(self) -> None:
for condition in self._conditions:
await condition.reset()

def _to_config(self) -> OrTerminationConditionConfig:
"""Convert the OR termination condition to a config."""
return OrTerminationConditionConfig(conditions=[condition.dump_component() for condition in self._conditions])

@classmethod
def _from_config(cls, config: OrTerminationConditionConfig) -> Self:
"""Create an OR termination condition from a config."""
conditions = [TerminationCondition.load_component(condition_model) for condition_model in config.conditions]
return cls(*conditions)
Loading

0 comments on commit 0ba8f97

Please sign in to comment.