Skip to content

Commit eb343cd

Browse files
committed
Make cache store typed, and improve docs
1 parent 18e5431 commit eb343cd

26 files changed

+349
-89
lines changed

python/packages/autogen-agentchat/tests/test_group_chat.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,10 +34,10 @@
3434
from autogen_agentchat.teams._group_chat._swarm_group_chat import SwarmGroupChatManager
3535
from autogen_agentchat.ui import Console
3636
from autogen_core import AgentId, CancellationToken
37-
from autogen_core.models import ReplayChatCompletionClient
3837
from autogen_core.tools import FunctionTool
3938
from autogen_ext.code_executors.local import LocalCommandLineCodeExecutor
4039
from autogen_ext.models.openai import OpenAIChatCompletionClient
40+
from autogen_ext.models.replay import ReplayChatCompletionClient
4141
from openai.resources.chat.completions import AsyncCompletions
4242
from openai.types.chat.chat_completion import ChatCompletion, Choice
4343
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk

python/packages/autogen-agentchat/tests/test_magentic_one_group_chat.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
)
1919
from autogen_agentchat.teams._group_chat._magentic_one._magentic_one_orchestrator import MagenticOneOrchestrator
2020
from autogen_core import AgentId, CancellationToken
21-
from autogen_core.models import ReplayChatCompletionClient
21+
from autogen_ext.models.replay import ReplayChatCompletionClient
2222
from utils import FileLogHandler
2323

2424
logger = logging.getLogger(EVENT_LOGGER_NAME)

python/packages/autogen-core/docs/src/reference/index.md

+4
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,16 @@ python/autogen_ext.agents.video_surfer
4848
python/autogen_ext.agents.video_surfer.tools
4949
python/autogen_ext.auth.azure
5050
python/autogen_ext.teams.magentic_one
51+
python/autogen_ext.models.cache
5152
python/autogen_ext.models.openai
53+
python/autogen_ext.models.replay
5254
python/autogen_ext.tools.langchain
5355
python/autogen_ext.tools.graphrag
5456
python/autogen_ext.tools.code_execution
5557
python/autogen_ext.code_executors.local
5658
python/autogen_ext.code_executors.docker
5759
python/autogen_ext.code_executors.azure
60+
python/autogen_ext.cache_store.diskcache
61+
python/autogen_ext.cache_store.redis
5862
python/autogen_ext.runtimes.grpc
5963
```
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
autogen\_ext.cache_store.diskcache
2+
==================================
3+
4+
5+
.. automodule:: autogen_ext.cache_store.diskcache
6+
:members:
7+
:undoc-members:
8+
:show-inheritance:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
autogen\_ext.cache_store.redis
2+
==============================
3+
4+
5+
.. automodule:: autogen_ext.cache_store.redis
6+
:members:
7+
:undoc-members:
8+
:show-inheritance:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
autogen\_ext.models.cache
2+
=========================
3+
4+
5+
.. automodule:: autogen_ext.models.cache
6+
:members:
7+
:undoc-members:
8+
:show-inheritance:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
autogen\_ext.models.replay
2+
==========================
3+
4+
5+
.. automodule:: autogen_ext.models.replay
6+
:members:
7+
:undoc-members:
8+
:show-inheritance:

python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/models.ipynb

+3-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,9 @@
88
"\n",
99
"In many cases, agents need access to LLM model services such as OpenAI, Azure OpenAI, or local models. Since there are many different providers with different APIs, `autogen-core` implements a protocol for [model clients](../../core-user-guide/framework/model-clients.ipynb) and `autogen-ext` implements a set of model clients for popular model services. AgentChat can use these model clients to interact with model services. \n",
1010
"\n",
11-
"**NOTE:** See {py:class}`~autogen_core.models.ChatCompletionCache` for a caching wrapper to use with the following clients."
11+
"```{note}\n",
12+
"See {py:class}`~autogen_ext.models.cache.ChatCompletionCache` for a caching wrapper to use with the following clients.\n",
13+
"```"
1214
]
1315
},
1416
{

python/packages/autogen-core/docs/src/user-guide/core-user-guide/framework/model-clients.ipynb

+39-9
Original file line numberDiff line numberDiff line change
@@ -327,9 +327,20 @@
327327
"source": [
328328
"## Caching Wrapper\n",
329329
"\n",
330-
"`autogen_core` implements a {py:class}`~autogen_core.models.ChatCompletionCache` that can wrap any {py:class}`~autogen_core.models.ChatCompletionClient`. Using this wrapper avoids incurring token usage when querying the underlying client with the same prompt multiple times. \n",
330+
"`autogen_ext` implements {py:class}`~autogen_ext.models.cache.ChatCompletionCache` that can wrap any {py:class}`~autogen_core.models.ChatCompletionClient`. Using this wrapper avoids incurring token usage when querying the underlying client with the same prompt multiple times.\n",
331331
"\n",
332-
"{py:class}`~autogen_core.models.ChatCompletionCache` uses a {py:class}`~autogen_core.CacheStore` protocol to allow duck-typing any storage object that has a pair of `get` & `set` methods (such as `redis.Redis` or `diskcache.Cache`). Here's an example of using `diskcache` for local caching:"
332+
"{py:class}`~autogen_core.models.ChatCompletionCache` uses a {py:class}`~autogen_core.CacheStore` protocol. We have implemented some useful variants of {py:class}`~autogen_core.CacheStore` including {py:class}`~autogen_ext.cache_store.diskcache.DiskCacheStore` and {py:class}`~autogen_ext.cache_store.redis.RedisStore`.\n",
333+
"\n",
334+
"Here's an example of using `diskcache` for local caching:"
335+
]
336+
},
337+
{
338+
"cell_type": "code",
339+
"execution_count": null,
340+
"metadata": {},
341+
"outputs": [],
342+
"source": [
343+
"# pip install -U \"autogen-ext[openai, diskcache]\""
333344
]
334345
},
335346
{
@@ -346,18 +357,37 @@
346357
}
347358
],
348359
"source": [
349-
"from typing import Any, Dict, Optional\n",
360+
"import asyncio\n",
361+
"import tempfile\n",
350362
"\n",
351-
"from autogen_core.models import ChatCompletionCache\n",
363+
"from autogen_core.models import UserMessage\n",
364+
"from autogen_ext.cache_store.diskcache import DiskCacheStore\n",
365+
"from autogen_ext.models.cache import CHAT_CACHE_VALUE_TYPE, ChatCompletionCache\n",
366+
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
352367
"from diskcache import Cache\n",
353368
"\n",
354-
"diskcache_client = Cache(\"/tmp/diskcache\")\n",
355369
"\n",
356-
"cached_client = ChatCompletionCache(model_client, diskcache_client)\n",
357-
"response = await cached_client.create(messages=messages)\n",
370+
"async def main() -> None:\n",
371+
" with tempfile.TemporaryDirectory() as tmpdirname:\n",
372+
" # Initialize the original client\n",
373+
" openai_model_client = OpenAIChatCompletionClient(model=\"gpt-4o\")\n",
374+
"\n",
375+
" # Then initialize the CacheStore, in this case with diskcache.Cache.\n",
376+
" # You can also use redis like:\n",
377+
" # from autogen_ext.cache_store.redis import RedisStore\n",
378+
" # import redis\n",
379+
" # redis_instance = redis.Redis()\n",
380+
" # cache_store = RedisCacheStore[CHAT_CACHE_VALUE_TYPE](redis_instance)\n",
381+
" cache_store = DiskCacheStore[CHAT_CACHE_VALUE_TYPE](Cache(tmpdirname))\n",
382+
" cache_client = ChatCompletionCache(openai_model_client, cache_store)\n",
383+
"\n",
384+
" response = await cache_client.create([UserMessage(content=\"Hello, how are you?\", source=\"user\")])\n",
385+
" print(response) # Should print response from OpenAI\n",
386+
" response = await cache_client.create([UserMessage(content=\"Hello, how are you?\", source=\"user\")])\n",
387+
" print(response) # Should print cached response\n",
388+
"\n",
358389
"\n",
359-
"cached_response = await cached_client.create(messages=messages)\n",
360-
"print(cached_response.cached)"
390+
"asyncio.run(main())"
361391
]
362392
},
363393
{

python/packages/autogen-core/src/autogen_core/__init__.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from ._agent_runtime import AgentRuntime
1111
from ._agent_type import AgentType
1212
from ._base_agent import BaseAgent
13-
from ._cache_store import CacheStore
13+
from ._cache_store import CacheStore, InMemoryStore
1414
from ._cancellation_token import CancellationToken
1515
from ._closure_agent import ClosureAgent, ClosureContext
1616
from ._component_config import (
@@ -87,6 +87,7 @@
8787
"AgentRuntime",
8888
"BaseAgent",
8989
"CacheStore",
90+
"InMemoryStore",
9091
"CancellationToken",
9192
"AgentInstantiationContext",
9293
"TopicId",

python/packages/autogen-core/src/autogen_core/_cache_store.py

+18-6
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
1-
from typing import Any, Optional, Protocol
1+
from typing import Any, Dict, Generic, Optional, Protocol, TypeVar, cast
22

3+
T = TypeVar("T")
34

4-
class CacheStore(Protocol):
5+
6+
class CacheStore(Protocol, Generic[T]):
57
"""
68
This protocol defines the basic interface for store/cache operations.
79
8-
Allows duck-typing with any object that implements the get and set methods,
9-
such as redis or diskcache interfaces.
10+
Sub-classes should handle the lifecycle of underlying storage.
1011
"""
1112

12-
def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
13+
def get(self, key: str, default: Optional[T] = None) -> Optional[T]:
1314
"""
1415
Retrieve an item from the store.
1516
@@ -23,7 +24,7 @@ def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]:
2324
"""
2425
...
2526

26-
def set(self, key: str, value: Any) -> Optional[Any]:
27+
def set(self, key: str, value: T) -> None:
2728
"""
2829
Set an item in the store.
2930
@@ -32,3 +33,14 @@ def set(self, key: str, value: Any) -> Optional[Any]:
3233
value: The value to be stored in the store.
3334
"""
3435
...
36+
37+
38+
class InMemoryStore(CacheStore[T]):
39+
def __init__(self) -> None:
40+
self.store: Dict[str, Any] = {}
41+
42+
def get(self, key: str, default: Optional[T] = None) -> Optional[T]:
43+
return cast(Optional[T], self.store.get(key, default))
44+
45+
def set(self, key: str, value: T) -> None:
46+
self.store[key] = value

python/packages/autogen-core/src/autogen_core/models/__init__.py

-4
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
from ._cache import ChatCompletionCache
21
from ._model_client import ChatCompletionClient, ModelCapabilities, ModelFamily, ModelInfo # type: ignore
3-
from ._replay_chat_completion_client import ReplayChatCompletionClient
42
from ._types import (
53
AssistantMessage,
64
ChatCompletionTokenLogprob,
@@ -17,7 +15,6 @@
1715

1816
__all__ = [
1917
"ModelCapabilities",
20-
"ChatCompletionCache",
2118
"ChatCompletionClient",
2219
"SystemMessage",
2320
"UserMessage",
@@ -32,5 +29,4 @@
3229
"ChatCompletionTokenLogprob",
3330
"ModelFamily",
3431
"ModelInfo",
35-
"ReplayChatCompletionClient",
3632
]

python/packages/autogen-core/tests/test_cache_store.py

+17-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from unittest.mock import Mock
22

3-
from autogen_core import CacheStore
3+
from autogen_core import CacheStore, InMemoryStore
44

55

66
def test_set_and_get_object_key_value() -> None:
@@ -30,3 +30,19 @@ def test_set_overwrite_existing_key() -> None:
3030
mock_store.get.return_value = new_value
3131
mock_store.set.assert_called_with(key, new_value)
3232
assert mock_store.get(key) == new_value
33+
34+
35+
def test_inmemory_store() -> None:
36+
store = InMemoryStore[int]()
37+
test_key = "test_key"
38+
test_value = 42
39+
store.set(test_key, test_value)
40+
assert store.get(test_key) == test_value
41+
42+
new_value = 2
43+
store.set(test_key, new_value)
44+
assert store.get(test_key) == new_value
45+
46+
key = "non_existent_key"
47+
default_value = 99
48+
assert store.get(key, default_value) == default_value

python/packages/autogen-ext/pyproject.toml

+6
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,12 @@ video-surfer = [
4646
"ffmpeg-python",
4747
"openai-whisper",
4848
]
49+
diskcache = [
50+
"diskcache>=5.6.3"
51+
]
52+
redis = [
53+
"redis>=5.2.1"
54+
]
4955

5056
grpc = [
5157
"grpcio~=1.62.0", # TODO: update this once we have a stable version.

python/packages/autogen-ext/src/autogen_ext/cache_store/__init__.py

Whitespace-only changes.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
from typing import Any, Optional, TypeVar, cast
2+
3+
import diskcache
4+
from autogen_core import CacheStore
5+
6+
T = TypeVar("T")
7+
8+
9+
class DiskCacheStore(CacheStore[T]):
10+
"""
11+
A typed CacheStore implementation that uses diskcache as the underlying storage.
12+
See :class:`~autogen_ext.models.cache.ChatCompletionCache` for an example of usage.
13+
14+
Args:
15+
cache_instance: An instance of diskcache.Cache.
16+
The user is responsible for managing the DiskCache instance's lifetime.
17+
"""
18+
19+
def __init__(self, cache_instance: diskcache.Cache): # type: ignore[no-any-unimported]
20+
self.cache = cache_instance
21+
22+
def get(self, key: str, default: Optional[T] = None) -> Optional[T]:
23+
return cast(Optional[T], self.cache.get(key, default)) # type: ignore[reportUnknownMemberType]
24+
25+
def set(self, key: str, value: T) -> None:
26+
self.cache.set(key, cast(Any, value)) # type: ignore[reportUnknownMemberType]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
from typing import Any, Optional, TypeVar, cast
2+
3+
import redis
4+
from autogen_core import CacheStore
5+
6+
T = TypeVar("T")
7+
8+
9+
class RedisStore(CacheStore[T]):
10+
"""
11+
A typed CacheStore implementation that uses redis as the underlying storage.
12+
See :class:`~autogen_ext.models.cache.ChatCompletionCache` for an example of usage.
13+
14+
Args:
15+
cache_instance: An instance of `redis.Redis`.
16+
The user is responsible for managing the Redis instance's lifetime.
17+
"""
18+
19+
def __init__(self, redis_instance: redis.Redis):
20+
self.cache = redis_instance
21+
22+
def get(self, key: str, default: Optional[T] = None) -> Optional[T]:
23+
value = cast(Optional[T], self.cache.get(key))
24+
if value is None:
25+
return default
26+
return value
27+
28+
def set(self, key: str, value: T) -> None:
29+
self.cache.set(key, cast(Any, value))
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
from ._chat_completion_cache import CHAT_CACHE_VALUE_TYPE, ChatCompletionCache
2+
3+
__all__ = [
4+
"CHAT_CACHE_VALUE_TYPE",
5+
"ChatCompletionCache",
6+
]

0 commit comments

Comments
 (0)