From 0ac106c22bfd92705cd9084b6f22cdb7e5a1b407 Mon Sep 17 00:00:00 2001 From: aboutphilippe Date: Tue, 4 Mar 2025 22:34:40 +0100 Subject: [PATCH] for review --- agent_chat/src/functions/llm_chat.py | 23 ++++------------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/agent_chat/src/functions/llm_chat.py b/agent_chat/src/functions/llm_chat.py index e808fed..6e56f3a 100644 --- a/agent_chat/src/functions/llm_chat.py +++ b/agent_chat/src/functions/llm_chat.py @@ -8,31 +8,22 @@ load_dotenv() - class Message(BaseModel): role: Literal["system", "user", "assistant"] content: str - class LlmChatInput(BaseModel): system_content: str | None = None model: str | None = None messages: list[Message] | None = None - -def raise_exception(message: str) -> None: - log.error(message) - raise NonRetryableError(message) - - @function.defn() async def llm_chat(agent_input: LlmChatInput) -> dict[str, str]: try: log.info("llm_chat function started", agent_input=agent_input) if os.environ.get("RESTACK_API_KEY") is None: - error_message = "RESTACK_API_KEY is not set" - raise_exception(error_message) + raise NonRetryableError("RESTACK_API_KEY is not set") client = OpenAI( base_url="https://ai.restack.io", api_key=os.environ.get("RESTACK_API_KEY") @@ -47,19 +38,13 @@ async def llm_chat(agent_input: LlmChatInput) -> dict[str, str]: model=agent_input.model or "gpt-4o-mini", messages=agent_input.messages, ) - except Exception as e: - error_message = f"LLM chat failed: {e}" - raise NonRetryableError(error_message) from e - else: - log.info( - "llm_chat function completed", assistant_raw_response=assistant_raw_response - ) assistant_response = { "role": assistant_raw_response.choices[0].message.role, "content": assistant_raw_response.choices[0].message.content, } - log.info("assistant_response", assistant_response=assistant_response) - + except Exception as e: + raise NonRetryableError(f"LLM chat failed: {e}") from e + else: return assistant_response