From cf5a39b92a7dbce7fc1403c6b54958cc7ecaffbb Mon Sep 17 00:00:00 2001 From: Ali Salimli <67149699+elisalimli@users.noreply.github.com> Date: Wed, 1 May 2024 15:54:56 +0400 Subject: [PATCH] fix: conditional check for tool_calls in llm.py (#986) --- libs/superagent/app/agents/llm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/superagent/app/agents/llm.py b/libs/superagent/app/agents/llm.py index d1c80cfb3..2776c4be2 100644 --- a/libs/superagent/app/agents/llm.py +++ b/libs/superagent/app/agents/llm.py @@ -261,7 +261,7 @@ async def _process_stream_response(self, res: CustomStreamWrapper): async for chunk in res: new_message = chunk.choices[0].delta - if new_message.tool_calls: + if hasattr(new_message, "tool_calls"): new_tool_calls = self._process_tool_calls(new_message) tool_calls.extend(new_tool_calls) @@ -281,7 +281,7 @@ async def _process_model_response(self, res: ModelResponse): new_messages = self.messages new_message = res.choices[0].message - if new_message.tool_calls: + if hasattr(new_message, "tool_calls"): new_tool_calls = self._process_tool_calls(new_message) tool_calls.extend(new_tool_calls)