Skip to content

Commit

Permalink
fix conditional check for tool_calls in llm.py
Browse files Browse the repository at this point in the history
  • Loading branch information
elisalimli committed May 1, 2024
1 parent 1d8493a commit f0e75bc
Showing 1 changed file with 5 additions and 2 deletions.
7 changes: 5 additions & 2 deletions libs/superagent/app/agents/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,10 @@ async def _process_stream_response(self, res: CustomStreamWrapper):

async for chunk in res:
new_message = chunk.choices[0].delta
if new_message.tool_calls:
if (
hasattr(new_message, "tool_calls")
and new_message.tool_calls is not None
):
new_tool_calls = self._process_tool_calls(new_message)
tool_calls.extend(new_tool_calls)

Expand All @@ -281,7 +284,7 @@ async def _process_model_response(self, res: ModelResponse):
new_messages = self.messages

new_message = res.choices[0].message
if new_message.tool_calls:
if hasattr(new_message, "tool_calls") and new_message.tool_calls is not None:
new_tool_calls = self._process_tool_calls(new_message)
tool_calls.extend(new_tool_calls)

Expand Down

0 comments on commit f0e75bc

Please sign in to comment.