Skip to content

Commit

Permalink
mypy fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
victordibia committed Feb 10, 2025
1 parent f9037d5 commit 9b9a76d
Showing 1 changed file with 39 additions and 39 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -308,17 +308,17 @@
},
{
"cell_type": "code",
"execution_count": 58,
"execution_count": 28,
"metadata": {},
"outputs": [],
"source": [
"# !pip install google-genai\n",
"import os\n",
"from typing import AsyncGenerator, Sequence\n",
"\n",
"from autogen_agentchat.ui import Console\n",
"from autogen_agentchat.agents import BaseChatAgent\n",
"from autogen_agentchat.base import Response\n",
"from autogen_agentchat.messages import AgentEvent, ChatMessage\n",
"from autogen_agentchat.messages import AgentEvent, ChatMessage, TextMessage\n",
"from autogen_core import CancellationToken\n",
"from autogen_core.model_context import UnboundedChatCompletionContext\n",
"from autogen_core.models import AssistantMessage, RequestUsage, UserMessage\n",
Expand All @@ -333,7 +333,7 @@
" description: str = \"An agent that provides assistance with ability to use tools.\",\n",
" model: str = \"gemini-1.5-flash-002\",\n",
" api_key: str = os.environ[\"GEMINI_API_KEY\"],\n",
" system_message: str = \"You are a helpful assistant that can respond to messages. Reply with TERMINATE when the task has been completed.\",\n",
" system_message: str | None = \"You are a helpful assistant that can respond to messages. Reply with TERMINATE when the task has been completed.\",\n",
" ):\n",
" super().__init__(name=name, description=description)\n",
" self._model_context = UnboundedChatCompletionContext()\n",
Expand All @@ -357,10 +357,10 @@
" # Add messages to the model context\n",
" for msg in messages:\n",
" await self._model_context.add_message(UserMessage(content=msg.content, source=msg.source))\n",
" \n",
"\n",
" # Get conversation history\n",
" history = [msg.source + \": \" + msg.content + \"\\n\" for msg in await self._model_context.get_messages()]\n",
"\n",
" history = [(msg.source if hasattr(msg, \"source\") else \"system\" ) + \": \" + (msg.content if isinstance(msg.content, str) else \"\") + \"\\n\" for msg in await self._model_context.get_messages()] \n",
" # Generate response using Gemini\n",
" response = self._model_client.models.generate_content(\n",
" model=self._model,\n",
Expand Down Expand Up @@ -393,7 +393,7 @@
},
{
"cell_type": "code",
"execution_count": 59,
"execution_count": 29,
"metadata": {},
"outputs": [
{
Expand All @@ -414,7 +414,7 @@
"TaskResult(messages=[TextMessage(source='user', models_usage=None, content='What is the capital of New York?', type='TextMessage'), TextMessage(source='gemini_assistant', models_usage=RequestUsage(prompt_tokens=46, completion_tokens=5), content='Albany\\nTERMINATE\\n', type='TextMessage')], stop_reason=None)"
]
},
"execution_count": 59,
"execution_count": 29,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -435,7 +435,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 30,
"metadata": {},
"outputs": [
{
Expand All @@ -445,42 +445,41 @@
"---------- user ----------\n",
"Write a Haiku poem with 4 lines about the fall season.\n",
"---------- primary ----------\n",
"Leaves dance in the breeze, \n",
"Crimson and gold softly fall, \n",
"Whispers of crisp air, \n",
"Nature's quilt, a warm call.\n",
"Leaves of crimson dance, \n",
"Whispers of the crisp cool air, \n",
"Pumpkins grin with light, \n",
"Nature's quilt laid bare.\n",
"---------- gemini_critic ----------\n",
"The haiku is lovely and evocative, but it has four lines instead of three. To be a true haiku, one line needs to be removed. Consider which line contributes least to the overall image and remove that. The imagery is strong, though.\n",
"The haiku only has three lines. The fourth line should be removed. Otherwise, it's a good poem.\n",
"\n",
"---------- primary ----------\n",
"Thank you for the feedback! Here’s a revised version of the haiku with three lines:\n",
"Thank you for your feedback! Here’s the revised haiku with three lines:\n",
"\n",
"Leaves dance in the breeze, \n",
"Crimson and gold softly fall, \n",
"Nature's quilt, a warm call.\n",
"Leaves of crimson dance, \n",
"Whispers of the crisp cool air, \n",
"Pumpkins grin with light.\n",
"---------- gemini_critic ----------\n",
"The revised haiku is much improved! Removing the \"Whispers of crisp air\" line maintains the strong imagery while adhering to the three-line structure. APPROVE\n",
"APPROVE\n",
"\n"
]
},
{
"data": {
"text/plain": [
"TaskResult(messages=[TextMessage(source='user', models_usage=None, content='Write a Haiku poem with 4 lines about the fall season.', type='TextMessage'), TextMessage(source='primary', models_usage=RequestUsage(prompt_tokens=33, completion_tokens=31), content=\"Leaves dance in the breeze, \\nCrimson and gold softly fall, \\nWhispers of crisp air, \\nNature's quilt, a warm call.\", type='TextMessage'), TextMessage(source='gemini_critic', models_usage=RequestUsage(prompt_tokens=88, completion_tokens=53), content='The haiku is lovely and evocative, but it has four lines instead of three. To be a true haiku, one line needs to be removed. Consider which line contributes least to the overall image and remove that. The imagery is strong, though.\\n', type='TextMessage'), TextMessage(source='primary', models_usage=RequestUsage(prompt_tokens=133, completion_tokens=43), content=\"Thank you for the feedback! Here’s a revised version of the haiku with three lines:\\n\\nLeaves dance in the breeze, \\nCrimson and gold softly fall, \\nNature's quilt, a warm call.\", type='TextMessage'), TextMessage(source='gemini_critic', models_usage=RequestUsage(prompt_tokens=203, completion_tokens=35), content='The revised haiku is much improved! Removing the \"Whispers of crisp air\" line maintains the strong imagery while adhering to the three-line structure. APPROVE\\n', type='TextMessage')], stop_reason=\"Text 'APPROVE' mentioned\")"
"TaskResult(messages=[TextMessage(source='user', models_usage=None, content='Write a Haiku poem with 4 lines about the fall season.', type='TextMessage'), TextMessage(source='primary', models_usage=RequestUsage(prompt_tokens=33, completion_tokens=29), content=\"Leaves of crimson dance, \\nWhispers of the crisp cool air, \\nPumpkins grin with light, \\nNature's quilt laid bare.\", type='TextMessage'), TextMessage(source='gemini_critic', models_usage=RequestUsage(prompt_tokens=87, completion_tokens=26), content=\"The haiku only has three lines. The fourth line should be removed. Otherwise, it's a good poem.\\n\", type='TextMessage'), TextMessage(source='primary', models_usage=RequestUsage(prompt_tokens=100, completion_tokens=38), content='Thank you for your feedback! Here’s the revised haiku with three lines:\\n\\nLeaves of crimson dance, \\nWhispers of the crisp cool air, \\nPumpkins grin with light.', type='TextMessage'), TextMessage(source='gemini_critic', models_usage=RequestUsage(prompt_tokens=170, completion_tokens=3), content='APPROVE\\n', type='TextMessage')], stop_reason=\"Text 'APPROVE' mentioned\")"
]
},
"execution_count": 71,
"execution_count": 30,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from autogen_agentchat.agents import AssistantAgent\n",
"from autogen_agentchat.base import TaskResult\n",
"from autogen_agentchat.conditions import TextMentionTermination\n",
"from autogen_agentchat.agents import AssistantAgent \n",
"from autogen_agentchat.conditions import TextMentionTermination, MaxMessageTermination\n",
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
"from autogen_agentchat.ui import Console\n",
"from autogen_core import CancellationToken\n",
"from autogen_agentchat.ui import Console \n",
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
"\n",
"# Create the primary agent.\n",
"primary_agent = AssistantAgent(\n",
Expand Down Expand Up @@ -540,6 +539,7 @@
"from autogen_agentchat.messages import AgentEvent, ChatMessage\n",
"from autogen_core import CancellationToken, Component\n",
"from pydantic import BaseModel\n",
"from typing_extensions import Self\n",
"\n",
"\n",
"class GeminiAssistantAgentConfig(BaseModel):\n",
Expand All @@ -549,17 +549,17 @@
" system_message: str | None = None\n",
"\n",
"\n",
"class GeminiAssistant(BaseChatAgent, Component[GeminiAssistantAgentConfig]):\n",
"class GeminiAssistantAgent(BaseChatAgent, Component[GeminiAssistantAgentConfig]): # type: ignore[no-redef]\n",
" component_config_schema = GeminiAssistantAgentConfig\n",
" # component_provider_override = \"mypackage.agents.GeminiAssistant\"\n",
" # component_provider_override = \"mypackage.agents.GeminiAssistantAgent\"\n",
"\n",
" def __init__(\n",
" self,\n",
" name: str,\n",
" description: str = \"An agent that provides assistance with ability to use tools.\",\n",
" model: str = \"gemini-1.5-flash-002\",\n",
" api_key: str = os.environ[\"GEMINI_API_KEY\"],\n",
" system_message: str = \"You are a helpful assistant that can respond to messages. Reply with TERMINATE when the task has been completed.\",\n",
" system_message: str | None = \"You are a helpful assistant that can respond to messages. Reply with TERMINATE when the task has been completed.\",\n",
" ):\n",
" super().__init__(name=name, description=description)\n",
" self._model_context = UnboundedChatCompletionContext()\n",
Expand All @@ -583,9 +583,9 @@
" # Add messages to the model context\n",
" for msg in messages:\n",
" await self._model_context.add_message(UserMessage(content=msg.content, source=msg.source))\n",
"\n",
" \n",
" # Get conversation history\n",
" history = [msg.source + \": \" + msg.content + \"\\n\" for msg in await self._model_context.get_messages()]\n",
" history = [(msg.source if hasattr(msg, \"source\") else \"system\" ) + \": \" + (msg.content if isinstance(msg.content, str) else \"\") + \"\\n\" for msg in await self._model_context.get_messages()]\n",
"\n",
" # Generate response using Gemini\n",
" response = self._model_client.models.generate_content(\n",
Expand Down Expand Up @@ -617,7 +617,7 @@
" await self._model_context.clear()\n",
"\n",
" @classmethod\n",
" def _from_config(cls, config: GeminiAssistantAgentConfig) -> \"GeminiAssistant\":\n",
" def _from_config(cls, config: GeminiAssistantAgentConfig) -> Self:\n",
" return cls(\n",
" name=config.name, description=config.description, model=config.model, system_message=config.system_message\n",
" )\n",
Expand All @@ -637,41 +637,41 @@
"source": [
"Now that we have the required methods implemented, we can now load and dump the custom agent to and from a JSON format, and then load the agent from the JSON format.\n",
" \n",
" > Note: You should set the `component_provider_override` class variable to the full path of the module containing the custom agent class e.g., (mypackage.agents.GeminiAssistantAgent). This is used by `load_component` method to determine how to instantiate the class."
" > Note: You should set the `component_provider_override` class variable to the full path of the module containing the custom agent class e.g., (`mypackage.agents.GeminiAssistantAgent`). This is used by `load_component` method to determine how to instantiate the class."
]
},
{
"cell_type": "code",
"execution_count": 67,
"execution_count": 36,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{\n",
" \"provider\": \"__main__.GeminiAssistant\",\n",
" \"provider\": \"__main__.GeminiAssistantAgent\",\n",
" \"component_type\": \"agent\",\n",
" \"version\": 1,\n",
" \"component_version\": 1,\n",
" \"description\": null,\n",
" \"label\": \"GeminiAssistant\",\n",
" \"label\": \"GeminiAssistantAgent\",\n",
" \"config\": {\n",
" \"name\": \"gemini_assistant\",\n",
" \"description\": \"An agent that provides assistance with ability to use tools.\",\n",
" \"model\": \"gemini-1.5-flash-002\",\n",
" \"system_message\": \"You are a helpful assistant that can respond to messages. Reply with TERMINATE when the task has been completed.\"\n",
" }\n",
"}\n",
"<__main__.GeminiAssistant object at 0x14ab6d890>\n"
"<__main__.GeminiAssistantAgent object at 0x11a9a4f10>\n"
]
}
],
"source": [
"gemini_assistant = GeminiAssistant(\"gemini_assistant\")\n",
"gemini_assistant = GeminiAssistantAgent(\"gemini_assistant\")\n",
"config = gemini_assistant.dump_component()\n",
"print(config.model_dump_json(indent=2))\n",
"loaded_agent = GeminiAssistant.load_component(config)\n",
"loaded_agent = GeminiAssistantAgent.load_component(config)\n",
"print(loaded_agent)"
]
}
Expand Down

0 comments on commit 9b9a76d

Please sign in to comment.