Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 28 additions & 31 deletions examples/agents-sdk-python/example.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import asyncio
from pathlib import Path
import shutil
from pathlib import Path

from openai import AsyncOpenAI
from agents import (
Expand All @@ -21,19 +21,29 @@ async def prompt_user(question: str) -> str:
return await loop.run_in_executor(None, input, question)


@function_tool
async def get_weather(location: str) -> str:
"""Dummy weather tool"""
return f"The weather in {location} is sunny."


async def main():
# Set up OpenAI client for local server (e.g., Ollama)
# Check if npx is installed
if not shutil.which("npx"):
raise RuntimeError(
"❌ 'npx' is not installed. Please install it with `npm install -g npx`."
)

# OpenAI client (e.g., for Ollama or local LLM)
openai_client = AsyncOpenAI(
api_key="local",
base_url="http://localhost:11434/v1",
)

# Get current working directory
# Set up ModelContextProtocol (MCP) server using npx
samples_dir = str(Path.cwd())

# Create MCP server for filesystem operations
mcp_server = MCPServerStdio(
name="Filesystem MCP Server, via npx",
name="Filesystem MCP Server (via npx)",
params={
"command": "npx",
"args": [
Expand All @@ -52,51 +62,38 @@ async def main():
set_default_openai_client(openai_client)
set_default_openai_api("chat_completions")

# Define weather tool
@function_tool
async def get_weather(location: str) -> str:
return f"The weather in {location} is sunny."

# Create agent
# Create the agent
agent = Agent(
name="My Agent",
instructions="You are a helpful assistant.",
tools=[get_weather],
model="gpt-oss:20b-test",
model="gpt-oss:20b-test", # Ensure this model is available in your Ollama instance
mcp_servers=[mcp_server],
)

# Get user input
user_input = await prompt_user("> ")

# Run agent with streaming
# Run agent with streamed output
result = Runner.run_streamed(agent, user_input)

# Process streaming results
# Stream processing
async for event in result.stream_events():
if event.type == "raw_response_event":
continue
elif event.type == "agent_updated_stream_event":
print(f"Agent updated: {event.new_agent.name}")
print(f"[Agent updated]: {event.new_agent.name}")
elif event.type == "run_item_stream_event":
if event.item.type == "tool_call_item":
item = event.item
if item.type == "tool_call_item":
print("-- Tool was called")
elif event.item.type == "tool_call_output_item":
print(f"-- Tool output: {event.item.output}")
elif event.item.type == "message_output_item":
print(
f"-- Message output:\n {ItemHelpers.text_message_output(event.item)}"
)
else:
pass
elif item.type == "tool_call_output_item":
print(f"-- Tool output: {item.output}")
elif item.type == "message_output_item":
print(f"-- Message output:\n{ItemHelpers.text_message_output(item)}")

print("=== Run complete ===")
print(" Run complete.")


if __name__ == "__main__":

if not shutil.which("npx"):
raise RuntimeError(
"npx is not installed. Please install it with `npm install -g npx`."
)
asyncio.run(main())