Skip to content

Commit f263986

Browse files
committed
Use AWSBedrockLLMService instead of OpenAILLMService in the aws-agentcore-websocket example (and improve the prompt)
1 parent aab5661 commit f263986

File tree

3 files changed

+16
-7
lines changed

3 files changed

+16
-7
lines changed

deployment/aws-agentcore-websocket/README.md

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,9 @@ You can also choose to specify more granular permissions; see [Amazon Bedrock Ag
4949

5050
Add your API keys:
5151

52+
- `AWS_ACCESS_KEY_ID`: Your AWS access key ID for the Amazon Bedrock LLM used by the agent
53+
- `AWS_SECRET_ACCESS_KEY`: Your AWS secret access key for the Amazon Bedrock LLM used by the agent
54+
- `AWS_REGION`: The AWS region for the Amazon Bedrock LLM used by the agent
5255
- `OPENAI_API_KEY`: Your OpenAI API key
5356
- `DEEPGRAM_API_KEY`: Your Deepgram API key
5457
- `CARTESIA_API_KEY`: Your Cartesia API key
@@ -60,7 +63,7 @@ You can also choose to specify more granular permissions; see [Amazon Bedrock Ag
6063
cp env.example .env
6164
```
6265

63-
Add your AWS credentials and configuration:
66+
Add your AWS credentials and configuration, for generating the signed WebSocket URL in the `/start` endpoint:
6467

6568
- `AWS_ACCESS_KEY_ID`
6669
- `AWS_SECRET_ACCESS_KEY`

deployment/aws-agentcore-websocket/agent/agent.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,12 @@
2121
from pipecat.pipeline.task import PipelineParams, PipelineTask
2222
from pipecat.processors.aggregators.llm_context import LLMContext
2323
from pipecat.processors.aggregators.llm_response_universal import LLMContextAggregatorPair
24-
from pipecat.runner.types import DailyRunnerArguments, RunnerArguments
25-
from pipecat.runner.utils import create_transport
24+
from pipecat.runner.types import RunnerArguments
2625
from pipecat.serializers.protobuf import ProtobufFrameSerializer
26+
from pipecat.services.aws.llm import AWSBedrockLLMService
2727
from pipecat.services.cartesia.tts import CartesiaTTSService
2828
from pipecat.services.deepgram.stt import DeepgramSTTService
2929
from pipecat.services.llm_service import FunctionCallParams
30-
from pipecat.services.openai.llm import OpenAILLMService
3130
from pipecat.transports.base_transport import BaseTransport, TransportParams
3231
from pipecat.transports.daily.transport import DailyParams
3332
from pipecat.transports.websocket.fastapi import FastAPIWebsocketParams, FastAPIWebsocketTransport
@@ -74,7 +73,11 @@ async def run_bot(transport: BaseTransport, runner_args: RunnerArguments):
7473
voice_id="71a7ad14-091c-4e8e-a314-022ece01c121", # British Reading Lady
7574
)
7675

77-
llm = OpenAILLMService(api_key=os.getenv("OPENAI_API_KEY"))
76+
# Automatically uses AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, and AWS_REGION env vars.
77+
llm = AWSBedrockLLMService(
78+
model="us.amazon.nova-2-lite-v1:0",
79+
params=AWSBedrockLLMService.InputParams(temperature=0.8),
80+
)
7881

7982
# You can also register a function_name of None to get all functions
8083
# sent to the same callback with an additional function_name parameter.
@@ -117,8 +120,9 @@ async def on_function_calls_started(service, function_calls):
117120
messages = [
118121
{
119122
"role": "system",
120-
"content": "You are a helpful LLM in a WebRTC call. Your goal is to demonstrate your capabilities in a succinct way. Your output will be spoken aloud, so avoid special characters that can't easily be spoken, such as emojis or bullet points. Respond to what the user said in a creative and helpful way.",
123+
"content": "You are a helpful LLM in a voice call. Your goal is to demonstrate your capabilities in a succinct way. Your output will be spoken aloud, so avoid special characters that can't easily be spoken, such as emojis or bullet points. Respond to what the user said in a creative and helpful way.",
121124
},
125+
{"role": "user", "content": "Say hello and briefly introduce yourself."},
122126
]
123127

124128
context = LLMContext(messages, tools)
Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1-
OPENAI_API_KEY=...
1+
AWS_ACCESS_KEY_ID=...
2+
AWS_SECRET_ACCESS_KEY=...
3+
AWS_REGION=...
24
DEEPGRAM_API_KEY=...
35
CARTESIA_API_KEY=...

0 commit comments

Comments
 (0)