From d6ac911d24db99df2dc133e287526a70090ecd20 Mon Sep 17 00:00:00 2001 From: Vincent Koc Date: Fri, 3 Oct 2025 08:28:06 -0700 Subject: [PATCH 1/5] Create env.example --- open-telemetry/opik/env.example | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 open-telemetry/opik/env.example diff --git a/open-telemetry/opik/env.example b/open-telemetry/opik/env.example new file mode 100644 index 0000000..27d5696 --- /dev/null +++ b/open-telemetry/opik/env.example @@ -0,0 +1,19 @@ +DEEPGRAM_API_KEY=your_deepgram_key +CARTESIA_API_KEY=your_cartesia_key +OPENAI_API_KEY=your_openai_key + +# Opik Configuration +# Set to any value to enable tracing +ENABLE_TRACING=true + +# OTLP endpoint (defaults to Opik Cloud if not set) +# For Opik Cloud: https://www.comet.com/opik/api/v1/private/otel/v1/traces +# For self-hosted: http:///api/v1/private/otel/v1/traces +OTEL_EXPORTER_OTLP_ENDPOINT=https://www.comet.com/opik/api/v1/private/otel/v1/traces + +# Opik headers (get your API key from https://www.comet.com/opik) +# Format: Authorization=,Comet-Workspace=,projectName= +OTEL_EXPORTER_OTLP_HEADERS=Authorization=,Comet-Workspace=default,projectName= + +# Set to any value to enable console output for debugging +# OTEL_CONSOLE_EXPORT=true \ No newline at end of file From 867b94fd07316a491c359350c83a20626acae849 Mon Sep 17 00:00:00 2001 From: Vincent Koc Date: Fri, 3 Oct 2025 08:28:14 -0700 Subject: [PATCH 2/5] Create bot.py --- open-telemetry/opik/bot.py | 180 +++++++++++++++++++++++++++++++++++++ 1 file changed, 180 insertions(+) create mode 100644 open-telemetry/opik/bot.py diff --git a/open-telemetry/opik/bot.py b/open-telemetry/opik/bot.py new file mode 100644 index 0000000..7be5451 --- /dev/null +++ b/open-telemetry/opik/bot.py @@ -0,0 +1,180 @@ +# +# Copyright (c) 2024–2025, Daily +# +# SPDX-License-Identifier: BSD 2-Clause License +# + +import os + +from dotenv import load_dotenv +from loguru import logger +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from pipecat.adapters.schemas.function_schema import FunctionSchema +from pipecat.adapters.schemas.tools_schema import ToolsSchema +from pipecat.audio.vad.silero import SileroVADAnalyzer +from pipecat.frames.frames import LLMRunFrame, TTSSpeakFrame +from pipecat.pipeline.pipeline import Pipeline +from pipecat.pipeline.runner import PipelineRunner +from pipecat.pipeline.task import PipelineParams, PipelineTask +from pipecat.processors.aggregators.openai_llm_context import OpenAILLMContext +from pipecat.runner.types import RunnerArguments +from pipecat.runner.utils import create_transport +from pipecat.services.cartesia.tts import CartesiaTTSService +from pipecat.services.deepgram.stt import DeepgramSTTService +from pipecat.services.llm_service import FunctionCallParams +from pipecat.services.openai.llm import OpenAILLMService +from pipecat.transports.base_transport import BaseTransport, TransportParams +from pipecat.transports.daily.transport import DailyParams +from pipecat.transports.websocket.fastapi import FastAPIWebsocketParams +from pipecat.utils.tracing.setup import setup_tracing + +load_dotenv(override=True) + +IS_TRACING_ENABLED = bool(os.getenv("ENABLE_TRACING")) + +# Initialize tracing if enabled +if IS_TRACING_ENABLED: + # Create the exporter for Opik (HTTP/JSON only) + # Opik supports HTTP transport for traces only (no logs or metrics) + # Headers are configured via OTEL_EXPORTER_OTLP_HEADERS environment variable + otlp_exporter = OTLPSpanExporter( + endpoint=os.getenv( + "OTEL_EXPORTER_OTLP_ENDPOINT", + "https://www.comet.com/opik/api/v1/private/otel/v1/traces" + ), + ) + + # Set up tracing with the exporter + setup_tracing( + service_name="pipecat-demo", + exporter=otlp_exporter, + console_export=bool(os.getenv("OTEL_CONSOLE_EXPORT")), + ) + logger.info("OpenTelemetry tracing initialized for Opik") + + +async def fetch_weather_from_api(params: FunctionCallParams): + await params.result_callback({"conditions": "nice", "temperature": "75"}) + + +# We store functions so objects (e.g. SileroVADAnalyzer) don't get +# instantiated. The function will be called when the desired transport gets +# selected. +transport_params = { + "daily": lambda: DailyParams( + audio_in_enabled=True, + audio_out_enabled=True, + vad_analyzer=SileroVADAnalyzer(), + ), + "twilio": lambda: FastAPIWebsocketParams( + audio_in_enabled=True, + audio_out_enabled=True, + vad_analyzer=SileroVADAnalyzer(), + ), + "webrtc": lambda: TransportParams( + audio_in_enabled=True, + audio_out_enabled=True, + vad_analyzer=SileroVADAnalyzer(), + ), +} + + +async def run_bot(transport: BaseTransport): + logger.info(f"Starting bot") + + stt = DeepgramSTTService(api_key=os.getenv("DEEPGRAM_API_KEY")) + + tts = CartesiaTTSService( + api_key=os.getenv("CARTESIA_API_KEY"), + voice_id="71a7ad14-091c-4e8e-a314-022ece01c121", # British Reading Lady + ) + + llm = OpenAILLMService( + api_key=os.getenv("OPENAI_API_KEY"), params=OpenAILLMService.InputParams(temperature=0.5) + ) + + # You can also register a function_name of None to get all functions + # sent to the same callback with an additional function_name parameter. + llm.register_function("get_current_weather", fetch_weather_from_api) + + @llm.event_handler("on_function_calls_started") + async def on_function_calls_started(service, function_calls): + await tts.queue_frame(TTSSpeakFrame("Let me check on that.")) + + weather_function = FunctionSchema( + name="get_current_weather", + description="Get the current weather", + properties={ + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "format": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + "description": "The temperature unit to use. Infer this from the user's location.", + }, + }, + required=["location", "format"], + ) + tools = ToolsSchema(standard_tools=[weather_function]) + + messages = [ + { + "role": "system", + "content": "You are a helpful LLM in a WebRTC call. Your goal is to demonstrate your capabilities in a succinct way. Your output will be converted to audio so don't include special characters in your answers. Respond to what the user said in a creative and helpful way.", + }, + ] + + context = OpenAILLMContext(messages, tools) + context_aggregator = llm.create_context_aggregator(context) + + pipeline = Pipeline( + [ + transport.input(), + stt, + context_aggregator.user(), + llm, + tts, + transport.output(), + context_aggregator.assistant(), + ] + ) + + task = PipelineTask( + pipeline, + params=PipelineParams( + enable_metrics=True, + enable_usage_metrics=True, + ), + enable_tracing=IS_TRACING_ENABLED, + # Optionally, add a conversation ID to track the conversation + # conversation_id="8df26cc1-6db0-4a7a-9930-1e037c8f1fa2", + ) + + @transport.event_handler("on_client_connected") + async def on_client_connected(transport, client): + logger.info(f"Client connected") + # Kick off the conversation. + await task.queue_frames([LLMRunFrame()]) + + @transport.event_handler("on_client_disconnected") + async def on_client_disconnected(transport, client): + logger.info(f"Client disconnected") + await task.cancel() + + runner = PipelineRunner(handle_sigint=False) + + await runner.run(task) + + +async def bot(runner_args: RunnerArguments): + """Main bot entry point compatible with Pipecat Cloud.""" + transport = await create_transport(runner_args, transport_params) + await run_bot(transport) + + +if __name__ == "__main__": + from pipecat.runner.run import main + + main() From 7706083b553c66460f3db622a4fc1f7d8a93faba Mon Sep 17 00:00:00 2001 From: Vincent Koc Date: Fri, 3 Oct 2025 08:28:22 -0700 Subject: [PATCH 3/5] Create README.md --- open-telemetry/opik/README.md | 105 ++++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 open-telemetry/opik/README.md diff --git a/open-telemetry/opik/README.md b/open-telemetry/opik/README.md new file mode 100644 index 0000000..e35c331 --- /dev/null +++ b/open-telemetry/opik/README.md @@ -0,0 +1,105 @@ +# Opik Tracing for Pipecat + +This demo showcases OpenTelemetry tracing integration for Pipecat services using Opik, allowing you to visualize and analyze LLM traces, service calls, performance metrics, and dependencies. + +> **Note**: Opik supports HTTP/JSON OpenTelemetry traces only (no logs or metrics). + +## Setup Instructions + +### 1. Get Your Opik API Key + +Sign up or log in at [https://www.comet.com/opik](https://www.comet.com/opik) to get your API key and workspace name. + +### 2. Environment Configuration + +Create a `.env` file with your API keys and Opik configuration: + +``` +# Enable tracing +ENABLE_TRACING=true + +# OTLP endpoint (defaults to Opik Cloud if not set) +OTEL_EXPORTER_OTLP_ENDPOINT=https://www.comet.com/opik/api/v1/private/otel/v1/traces + +# Opik headers - Configure your API key, workspace, and project name +OTEL_EXPORTER_OTLP_HEADERS=Authorization=your_opik_api_key,Comet-Workspace=your_workspace_name,projectName=your_project_name + +# Optional: Enable console output for debugging +# OTEL_CONSOLE_EXPORT=true + +# Service API keys +DEEPGRAM_API_KEY=your_key_here +CARTESIA_API_KEY=your_key_here +OPENAI_API_KEY=your_key_here +``` + +For self-hosted Opik installations, update the endpoint: +``` +OTEL_EXPORTER_OTLP_ENDPOINT=http:///api/v1/private/otel/v1/traces +``` + +### 3. Install Dependencies + +```bash +pip install -r requirements.txt +``` + +> **Important**: Use the HTTP exporter (`opentelemetry-exporter-otlp-proto-http`), not the GRPC exporter. Opik only supports HTTP transport. + +### 4. Run the Demo + +```bash +python bot.py +``` + +### 5. View Traces in Opik + +Open your browser to [https://www.comet.com/opik](https://www.comet.com/opik) and navigate to your project to view traces and analyze your LLM interactions. + +## Opik-Specific Configuration + +In the `bot.py` file, note the HTTP exporter configuration: + +```python +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + +# Create the exporter for Opik (HTTP/JSON only) +# Headers are configured via OTEL_EXPORTER_OTLP_HEADERS environment variable +otlp_exporter = OTLPSpanExporter( + endpoint=os.getenv( + "OTEL_EXPORTER_OTLP_ENDPOINT", + "https://www.comet.com/opik/api/v1/private/otel/v1/traces" + ), +) + +# Set up tracing with the exporter +setup_tracing( + service_name="pipecat-demo", + exporter=otlp_exporter, + console_export=bool(os.getenv("OTEL_CONSOLE_EXPORT")), +) +``` + +The OpenTelemetry SDK automatically reads headers from the `OTEL_EXPORTER_OTLP_HEADERS` environment variable. + +## Key Features + +- **HTTP/JSON Transport**: Opik uses HTTP transport for OpenTelemetry traces +- **LLM-Focused**: Optimized for tracking and analyzing LLM interactions +- **Required Headers**: + - `Authorization`: Your Opik API key + - `projectName`: Your project name in Opik + - `Comet-Workspace`: Your workspace name (required for Comet-hosted installations) + +## Troubleshooting + +- **No Traces in Opik**: Verify your API key, workspace name, and project name are correct +- **Authorization Errors**: Ensure your `OPIK_API_KEY` and `OPIK_WORKSPACE` are set correctly +- **Connection Errors**: Check your network connectivity and endpoint URL +- **Exporter Issues**: Try the Console exporter (`OTEL_CONSOLE_EXPORT=true`) to verify tracing works locally + +## References + +- [Opik Documentation](https://www.comet.com/docs/opik) +- [Opik OpenTelemetry Integration Guide](https://www.comet.com/docs/opik/integrations/opentelemetry) +- [OpenTelemetry Documentation](https://opentelemetry.io/docs/) From 218b437532c4c16c93cd5eac385b8cdd4a2b5fea Mon Sep 17 00:00:00 2001 From: Vincent Koc Date: Fri, 3 Oct 2025 08:28:25 -0700 Subject: [PATCH 4/5] Create requirements.txt --- open-telemetry/opik/requirements.txt | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 open-telemetry/opik/requirements.txt diff --git a/open-telemetry/opik/requirements.txt b/open-telemetry/opik/requirements.txt new file mode 100644 index 0000000..3f9b3af --- /dev/null +++ b/open-telemetry/opik/requirements.txt @@ -0,0 +1,7 @@ +fastapi +uvicorn +python-dotenv +pipecat-ai[daily,webrtc,silero,cartesia,deepgram,openai,tracing]>=0.0.82 +pipecat-ai-small-webrtc-prebuilt +opentelemetry-exporter-otlp-proto-http +opik \ No newline at end of file From 59cb54166cf4f5a999a25d0ba6ac543ef441dc47 Mon Sep 17 00:00:00 2001 From: Vincent Koc Date: Fri, 3 Oct 2025 08:30:23 -0700 Subject: [PATCH 5/5] Update README.md --- open-telemetry/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/open-telemetry/README.md b/open-telemetry/README.md index 1d3871e..eeb2a37 100644 --- a/open-telemetry/README.md +++ b/open-telemetry/README.md @@ -35,6 +35,7 @@ This organization helps you track conversation-to-conversation and turn-to-turn | ------------------------------- | ------------------------------------------------------------------------- | | [Jaeger Tracing](./jaeger/) | Tracing with Jaeger, an open-source end-to-end distributed tracing system | | [Langfuse Tracing](./langfuse/) | Tracing with Langfuse, a specialized platform for LLM observability | +| [Opik Tracing](./opik/) | Tracing with Opik, an open-source tracing and evaluation platform | ## Common Requirements