Skip to content

Commit fba5102

Browse files
authored
feat: client.responses.create() and client.responses.retrieve() (#227)
client-side support for the in-progress API surface: llamastack/llama-stack#1989
1 parent ee0e65e commit fba5102

File tree

8 files changed

+1023
-0
lines changed

8 files changed

+1023
-0
lines changed

src/llama_stack_client/_client.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
datasets,
3434
inference,
3535
providers,
36+
responses,
3637
telemetry,
3738
vector_io,
3839
benchmarks,
@@ -71,6 +72,7 @@ class LlamaStackClient(SyncAPIClient):
7172
toolgroups: toolgroups.ToolgroupsResource
7273
tools: tools.ToolsResource
7374
tool_runtime: tool_runtime.ToolRuntimeResource
75+
responses: responses.ResponsesResource
7476
agents: agents.AgentsResource
7577
datasets: datasets.DatasetsResource
7678
eval: eval.EvalResource
@@ -153,6 +155,7 @@ def __init__(
153155
self.toolgroups = toolgroups.ToolgroupsResource(self)
154156
self.tools = tools.ToolsResource(self)
155157
self.tool_runtime = tool_runtime.ToolRuntimeResource(self)
158+
self.responses = responses.ResponsesResource(self)
156159
self.agents = agents.AgentsResource(self)
157160
self.datasets = datasets.DatasetsResource(self)
158161
self.eval = eval.EvalResource(self)
@@ -287,6 +290,7 @@ class AsyncLlamaStackClient(AsyncAPIClient):
287290
toolgroups: toolgroups.AsyncToolgroupsResource
288291
tools: tools.AsyncToolsResource
289292
tool_runtime: tool_runtime.AsyncToolRuntimeResource
293+
responses: responses.AsyncResponsesResource
290294
agents: agents.AsyncAgentsResource
291295
datasets: datasets.AsyncDatasetsResource
292296
eval: eval.AsyncEvalResource
@@ -369,6 +373,7 @@ def __init__(
369373
self.toolgroups = toolgroups.AsyncToolgroupsResource(self)
370374
self.tools = tools.AsyncToolsResource(self)
371375
self.tool_runtime = tool_runtime.AsyncToolRuntimeResource(self)
376+
self.responses = responses.AsyncResponsesResource(self)
372377
self.agents = agents.AsyncAgentsResource(self)
373378
self.datasets = datasets.AsyncDatasetsResource(self)
374379
self.eval = eval.AsyncEvalResource(self)
@@ -504,6 +509,7 @@ def __init__(self, client: LlamaStackClient) -> None:
504509
self.toolgroups = toolgroups.ToolgroupsResourceWithRawResponse(client.toolgroups)
505510
self.tools = tools.ToolsResourceWithRawResponse(client.tools)
506511
self.tool_runtime = tool_runtime.ToolRuntimeResourceWithRawResponse(client.tool_runtime)
512+
self.responses = responses.ResponsesResourceWithRawResponse(client.responses)
507513
self.agents = agents.AgentsResourceWithRawResponse(client.agents)
508514
self.datasets = datasets.DatasetsResourceWithRawResponse(client.datasets)
509515
self.eval = eval.EvalResourceWithRawResponse(client.eval)
@@ -533,6 +539,7 @@ def __init__(self, client: AsyncLlamaStackClient) -> None:
533539
self.toolgroups = toolgroups.AsyncToolgroupsResourceWithRawResponse(client.toolgroups)
534540
self.tools = tools.AsyncToolsResourceWithRawResponse(client.tools)
535541
self.tool_runtime = tool_runtime.AsyncToolRuntimeResourceWithRawResponse(client.tool_runtime)
542+
self.responses = responses.AsyncResponsesResourceWithRawResponse(client.responses)
536543
self.agents = agents.AsyncAgentsResourceWithRawResponse(client.agents)
537544
self.datasets = datasets.AsyncDatasetsResourceWithRawResponse(client.datasets)
538545
self.eval = eval.AsyncEvalResourceWithRawResponse(client.eval)
@@ -564,6 +571,7 @@ def __init__(self, client: LlamaStackClient) -> None:
564571
self.toolgroups = toolgroups.ToolgroupsResourceWithStreamingResponse(client.toolgroups)
565572
self.tools = tools.ToolsResourceWithStreamingResponse(client.tools)
566573
self.tool_runtime = tool_runtime.ToolRuntimeResourceWithStreamingResponse(client.tool_runtime)
574+
self.responses = responses.ResponsesResourceWithStreamingResponse(client.responses)
567575
self.agents = agents.AgentsResourceWithStreamingResponse(client.agents)
568576
self.datasets = datasets.DatasetsResourceWithStreamingResponse(client.datasets)
569577
self.eval = eval.EvalResourceWithStreamingResponse(client.eval)
@@ -595,6 +603,7 @@ def __init__(self, client: AsyncLlamaStackClient) -> None:
595603
self.toolgroups = toolgroups.AsyncToolgroupsResourceWithStreamingResponse(client.toolgroups)
596604
self.tools = tools.AsyncToolsResourceWithStreamingResponse(client.tools)
597605
self.tool_runtime = tool_runtime.AsyncToolRuntimeResourceWithStreamingResponse(client.tool_runtime)
606+
self.responses = responses.AsyncResponsesResourceWithStreamingResponse(client.responses)
598607
self.agents = agents.AsyncAgentsResourceWithStreamingResponse(client.agents)
599608
self.datasets = datasets.AsyncDatasetsResourceWithStreamingResponse(client.datasets)
600609
self.eval = eval.AsyncEvalResourceWithStreamingResponse(client.eval)

src/llama_stack_client/resources/__init__.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,14 @@
104104
ProvidersResourceWithStreamingResponse,
105105
AsyncProvidersResourceWithStreamingResponse,
106106
)
107+
from .responses import (
108+
ResponsesResource,
109+
AsyncResponsesResource,
110+
ResponsesResourceWithRawResponse,
111+
AsyncResponsesResourceWithRawResponse,
112+
ResponsesResourceWithStreamingResponse,
113+
AsyncResponsesResourceWithStreamingResponse,
114+
)
107115
from .telemetry import (
108116
TelemetryResource,
109117
AsyncTelemetryResource,
@@ -204,6 +212,12 @@
204212
"AsyncToolRuntimeResourceWithRawResponse",
205213
"ToolRuntimeResourceWithStreamingResponse",
206214
"AsyncToolRuntimeResourceWithStreamingResponse",
215+
"ResponsesResource",
216+
"AsyncResponsesResource",
217+
"ResponsesResourceWithRawResponse",
218+
"AsyncResponsesResourceWithRawResponse",
219+
"ResponsesResourceWithStreamingResponse",
220+
"AsyncResponsesResourceWithStreamingResponse",
207221
"AgentsResource",
208222
"AsyncAgentsResource",
209223
"AgentsResourceWithRawResponse",

0 commit comments

Comments
 (0)