Skip to content

Commit b0704e9

Browse files
sabrennergithub-actions[bot]
authored andcommitted
fix(langchain): do not read from streamed chunks when there are none for chat model streams (#14985)
## Description Fixes an issue where an `IndexError` would be raised trying to access a chunk that didn't exist when a streamed chat model operation did not produce any chunks - usually because of a timeout error. ## Testing Verified with the reproduction in the linked issue ## Risks None ## Additional Notes Closes #14688 (cherry picked from commit 99b19b7)
1 parent b4cd7cc commit b0704e9

File tree

6 files changed

+364
-3
lines changed

6 files changed

+364
-3
lines changed

ddtrace/contrib/internal/langchain/patch.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -320,10 +320,13 @@ def _on_span_started(span: Span):
320320
integration.record_instance(instance, span)
321321

322322
def _on_span_finished(span: Span, streamed_chunks):
323-
joined_chunks = streamed_chunks[0]
324-
for chunk in streamed_chunks[1:]:
325-
joined_chunks += chunk # base message types support __add__ for concatenation
326323
kwargs["_dd.identifying_params"] = instance._identifying_params
324+
if len(streamed_chunks):
325+
joined_chunks = streamed_chunks[0]
326+
for chunk in streamed_chunks[1:]:
327+
joined_chunks += chunk # base message types support __add__ for concatenation
328+
else:
329+
joined_chunks = []
327330
integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=joined_chunks, operation="chat")
328331

329332
return shared_stream(
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
---
2+
fixes:
3+
- |
4+
langchain: Fixes an issue where streamed responses that end before the first chunk is received would result in an ``IndexError``.

tests/contrib/langchain/test_langchain.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -539,3 +539,17 @@ def circumference_tool(radius: float) -> float:
539539
)
540540

541541
calculator.invoke("2", config={"unserializable": object()})
542+
543+
544+
@pytest.mark.snapshot(ignores=["meta.error.stack", "meta.error.message"])
545+
def test_streamed_chat_model_with_no_output(langchain_openai, openai_url):
546+
from openai import APITimeoutError
547+
548+
chat_model = langchain_openai.ChatOpenAI(base_url=openai_url, timeout=0.0001)
549+
550+
result = chat_model.stream("Hello, my name is")
551+
try:
552+
next(result)
553+
except Exception as e:
554+
if not isinstance(e, APITimeoutError):
555+
assert False, f"Expected APITimeoutError, got {e}"
Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
interactions:
2+
- request:
3+
body: '{"messages":[{"content":"Hello, my name is","role":"user"}],"model":"gpt-3.5-turbo","n":1,"stream":true,"temperature":0.7}'
4+
headers:
5+
? !!python/object/apply:multidict._multidict.istr
6+
- Accept
7+
: - application/json
8+
? !!python/object/apply:multidict._multidict.istr
9+
- Accept-Encoding
10+
: - gzip, deflate
11+
? !!python/object/apply:multidict._multidict.istr
12+
- Connection
13+
: - keep-alive
14+
Content-Length:
15+
- '122'
16+
? !!python/object/apply:multidict._multidict.istr
17+
- Content-Type
18+
: - application/json
19+
? !!python/object/apply:multidict._multidict.istr
20+
- User-Agent
21+
: - OpenAI/Python 1.109.1
22+
? !!python/object/apply:multidict._multidict.istr
23+
- X-Stainless-Arch
24+
: - arm64
25+
? !!python/object/apply:multidict._multidict.istr
26+
- X-Stainless-Async
27+
: - 'false'
28+
? !!python/object/apply:multidict._multidict.istr
29+
- X-Stainless-Lang
30+
: - python
31+
? !!python/object/apply:multidict._multidict.istr
32+
- X-Stainless-OS
33+
: - MacOS
34+
? !!python/object/apply:multidict._multidict.istr
35+
- X-Stainless-Package-Version
36+
: - 1.109.1
37+
? !!python/object/apply:multidict._multidict.istr
38+
- X-Stainless-Runtime
39+
: - CPython
40+
? !!python/object/apply:multidict._multidict.istr
41+
- X-Stainless-Runtime-Version
42+
: - 3.10.13
43+
? !!python/object/apply:multidict._multidict.istr
44+
- x-stainless-read-timeout
45+
: - '0.0001'
46+
? !!python/object/apply:multidict._multidict.istr
47+
- x-stainless-retry-count
48+
: - '0'
49+
method: POST
50+
uri: https://api.openai.com/v1/chat/completions
51+
response:
52+
body:
53+
string: 'data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"obfuscation":"dDINu"}
54+
55+
56+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Assistant"},"logprobs":null,"finish_reason":null}],"obfuscation":"1yL6DImc2cky67"}
57+
58+
59+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"obfuscation":"kyn0dy"}
60+
61+
62+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
63+
How"},"logprobs":null,"finish_reason":null}],"obfuscation":"zF8"}
64+
65+
66+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
67+
can"},"logprobs":null,"finish_reason":null}],"obfuscation":"ZOA"}
68+
69+
70+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
71+
I"},"logprobs":null,"finish_reason":null}],"obfuscation":"yPnQN"}
72+
73+
74+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
75+
assist"},"logprobs":null,"finish_reason":null}],"obfuscation":""}
76+
77+
78+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
79+
you"},"logprobs":null,"finish_reason":null}],"obfuscation":"nG8"}
80+
81+
82+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
83+
today"},"logprobs":null,"finish_reason":null}],"obfuscation":"a"}
84+
85+
86+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}],"obfuscation":"KxSued"}
87+
88+
89+
data: {"id":"chatcmpl-CTDgKS9E25YxCQOhLhNmhfOPiznNk","object":"chat.completion.chunk","created":1761080140,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"obfuscation":"Q"}
90+
91+
92+
data: [DONE]
93+
94+
95+
'
96+
headers:
97+
CF-RAY:
98+
- 9923a83a5cd9081d-IAD
99+
Connection:
100+
- keep-alive
101+
Content-Type:
102+
- text/event-stream; charset=utf-8
103+
Date:
104+
- Tue, 21 Oct 2025 20:55:40 GMT
105+
Server:
106+
- cloudflare
107+
Set-Cookie:
108+
- __cf_bm=o7TNYeatXbBOlFxeMlhl.8fe7kXVRNTm_dL98zIje8M-1761080140-1.0.1.1-FRpu._KCnEk.aGZG5YQ75Od_Ucq8okx9WLNY3JjdbbK3P7mwxS21FIvTtyY6GlllpujKEYLWkFHG6VIIw4zZmwH0yDL04t_gvEzyjWwc1qc;
109+
path=/; expires=Tue, 21-Oct-25 21:25:40 GMT; domain=.api.openai.com; HttpOnly;
110+
Secure; SameSite=None
111+
- _cfuvid=bZiyEgtQ9lNlCOG2DaVgkJLkXs33574MrsoVFl0iIf4-1761080140438-0.0.1.1-604800000;
112+
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
113+
Strict-Transport-Security:
114+
- max-age=31536000; includeSubDomains; preload
115+
Transfer-Encoding:
116+
- chunked
117+
X-Content-Type-Options:
118+
- nosniff
119+
access-control-expose-headers:
120+
- X-Request-ID
121+
alt-svc:
122+
- h3=":443"; ma=86400
123+
cf-cache-status:
124+
- DYNAMIC
125+
openai-organization:
126+
- datadog-staging
127+
openai-processing-ms:
128+
- '151'
129+
openai-project:
130+
- proj_gt6TQZPRbZfoY2J9AQlEJMpd
131+
openai-version:
132+
- '2020-10-01'
133+
x-envoy-upstream-service-time:
134+
- '174'
135+
x-openai-proxy-wasm:
136+
- v0.1
137+
x-ratelimit-limit-requests:
138+
- '10000'
139+
x-ratelimit-limit-tokens:
140+
- '50000000'
141+
x-ratelimit-remaining-requests:
142+
- '9999'
143+
x-ratelimit-remaining-tokens:
144+
- '49999993'
145+
x-ratelimit-reset-requests:
146+
- 6ms
147+
x-ratelimit-reset-tokens:
148+
- 0s
149+
x-request-id:
150+
- req_c9c685c9da2e4684b4613cefd4af98e8
151+
status:
152+
code: 200
153+
message: OK
154+
version: 1
Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
interactions:
2+
- request:
3+
body: '{"messages":[{"content":"Hello, my name is","role":"user"}],"model":"gpt-3.5-turbo","stream":true}'
4+
headers:
5+
? !!python/object/apply:multidict._multidict.istr
6+
- Accept
7+
: - application/json
8+
? !!python/object/apply:multidict._multidict.istr
9+
- Accept-Encoding
10+
: - gzip, deflate, zstd
11+
? !!python/object/apply:multidict._multidict.istr
12+
- Connection
13+
: - keep-alive
14+
Content-Length:
15+
- '98'
16+
? !!python/object/apply:multidict._multidict.istr
17+
- Content-Type
18+
: - application/json
19+
? !!python/object/apply:multidict._multidict.istr
20+
- User-Agent
21+
: - OpenAI/Python 1.109.1
22+
? !!python/object/apply:multidict._multidict.istr
23+
- X-Stainless-Arch
24+
: - arm64
25+
? !!python/object/apply:multidict._multidict.istr
26+
- X-Stainless-Async
27+
: - 'false'
28+
? !!python/object/apply:multidict._multidict.istr
29+
- X-Stainless-Lang
30+
: - python
31+
? !!python/object/apply:multidict._multidict.istr
32+
- X-Stainless-OS
33+
: - MacOS
34+
? !!python/object/apply:multidict._multidict.istr
35+
- X-Stainless-Package-Version
36+
: - 1.109.1
37+
? !!python/object/apply:multidict._multidict.istr
38+
- X-Stainless-Runtime
39+
: - CPython
40+
? !!python/object/apply:multidict._multidict.istr
41+
- X-Stainless-Runtime-Version
42+
: - 3.10.13
43+
? !!python/object/apply:multidict._multidict.istr
44+
- x-stainless-read-timeout
45+
: - '0.0001'
46+
? !!python/object/apply:multidict._multidict.istr
47+
- x-stainless-retry-count
48+
: - '0'
49+
method: POST
50+
uri: https://api.openai.com/v1/chat/completions
51+
response:
52+
body:
53+
string: 'data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"obfuscation":"GHVuj"}
54+
55+
56+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Assistant"},"logprobs":null,"finish_reason":null}],"obfuscation":"x2EMmcLlO3ecwG"}
57+
58+
59+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"obfuscation":"TQ802v"}
60+
61+
62+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
63+
How"},"logprobs":null,"finish_reason":null}],"obfuscation":"XKe"}
64+
65+
66+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
67+
can"},"logprobs":null,"finish_reason":null}],"obfuscation":"kcL"}
68+
69+
70+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
71+
I"},"logprobs":null,"finish_reason":null}],"obfuscation":"iYfTx"}
72+
73+
74+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
75+
assist"},"logprobs":null,"finish_reason":null}],"obfuscation":""}
76+
77+
78+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
79+
you"},"logprobs":null,"finish_reason":null}],"obfuscation":"3qp"}
80+
81+
82+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
83+
today"},"logprobs":null,"finish_reason":null}],"obfuscation":"Q"}
84+
85+
86+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}],"obfuscation":"rxGZdS"}
87+
88+
89+
data: {"id":"chatcmpl-CTDgOzn7iu7WvRbPx6d7DKv4jisZ1","object":"chat.completion.chunk","created":1761080144,"model":"gpt-3.5-turbo-0125","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"obfuscation":"q"}
90+
91+
92+
data: [DONE]
93+
94+
95+
'
96+
headers:
97+
CF-RAY:
98+
- 9923a8557c58310c-IAD
99+
Connection:
100+
- keep-alive
101+
Content-Type:
102+
- text/event-stream; charset=utf-8
103+
Date:
104+
- Tue, 21 Oct 2025 20:55:44 GMT
105+
Server:
106+
- cloudflare
107+
Set-Cookie:
108+
- __cf_bm=BNW6cRDCKHu3RmqDR.YwTVZBl0oaF1KmyP7rFJOVJh8-1761080144-1.0.1.1-Em.1pj_MusLdRZAzfxs.tpO51tHRIUprKfWcjhgW3dWoSXLap2PB6YRiem.DU.MG8NVyMIQkwW6W_JUxa_NnHFenF.ejDJXGieRs0VvP6cs;
109+
path=/; expires=Tue, 21-Oct-25 21:25:44 GMT; domain=.api.openai.com; HttpOnly;
110+
Secure; SameSite=None
111+
- _cfuvid=ue2PaSkW.MPXEVwGniK0_bY41_Ri3BTQdhJbM_FzhWo-1761080144466-0.0.1.1-604800000;
112+
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
113+
Strict-Transport-Security:
114+
- max-age=31536000; includeSubDomains; preload
115+
Transfer-Encoding:
116+
- chunked
117+
X-Content-Type-Options:
118+
- nosniff
119+
access-control-expose-headers:
120+
- X-Request-ID
121+
alt-svc:
122+
- h3=":443"; ma=86400
123+
cf-cache-status:
124+
- DYNAMIC
125+
openai-organization:
126+
- datadog-staging
127+
openai-processing-ms:
128+
- '134'
129+
openai-project:
130+
- proj_gt6TQZPRbZfoY2J9AQlEJMpd
131+
openai-version:
132+
- '2020-10-01'
133+
x-envoy-upstream-service-time:
134+
- '151'
135+
x-openai-proxy-wasm:
136+
- v0.1
137+
x-ratelimit-limit-requests:
138+
- '10000'
139+
x-ratelimit-limit-tokens:
140+
- '50000000'
141+
x-ratelimit-remaining-requests:
142+
- '9999'
143+
x-ratelimit-remaining-tokens:
144+
- '49999993'
145+
x-ratelimit-reset-requests:
146+
- 6ms
147+
x-ratelimit-reset-tokens:
148+
- 0s
149+
x-request-id:
150+
- req_396d9ab42a15454bbc29de5853e2bad7
151+
status:
152+
code: 200
153+
message: OK
154+
version: 1

0 commit comments

Comments
 (0)