Skip to content

Commit a19ef1c

Browse files
authored
Fix Anthropic response parsing (#266)
* Fix Anthropic response parsing * Update CHANGELOG.md
1 parent 398779f commit a19ef1c

File tree

3 files changed

+19
-8
lines changed

3 files changed

+19
-8
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212

1313
### Fixed
1414
- Removed the `uuid` package from dependencies (not needed with Python 3).
15+
- Fixed a bug in the `AnthropicLLM` class preventing it from being used in `GraphRAG` pipeline.
1516

1617
## 1.4.2
1718

src/neo4j_graphrag/llm/anthropic_llm.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,12 @@ def invoke(
112112
messages=messages,
113113
**self.model_params,
114114
)
115-
return LLMResponse(content=response.content)
115+
response_content = response.content
116+
if response_content and len(response_content) > 0:
117+
text = response_content[0].text
118+
else:
119+
raise LLMGenerationError("LLM returned empty response.")
120+
return LLMResponse(content=text)
116121
except self.anthropic.APIError as e:
117122
raise LLMGenerationError(e)
118123

@@ -140,6 +145,11 @@ async def ainvoke(
140145
messages=messages,
141146
**self.model_params,
142147
)
143-
return LLMResponse(content=response.content)
148+
response_content = response.content
149+
if response_content and len(response_content) > 0:
150+
text = response_content[0].text
151+
else:
152+
raise LLMGenerationError("LLM returned empty response.")
153+
return LLMResponse(content=text)
144154
except self.anthropic.APIError as e:
145155
raise LLMGenerationError(e)

tests/unit/llm/test_anthropic_llm.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def test_anthropic_llm_missing_dependency(mock_import: Mock) -> None:
4242

4343
def test_anthropic_invoke_happy_path(mock_anthropic: Mock) -> None:
4444
mock_anthropic.Anthropic.return_value.messages.create.return_value = MagicMock(
45-
content="generated text"
45+
content=[MagicMock(text="generated text")]
4646
)
4747
model_params = {"temperature": 0.3}
4848
llm = AnthropicLLM("claude-3-opus-20240229", model_params=model_params)
@@ -59,7 +59,7 @@ def test_anthropic_invoke_happy_path(mock_anthropic: Mock) -> None:
5959

6060
def test_anthropic_invoke_with_message_history_happy_path(mock_anthropic: Mock) -> None:
6161
mock_anthropic.Anthropic.return_value.messages.create.return_value = MagicMock(
62-
content="generated text"
62+
content=[MagicMock(text="generated text")]
6363
)
6464
model_params = {"temperature": 0.3}
6565
llm = AnthropicLLM(
@@ -87,7 +87,7 @@ def test_anthropic_invoke_with_system_instruction(
8787
mock_anthropic: Mock,
8888
) -> None:
8989
mock_anthropic.Anthropic.return_value.messages.create.return_value = MagicMock(
90-
content="generated text"
90+
content=[MagicMock(text="generated text")]
9191
)
9292
model_params = {"temperature": 0.3}
9393
system_instruction = "You are a helpful assistant."
@@ -115,7 +115,7 @@ def test_anthropic_invoke_with_message_history_and_system_instruction(
115115
mock_anthropic: Mock,
116116
) -> None:
117117
mock_anthropic.Anthropic.return_value.messages.create.return_value = MagicMock(
118-
content="generated text"
118+
content=[MagicMock(text="generated text")]
119119
)
120120
model_params = {"temperature": 0.3}
121121
system_instruction = "You are a helpful assistant."
@@ -147,7 +147,7 @@ def test_anthropic_invoke_with_message_history_validation_error(
147147
mock_anthropic: Mock,
148148
) -> None:
149149
mock_anthropic.Anthropic.return_value.messages.create.return_value = MagicMock(
150-
content="generated text"
150+
content=[MagicMock(text="generated text")]
151151
)
152152
model_params = {"temperature": 0.3}
153153
system_instruction = "You are a helpful assistant."
@@ -170,7 +170,7 @@ def test_anthropic_invoke_with_message_history_validation_error(
170170
@pytest.mark.asyncio
171171
async def test_anthropic_ainvoke_happy_path(mock_anthropic: Mock) -> None:
172172
mock_response = AsyncMock()
173-
mock_response.content = "Return text"
173+
mock_response.content = [MagicMock(text="Return text")]
174174
mock_model = mock_anthropic.AsyncAnthropic.return_value
175175
mock_model.messages.create = AsyncMock(return_value=mock_response)
176176
model_params = {"temperature": 0.3}

0 commit comments

Comments
 (0)