@@ -41,86 +41,17 @@ def test_cohere_llm_happy_path(mock_cohere: Mock) -> None:
41
41
chat_response_mock = MagicMock ()
42
42
chat_response_mock .message .content = [MagicMock (text = "cohere response text" )]
43
43
mock_cohere .ClientV2 .return_value .chat .return_value = chat_response_mock
44
+ mock_cohere .UserChatMessageV2 .return_value = {"role" : "user" , "content" : "test" }
44
45
llm = CohereLLM (model_name = "something" )
45
46
res = llm .invoke ("my text" )
46
47
assert isinstance (res , LLMResponse )
47
48
assert res .content == "cohere response text"
48
-
49
-
50
- def test_cohere_llm_invoke_with_message_history_happy_path (mock_cohere : Mock ) -> None :
51
- chat_response_mock = MagicMock ()
52
- chat_response_mock .message .content = [MagicMock (text = "cohere response text" )]
53
- mock_cohere_client_chat = mock_cohere .ClientV2 .return_value .chat
54
- mock_cohere_client_chat .return_value = chat_response_mock
55
-
56
- system_instruction = "You are a helpful assistant."
57
- llm = CohereLLM (model_name = "something" )
58
- message_history = [
59
- {"role" : "user" , "content" : "When does the sun come up in the summer?" },
60
- {"role" : "assistant" , "content" : "Usually around 6am." },
61
- ]
62
- question = "What about next season?"
63
-
64
- res = llm .invoke (question , message_history , system_instruction = system_instruction ) # type: ignore
65
- assert isinstance (res , LLMResponse )
66
- assert res .content == "cohere response text"
67
- messages = [{"role" : "system" , "content" : system_instruction }]
68
- messages .extend (message_history )
69
- messages .append ({"role" : "user" , "content" : question })
70
- mock_cohere_client_chat .assert_called_once_with (
71
- messages = messages ,
49
+ mock_cohere .ClientV2 .return_value .chat .assert_called_once_with (
50
+ messages = [{"role" : "user" , "content" : "test" }],
72
51
model = "something" ,
73
52
)
74
53
75
54
76
- def test_cohere_llm_invoke_with_message_history_and_system_instruction (
77
- mock_cohere : Mock ,
78
- ) -> None :
79
- chat_response_mock = MagicMock ()
80
- chat_response_mock .message .content = [MagicMock (text = "cohere response text" )]
81
- mock_cohere_client_chat = mock_cohere .ClientV2 .return_value .chat
82
- mock_cohere_client_chat .return_value = chat_response_mock
83
-
84
- system_instruction = "You are a helpful assistant."
85
- llm = CohereLLM (model_name = "gpt" )
86
- message_history = [
87
- {"role" : "user" , "content" : "When does the sun come up in the summer?" },
88
- {"role" : "assistant" , "content" : "Usually around 6am." },
89
- ]
90
- question = "What about next season?"
91
-
92
- res = llm .invoke (question , message_history , system_instruction = system_instruction ) # type: ignore
93
- assert isinstance (res , LLMResponse )
94
- assert res .content == "cohere response text"
95
- messages = [{"role" : "system" , "content" : system_instruction }]
96
- messages .extend (message_history )
97
- messages .append ({"role" : "user" , "content" : question })
98
- mock_cohere_client_chat .assert_called_once_with (
99
- messages = messages ,
100
- model = "gpt" ,
101
- )
102
-
103
-
104
- def test_cohere_llm_invoke_with_message_history_validation_error (
105
- mock_cohere : Mock ,
106
- ) -> None :
107
- chat_response_mock = MagicMock ()
108
- chat_response_mock .message .content = [MagicMock (text = "cohere response text" )]
109
- mock_cohere .ClientV2 .return_value .chat .return_value = chat_response_mock
110
-
111
- system_instruction = "You are a helpful assistant."
112
- llm = CohereLLM (model_name = "something" , system_instruction = system_instruction )
113
- message_history = [
114
- {"role" : "robot" , "content" : "When does the sun come up in the summer?" },
115
- {"role" : "assistant" , "content" : "Usually around 6am." },
116
- ]
117
- question = "What about next season?"
118
-
119
- with pytest .raises (LLMGenerationError ) as exc_info :
120
- llm .invoke (question , message_history ) # type: ignore
121
- assert "Input should be 'user', 'assistant' or 'system" in str (exc_info .value )
122
-
123
-
124
55
@pytest .mark .asyncio
125
56
async def test_cohere_llm_happy_path_async (mock_cohere : Mock ) -> None :
126
57
chat_response_mock = MagicMock (
@@ -139,16 +70,14 @@ async def test_cohere_llm_happy_path_async(mock_cohere: Mock) -> None:
139
70
def test_cohere_llm_failed (mock_cohere : Mock ) -> None :
140
71
mock_cohere .ClientV2 .return_value .chat .side_effect = cohere .core .ApiError
141
72
llm = CohereLLM (model_name = "something" )
142
- with pytest .raises (LLMGenerationError ) as excinfo :
73
+ with pytest .raises (LLMGenerationError , match = "ApiError" ) :
143
74
llm .invoke ("my text" )
144
- assert "ApiError" in str (excinfo )
145
75
146
76
147
77
@pytest .mark .asyncio
148
78
async def test_cohere_llm_failed_async (mock_cohere : Mock ) -> None :
149
79
mock_cohere .AsyncClientV2 .return_value .chat .side_effect = cohere .core .ApiError
150
80
llm = CohereLLM (model_name = "something" )
151
81
152
- with pytest .raises (LLMGenerationError ) as excinfo :
82
+ with pytest .raises (LLMGenerationError , match = "ApiError" ) :
153
83
await llm .ainvoke ("my text" )
154
- assert "ApiError" in str (excinfo )
0 commit comments