Skip to content

Commit de791bc

Browse files
authored
fix(deepseek): inject model_provider in response_metadata (#33544)
& slight tests rfc
1 parent 69c6e7d commit de791bc

File tree

3 files changed

+111
-13
lines changed

3 files changed

+111
-13
lines changed

libs/partners/deepseek/langchain_deepseek/chat_models.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -263,6 +263,11 @@ def _create_chat_result(
263263
if not isinstance(response, openai.BaseModel):
264264
return rtn
265265

266+
for generation in rtn.generations:
267+
if generation.message.response_metadata is None:
268+
generation.message.response_metadata = {}
269+
generation.message.response_metadata["model_provider"] = "deepseek"
270+
266271
choices = getattr(response, "choices", None)
267272
if choices and hasattr(choices[0].message, "reasoning_content"):
268273
rtn.generations[0].message.additional_kwargs["reasoning_content"] = choices[
@@ -294,6 +299,10 @@ def _convert_chunk_to_generation_chunk(
294299
if (choices := chunk.get("choices")) and generation_chunk:
295300
top = choices[0]
296301
if isinstance(generation_chunk.message, AIMessageChunk):
302+
generation_chunk.message.response_metadata = {
303+
**generation_chunk.message.response_metadata,
304+
"model_provider": "deepseek",
305+
}
297306
if (
298307
reasoning_content := top.get("delta", {}).get("reasoning_content")
299308
) is not None:

libs/partners/deepseek/tests/integration_tests/test_chat_models.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@
1010

1111
from langchain_deepseek.chat_models import ChatDeepSeek
1212

13+
MODEL_NAME = "deepseek-chat"
14+
1315

1416
class TestChatDeepSeek(ChatModelIntegrationTests):
1517
"""Test `ChatDeepSeek` chat model."""
@@ -23,7 +25,7 @@ def chat_model_class(self) -> type[ChatDeepSeek]:
2325
def chat_model_params(self) -> dict:
2426
"""Parameters to create chat model instance for testing."""
2527
return {
26-
"model": "deepseek-chat",
28+
"model": MODEL_NAME,
2729
"temperature": 0,
2830
}
2931

@@ -49,6 +51,14 @@ def test_reasoning_content() -> None:
4951
response = chat_model.invoke("What is 3^3?")
5052
assert response.content
5153
assert response.additional_kwargs["reasoning_content"]
54+
55+
content_blocks = response.content_blocks
56+
assert content_blocks is not None
57+
assert len(content_blocks) > 0
58+
reasoning_blocks = [
59+
block for block in content_blocks if block.get("type") == "reasoning"
60+
]
61+
assert len(reasoning_blocks) > 0
5262
raise ValueError
5363

5464

@@ -61,3 +71,11 @@ def test_reasoning_content_streaming() -> None:
6171
full = chunk if full is None else full + chunk
6272
assert isinstance(full, AIMessageChunk)
6373
assert full.additional_kwargs["reasoning_content"]
74+
75+
content_blocks = full.content_blocks
76+
assert content_blocks is not None
77+
assert len(content_blocks) > 0
78+
reasoning_blocks = [
79+
block for block in content_blocks if block.get("type") == "reasoning"
80+
]
81+
assert len(reasoning_blocks) > 0

libs/partners/deepseek/tests/unit_tests/test_chat_models.py

Lines changed: 83 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313

1414
from langchain_deepseek.chat_models import ChatDeepSeek
1515

16+
MODEL_NAME = "deepseek-chat"
17+
1618

1719
class MockOpenAIResponse(BaseModel):
1820
"""Mock OpenAI response model."""
@@ -35,7 +37,7 @@ def model_dump( # type: ignore[override]
3537
context: dict[str, Any] | None = None,
3638
serialize_as_any: bool = False,
3739
) -> dict[str, Any]:
38-
"""Convert to dictionary, ensuring reasoning_content is included."""
40+
"""Convert to dictionary, ensuring `reasoning_content` is included."""
3941
choices_list = []
4042
for choice in self.choices:
4143
if isinstance(choice.message, ChatCompletionMessage):
@@ -61,7 +63,7 @@ def model_dump( # type: ignore[override]
6163

6264

6365
class TestChatDeepSeekUnit(ChatModelUnitTests):
64-
"""Unit tests for `ChatDeepSeek` chat model."""
66+
"""Standard unit tests for `ChatDeepSeek` chat model."""
6567

6668
@property
6769
def chat_model_class(self) -> type[ChatDeepSeek]:
@@ -77,7 +79,7 @@ def init_from_env_params(self) -> tuple[dict, dict, dict]:
7779
"DEEPSEEK_API_BASE": "api_base",
7880
},
7981
{
80-
"model": "deepseek-chat",
82+
"model": MODEL_NAME,
8183
},
8284
{
8385
"api_key": "api_key",
@@ -89,7 +91,7 @@ def init_from_env_params(self) -> tuple[dict, dict, dict]:
8991
def chat_model_params(self) -> dict:
9092
"""Parameters to create chat model instance for testing."""
9193
return {
92-
"model": "deepseek-chat",
94+
"model": MODEL_NAME,
9395
"api_key": "api_key",
9496
}
9597

@@ -103,7 +105,7 @@ class TestChatDeepSeekCustomUnit:
103105

104106
def test_create_chat_result_with_reasoning_content(self) -> None:
105107
"""Test that reasoning_content is properly extracted from response."""
106-
chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key"))
108+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
107109
mock_message = MagicMock()
108110
mock_message.content = "Main content"
109111
mock_message.reasoning_content = "This is the reasoning content"
@@ -120,8 +122,8 @@ def test_create_chat_result_with_reasoning_content(self) -> None:
120122
)
121123

122124
def test_create_chat_result_with_model_extra_reasoning(self) -> None:
123-
"""Test that reasoning is properly extracted from model_extra."""
124-
chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key"))
125+
"""Test that reasoning is properly extracted from `model_extra`."""
126+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
125127
mock_message = MagicMock(spec=ChatCompletionMessage)
126128
mock_message.content = "Main content"
127129
mock_message.role = "assistant"
@@ -143,7 +145,7 @@ def test_create_chat_result_with_model_extra_reasoning(self) -> None:
143145

144146
def test_convert_chunk_with_reasoning_content(self) -> None:
145147
"""Test that reasoning_content is properly extracted from streaming chunk."""
146-
chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key"))
148+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
147149
chunk: dict[str, Any] = {
148150
"choices": [
149151
{
@@ -170,7 +172,7 @@ def test_convert_chunk_with_reasoning_content(self) -> None:
170172

171173
def test_convert_chunk_with_reasoning(self) -> None:
172174
"""Test that reasoning is properly extracted from streaming chunk."""
173-
chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key"))
175+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
174176
chunk: dict[str, Any] = {
175177
"choices": [
176178
{
@@ -197,7 +199,7 @@ def test_convert_chunk_with_reasoning(self) -> None:
197199

198200
def test_convert_chunk_without_reasoning(self) -> None:
199201
"""Test that chunk without reasoning fields works correctly."""
200-
chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key"))
202+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
201203
chunk: dict[str, Any] = {"choices": [{"delta": {"content": "Main content"}}]}
202204

203205
chunk_result = chat_model._convert_chunk_to_generation_chunk(
@@ -212,7 +214,7 @@ def test_convert_chunk_without_reasoning(self) -> None:
212214

213215
def test_convert_chunk_with_empty_delta(self) -> None:
214216
"""Test that chunk with empty delta works correctly."""
215-
chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key"))
217+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
216218
chunk: dict[str, Any] = {"choices": [{"delta": {}}]}
217219

218220
chunk_result = chat_model._convert_chunk_to_generation_chunk(
@@ -227,7 +229,7 @@ def test_convert_chunk_with_empty_delta(self) -> None:
227229

228230
def test_get_request_payload(self) -> None:
229231
"""Test that tool message content is converted from list to string."""
230-
chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key"))
232+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
231233

232234
tool_message = ToolMessage(content=[], tool_call_id="test_id")
233235
payload = chat_model._get_request_payload([tool_message])
@@ -240,3 +242,72 @@ def test_get_request_payload(self) -> None:
240242
tool_message = ToolMessage(content="test string", tool_call_id="test_id")
241243
payload = chat_model._get_request_payload([tool_message])
242244
assert payload["messages"][0]["content"] == "test string"
245+
246+
def test_create_chat_result_with_model_provider(self) -> None:
247+
"""Test that `model_provider` is added to `response_metadata`."""
248+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
249+
mock_message = MagicMock()
250+
mock_message.content = "Main content"
251+
mock_message.role = "assistant"
252+
mock_response = MockOpenAIResponse(
253+
choices=[MagicMock(message=mock_message)],
254+
error=None,
255+
)
256+
257+
result = chat_model._create_chat_result(mock_response)
258+
assert (
259+
result.generations[0].message.response_metadata.get("model_provider")
260+
== "deepseek"
261+
)
262+
263+
def test_convert_chunk_with_model_provider(self) -> None:
264+
"""Test that `model_provider` is added to `response_metadata` for chunks."""
265+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
266+
chunk: dict[str, Any] = {
267+
"choices": [
268+
{
269+
"delta": {
270+
"content": "Main content",
271+
},
272+
},
273+
],
274+
}
275+
276+
chunk_result = chat_model._convert_chunk_to_generation_chunk(
277+
chunk,
278+
AIMessageChunk,
279+
None,
280+
)
281+
if chunk_result is None:
282+
msg = "Expected chunk_result not to be None"
283+
raise AssertionError(msg)
284+
assert (
285+
chunk_result.message.response_metadata.get("model_provider") == "deepseek"
286+
)
287+
288+
def test_create_chat_result_with_model_provider_multiple_generations(
289+
self,
290+
) -> None:
291+
"""Test that `model_provider` is added to all generations when `n > 1`."""
292+
chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key"))
293+
mock_message_1 = MagicMock()
294+
mock_message_1.content = "First response"
295+
mock_message_1.role = "assistant"
296+
mock_message_2 = MagicMock()
297+
mock_message_2.content = "Second response"
298+
mock_message_2.role = "assistant"
299+
300+
mock_response = MockOpenAIResponse(
301+
choices=[
302+
MagicMock(message=mock_message_1),
303+
MagicMock(message=mock_message_2),
304+
],
305+
error=None,
306+
)
307+
308+
result = chat_model._create_chat_result(mock_response)
309+
assert len(result.generations) == 2 # noqa: PLR2004
310+
for generation in result.generations:
311+
assert (
312+
generation.message.response_metadata.get("model_provider") == "deepseek"
313+
)

0 commit comments

Comments
 (0)