feat: return reasoning_content with tool_calls for DeepSeek models (#2543)

* feat: return reasoning_content with tool_calls for DeepSeek models

* fix: correct reasoning_content handling for DeepSeek tool_calls

* test: cover DeepSeek reasoning content round trip

---------

Co-authored-by: Jason <farion1231@gmail.com>
This commit is contained in:
bling-yshs
2026-05-07 23:03:07 +08:00
committed by GitHub
parent 1d44b1ba41
commit f5fbcd0493
3 changed files with 104 additions and 8 deletions
+39 -4
View File
@@ -82,9 +82,9 @@ pub fn claude_api_format_needs_transform(api_format: &str) -> bool {
)
}
fn is_moonshot_or_kimi_identifier(value: &str) -> bool {
fn is_reasoning_content_compatible_identifier(value: &str) -> bool {
let value = value.to_ascii_lowercase();
value.contains("moonshot") || value.contains("kimi")
value.contains("moonshot") || value.contains("kimi") || value.contains("deepseek")
}
fn should_preserve_reasoning_content_for_openai_chat(
@@ -94,7 +94,7 @@ fn should_preserve_reasoning_content_for_openai_chat(
if body
.get("model")
.and_then(|m| m.as_str())
.is_some_and(is_moonshot_or_kimi_identifier)
.is_some_and(is_reasoning_content_compatible_identifier)
{
return true;
}
@@ -113,7 +113,7 @@ fn should_preserve_reasoning_content_for_openai_chat(
base_urls
.into_iter()
.flatten()
.any(is_moonshot_or_kimi_identifier)
.any(is_reasoning_content_compatible_identifier)
}
pub fn transform_claude_request_for_api_format(
@@ -1673,4 +1673,39 @@ mod tests {
assert_eq!(msg["reasoning_content"], "I should call the tool.");
assert!(msg.get("tool_calls").is_some());
}
#[test]
fn test_transform_openai_chat_preserves_reasoning_content_for_deepseek_provider() {
let provider = create_provider_with_meta(
json!({
"env": {
"ANTHROPIC_BASE_URL": "https://api.deepseek.com/v1",
"ANTHROPIC_API_KEY": "test-key"
}
}),
ProviderMeta {
api_format: Some("openai_chat".to_string()),
..Default::default()
},
);
let body = json!({
"model": "deepseek-v4-flash",
"max_tokens": 64,
"messages": [{
"role": "assistant",
"content": [
{"type": "thinking", "thinking": "I should call the tool."},
{"type": "tool_use", "id": "call_123", "name": "get_weather", "input": {"location": "Tokyo"}}
]
}]
});
let transformed =
transform_claude_request_for_api_format(body, &provider, "openai_chat", None, None)
.unwrap();
let msg = &transformed["messages"][0];
assert_eq!(msg["reasoning_content"], "I should call the tool.");
assert!(msg.get("tool_calls").is_some());
}
}
+3 -2
View File
@@ -33,8 +33,9 @@ struct StreamChoice {
struct Delta {
#[serde(default)]
content: Option<String>,
#[serde(default)]
reasoning: Option<String>, // OpenRouter 的推理内容
// OpenRouter/Kimi/其它 使用 reasoningDeepSeek 使用 reasoning_content
#[serde(default, alias = "reasoning_content")]
reasoning: Option<String>,
#[serde(default)]
tool_calls: Option<Vec<DeltaToolCall>>,
}
+62 -2
View File
@@ -118,7 +118,7 @@ pub fn anthropic_to_openai(body: Value) -> Result<Value, ProxyError> {
/// Anthropic 请求 → OpenAI Chat Completions 请求
///
/// `preserve_reasoning_content` 仅用于明确需要 Moonshot/Kimi
/// `preserve_reasoning_content` 仅用于明确需要 Moonshot/Kimi/DeepSeek
/// `reasoning_content` 兼容字段的 provider。默认转换保持通用 OpenAI-compatible
/// 请求体,避免向严格后端发送未知字段。
pub fn anthropic_to_openai_with_reasoning_content(
@@ -332,7 +332,7 @@ fn convert_message_to_openai(
if let Some(blocks) = content.as_array() {
let mut content_parts = Vec::new();
let mut tool_calls = Vec::new();
// reasoning_parts: 仅在兼容 Moonshot/Kimi thinking tool-call 路径时
// reasoning_parts: 仅在兼容 Moonshot/Kimi/DeepSeek thinking tool-call 路径时
// 生成 reasoning_content,通用 OpenAI-compatible 路径不发送该非标准字段。
let mut reasoning_parts = Vec::new();
@@ -493,6 +493,13 @@ pub fn openai_to_anthropic(body: Value) -> Result<Value, ProxyError> {
let mut content = Vec::new();
let mut has_tool_use = false;
// DeepSeek provider 会把思考内容放在 message.reasoning_content。
if let Some(reasoning_content) = message.get("reasoning_content").and_then(|r| r.as_str()) {
if !reasoning_content.is_empty() {
content.push(json!({"type": "thinking", "thinking": reasoning_content}));
}
}
// 文本/拒绝内容
if let Some(msg_content) = message.get("content") {
if let Some(text) = msg_content.as_str() {
@@ -1048,6 +1055,59 @@ mod tests {
assert_eq!(result["stop_reason"], "tool_use");
}
#[test]
fn test_deepseek_reasoning_content_round_trips_for_tool_calls() {
let upstream_response = json!({
"id": "chatcmpl-deepseek",
"object": "chat.completion",
"created": 1234567890,
"model": "deepseek-v4-flash",
"choices": [{
"index": 0,
"message": {
"role": "assistant",
"reasoning_content": "Need the current date before calling weather.",
"content": "Let me check the date first.",
"tool_calls": [{
"id": "call_date",
"type": "function",
"function": {"name": "get_date", "arguments": "{}"}
}]
},
"finish_reason": "tool_calls"
}],
"usage": {"prompt_tokens": 10, "completion_tokens": 5, "total_tokens": 15}
});
let anthropic_response = openai_to_anthropic(upstream_response).unwrap();
assert_eq!(anthropic_response["content"][0]["type"], "thinking");
assert_eq!(
anthropic_response["content"][0]["thinking"],
"Need the current date before calling weather."
);
assert_eq!(anthropic_response["content"][1]["type"], "text");
assert_eq!(anthropic_response["content"][2]["type"], "tool_use");
assert_eq!(anthropic_response["content"][2]["id"], "call_date");
let follow_up_request = json!({
"model": "deepseek-v4-flash",
"max_tokens": 1024,
"messages": [{
"role": "assistant",
"content": anthropic_response["content"].clone()
}]
});
let replayed = anthropic_to_openai_with_reasoning_content(follow_up_request, true).unwrap();
let msg = &replayed["messages"][0];
assert_eq!(
msg["reasoning_content"],
"Need the current date before calling weather."
);
assert_eq!(msg["tool_calls"][0]["id"], "call_date");
assert_eq!(msg["tool_calls"][0]["function"]["name"], "get_date");
}
#[test]
fn test_model_passthrough() {
// 格式转换层只做结构转换,模型映射由上游 proxy::model_mapper 处理