mirror of
https://github.com/farion1231/cc-switch.git
synced 2026-05-07 11:47:28 +08:00
Normalize fragmented system prompts for strict chat backends
Some OpenAI-compatible chat providers reject requests when Claude-side\nsystem fragments arrive as multiple system messages. Normalize the\nconverted OpenAI chat payload so system content becomes a single\nleading system message while leaving the rest of the message stream\nunchanged.\n\nConstraint: Nvidia/Qwen-style chat completions require a single leading system prompt\nRejected: Reorder system messages only | still leaves fragmented system prompts for strict backends\nConfidence: high\nScope-risk: narrow\nReversibility: clean\nDirective: Keep OpenAI chat system prompts normalized unless a provider explicitly requires fragmented system messages\nTested: cargo test proxy::providers::transform --manifest-path src-tauri/Cargo.toml\nNot-tested: Full end-to-end proxy capture against Nvidia upstream in this session\nRelated: #1881
This commit is contained in:
@@ -113,6 +113,7 @@ pub fn anthropic_to_openai(body: Value, cache_key: Option<&str>) -> Result<Value
|
||||
}
|
||||
}
|
||||
|
||||
normalize_openai_system_messages(&mut messages);
|
||||
result["messages"] = json!(messages);
|
||||
|
||||
// 转换参数 — o-series 模型需要 max_completion_tokens
|
||||
@@ -182,6 +183,57 @@ pub fn anthropic_to_openai(body: Value, cache_key: Option<&str>) -> Result<Value
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn normalize_openai_system_messages(messages: &mut Vec<Value>) {
|
||||
let system_count = messages
|
||||
.iter()
|
||||
.filter(|message| message.get("role").and_then(|value| value.as_str()) == Some("system"))
|
||||
.count();
|
||||
|
||||
if system_count == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
if system_count == 1 {
|
||||
if let Some(index) = messages.iter().position(|message| {
|
||||
message.get("role").and_then(|value| value.as_str()) == Some("system")
|
||||
}) {
|
||||
if index > 0 {
|
||||
let message = messages.remove(index);
|
||||
messages.insert(0, message);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let mut parts = Vec::new();
|
||||
messages.retain(|message| {
|
||||
if message.get("role").and_then(|value| value.as_str()) != Some("system") {
|
||||
return true;
|
||||
}
|
||||
|
||||
match message.get("content") {
|
||||
Some(Value::String(text)) if !text.is_empty() => parts.push(text.clone()),
|
||||
Some(Value::Array(content_parts)) => {
|
||||
let text = content_parts
|
||||
.iter()
|
||||
.filter_map(|part| part.get("text").and_then(|value| value.as_str()))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
if !text.is_empty() {
|
||||
parts.push(text);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
false
|
||||
});
|
||||
|
||||
if !parts.is_empty() {
|
||||
messages.insert(0, json!({"role": "system", "content": parts.join("\n")}));
|
||||
}
|
||||
}
|
||||
|
||||
/// 转换单条消息到 OpenAI 格式(可能产生多条消息)
|
||||
fn convert_message_to_openai(
|
||||
role: &str,
|
||||
@@ -560,6 +612,31 @@ mod tests {
|
||||
assert_eq!(result["tools"][0]["function"]["name"], "get_weather");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_anthropic_to_openai_normalizes_fragmented_system_messages() {
|
||||
let input = json!({
|
||||
"model": "claude-3-sonnet",
|
||||
"max_tokens": 1024,
|
||||
"system": [
|
||||
{"type": "text", "text": "You are Claude Code."},
|
||||
{"type": "text", "text": "Be concise."}
|
||||
],
|
||||
"messages": [
|
||||
{"role": "system", "content": "Follow repo conventions."},
|
||||
{"role": "user", "content": "Hello"}
|
||||
]
|
||||
});
|
||||
|
||||
let result = anthropic_to_openai(input, None).unwrap();
|
||||
assert_eq!(result["messages"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(result["messages"][0]["role"], "system");
|
||||
assert_eq!(
|
||||
result["messages"][0]["content"],
|
||||
"You are Claude Code.\nBe concise.\nFollow repo conventions."
|
||||
);
|
||||
assert_eq!(result["messages"][1]["role"], "user");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_anthropic_to_openai_tool_use() {
|
||||
let input = json!({
|
||||
|
||||
Reference in New Issue
Block a user