mirror of
https://github.com/farion1231/cc-switch.git
synced 2026-05-03 09:31:48 +08:00
Compare commits
2 Commits
codex/issu
...
codex/issu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0c17ed867 | ||
|
|
1399cbbf06 |
@@ -113,6 +113,7 @@ pub fn anthropic_to_openai(body: Value, cache_key: Option<&str>) -> Result<Value
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
normalize_openai_system_messages(&mut messages);
|
||||||
result["messages"] = json!(messages);
|
result["messages"] = json!(messages);
|
||||||
|
|
||||||
// 转换参数 — o-series 模型需要 max_completion_tokens
|
// 转换参数 — o-series 模型需要 max_completion_tokens
|
||||||
@@ -182,6 +183,73 @@ pub fn anthropic_to_openai(body: Value, cache_key: Option<&str>) -> Result<Value
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn normalize_openai_system_messages(messages: &mut Vec<Value>) {
|
||||||
|
let system_count = messages
|
||||||
|
.iter()
|
||||||
|
.filter(|message| message.get("role").and_then(|value| value.as_str()) == Some("system"))
|
||||||
|
.count();
|
||||||
|
|
||||||
|
if system_count == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if system_count == 1 {
|
||||||
|
if let Some(index) = messages.iter().position(|message| {
|
||||||
|
message.get("role").and_then(|value| value.as_str()) == Some("system")
|
||||||
|
}) {
|
||||||
|
if index > 0 {
|
||||||
|
let message = messages.remove(index);
|
||||||
|
messages.insert(0, message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
let mut inherited_cache_control: Option<Value> = None;
|
||||||
|
let mut cache_control_conflict = false;
|
||||||
|
messages.retain(|message| {
|
||||||
|
if message.get("role").and_then(|value| value.as_str()) != Some("system") {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
match message.get("content") {
|
||||||
|
Some(Value::String(text)) if !text.is_empty() => parts.push(text.clone()),
|
||||||
|
Some(Value::Array(content_parts)) => {
|
||||||
|
let text = content_parts
|
||||||
|
.iter()
|
||||||
|
.filter_map(|part| part.get("text").and_then(|value| value.as_str()))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
if !text.is_empty() {
|
||||||
|
parts.push(text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(cache_control) = message.get("cache_control") {
|
||||||
|
match &inherited_cache_control {
|
||||||
|
None => inherited_cache_control = Some(cache_control.clone()),
|
||||||
|
Some(existing) if existing == cache_control => {}
|
||||||
|
Some(_) => cache_control_conflict = true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
});
|
||||||
|
|
||||||
|
if !parts.is_empty() {
|
||||||
|
let mut merged = json!({"role": "system", "content": parts.join("\n")});
|
||||||
|
if !cache_control_conflict {
|
||||||
|
if let Some(cache_control) = inherited_cache_control {
|
||||||
|
merged["cache_control"] = cache_control;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
messages.insert(0, merged);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// 转换单条消息到 OpenAI 格式(可能产生多条消息)
|
/// 转换单条消息到 OpenAI 格式(可能产生多条消息)
|
||||||
fn convert_message_to_openai(
|
fn convert_message_to_openai(
|
||||||
role: &str,
|
role: &str,
|
||||||
@@ -560,6 +628,50 @@ mod tests {
|
|||||||
assert_eq!(result["tools"][0]["function"]["name"], "get_weather");
|
assert_eq!(result["tools"][0]["function"]["name"], "get_weather");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_anthropic_to_openai_normalizes_fragmented_system_messages() {
|
||||||
|
let input = json!({
|
||||||
|
"model": "claude-3-sonnet",
|
||||||
|
"max_tokens": 1024,
|
||||||
|
"system": [
|
||||||
|
{"type": "text", "text": "You are Claude Code.", "cache_control": {"type": "ephemeral"}},
|
||||||
|
{"type": "text", "text": "Be concise."}
|
||||||
|
],
|
||||||
|
"messages": [
|
||||||
|
{"role": "system", "content": "Follow repo conventions."},
|
||||||
|
{"role": "user", "content": "Hello"}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = anthropic_to_openai(input, None).unwrap();
|
||||||
|
assert_eq!(result["messages"].as_array().unwrap().len(), 2);
|
||||||
|
assert_eq!(result["messages"][0]["role"], "system");
|
||||||
|
assert_eq!(
|
||||||
|
result["messages"][0]["content"],
|
||||||
|
"You are Claude Code.\nBe concise.\nFollow repo conventions."
|
||||||
|
);
|
||||||
|
assert_eq!(result["messages"][0]["cache_control"]["type"], "ephemeral");
|
||||||
|
assert_eq!(result["messages"][1]["role"], "user");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_anthropic_to_openai_drops_conflicting_system_cache_control_when_merging() {
|
||||||
|
let input = json!({
|
||||||
|
"model": "claude-3-sonnet",
|
||||||
|
"max_tokens": 1024,
|
||||||
|
"system": [
|
||||||
|
{"type": "text", "text": "You are Claude Code.", "cache_control": {"type": "ephemeral"}},
|
||||||
|
{"type": "text", "text": "Be concise.", "cache_control": {"type": "ephemeral", "ttl": "5m"}}
|
||||||
|
],
|
||||||
|
"messages": [{"role": "user", "content": "Hello"}]
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = anthropic_to_openai(input, None).unwrap();
|
||||||
|
assert_eq!(result["messages"][0]["role"], "system");
|
||||||
|
assert_eq!(result["messages"][0]["content"], "You are Claude Code.\nBe concise.");
|
||||||
|
assert!(result["messages"][0].get("cache_control").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_anthropic_to_openai_tool_use() {
|
fn test_anthropic_to_openai_tool_use() {
|
||||||
let input = json!({
|
let input = json!({
|
||||||
|
|||||||
Reference in New Issue
Block a user