Compare commits

...

7 Commits

Author SHA1 Message Date
YoVinchen
e01ef2f51a Merge branch 'main' into feat/format-transform-refactor 2026-01-26 01:43:57 +08:00
YoVinchen
dc8a70b14e feat(i18n): add format transform translations
Add translations for format transform configuration UI in Chinese,
English, and Japanese.
2026-01-25 19:59:07 +08:00
YoVinchen
4ee45d2bc3 feat(ui): add format transform configuration in provider form
- Add FormatTransformConfig type definition
- Add format transform section in ProviderAdvancedConfig component
- Support source/target format selection (Anthropic/OpenAI)
- Add transform streaming toggle option
2026-01-25 19:58:28 +08:00
YoVinchen
be246f8596 fix(proxy): prevent duplicate endpoint in Codex adapter URL building
Fix URL construction when base_url already contains the endpoint path
(e.g., base_url="https://api.example.com/v1/chat/completions" with
endpoint="/v1/chat/completions" should not result in double path).
2026-01-25 19:57:50 +08:00
YoVinchen
6046c166cc feat(proxy): integrate format transform module in forwarder
- Use TransformConfig.from_provider() to get transform settings
- Add early validation for missing transformers
- Support transform_streaming flag to force non-streaming mode
- Transform endpoint path based on source/target format
2026-01-25 19:50:29 +08:00
YoVinchen
fe4a968eef feat(provider): add FormatTransformConfig for API format conversion
Add FormatTransformConfig struct to support API format transformation
between different providers (e.g., Anthropic ↔ OpenAI). This enables
providers like OpenRouter that use OpenAI-compatible interfaces.
2026-01-25 19:46:22 +08:00
YoVinchen
55301abc00 refactor(proxy): extract format transform into standalone module
- Create new `proxy/transform/` module with clean architecture:
  - `traits.rs`: FormatTransformer trait definition
  - `format.rs`: ApiFormat enum (Anthropic, OpenAI, Gemini)
  - `registry.rs`: TransformerRegistry with global instance
  - `config.rs`: TransformConfig from Provider settings
  - `anthropic_openai/`: Bidirectional Anthropic ↔ OpenAI converters

- Move streaming transform from providers/ to transform/
- Remove legacy transform code from providers/adapter.rs and claude.rs
- Update handlers.rs to use new transform module

This refactor improves maintainability and makes it easier to add
new format converters in the future.
2026-01-25 19:37:30 +08:00
24 changed files with 1507 additions and 802 deletions

View File

@@ -191,6 +191,23 @@ pub struct ProviderProxyConfig {
pub proxy_password: Option<String>,
}
/// 格式转换配置(用于 OpenRouter 等需要 API 格式转换的供应商)
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct FormatTransformConfig {
/// 是否启用格式转换
#[serde(default)]
pub enabled: bool,
/// 源格式anthropic, openai, gemini
#[serde(rename = "sourceFormat", skip_serializing_if = "Option::is_none")]
pub source_format: Option<String>,
/// 目标格式anthropic, openai, gemini
#[serde(rename = "targetFormat", skip_serializing_if = "Option::is_none")]
pub target_format: Option<String>,
/// 是否转换流式响应(默认 true
#[serde(rename = "transformStreaming", skip_serializing_if = "Option::is_none")]
pub transform_streaming: Option<bool>,
}
/// 供应商元数据
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ProviderMeta {
@@ -227,6 +244,9 @@ pub struct ProviderMeta {
/// 供应商单独的代理配置
#[serde(rename = "proxyConfig", skip_serializing_if = "Option::is_none")]
pub proxy_config: Option<ProviderProxyConfig>,
/// 格式转换配置(用于 OpenRouter 等需要 API 格式转换的供应商)
#[serde(rename = "formatTransform", skip_serializing_if = "Option::is_none")]
pub format_transform: Option<FormatTransformConfig>,
}
impl ProviderManager {

View File

@@ -9,6 +9,7 @@ use super::{
provider_router::ProviderRouter,
providers::{get_adapter, ProviderAdapter, ProviderType},
thinking_rectifier::{rectify_anthropic_request, should_rectify_thinking_signature},
transform::{get_transformer, TransformConfig},
types::{ProxyStatus, RectifierConfig},
ProxyError,
};
@@ -558,26 +559,63 @@ impl RequestForwarder {
// 使用适配器提取 base_url
let base_url = adapter.extract_base_url(provider)?;
// 检查是否需要格式转换
let needs_transform = adapter.needs_transform(provider);
// 获取格式转换配置
let transform_config = TransformConfig::from_provider(provider);
let needs_transform = transform_config.needs_transform();
let effective_endpoint =
if needs_transform && adapter.name() == "Claude" && endpoint == "/v1/messages" {
"/v1/chat/completions"
} else {
endpoint
};
// 如果需要转换但找不到转换器,直接返回错误(避免静默透传后在响应阶段失败)
let transformer = if needs_transform {
let t = get_transformer(
transform_config.source_format,
transform_config.target_format,
);
if t.is_none() {
log::error!(
"[Forwarder] 格式转换已启用但找不到转换器: {:?} → {:?}",
transform_config.source_format,
transform_config.target_format
);
return Err(ProxyError::TransformError(format!(
"No transformer registered for {:?}{:?}. Please disable format transform or use supported formats (Anthropic ↔ OpenAI).",
transform_config.source_format,
transform_config.target_format
)));
}
t
} else {
None
};
// 确定有效端点
let effective_endpoint = if let Some(ref t) = transformer {
t.transform_endpoint(endpoint)
} else {
endpoint.to_string()
};
// 使用适配器构建 URL
let url = adapter.build_url(&base_url, effective_endpoint);
let url = adapter.build_url(&base_url, &effective_endpoint);
// 应用模型映射(独立于格式转换)
let (mapped_body, _original_model, _mapped_model) =
let (mut mapped_body, _original_model, _mapped_model) =
super::model_mapper::apply_model_mapping(body.clone(), provider);
// 如果启用格式转换但禁用流式转换,强制将 stream 设为 false
// 避免上游返回 SSE 流但我们无法转换的情况
if needs_transform && !transform_config.transform_streaming {
if let Some(stream_val) = mapped_body.get("stream") {
if stream_val.as_bool() == Some(true) {
log::info!("[Forwarder] transform_streaming=false强制将 stream 设为 false");
if let Some(obj) = mapped_body.as_object_mut() {
obj.insert("stream".to_string(), serde_json::Value::Bool(false));
}
}
}
}
// 转换请求体(如果需要)
let request_body = if needs_transform {
adapter.transform_request(mapped_body, provider)?
let request_body = if let Some(ref t) = transformer {
t.transform_request(mapped_body)?
} else {
mapped_body
};

View File

@@ -13,9 +13,9 @@ use super::{
CLAUDE_PARSER_CONFIG, CODEX_PARSER_CONFIG, GEMINI_PARSER_CONFIG, OPENAI_PARSER_CONFIG,
},
handler_context::RequestContext,
providers::{get_adapter, streaming::create_anthropic_sse_stream, transform},
response_processor::{create_logged_passthrough_stream, process_response, SseUsageCollector},
server::ProxyState,
transform::{get_transformer, TransformConfig},
types::*,
usage::parser::TokenUsage,
ProxyError,
@@ -94,13 +94,20 @@ pub async fn handle_messages(
ctx.provider = result.provider;
let response = result.response;
// 检查是否需要格式转换(OpenRouter 等中转服务
let adapter = get_adapter(&AppType::Claude);
let needs_transform = adapter.needs_transform(&ctx.provider);
// 检查是否需要格式转换(通过 Provider 配置
let transform_config = TransformConfig::from_provider(&ctx.provider);
// Claude 特有:格式转换处理
if needs_transform {
return handle_claude_transform(response, &ctx, &state, &body, is_stream).await;
if transform_config.needs_transform() {
return handle_claude_transform(
response,
&ctx,
&state,
&body,
is_stream,
&transform_config,
)
.await;
}
// 通用响应处理(透传模式)
@@ -116,13 +123,26 @@ async fn handle_claude_transform(
state: &ProxyState,
_original_body: &Value,
is_stream: bool,
transform_config: &TransformConfig,
) -> Result<axum::response::Response, ProxyError> {
let status = response.status();
if is_stream {
// 获取响应转换器OpenAI → Anthropic
let response_transformer = get_transformer(
transform_config.target_format,
transform_config.source_format,
)
.ok_or_else(|| {
ProxyError::TransformError(format!(
"No transformer for {:?}{:?}",
transform_config.target_format, transform_config.source_format
))
})?;
if is_stream && transform_config.transform_streaming {
// 流式响应转换 (OpenAI SSE → Anthropic SSE)
let stream = response.bytes_stream();
let sse_stream = create_anthropic_sse_stream(stream);
let sse_stream = response_transformer.transform_stream(Box::pin(stream));
// 创建使用量收集器
let usage_collector = {
@@ -202,10 +222,12 @@ async fn handle_claude_transform(
ProxyError::TransformError(format!("Failed to parse OpenAI response: {e}"))
})?;
let anthropic_response = transform::openai_to_anthropic(openai_response).map_err(|e| {
log::error!("[Claude] 转换响应失败: {e}");
e
})?;
let anthropic_response = response_transformer
.transform_response(openai_response)
.map_err(|e| {
log::error!("[Claude] 转换响应失败: {e}");
e
})?;
// 记录使用量
if let Some(usage) = TokenUsage::from_claude_response(&anthropic_response) {

View File

@@ -22,6 +22,7 @@ pub mod response_processor;
pub(crate) mod server;
pub mod session;
pub mod thinking_rectifier;
pub mod transform;
pub(crate) mod types;
pub mod usage;

View File

@@ -6,7 +6,6 @@ use super::auth::AuthInfo;
use crate::provider::Provider;
use crate::proxy::error::ProxyError;
use reqwest::RequestBuilder;
use serde_json::Value;
/// 供应商适配器 Trait
///
@@ -83,49 +82,4 @@ pub trait ProviderAdapter: Send + Sync {
/// # Returns
/// 添加了认证头的 RequestBuilder
fn add_auth_headers(&self, request: RequestBuilder, auth: &AuthInfo) -> RequestBuilder;
/// 是否需要格式转换
///
/// 默认返回 `false`(透传模式)。
/// 仅当供应商需要格式转换时(如 Claude + OpenRouter 旧 OpenAI 兼容接口)才返回 `true`。
///
/// # Arguments
/// * `provider` - Provider 配置
fn needs_transform(&self, _provider: &Provider) -> bool {
false
}
/// 转换请求体
///
/// 将请求体从一种格式转换为另一种格式(如 Anthropic → OpenAI
/// 默认实现直接返回原始请求体(透传)。
///
/// # Arguments
/// * `body` - 原始请求体
/// * `provider` - Provider 配置(用于获取模型映射等)
///
/// # Returns
/// * `Ok(Value)` - 转换后的请求体
/// * `Err(ProxyError)` - 转换失败
fn transform_request(&self, body: Value, _provider: &Provider) -> Result<Value, ProxyError> {
Ok(body)
}
/// 转换响应体
///
/// 将响应体从一种格式转换为另一种格式(如 OpenAI → Anthropic
/// 默认实现直接返回原始响应体(透传)。
///
/// # Arguments
/// * `body` - 原始响应体
///
/// # Returns
/// * `Ok(Value)` - 转换后的响应体
/// * `Err(ProxyError)` - 转换失败
///
/// Note: 响应转换将在 handler 层集成,目前预留接口
#[allow(dead_code)]
fn transform_response(&self, body: Value) -> Result<Value, ProxyError> {
Ok(body)
}
}

View File

@@ -48,25 +48,6 @@ impl ClaudeAdapter {
false
}
/// 检测 OpenRouter 是否启用兼容模式
fn is_openrouter_compat_enabled(&self, provider: &Provider) -> bool {
if !self.is_openrouter(provider) {
return false;
}
let raw = provider.settings_config.get("openrouter_compat_mode");
match raw {
Some(serde_json::Value::Bool(enabled)) => *enabled,
Some(serde_json::Value::Number(num)) => num.as_i64().unwrap_or(0) != 0,
Some(serde_json::Value::String(value)) => {
let normalized = value.trim().to_lowercase();
normalized == "true" || normalized == "1"
}
// OpenRouter now supports Claude Code compatible API, default to passthrough
_ => false,
}
}
/// 检测是否为仅 Bearer 认证模式
fn is_bearer_only_mode(&self, provider: &Provider) -> bool {
// 检查 settings_config 中的 auth_mode
@@ -252,27 +233,6 @@ impl ProviderAdapter for ClaudeAdapter {
_ => request,
}
}
fn needs_transform(&self, _provider: &Provider) -> bool {
// NOTE:
// OpenRouter 已推出 Claude Code 兼容接口(可直接处理 `/v1/messages`),默认不再启用
// Anthropic ↔ OpenAI 的格式转换。
//
// 如果未来需要回退到旧的 OpenAI Chat Completions 方案,可恢复下面这行:
self.is_openrouter_compat_enabled(_provider)
}
fn transform_request(
&self,
body: serde_json::Value,
provider: &Provider,
) -> Result<serde_json::Value, ProxyError> {
super::transform::anthropic_to_openai(body, provider)
}
fn transform_response(&self, body: serde_json::Value) -> Result<serde_json::Value, ProxyError> {
super::transform::openai_to_anthropic(body)
}
}
#[cfg(test)]
@@ -454,41 +414,4 @@ mod tests {
let url = adapter.build_url("https://api.anthropic.com", "/v1/messages?foo=bar");
assert_eq!(url, "https://api.anthropic.com/v1/messages?foo=bar");
}
#[test]
fn test_needs_transform() {
let adapter = ClaudeAdapter::new();
let anthropic_provider = create_provider(json!({
"env": {
"ANTHROPIC_BASE_URL": "https://api.anthropic.com"
}
}));
assert!(!adapter.needs_transform(&anthropic_provider));
// OpenRouter provider without explicit setting now defaults to passthrough (no transform)
let openrouter_provider = create_provider(json!({
"env": {
"ANTHROPIC_BASE_URL": "https://openrouter.ai/api"
}
}));
assert!(!adapter.needs_transform(&openrouter_provider));
// OpenRouter provider with explicit compat mode enabled should transform
let openrouter_enabled = create_provider(json!({
"env": {
"ANTHROPIC_BASE_URL": "https://openrouter.ai/api"
},
"openrouter_compat_mode": true
}));
assert!(adapter.needs_transform(&openrouter_enabled));
let openrouter_disabled = create_provider(json!({
"env": {
"ANTHROPIC_BASE_URL": "https://openrouter.ai/api"
},
"openrouter_compat_mode": false
}));
assert!(!adapter.needs_transform(&openrouter_disabled));
}
}

View File

@@ -141,6 +141,20 @@ impl ProviderAdapter for CodexAdapter {
let base_trimmed = base_url.trim_end_matches('/');
let endpoint_trimmed = endpoint.trim_start_matches('/');
// 检查 base_url 是否已包含 endpoint 的核心路径
// 例如base_url = "https://api.example.com/v1/chat/completions"
// endpoint = "/v1/chat/completions"
// 此时不应再拼接,直接返回 base_url
let endpoint_core = endpoint_trimmed
.trim_start_matches("v1/")
.trim_start_matches("v1");
let endpoint_core = endpoint_core.trim_start_matches('/');
// 如果 base_url 已经以 endpoint 核心路径结尾,直接返回 base_url
if !endpoint_core.is_empty() && base_trimmed.ends_with(endpoint_core) {
return base_trimmed.to_string();
}
let mut url = format!("{base_trimmed}/{endpoint_trimmed}");
// 去除重复的 /v1/v1
@@ -231,6 +245,33 @@ mod tests {
assert_eq!(url, "https://www.packyapi.com/v1/responses");
}
#[test]
fn test_build_url_base_already_has_chat_completions() {
let adapter = CodexAdapter::new();
// base_url 已包含 chat/completions不应再拼接
let url = adapter.build_url(
"https://api.example.com/v1/chat/completions",
"/v1/chat/completions",
);
assert_eq!(url, "https://api.example.com/v1/chat/completions");
}
#[test]
fn test_build_url_base_already_has_responses() {
let adapter = CodexAdapter::new();
// base_url 已包含 responses不应再拼接
let url = adapter.build_url("https://api.example.com/v1/responses", "/v1/responses");
assert_eq!(url, "https://api.example.com/v1/responses");
}
#[test]
fn test_build_url_base_without_endpoint() {
let adapter = CodexAdapter::new();
// base_url 不包含 endpoint应正常拼接
let url = adapter.build_url("https://api.example.com/v1", "/v1/chat/completions");
assert_eq!(url, "https://api.example.com/v1/chat/completions");
}
// 官方客户端检测测试
#[test]
fn test_is_official_client_vscode() {

View File

@@ -17,8 +17,6 @@ mod claude;
mod codex;
mod gemini;
pub mod models;
pub mod streaming;
pub mod transform;
use crate::app_config::AppType;
use crate::provider::Provider;

View File

@@ -1,640 +0,0 @@
//! 格式转换模块
//!
//! 实现 Anthropic ↔ OpenAI 格式转换,用于 OpenRouter 支持
//! 参考: anthropic-proxy-rs
use crate::provider::Provider;
use crate::proxy::error::ProxyError;
use serde_json::{json, Value};
/// 从 Provider 配置中获取模型映射
fn get_model_from_provider(model: &str, provider: &Provider, body: &Value) -> String {
let env = provider.settings_config.get("env");
let model_lower = model.to_lowercase();
// 检测 thinking 参数
let has_thinking = body
.get("thinking")
.and_then(|v| v.as_object())
.and_then(|o| o.get("type"))
.and_then(|t| t.as_str())
== Some("enabled");
if let Some(env) = env {
// 如果启用 thinking优先使用推理模型
if has_thinking {
if let Some(m) = env
.get("ANTHROPIC_REASONING_MODEL")
.and_then(|v| v.as_str())
{
log::debug!("[Transform] 使用推理模型: {m}");
return m.to_string();
}
}
// 根据模型类型选择配置模型
if model_lower.contains("haiku") {
if let Some(m) = env
.get("ANTHROPIC_DEFAULT_HAIKU_MODEL")
.and_then(|v| v.as_str())
{
return m.to_string();
}
}
if model_lower.contains("opus") {
if let Some(m) = env
.get("ANTHROPIC_DEFAULT_OPUS_MODEL")
.and_then(|v| v.as_str())
{
return m.to_string();
}
}
if model_lower.contains("sonnet") {
if let Some(m) = env
.get("ANTHROPIC_DEFAULT_SONNET_MODEL")
.and_then(|v| v.as_str())
{
return m.to_string();
}
}
// 默认使用 ANTHROPIC_MODEL
if let Some(m) = env.get("ANTHROPIC_MODEL").and_then(|v| v.as_str()) {
return m.to_string();
}
}
model.to_string()
}
/// Anthropic 请求 → OpenAI 请求
pub fn anthropic_to_openai(body: Value, provider: &Provider) -> Result<Value, ProxyError> {
let mut result = json!({});
// 模型映射:使用 Provider 配置中的模型(支持 thinking 参数)
if let Some(model) = body.get("model").and_then(|m| m.as_str()) {
let mapped_model = get_model_from_provider(model, provider, &body);
result["model"] = json!(mapped_model);
}
let mut messages = Vec::new();
// 处理 system prompt
if let Some(system) = body.get("system") {
if let Some(text) = system.as_str() {
// 单个字符串
messages.push(json!({"role": "system", "content": text}));
} else if let Some(arr) = system.as_array() {
// 多个 system message
for msg in arr {
if let Some(text) = msg.get("text").and_then(|t| t.as_str()) {
messages.push(json!({"role": "system", "content": text}));
}
}
}
}
// 转换 messages
if let Some(msgs) = body.get("messages").and_then(|m| m.as_array()) {
for msg in msgs {
let role = msg.get("role").and_then(|r| r.as_str()).unwrap_or("user");
let content = msg.get("content");
let converted = convert_message_to_openai(role, content)?;
messages.extend(converted);
}
}
result["messages"] = json!(messages);
// 转换参数
if let Some(v) = body.get("max_tokens") {
result["max_tokens"] = v.clone();
}
if let Some(v) = body.get("temperature") {
result["temperature"] = v.clone();
}
if let Some(v) = body.get("top_p") {
result["top_p"] = v.clone();
}
if let Some(v) = body.get("stop_sequences") {
result["stop"] = v.clone();
}
if let Some(v) = body.get("stream") {
result["stream"] = v.clone();
}
// 转换 tools (过滤 BatchTool)
if let Some(tools) = body.get("tools").and_then(|t| t.as_array()) {
let openai_tools: Vec<Value> = tools
.iter()
.filter(|t| t.get("type").and_then(|v| v.as_str()) != Some("BatchTool"))
.map(|t| {
json!({
"type": "function",
"function": {
"name": t.get("name").and_then(|n| n.as_str()).unwrap_or(""),
"description": t.get("description"),
"parameters": clean_schema(t.get("input_schema").cloned().unwrap_or(json!({})))
}
})
})
.collect();
if !openai_tools.is_empty() {
result["tools"] = json!(openai_tools);
}
}
if let Some(v) = body.get("tool_choice") {
result["tool_choice"] = v.clone();
}
Ok(result)
}
/// 转换单条消息到 OpenAI 格式(可能产生多条消息)
fn convert_message_to_openai(
role: &str,
content: Option<&Value>,
) -> Result<Vec<Value>, ProxyError> {
let mut result = Vec::new();
let content = match content {
Some(c) => c,
None => {
result.push(json!({"role": role, "content": null}));
return Ok(result);
}
};
// 字符串内容
if let Some(text) = content.as_str() {
result.push(json!({"role": role, "content": text}));
return Ok(result);
}
// 数组内容(多模态/工具调用)
if let Some(blocks) = content.as_array() {
let mut content_parts = Vec::new();
let mut tool_calls = Vec::new();
for block in blocks {
let block_type = block.get("type").and_then(|t| t.as_str()).unwrap_or("");
match block_type {
"text" => {
if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
content_parts.push(json!({"type": "text", "text": text}));
}
}
"image" => {
if let Some(source) = block.get("source") {
let media_type = source
.get("media_type")
.and_then(|m| m.as_str())
.unwrap_or("image/png");
let data = source.get("data").and_then(|d| d.as_str()).unwrap_or("");
content_parts.push(json!({
"type": "image_url",
"image_url": {"url": format!("data:{};base64,{}", media_type, data)}
}));
}
}
"tool_use" => {
let id = block.get("id").and_then(|i| i.as_str()).unwrap_or("");
let name = block.get("name").and_then(|n| n.as_str()).unwrap_or("");
let input = block.get("input").cloned().unwrap_or(json!({}));
tool_calls.push(json!({
"id": id,
"type": "function",
"function": {
"name": name,
"arguments": serde_json::to_string(&input).unwrap_or_default()
}
}));
}
"tool_result" => {
// tool_result 变成单独的 tool role 消息
let tool_use_id = block
.get("tool_use_id")
.and_then(|i| i.as_str())
.unwrap_or("");
let content_val = block.get("content");
let content_str = match content_val {
Some(Value::String(s)) => s.clone(),
Some(v) => serde_json::to_string(v).unwrap_or_default(),
None => String::new(),
};
result.push(json!({
"role": "tool",
"tool_call_id": tool_use_id,
"content": content_str
}));
}
"thinking" => {
// 跳过 thinking blocks
}
_ => {}
}
}
// 添加带内容和/或工具调用的消息
if !content_parts.is_empty() || !tool_calls.is_empty() {
let mut msg = json!({"role": role});
// 内容处理
if content_parts.is_empty() {
msg["content"] = Value::Null;
} else if content_parts.len() == 1 {
if let Some(text) = content_parts[0].get("text") {
msg["content"] = text.clone();
} else {
msg["content"] = json!(content_parts);
}
} else {
msg["content"] = json!(content_parts);
}
// 工具调用
if !tool_calls.is_empty() {
msg["tool_calls"] = json!(tool_calls);
}
result.push(msg);
}
return Ok(result);
}
// 其他情况直接透传
result.push(json!({"role": role, "content": content}));
Ok(result)
}
/// 清理 JSON schema移除不支持的 format
fn clean_schema(mut schema: Value) -> Value {
if let Some(obj) = schema.as_object_mut() {
// 移除 "format": "uri"
if obj.get("format").and_then(|v| v.as_str()) == Some("uri") {
obj.remove("format");
}
// 递归清理嵌套 schema
if let Some(properties) = obj.get_mut("properties").and_then(|v| v.as_object_mut()) {
for (_, value) in properties.iter_mut() {
*value = clean_schema(value.clone());
}
}
if let Some(items) = obj.get_mut("items") {
*items = clean_schema(items.clone());
}
}
schema
}
/// OpenAI 响应 → Anthropic 响应
pub fn openai_to_anthropic(body: Value) -> Result<Value, ProxyError> {
let choices = body
.get("choices")
.and_then(|c| c.as_array())
.ok_or_else(|| ProxyError::TransformError("No choices in response".to_string()))?;
let choice = choices
.first()
.ok_or_else(|| ProxyError::TransformError("Empty choices array".to_string()))?;
let message = choice
.get("message")
.ok_or_else(|| ProxyError::TransformError("No message in choice".to_string()))?;
let mut content = Vec::new();
// 文本内容
if let Some(text) = message.get("content").and_then(|c| c.as_str()) {
if !text.is_empty() {
content.push(json!({"type": "text", "text": text}));
}
}
// 工具调用
if let Some(tool_calls) = message.get("tool_calls").and_then(|t| t.as_array()) {
for tc in tool_calls {
let id = tc.get("id").and_then(|i| i.as_str()).unwrap_or("");
let empty_obj = json!({});
let func = tc.get("function").unwrap_or(&empty_obj);
let name = func.get("name").and_then(|n| n.as_str()).unwrap_or("");
let args_str = func
.get("arguments")
.and_then(|a| a.as_str())
.unwrap_or("{}");
let input: Value = serde_json::from_str(args_str).unwrap_or(json!({}));
content.push(json!({
"type": "tool_use",
"id": id,
"name": name,
"input": input
}));
}
}
// 映射 finish_reason → stop_reason
let stop_reason = choice
.get("finish_reason")
.and_then(|r| r.as_str())
.map(|r| match r {
"stop" => "end_turn",
"length" => "max_tokens",
"tool_calls" => "tool_use",
other => other,
});
// usage
let usage = body.get("usage").cloned().unwrap_or(json!({}));
let input_tokens = usage
.get("prompt_tokens")
.and_then(|v| v.as_u64())
.unwrap_or(0) as u32;
let output_tokens = usage
.get("completion_tokens")
.and_then(|v| v.as_u64())
.unwrap_or(0) as u32;
let result = json!({
"id": body.get("id").and_then(|i| i.as_str()).unwrap_or(""),
"type": "message",
"role": "assistant",
"content": content,
"model": body.get("model").and_then(|m| m.as_str()).unwrap_or(""),
"stop_reason": stop_reason,
"stop_sequence": null,
"usage": {
"input_tokens": input_tokens,
"output_tokens": output_tokens
}
});
Ok(result)
}
#[cfg(test)]
mod tests {
use super::*;
fn create_provider(env_config: Value) -> Provider {
Provider {
id: "test".to_string(),
name: "Test Provider".to_string(),
settings_config: json!({"env": env_config}),
website_url: None,
category: None,
created_at: None,
sort_index: None,
notes: None,
meta: None,
icon: None,
icon_color: None,
in_failover_queue: false,
}
}
fn create_openrouter_provider() -> Provider {
create_provider(json!({
"ANTHROPIC_BASE_URL": "https://openrouter.ai/api",
"ANTHROPIC_MODEL": "anthropic/claude-sonnet-4.5",
"ANTHROPIC_DEFAULT_HAIKU_MODEL": "anthropic/claude-haiku-4.5",
"ANTHROPIC_DEFAULT_SONNET_MODEL": "anthropic/claude-sonnet-4.5",
"ANTHROPIC_DEFAULT_OPUS_MODEL": "anthropic/claude-opus-4.5"
}))
}
#[test]
fn test_anthropic_to_openai_simple() {
let provider = create_openrouter_provider();
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "Hello"}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
// opus 模型映射到配置的 ANTHROPIC_DEFAULT_OPUS_MODEL
assert_eq!(result["model"], "anthropic/claude-opus-4.5");
assert_eq!(result["max_tokens"], 1024);
assert_eq!(result["messages"][0]["role"], "user");
assert_eq!(result["messages"][0]["content"], "Hello");
}
#[test]
fn test_anthropic_to_openai_with_system() {
let provider = create_openrouter_provider();
let input = json!({
"model": "claude-3-sonnet",
"max_tokens": 1024,
"system": "You are a helpful assistant.",
"messages": [{"role": "user", "content": "Hello"}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
assert_eq!(result["messages"][0]["role"], "system");
assert_eq!(
result["messages"][0]["content"],
"You are a helpful assistant."
);
assert_eq!(result["messages"][1]["role"], "user");
}
#[test]
fn test_anthropic_to_openai_with_tools() {
let provider = create_openrouter_provider();
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "What's the weather?"}],
"tools": [{
"name": "get_weather",
"description": "Get weather info",
"input_schema": {"type": "object", "properties": {"location": {"type": "string"}}}
}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
assert_eq!(result["tools"][0]["type"], "function");
assert_eq!(result["tools"][0]["function"]["name"], "get_weather");
}
#[test]
fn test_anthropic_to_openai_tool_use() {
let provider = create_openrouter_provider();
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{
"role": "assistant",
"content": [
{"type": "text", "text": "Let me check"},
{"type": "tool_use", "id": "call_123", "name": "get_weather", "input": {"location": "Tokyo"}}
]
}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
let msg = &result["messages"][0];
assert_eq!(msg["role"], "assistant");
assert!(msg.get("tool_calls").is_some());
assert_eq!(msg["tool_calls"][0]["id"], "call_123");
}
#[test]
fn test_anthropic_to_openai_tool_result() {
let provider = create_openrouter_provider();
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{
"role": "user",
"content": [
{"type": "tool_result", "tool_use_id": "call_123", "content": "Sunny, 25°C"}
]
}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
let msg = &result["messages"][0];
assert_eq!(msg["role"], "tool");
assert_eq!(msg["tool_call_id"], "call_123");
assert_eq!(msg["content"], "Sunny, 25°C");
}
#[test]
fn test_openai_to_anthropic_simple() {
let input = json!({
"id": "chatcmpl-123",
"object": "chat.completion",
"created": 1234567890,
"model": "gpt-4",
"choices": [{
"index": 0,
"message": {"role": "assistant", "content": "Hello!"},
"finish_reason": "stop"
}],
"usage": {"prompt_tokens": 10, "completion_tokens": 5, "total_tokens": 15}
});
let result = openai_to_anthropic(input).unwrap();
assert_eq!(result["id"], "chatcmpl-123");
assert_eq!(result["type"], "message");
assert_eq!(result["content"][0]["type"], "text");
assert_eq!(result["content"][0]["text"], "Hello!");
assert_eq!(result["stop_reason"], "end_turn");
assert_eq!(result["usage"]["input_tokens"], 10);
assert_eq!(result["usage"]["output_tokens"], 5);
}
#[test]
fn test_openai_to_anthropic_with_tool_calls() {
let input = json!({
"id": "chatcmpl-123",
"object": "chat.completion",
"created": 1234567890,
"model": "gpt-4",
"choices": [{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"tool_calls": [{
"id": "call_123",
"type": "function",
"function": {"name": "get_weather", "arguments": "{\"location\": \"Tokyo\"}"}
}]
},
"finish_reason": "tool_calls"
}],
"usage": {"prompt_tokens": 10, "completion_tokens": 5, "total_tokens": 15}
});
let result = openai_to_anthropic(input).unwrap();
assert_eq!(result["content"][0]["type"], "tool_use");
assert_eq!(result["content"][0]["id"], "call_123");
assert_eq!(result["content"][0]["name"], "get_weather");
assert_eq!(result["content"][0]["input"]["location"], "Tokyo");
assert_eq!(result["stop_reason"], "tool_use");
}
#[test]
fn test_model_mapping_from_provider() {
let provider = create_openrouter_provider();
let body = json!({"model": "test"});
// sonnet 模型
assert_eq!(
get_model_from_provider("claude-sonnet-4-5-20250929", &provider, &body),
"anthropic/claude-sonnet-4.5"
);
// haiku 模型
assert_eq!(
get_model_from_provider("claude-haiku-4-5-20250929", &provider, &body),
"anthropic/claude-haiku-4.5"
);
// opus 模型
assert_eq!(
get_model_from_provider("claude-opus-4-5", &provider, &body),
"anthropic/claude-opus-4.5"
);
}
#[test]
fn test_anthropic_to_openai_model_mapping() {
let provider = create_openrouter_provider();
let input = json!({
"model": "claude-sonnet-4-5-20250929",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "Hello"}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
assert_eq!(result["model"], "anthropic/claude-sonnet-4.5");
}
#[test]
fn test_thinking_parameter_detection() {
let mut provider = create_openrouter_provider();
// 添加推理模型配置
if let Some(env) = provider.settings_config.get_mut("env") {
env["ANTHROPIC_REASONING_MODEL"] = json!("anthropic/claude-sonnet-4.5:extended");
}
let input = json!({
"model": "claude-sonnet-4-5",
"max_tokens": 1024,
"thinking": {"type": "enabled"},
"messages": [{"role": "user", "content": "Solve this problem"}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
// 应该使用推理模型
assert_eq!(result["model"], "anthropic/claude-sonnet-4.5:extended");
}
#[test]
fn test_thinking_parameter_disabled() {
let mut provider = create_openrouter_provider();
if let Some(env) = provider.settings_config.get_mut("env") {
env["ANTHROPIC_REASONING_MODEL"] = json!("anthropic/claude-sonnet-4.5:extended");
}
let input = json!({
"model": "claude-sonnet-4-5",
"max_tokens": 1024,
"thinking": {"type": "disabled"},
"messages": [{"role": "user", "content": "Hello"}]
});
let result = anthropic_to_openai(input, &provider).unwrap();
// 应该使用普通模型
assert_eq!(result["model"], "anthropic/claude-sonnet-4.5");
}
}

View File

@@ -0,0 +1,10 @@
//! Anthropic ↔ OpenAI 格式转换模块
//!
//! 提供 Anthropic Messages API 和 OpenAI Chat Completions API 之间的双向转换
mod request;
mod response;
pub mod streaming;
pub use request::AnthropicToOpenAITransformer;
pub use response::OpenAIToAnthropicTransformer;

View File

@@ -0,0 +1,397 @@
//! Anthropic → OpenAI 请求转换器
//!
//! 将 Anthropic Messages API 请求转换为 OpenAI Chat Completions API 格式
use crate::proxy::error::ProxyError;
use crate::proxy::transform::{format::ApiFormat, traits::FormatTransformer};
use bytes::Bytes;
use futures::stream::Stream;
use serde_json::{json, Value};
use std::pin::Pin;
/// Anthropic → OpenAI 请求转换器
pub struct AnthropicToOpenAITransformer;
impl AnthropicToOpenAITransformer {
pub fn new() -> Self {
Self
}
}
impl Default for AnthropicToOpenAITransformer {
fn default() -> Self {
Self::new()
}
}
impl FormatTransformer for AnthropicToOpenAITransformer {
fn name(&self) -> &'static str {
"Anthropic→OpenAI"
}
fn source_format(&self) -> ApiFormat {
ApiFormat::Anthropic
}
fn target_format(&self) -> ApiFormat {
ApiFormat::OpenAI
}
fn transform_request(&self, body: Value) -> Result<Value, ProxyError> {
anthropic_to_openai(body)
}
fn transform_response(&self, body: Value) -> Result<Value, ProxyError> {
// 请求转换器不处理响应,直接透传
Ok(body)
}
fn transform_stream(
&self,
_stream: Pin<Box<dyn Stream<Item = Result<Bytes, reqwest::Error>> + Send>>,
) -> Pin<Box<dyn Stream<Item = Result<Bytes, std::io::Error>> + Send>> {
// 请求转换器不处理流
Box::pin(futures::stream::empty())
}
fn transform_endpoint(&self, endpoint: &str) -> String {
// /v1/messages → /v1/chat/completions
if endpoint == "/v1/messages" {
"/v1/chat/completions".to_string()
} else {
endpoint.to_string()
}
}
}
/// Anthropic 请求 → OpenAI 请求
fn anthropic_to_openai(body: Value) -> Result<Value, ProxyError> {
let mut result = json!({});
// 模型直接透传(模型映射由 model_mapper 模块独立处理)
if let Some(model) = body.get("model") {
result["model"] = model.clone();
}
let mut messages = Vec::new();
// 处理 system prompt
if let Some(system) = body.get("system") {
if let Some(text) = system.as_str() {
// 单个字符串
messages.push(json!({"role": "system", "content": text}));
} else if let Some(arr) = system.as_array() {
// 多个 system message
for msg in arr {
if let Some(text) = msg.get("text").and_then(|t| t.as_str()) {
messages.push(json!({"role": "system", "content": text}));
}
}
}
}
// 转换 messages
if let Some(msgs) = body.get("messages").and_then(|m| m.as_array()) {
for msg in msgs {
let role = msg.get("role").and_then(|r| r.as_str()).unwrap_or("user");
let content = msg.get("content");
let converted = convert_message_to_openai(role, content)?;
messages.extend(converted);
}
}
result["messages"] = json!(messages);
// 转换参数
if let Some(v) = body.get("max_tokens") {
result["max_tokens"] = v.clone();
}
if let Some(v) = body.get("temperature") {
result["temperature"] = v.clone();
}
if let Some(v) = body.get("top_p") {
result["top_p"] = v.clone();
}
if let Some(v) = body.get("stop_sequences") {
result["stop"] = v.clone();
}
if let Some(v) = body.get("stream") {
result["stream"] = v.clone();
}
// 转换 tools (过滤 BatchTool)
if let Some(tools) = body.get("tools").and_then(|t| t.as_array()) {
let openai_tools: Vec<Value> = tools
.iter()
.filter(|t| t.get("type").and_then(|v| v.as_str()) != Some("BatchTool"))
.map(|t| {
json!({
"type": "function",
"function": {
"name": t.get("name").and_then(|n| n.as_str()).unwrap_or(""),
"description": t.get("description"),
"parameters": clean_schema(t.get("input_schema").cloned().unwrap_or(json!({})))
}
})
})
.collect();
if !openai_tools.is_empty() {
result["tools"] = json!(openai_tools);
}
}
if let Some(v) = body.get("tool_choice") {
result["tool_choice"] = v.clone();
}
Ok(result)
}
/// 转换单条消息到 OpenAI 格式(可能产生多条消息)
fn convert_message_to_openai(
role: &str,
content: Option<&Value>,
) -> Result<Vec<Value>, ProxyError> {
let mut result = Vec::new();
let content = match content {
Some(c) => c,
None => {
result.push(json!({"role": role, "content": null}));
return Ok(result);
}
};
// 字符串内容
if let Some(text) = content.as_str() {
result.push(json!({"role": role, "content": text}));
return Ok(result);
}
// 数组内容(多模态/工具调用)
if let Some(blocks) = content.as_array() {
let mut content_parts = Vec::new();
let mut tool_calls = Vec::new();
for block in blocks {
let block_type = block.get("type").and_then(|t| t.as_str()).unwrap_or("");
match block_type {
"text" => {
if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
content_parts.push(json!({"type": "text", "text": text}));
}
}
"image" => {
if let Some(source) = block.get("source") {
let media_type = source
.get("media_type")
.and_then(|m| m.as_str())
.unwrap_or("image/png");
let data = source.get("data").and_then(|d| d.as_str()).unwrap_or("");
content_parts.push(json!({
"type": "image_url",
"image_url": {"url": format!("data:{};base64,{}", media_type, data)}
}));
}
}
"tool_use" => {
let id = block.get("id").and_then(|i| i.as_str()).unwrap_or("");
let name = block.get("name").and_then(|n| n.as_str()).unwrap_or("");
let input = block.get("input").cloned().unwrap_or(json!({}));
tool_calls.push(json!({
"id": id,
"type": "function",
"function": {
"name": name,
"arguments": serde_json::to_string(&input).unwrap_or_default()
}
}));
}
"tool_result" => {
// tool_result 变成单独的 tool role 消息
let tool_use_id = block
.get("tool_use_id")
.and_then(|i| i.as_str())
.unwrap_or("");
let content_val = block.get("content");
let content_str = match content_val {
Some(Value::String(s)) => s.clone(),
Some(v) => serde_json::to_string(v).unwrap_or_default(),
None => String::new(),
};
result.push(json!({
"role": "tool",
"tool_call_id": tool_use_id,
"content": content_str
}));
}
"thinking" => {
// 跳过 thinking blocks
}
_ => {}
}
}
// 添加带内容和/或工具调用的消息
if !content_parts.is_empty() || !tool_calls.is_empty() {
let mut msg = json!({"role": role});
// 内容处理
if content_parts.is_empty() {
msg["content"] = Value::Null;
} else if content_parts.len() == 1 {
if let Some(text) = content_parts[0].get("text") {
msg["content"] = text.clone();
} else {
msg["content"] = json!(content_parts);
}
} else {
msg["content"] = json!(content_parts);
}
// 工具调用
if !tool_calls.is_empty() {
msg["tool_calls"] = json!(tool_calls);
}
result.push(msg);
}
return Ok(result);
}
// 其他情况直接透传
result.push(json!({"role": role, "content": content}));
Ok(result)
}
/// 清理 JSON schema移除不支持的 format
fn clean_schema(mut schema: Value) -> Value {
if let Some(obj) = schema.as_object_mut() {
// 移除 "format": "uri"
if obj.get("format").and_then(|v| v.as_str()) == Some("uri") {
obj.remove("format");
}
// 递归清理嵌套 schema
if let Some(properties) = obj.get_mut("properties").and_then(|v| v.as_object_mut()) {
for (_, value) in properties.iter_mut() {
*value = clean_schema(value.clone());
}
}
if let Some(items) = obj.get_mut("items") {
*items = clean_schema(items.clone());
}
}
schema
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_anthropic_to_openai_simple() {
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "Hello"}]
});
let result = anthropic_to_openai(input).unwrap();
assert_eq!(result["model"], "claude-3-opus");
assert_eq!(result["max_tokens"], 1024);
assert_eq!(result["messages"][0]["role"], "user");
assert_eq!(result["messages"][0]["content"], "Hello");
}
#[test]
fn test_anthropic_to_openai_with_system() {
let input = json!({
"model": "claude-3-sonnet",
"max_tokens": 1024,
"system": "You are a helpful assistant.",
"messages": [{"role": "user", "content": "Hello"}]
});
let result = anthropic_to_openai(input).unwrap();
assert_eq!(result["messages"][0]["role"], "system");
assert_eq!(
result["messages"][0]["content"],
"You are a helpful assistant."
);
assert_eq!(result["messages"][1]["role"], "user");
}
#[test]
fn test_anthropic_to_openai_with_tools() {
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "What's the weather?"}],
"tools": [{
"name": "get_weather",
"description": "Get weather info",
"input_schema": {"type": "object", "properties": {"location": {"type": "string"}}}
}]
});
let result = anthropic_to_openai(input).unwrap();
assert_eq!(result["tools"][0]["type"], "function");
assert_eq!(result["tools"][0]["function"]["name"], "get_weather");
}
#[test]
fn test_anthropic_to_openai_tool_use() {
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{
"role": "assistant",
"content": [
{"type": "text", "text": "Let me check"},
{"type": "tool_use", "id": "call_123", "name": "get_weather", "input": {"location": "Tokyo"}}
]
}]
});
let result = anthropic_to_openai(input).unwrap();
let msg = &result["messages"][0];
assert_eq!(msg["role"], "assistant");
assert!(msg.get("tool_calls").is_some());
assert_eq!(msg["tool_calls"][0]["id"], "call_123");
}
#[test]
fn test_anthropic_to_openai_tool_result() {
let input = json!({
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": [{
"role": "user",
"content": [
{"type": "tool_result", "tool_use_id": "call_123", "content": "Sunny, 25°C"}
]
}]
});
let result = anthropic_to_openai(input).unwrap();
let msg = &result["messages"][0];
assert_eq!(msg["role"], "tool");
assert_eq!(msg["tool_call_id"], "call_123");
assert_eq!(msg["content"], "Sunny, 25°C");
}
#[test]
fn test_transform_endpoint() {
let transformer = AnthropicToOpenAITransformer::new();
assert_eq!(
transformer.transform_endpoint("/v1/messages"),
"/v1/chat/completions"
);
assert_eq!(transformer.transform_endpoint("/v1/other"), "/v1/other");
}
}

View File

@@ -0,0 +1,237 @@
//! OpenAI → Anthropic 响应转换器
//!
//! 将 OpenAI Chat Completions API 响应转换为 Anthropic Messages API 格式
use crate::proxy::error::ProxyError;
use crate::proxy::transform::{format::ApiFormat, traits::FormatTransformer};
use bytes::Bytes;
use futures::stream::Stream;
use serde_json::{json, Value};
use std::pin::Pin;
use super::streaming::create_anthropic_sse_stream;
/// OpenAI → Anthropic 响应转换器
pub struct OpenAIToAnthropicTransformer;
impl OpenAIToAnthropicTransformer {
pub fn new() -> Self {
Self
}
}
impl Default for OpenAIToAnthropicTransformer {
fn default() -> Self {
Self::new()
}
}
impl FormatTransformer for OpenAIToAnthropicTransformer {
fn name(&self) -> &'static str {
"OpenAI→Anthropic"
}
fn source_format(&self) -> ApiFormat {
ApiFormat::OpenAI
}
fn target_format(&self) -> ApiFormat {
ApiFormat::Anthropic
}
fn transform_request(&self, body: Value) -> Result<Value, ProxyError> {
// 响应转换器不处理请求,直接透传
Ok(body)
}
fn transform_response(&self, body: Value) -> Result<Value, ProxyError> {
openai_to_anthropic(body)
}
fn transform_stream(
&self,
stream: Pin<Box<dyn Stream<Item = Result<Bytes, reqwest::Error>> + Send>>,
) -> Pin<Box<dyn Stream<Item = Result<Bytes, std::io::Error>> + Send>> {
Box::pin(create_anthropic_sse_stream(stream))
}
}
/// OpenAI 响应 → Anthropic 响应
fn openai_to_anthropic(body: Value) -> Result<Value, ProxyError> {
let choices = body
.get("choices")
.and_then(|c| c.as_array())
.ok_or_else(|| ProxyError::TransformError("No choices in response".to_string()))?;
let choice = choices
.first()
.ok_or_else(|| ProxyError::TransformError("Empty choices array".to_string()))?;
let message = choice
.get("message")
.ok_or_else(|| ProxyError::TransformError("No message in choice".to_string()))?;
let mut content = Vec::new();
// 文本内容
if let Some(text) = message.get("content").and_then(|c| c.as_str()) {
if !text.is_empty() {
content.push(json!({"type": "text", "text": text}));
}
}
// 工具调用
if let Some(tool_calls) = message.get("tool_calls").and_then(|t| t.as_array()) {
for tc in tool_calls {
let id = tc.get("id").and_then(|i| i.as_str()).unwrap_or("");
let empty_obj = json!({});
let func = tc.get("function").unwrap_or(&empty_obj);
let name = func.get("name").and_then(|n| n.as_str()).unwrap_or("");
let args_str = func
.get("arguments")
.and_then(|a| a.as_str())
.unwrap_or("{}");
// 解析 arguments JSON失败时返回错误而不是静默使用空对象
let input: Value = serde_json::from_str(args_str).map_err(|e| {
log::error!("[Transform] tool_calls.arguments 解析失败: {e}, 原始内容: {args_str}");
ProxyError::TransformError(format!(
"Failed to parse tool_calls.arguments: {e}, content: {args_str}"
))
})?;
content.push(json!({
"type": "tool_use",
"id": id,
"name": name,
"input": input
}));
}
}
// 映射 finish_reason → stop_reason
let stop_reason = choice
.get("finish_reason")
.and_then(|r| r.as_str())
.map(|r| match r {
"stop" => "end_turn",
"length" => "max_tokens",
"tool_calls" => "tool_use",
other => other,
});
// usage
let usage = body.get("usage").cloned().unwrap_or(json!({}));
let input_tokens = usage
.get("prompt_tokens")
.and_then(|v| v.as_u64())
.unwrap_or(0) as u32;
let output_tokens = usage
.get("completion_tokens")
.and_then(|v| v.as_u64())
.unwrap_or(0) as u32;
let result = json!({
"id": body.get("id").and_then(|i| i.as_str()).unwrap_or(""),
"type": "message",
"role": "assistant",
"content": content,
"model": body.get("model").and_then(|m| m.as_str()).unwrap_or(""),
"stop_reason": stop_reason,
"stop_sequence": null,
"usage": {
"input_tokens": input_tokens,
"output_tokens": output_tokens
}
});
Ok(result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_openai_to_anthropic_simple() {
let input = json!({
"id": "chatcmpl-123",
"object": "chat.completion",
"created": 1234567890,
"model": "gpt-4",
"choices": [{
"index": 0,
"message": {"role": "assistant", "content": "Hello!"},
"finish_reason": "stop"
}],
"usage": {"prompt_tokens": 10, "completion_tokens": 5, "total_tokens": 15}
});
let result = openai_to_anthropic(input).unwrap();
assert_eq!(result["id"], "chatcmpl-123");
assert_eq!(result["type"], "message");
assert_eq!(result["content"][0]["type"], "text");
assert_eq!(result["content"][0]["text"], "Hello!");
assert_eq!(result["stop_reason"], "end_turn");
assert_eq!(result["usage"]["input_tokens"], 10);
assert_eq!(result["usage"]["output_tokens"], 5);
}
#[test]
fn test_openai_to_anthropic_with_tool_calls() {
let input = json!({
"id": "chatcmpl-123",
"object": "chat.completion",
"created": 1234567890,
"model": "gpt-4",
"choices": [{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"tool_calls": [{
"id": "call_123",
"type": "function",
"function": {"name": "get_weather", "arguments": "{\"location\": \"Tokyo\"}"}
}]
},
"finish_reason": "tool_calls"
}],
"usage": {"prompt_tokens": 10, "completion_tokens": 5, "total_tokens": 15}
});
let result = openai_to_anthropic(input).unwrap();
assert_eq!(result["content"][0]["type"], "tool_use");
assert_eq!(result["content"][0]["id"], "call_123");
assert_eq!(result["content"][0]["name"], "get_weather");
assert_eq!(result["content"][0]["input"]["location"], "Tokyo");
assert_eq!(result["stop_reason"], "tool_use");
}
#[test]
fn test_stop_reason_mapping() {
// stop → end_turn
let input = json!({
"choices": [{"message": {"content": "Hi"}, "finish_reason": "stop"}],
"usage": {}
});
let result = openai_to_anthropic(input).unwrap();
assert_eq!(result["stop_reason"], "end_turn");
// length → max_tokens
let input = json!({
"choices": [{"message": {"content": "Hi"}, "finish_reason": "length"}],
"usage": {}
});
let result = openai_to_anthropic(input).unwrap();
assert_eq!(result["stop_reason"], "max_tokens");
// tool_calls → tool_use
let input = json!({
"choices": [{"message": {"content": null, "tool_calls": []}, "finish_reason": "tool_calls"}],
"usage": {}
});
let result = openai_to_anthropic(input).unwrap();
assert_eq!(result["stop_reason"], "tool_use");
}
}

View File

@@ -6,6 +6,7 @@ use bytes::Bytes;
use futures::stream::{Stream, StreamExt};
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::collections::HashMap;
/// OpenAI 流式响应数据结构
#[derive(Debug, Deserialize)]
@@ -73,7 +74,8 @@ pub fn create_anthropic_sse_stream(
let mut content_index = 0;
let mut has_sent_message_start = false;
let mut current_block_type: Option<String> = None;
let mut tool_call_id = None;
// 使用 HashMap 按 index 管理多个工具调用的 ID 和 content_index
let mut tool_calls_map: HashMap<usize, (String, usize)> = HashMap::new();
tokio::pin!(stream);
@@ -94,17 +96,17 @@ pub fn create_anthropic_sse_stream(
for l in line.lines() {
if let Some(data) = l.strip_prefix("data: ") {
if data.trim() == "[DONE]" {
log::debug!("[Claude/OpenRouter] <<< OpenAI SSE: [DONE]");
log::debug!("[Transform] <<< OpenAI SSE: [DONE]");
let event = json!({"type": "message_stop"});
let sse_data = format!("event: message_stop\ndata: {}\n\n",
serde_json::to_string(&event).unwrap_or_default());
log::debug!("[Claude/OpenRouter] >>> Anthropic SSE: message_stop");
log::debug!("[Transform] >>> Anthropic SSE: message_stop");
yield Ok(Bytes::from(sse_data));
continue;
}
if let Ok(chunk) = serde_json::from_str::<OpenAIStreamChunk>(data) {
log::debug!("[Claude/OpenRouter] <<< SSE chunk received");
log::debug!("[Transform] <<< SSE chunk received");
if message_id.is_none() {
message_id = Some(chunk.id.clone());
@@ -210,7 +212,11 @@ pub fn create_anthropic_sse_stream(
// 处理工具调用
if let Some(tool_calls) = &choice.delta.tool_calls {
for tool_call in tool_calls {
let tc_index = tool_call.index;
// 检查是否是新的工具调用(有 id 表示开始新的工具调用)
if let Some(id) = &tool_call.id {
// 关闭当前的 content block如果有
if current_block_type.is_some() {
let event = json!({
"type": "content_block_stop",
@@ -222,30 +228,44 @@ pub fn create_anthropic_sse_stream(
content_index += 1;
}
tool_call_id = Some(id.clone());
// 记录这个工具调用的 ID 和对应的 content_index
tool_calls_map.insert(tc_index, (id.clone(), content_index));
current_block_type = Some("tool_use".to_string());
}
// 获取当前工具调用的信息
let (tool_id, tool_content_index) = tool_calls_map
.get(&tc_index)
.cloned()
.unwrap_or_else(|| {
log::warn!(
"[Transform] 收到未知 index 的工具调用 delta: {tc_index}"
);
(String::new(), content_index)
});
if let Some(function) = &tool_call.function {
// 如果有 name发送 content_block_start
if let Some(name) = &function.name {
let event = json!({
"type": "content_block_start",
"index": content_index,
"index": tool_content_index,
"content_block": {
"type": "tool_use",
"id": tool_call_id.clone().unwrap_or_default(),
"id": tool_id,
"name": name
}
});
let sse_data = format!("event: content_block_start\ndata: {}\n\n",
serde_json::to_string(&event).unwrap_or_default());
yield Ok(Bytes::from(sse_data));
current_block_type = Some("tool_use".to_string());
}
// 如果有 arguments发送 content_block_delta
if let Some(args) = &function.arguments {
let event = json!({
"type": "content_block_delta",
"index": content_index,
"index": tool_content_index,
"delta": {
"type": "input_json_delta",
"partial_json": args

View File

@@ -0,0 +1,247 @@
//! 格式转换配置
//!
//! 从 Provider 配置中提取格式转换设置
use super::format::ApiFormat;
use crate::provider::Provider;
use serde::{Deserialize, Serialize};
/// 格式转换配置
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TransformConfig {
/// 是否启用格式转换
pub enabled: bool,
/// 源格式(客户端发送的格式)
pub source_format: ApiFormat,
/// 目标格式(上游服务期望的格式)
pub target_format: ApiFormat,
/// 是否转换流式响应
pub transform_streaming: bool,
}
impl Default for TransformConfig {
fn default() -> Self {
Self {
enabled: false,
source_format: ApiFormat::Anthropic,
target_format: ApiFormat::OpenAI,
transform_streaming: true,
}
}
}
impl TransformConfig {
/// 从 Provider 配置中提取转换配置
///
/// 优先级:
/// 1. ProviderMeta.format_transform新配置格式通过前端 UI 设置)
/// 2. settings_config.format_transform兼容旧配置
/// 3. settings_config.openrouter_compat_mode兼容旧配置
///
/// 注意:如果格式解析失败,将禁用转换并记录警告,而不是静默回退到默认值
pub fn from_provider(provider: &Provider) -> Self {
// 1. 优先从 ProviderMeta 读取(前端 UI 设置的配置)
if let Some(meta) = &provider.meta {
if let Some(ft) = &meta.format_transform {
if ft.enabled {
let source_str = ft.source_format.as_deref();
let target_str = ft.target_format.as_deref();
let source_format = source_str.and_then(ApiFormat::from_str);
let target_format = target_str.and_then(ApiFormat::from_str);
// 如果格式解析失败,禁用转换并记录警告
if source_str.is_some() && source_format.is_none() {
log::warn!(
"[TransformConfig] 无法解析 source_format: {source_str:?},禁用格式转换"
);
return Self::default();
}
if target_str.is_some() && target_format.is_none() {
log::warn!(
"[TransformConfig] 无法解析 target_format: {target_str:?},禁用格式转换"
);
return Self::default();
}
let transform_streaming = ft.transform_streaming.unwrap_or(true);
return Self {
enabled: true,
source_format: source_format.unwrap_or(ApiFormat::Anthropic),
target_format: target_format.unwrap_or(ApiFormat::OpenAI),
transform_streaming,
};
}
}
}
let settings = &provider.settings_config;
// 2. 检查是否显式启用格式转换settings_config 中的配置)
let format_transform = settings.get("format_transform").and_then(|v| v.as_object());
if let Some(config) = format_transform {
let enabled = config
.get("enabled")
.and_then(|v| v.as_bool())
.unwrap_or(false);
if enabled {
let source_str = config.get("source_format").and_then(|v| v.as_str());
let target_str = config.get("target_format").and_then(|v| v.as_str());
let source_format = source_str.and_then(ApiFormat::from_str);
let target_format = target_str.and_then(ApiFormat::from_str);
// 如果格式解析失败,禁用转换并记录警告
if source_str.is_some() && source_format.is_none() {
log::warn!(
"[TransformConfig] 无法解析 source_format: {source_str:?},禁用格式转换"
);
return Self::default();
}
if target_str.is_some() && target_format.is_none() {
log::warn!(
"[TransformConfig] 无法解析 target_format: {target_str:?},禁用格式转换"
);
return Self::default();
}
let transform_streaming = config
.get("transform_streaming")
.and_then(|v| v.as_bool())
.unwrap_or(true);
return Self {
enabled,
source_format: source_format.unwrap_or(ApiFormat::Anthropic),
target_format: target_format.unwrap_or(ApiFormat::OpenAI),
transform_streaming,
};
}
}
// 3. 兼容旧配置:检查 openrouter_compat_mode
let legacy_enabled = settings
.get("openrouter_compat_mode")
.and_then(|v| match v {
serde_json::Value::Bool(b) => Some(*b),
serde_json::Value::Number(n) => Some(n.as_i64().unwrap_or(0) != 0),
serde_json::Value::String(s) => {
let normalized = s.trim().to_lowercase();
Some(normalized == "true" || normalized == "1")
}
_ => None,
})
.unwrap_or(false);
if legacy_enabled {
return Self {
enabled: true,
source_format: ApiFormat::Anthropic,
target_format: ApiFormat::OpenAI,
transform_streaming: true,
};
}
Self::default()
}
/// 检查是否需要转换
pub fn needs_transform(&self) -> bool {
self.enabled && self.source_format != self.target_format
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
fn create_provider(settings: serde_json::Value) -> Provider {
Provider {
id: "test".to_string(),
name: "Test Provider".to_string(),
settings_config: settings,
website_url: None,
category: None,
created_at: None,
sort_index: None,
notes: None,
meta: None,
icon: None,
icon_color: None,
in_failover_queue: false,
}
}
#[test]
fn test_default_config() {
let provider = create_provider(json!({}));
let config = TransformConfig::from_provider(&provider);
assert!(!config.enabled);
assert!(!config.needs_transform());
}
#[test]
fn test_new_format_config() {
let provider = create_provider(json!({
"format_transform": {
"enabled": true,
"source_format": "anthropic",
"target_format": "openai",
"transform_streaming": true
}
}));
let config = TransformConfig::from_provider(&provider);
assert!(config.enabled);
assert_eq!(config.source_format, ApiFormat::Anthropic);
assert_eq!(config.target_format, ApiFormat::OpenAI);
assert!(config.transform_streaming);
assert!(config.needs_transform());
}
#[test]
fn test_legacy_openrouter_compat_mode_bool() {
let provider = create_provider(json!({
"openrouter_compat_mode": true
}));
let config = TransformConfig::from_provider(&provider);
assert!(config.enabled);
assert_eq!(config.source_format, ApiFormat::Anthropic);
assert_eq!(config.target_format, ApiFormat::OpenAI);
}
#[test]
fn test_legacy_openrouter_compat_mode_string() {
let provider = create_provider(json!({
"openrouter_compat_mode": "true"
}));
let config = TransformConfig::from_provider(&provider);
assert!(config.enabled);
}
#[test]
fn test_legacy_openrouter_compat_mode_number() {
let provider = create_provider(json!({
"openrouter_compat_mode": 1
}));
let config = TransformConfig::from_provider(&provider);
assert!(config.enabled);
}
#[test]
fn test_same_format_no_transform() {
let provider = create_provider(json!({
"format_transform": {
"enabled": true,
"source_format": "anthropic",
"target_format": "anthropic"
}
}));
let config = TransformConfig::from_provider(&provider);
assert!(config.enabled);
assert!(!config.needs_transform()); // 相同格式不需要转换
}
}

View File

@@ -0,0 +1,67 @@
//! API 格式枚举定义
//!
//! 定义支持的 API 格式类型,用于格式转换配置
use serde::{Deserialize, Serialize};
/// API 格式枚举
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ApiFormat {
/// Anthropic Messages API
Anthropic,
/// OpenAI Chat Completions API
OpenAI,
/// Google Gemini API (预留)
Gemini,
}
impl ApiFormat {
/// 从字符串解析
pub fn from_str(s: &str) -> Option<Self> {
match s.to_lowercase().as_str() {
"anthropic" | "claude" => Some(Self::Anthropic),
"openai" | "codex" => Some(Self::OpenAI),
"gemini" | "google" => Some(Self::Gemini),
_ => None,
}
}
/// 转换为字符串
pub fn as_str(&self) -> &'static str {
match self {
Self::Anthropic => "anthropic",
Self::OpenAI => "openai",
Self::Gemini => "gemini",
}
}
}
impl std::fmt::Display for ApiFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_str() {
assert_eq!(ApiFormat::from_str("anthropic"), Some(ApiFormat::Anthropic));
assert_eq!(ApiFormat::from_str("claude"), Some(ApiFormat::Anthropic));
assert_eq!(ApiFormat::from_str("openai"), Some(ApiFormat::OpenAI));
assert_eq!(ApiFormat::from_str("codex"), Some(ApiFormat::OpenAI));
assert_eq!(ApiFormat::from_str("gemini"), Some(ApiFormat::Gemini));
assert_eq!(ApiFormat::from_str("google"), Some(ApiFormat::Gemini));
assert_eq!(ApiFormat::from_str("unknown"), None);
}
#[test]
fn test_as_str() {
assert_eq!(ApiFormat::Anthropic.as_str(), "anthropic");
assert_eq!(ApiFormat::OpenAI.as_str(), "openai");
assert_eq!(ApiFormat::Gemini.as_str(), "gemini");
}
}

View File

@@ -0,0 +1,36 @@
//! 通用格式转换模块
//!
//! 提供 API 格式之间的双向转换,支持:
//! - Anthropic ↔ OpenAI
//! - Gemini ↔ OpenAI预留
//!
//! ## 使用方式
//!
//! ```rust,ignore
//! use crate::proxy::transform::{config::TransformConfig, registry::get_transformer};
//!
//! let config = TransformConfig::from_provider(&provider);
//! if config.needs_transform() {
//! if let Some(transformer) = get_transformer(config.source_format, config.target_format) {
//! let transformed = transformer.transform_request(body)?;
//! }
//! }
//! ```
pub mod anthropic_openai;
pub mod config;
pub mod format;
pub mod registry;
pub mod traits;
// 公开导出
pub use config::TransformConfig;
pub use registry::get_transformer;
// 以下导出供外部模块使用(如需扩展转换器)
#[allow(unused_imports)]
pub use format::ApiFormat;
#[allow(unused_imports)]
pub use registry::TRANSFORMER_REGISTRY;
#[allow(unused_imports)]
pub use traits::{BidirectionalTransformer, FormatTransformer};

View File

@@ -0,0 +1,95 @@
//! 转换器注册表
//!
//! 管理和获取格式转换器
use super::{format::ApiFormat, traits::FormatTransformer};
use std::collections::HashMap;
use std::sync::{Arc, LazyLock};
/// 转换器注册表
pub struct TransformerRegistry {
transformers: HashMap<(ApiFormat, ApiFormat), Arc<dyn FormatTransformer>>,
}
impl TransformerRegistry {
/// 创建新的注册表
pub fn new() -> Self {
let mut registry = Self {
transformers: HashMap::new(),
};
registry.register_defaults();
registry
}
/// 注册默认转换器
fn register_defaults(&mut self) {
use super::anthropic_openai::{AnthropicToOpenAITransformer, OpenAIToAnthropicTransformer};
// Anthropic → OpenAI
self.register(Arc::new(AnthropicToOpenAITransformer::new()));
// OpenAI → Anthropic
self.register(Arc::new(OpenAIToAnthropicTransformer::new()));
}
/// 注册转换器
pub fn register(&mut self, transformer: Arc<dyn FormatTransformer>) {
let key = (transformer.source_format(), transformer.target_format());
self.transformers.insert(key, transformer);
}
/// 获取转换器
pub fn get(&self, source: ApiFormat, target: ApiFormat) -> Option<Arc<dyn FormatTransformer>> {
self.transformers.get(&(source, target)).cloned()
}
/// 检查是否支持指定的转换
#[cfg(test)]
pub fn supports(&self, source: ApiFormat, target: ApiFormat) -> bool {
self.transformers.contains_key(&(source, target))
}
}
impl Default for TransformerRegistry {
fn default() -> Self {
Self::new()
}
}
/// 全局转换器注册表
pub static TRANSFORMER_REGISTRY: LazyLock<TransformerRegistry> =
LazyLock::new(TransformerRegistry::new);
/// 获取转换器的便捷函数
pub fn get_transformer(source: ApiFormat, target: ApiFormat) -> Option<Arc<dyn FormatTransformer>> {
TRANSFORMER_REGISTRY.get(source, target)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_registry_has_default_transformers() {
let registry = TransformerRegistry::new();
// Anthropic → OpenAI
assert!(registry.supports(ApiFormat::Anthropic, ApiFormat::OpenAI));
// OpenAI → Anthropic
assert!(registry.supports(ApiFormat::OpenAI, ApiFormat::Anthropic));
// 不支持的转换
assert!(!registry.supports(ApiFormat::Gemini, ApiFormat::OpenAI));
}
#[test]
fn test_get_transformer() {
let transformer = get_transformer(ApiFormat::Anthropic, ApiFormat::OpenAI);
assert!(transformer.is_some());
let t = transformer.unwrap();
assert_eq!(t.source_format(), ApiFormat::Anthropic);
assert_eq!(t.target_format(), ApiFormat::OpenAI);
}
}

View File

@@ -0,0 +1,47 @@
//! 格式转换器 Trait 定义
//!
//! 定义通用的格式转换器接口
use super::format::ApiFormat;
use crate::proxy::error::ProxyError;
use bytes::Bytes;
use futures::stream::Stream;
use serde_json::Value;
use std::pin::Pin;
/// 格式转换器 Trait
pub trait FormatTransformer: Send + Sync {
/// 转换器名称(用于日志)
#[allow(dead_code)]
fn name(&self) -> &'static str;
/// 源格式
fn source_format(&self) -> ApiFormat;
/// 目标格式
fn target_format(&self) -> ApiFormat;
/// 转换请求体
fn transform_request(&self, body: Value) -> Result<Value, ProxyError>;
/// 转换非流式响应体
fn transform_response(&self, body: Value) -> Result<Value, ProxyError>;
/// 转换流式响应
fn transform_stream(
&self,
stream: Pin<Box<dyn Stream<Item = Result<Bytes, reqwest::Error>> + Send>>,
) -> Pin<Box<dyn Stream<Item = Result<Bytes, std::io::Error>> + Send>>;
/// 获取转换后的端点路径
fn transform_endpoint(&self, endpoint: &str) -> String {
endpoint.to_string()
}
}
/// 双向转换器 Trait可选实现
#[allow(dead_code)]
pub trait BidirectionalTransformer: FormatTransformer {
/// 获取反向转换器
fn reverse(&self) -> Box<dyn FormatTransformer>;
}

View File

@@ -8,19 +8,33 @@ import {
Eye,
EyeOff,
X,
ArrowLeftRight,
} from "lucide-react";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { Switch } from "@/components/ui/switch";
import { Button } from "@/components/ui/button";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { cn } from "@/lib/utils";
import type { ProviderTestConfig, ProviderProxyConfig } from "@/types";
import type {
ProviderTestConfig,
ProviderProxyConfig,
FormatTransformConfig,
} from "@/types";
interface ProviderAdvancedConfigProps {
testConfig: ProviderTestConfig;
proxyConfig: ProviderProxyConfig;
formatTransform?: FormatTransformConfig;
onTestConfigChange: (config: ProviderTestConfig) => void;
onProxyConfigChange: (config: ProviderProxyConfig) => void;
onFormatTransformChange?: (config: FormatTransformConfig) => void;
}
/** 从 ProviderProxyConfig 构建完整 URL */
@@ -71,14 +85,19 @@ function parseProxyUrl(url: string): Partial<ProviderProxyConfig> {
export function ProviderAdvancedConfig({
testConfig,
proxyConfig,
formatTransform,
onTestConfigChange,
onProxyConfigChange,
onFormatTransformChange,
}: ProviderAdvancedConfigProps) {
const { t } = useTranslation();
const [isTestConfigOpen, setIsTestConfigOpen] = useState(testConfig.enabled);
const [isProxyConfigOpen, setIsProxyConfigOpen] = useState(
proxyConfig.enabled,
);
const [isFormatTransformOpen, setIsFormatTransformOpen] = useState(
formatTransform?.enabled ?? false,
);
const [showPassword, setShowPassword] = useState(false);
// 代理 URL 输入状态(仅在初始化时从 proxyConfig 构建)
@@ -97,6 +116,11 @@ export function ProviderAdvancedConfig({
setIsProxyConfigOpen(proxyConfig.enabled);
}, [proxyConfig.enabled]);
// 同步外部 formatTransform.enabled 变化到展开状态
useEffect(() => {
setIsFormatTransformOpen(formatTransform?.enabled ?? false);
}, [formatTransform?.enabled]);
// 仅在外部 proxyConfig 变化且非用户输入时同步(如:重置表单、加载数据)
useEffect(() => {
if (!isUserTyping) {
@@ -450,6 +474,135 @@ export function ProviderAdvancedConfig({
</div>
</div>
</div>
{/* 格式转换配置 */}
{onFormatTransformChange && (
<div className="rounded-lg border border-border/50 bg-muted/20">
<button
type="button"
className="flex w-full items-center justify-between p-4 hover:bg-muted/30 transition-colors"
onClick={() => setIsFormatTransformOpen(!isFormatTransformOpen)}
>
<div className="flex items-center gap-3">
<ArrowLeftRight className="h-4 w-4 text-muted-foreground" />
<span className="font-medium">
{t("providerAdvanced.formatTransform")}
</span>
</div>
<div className="flex items-center gap-3">
<div
className="flex items-center gap-2"
onClick={(e) => e.stopPropagation()}
>
<Label
htmlFor="format-transform-enabled"
className="text-sm text-muted-foreground"
>
{t("providerAdvanced.enableFormatTransform")}
</Label>
<Switch
id="format-transform-enabled"
checked={formatTransform?.enabled ?? false}
onCheckedChange={(checked) => {
onFormatTransformChange({
...(formatTransform ?? { enabled: false }),
enabled: checked,
});
if (checked) setIsFormatTransformOpen(true);
}}
/>
</div>
{isFormatTransformOpen ? (
<ChevronDown className="h-4 w-4 text-muted-foreground" />
) : (
<ChevronRight className="h-4 w-4 text-muted-foreground" />
)}
</div>
</button>
<div
className={cn(
"overflow-hidden transition-all duration-200",
isFormatTransformOpen
? "max-h-[500px] opacity-100"
: "max-h-0 opacity-0",
)}
>
<div className="border-t border-border/50 p-4 space-y-4">
<p className="text-sm text-muted-foreground">
{t("providerAdvanced.formatTransformDesc")}
</p>
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
<div className="space-y-2">
<Label htmlFor="source-format">
{t("providerAdvanced.sourceFormat")}
</Label>
<Select
value={formatTransform?.sourceFormat ?? "anthropic"}
onValueChange={(value) =>
onFormatTransformChange({
...(formatTransform ?? { enabled: false }),
sourceFormat: value as "anthropic" | "openai",
})
}
disabled={!formatTransform?.enabled}
>
<SelectTrigger id="source-format">
<SelectValue />
</SelectTrigger>
<SelectContent>
<SelectItem value="anthropic">
Anthropic (Claude)
</SelectItem>
<SelectItem value="openai">OpenAI</SelectItem>
</SelectContent>
</Select>
</div>
<div className="space-y-2">
<Label htmlFor="target-format">
{t("providerAdvanced.targetFormat")}
</Label>
<Select
value={formatTransform?.targetFormat ?? "openai"}
onValueChange={(value) =>
onFormatTransformChange({
...(formatTransform ?? { enabled: false }),
targetFormat: value as "anthropic" | "openai",
})
}
disabled={!formatTransform?.enabled}
>
<SelectTrigger id="target-format">
<SelectValue />
</SelectTrigger>
<SelectContent>
<SelectItem value="anthropic">
Anthropic (Claude)
</SelectItem>
<SelectItem value="openai">OpenAI</SelectItem>
</SelectContent>
</Select>
</div>
</div>
<div className="flex items-center gap-2">
<Switch
id="transform-streaming"
checked={formatTransform?.transformStreaming ?? true}
onCheckedChange={(checked) =>
onFormatTransformChange({
...(formatTransform ?? { enabled: false }),
transformStreaming: checked,
})
}
disabled={!formatTransform?.enabled}
/>
<Label htmlFor="transform-streaming" className="text-sm">
{t("providerAdvanced.transformStreaming")}
</Label>
</div>
</div>
</div>
</div>
)}
</div>
);
}

View File

@@ -13,6 +13,7 @@ import type {
ProviderMeta,
ProviderTestConfig,
ProviderProxyConfig,
FormatTransformConfig,
} from "@/types";
import {
providerPresets,
@@ -168,6 +169,9 @@ export function ProviderForm({
const [proxyConfig, setProxyConfig] = useState<ProviderProxyConfig>(
() => initialData?.meta?.proxyConfig ?? { enabled: false },
);
const [formatTransform, setFormatTransform] = useState<FormatTransformConfig>(
() => initialData?.meta?.formatTransform ?? { enabled: false },
);
// 使用 category hook
const { category } = useProviderCategory({
@@ -940,6 +944,7 @@ export function ProviderForm({
// 添加高级配置
testConfig: testConfig.enabled ? testConfig : undefined,
proxyConfig: proxyConfig.enabled ? proxyConfig : undefined,
formatTransform: formatTransform.enabled ? formatTransform : undefined,
};
onSubmit(payload);
@@ -1464,8 +1469,10 @@ export function ProviderForm({
<ProviderAdvancedConfig
testConfig={testConfig}
proxyConfig={proxyConfig}
formatTransform={formatTransform}
onTestConfigChange={setTestConfig}
onProxyConfigChange={setProxyConfig}
onFormatTransformChange={setFormatTransform}
/>
{showButtons && (

View File

@@ -480,7 +480,13 @@
"useCustomProxy": "Use separate proxy",
"proxyConfigDesc": "Configure separate network proxy for this provider. Uses system proxy or global settings when disabled.",
"proxyUsername": "Username (optional)",
"proxyPassword": "Password (optional)"
"proxyPassword": "Password (optional)",
"formatTransform": "Format Transform",
"enableFormatTransform": "Enable Transform",
"formatTransformDesc": "Transform requests and responses between different API formats. Useful for providers using OpenAI-compatible interfaces.",
"sourceFormat": "Source Format (Client)",
"targetFormat": "Target Format (Upstream)",
"transformStreaming": "Transform Streaming Responses"
},
"codexConfig": {
"authJson": "auth.json (JSON) *",

View File

@@ -480,7 +480,13 @@
"useCustomProxy": "個別プロキシを使用",
"proxyConfigDesc": "このプロバイダーに個別のネットワークプロキシを設定します。無効の場合はシステムプロキシまたはグローバル設定を使用します。",
"proxyUsername": "ユーザー名(任意)",
"proxyPassword": "パスワード(任意)"
"proxyPassword": "パスワード(任意)",
"formatTransform": "フォーマット変換",
"enableFormatTransform": "変換を有効化",
"formatTransformDesc": "リクエストとレスポンスを異なる API フォーマット間で変換します。OpenAI 互換インターフェースを使用するプロバイダーに適しています。",
"sourceFormat": "ソースフォーマット(クライアント)",
"targetFormat": "ターゲットフォーマット(上流)",
"transformStreaming": "ストリーミングレスポンスを変換"
},
"codexConfig": {
"authJson": "auth.json (JSON) *",

View File

@@ -480,7 +480,13 @@
"useCustomProxy": "使用单独代理",
"proxyConfigDesc": "为此供应商配置单独的网络代理,不启用时使用系统代理或全局设置。",
"proxyUsername": "用户名(可选)",
"proxyPassword": "密码(可选)"
"proxyPassword": "密码(可选)",
"formatTransform": "格式转换",
"enableFormatTransform": "启用转换",
"formatTransformDesc": "将请求和响应在不同 API 格式之间转换。适用于使用 OpenAI 兼容接口的供应商。",
"sourceFormat": "源格式(客户端)",
"targetFormat": "目标格式(上游)",
"transformStreaming": "转换流式响应"
},
"codexConfig": {
"authJson": "auth.json (JSON) *",

View File

@@ -119,6 +119,18 @@ export interface ProviderProxyConfig {
proxyPassword?: string;
}
// 格式转换配置(用于 OpenRouter 等需要 API 格式转换的供应商)
export interface FormatTransformConfig {
// 是否启用格式转换
enabled: boolean;
// 源格式anthropic, openai, gemini
sourceFormat?: "anthropic" | "openai" | "gemini";
// 目标格式anthropic, openai, gemini
targetFormat?: "anthropic" | "openai" | "gemini";
// 是否转换流式响应(默认 true
transformStreaming?: boolean;
}
// 供应商元数据(字段名与后端一致,保持 snake_case
export interface ProviderMeta {
// 自定义端点:以 URL 为键,值为端点信息
@@ -135,6 +147,8 @@ export interface ProviderMeta {
testConfig?: ProviderTestConfig;
// 供应商单独的代理配置
proxyConfig?: ProviderProxyConfig;
// 格式转换配置(用于 OpenRouter 等需要 API 格式转换的供应商)
formatTransform?: FormatTransformConfig;
}
// Skill 同步方式