Compare commits

...

8 Commits

Author SHA1 Message Date
YoVinchen
a520b52d8f Merge branch 'main' into feat/proxy-full-url 2026-03-27 16:20:18 +08:00
YoVinchen
c718dd703b Merge branch 'main' into feat/proxy-full-url 2026-03-22 22:30:07 +08:00
YoVinchen
8c8b265ebf refactor(ui): refine full URL endpoint hint 2026-03-19 22:50:01 +08:00
YoVinchen
8a26a091a9 feat(codex): complete full URL support 2026-03-19 15:27:23 +08:00
YoVinchen
c2120dc8c3 fix(proxy): strip beta query when rewriting Claude endpoints 2026-03-19 01:25:01 +08:00
YoVinchen
69ab4a8a46 refactor(proxy): remove beta query handling 2026-03-18 22:54:49 +08:00
YoVinchen
cf945de997 Merge branch 'main' into feat/proxy-full-url 2026-03-18 22:54:08 +08:00
YoVinchen
eeda9adb03 feat(proxy): add full URL mode and refactor endpoint rewriting
- Add `isFullUrl` provider meta to treat base_url as complete API endpoint
- Remove hardcoded `?beta=true` from Claude adapter, pass through from client
- Refactor forwarder endpoint rewriting with proper query string handling
- Block provider switching when proxy is required but not running
- Add full URL toggle UI in endpoint field with i18n (zh/en/ja)
2026-03-16 14:22:54 +08:00
16 changed files with 454 additions and 103 deletions

View File

@@ -275,7 +275,9 @@ pub struct ProviderMeta {
/// Claude 认证字段名("ANTHROPIC_AUTH_TOKEN" 或 "ANTHROPIC_API_KEY" /// Claude 认证字段名("ANTHROPIC_AUTH_TOKEN" 或 "ANTHROPIC_API_KEY"
#[serde(rename = "apiKeyField", skip_serializing_if = "Option::is_none")] #[serde(rename = "apiKeyField", skip_serializing_if = "Option::is_none")]
pub api_key_field: Option<String>, pub api_key_field: Option<String>,
/// 是否将 base_url 视为完整 API 端点(不拼接 endpoint 路径)
#[serde(rename = "isFullUrl", skip_serializing_if = "Option::is_none")]
pub is_full_url: Option<bool>,
/// Prompt cache key for OpenAI-compatible endpoints. /// Prompt cache key for OpenAI-compatible endpoints.
/// When set, injected into converted requests to improve cache hit rate. /// When set, injected into converted requests to improve cache hit rate.
/// If not set, provider ID is used automatically during format conversion. /// If not set, provider ID is used automatically during format conversion.

View File

@@ -795,6 +795,12 @@ impl RequestForwarder {
// 检查是否需要格式转换 // 检查是否需要格式转换
let needs_transform = adapter.needs_transform(provider); let needs_transform = adapter.needs_transform(provider);
let is_full_url = provider
.meta
.as_ref()
.and_then(|meta| meta.is_full_url)
.unwrap_or(false);
// 确定有效端点 // 确定有效端点
// GitHub Copilot API 使用 /chat/completions无 /v1 前缀) // GitHub Copilot API 使用 /chat/completions无 /v1 前缀)
let is_copilot = provider let is_copilot = provider
@@ -803,26 +809,24 @@ impl RequestForwarder {
.and_then(|m| m.provider_type.as_deref()) .and_then(|m| m.provider_type.as_deref())
== Some("github_copilot") == Some("github_copilot")
|| base_url.contains("githubcopilot.com"); || base_url.contains("githubcopilot.com");
let effective_endpoint = let (effective_endpoint, passthrough_query) =
if needs_transform && adapter.name() == "Claude" && endpoint == "/v1/messages" { if needs_transform && adapter.name() == "Claude" {
if is_copilot { let api_format = super::providers::get_claude_api_format(provider);
// GitHub Copilot uses /chat/completions without /v1 prefix rewrite_claude_transform_endpoint(endpoint, api_format, is_copilot)
"/chat/completions"
} else {
// 根据 api_format 选择目标端点
let api_format = super::providers::get_claude_api_format(provider);
if api_format == "openai_responses" {
"/v1/responses"
} else {
"/v1/chat/completions"
}
}
} else { } else {
endpoint (
endpoint.to_string(),
split_endpoint_and_query(endpoint)
.1
.map(ToString::to_string),
)
}; };
// 使用适配器构建 URL let url = if is_full_url {
let url = adapter.build_url(&base_url, effective_endpoint); append_query_to_full_url(&base_url, passthrough_query.as_deref())
} else {
adapter.build_url(&base_url, &effective_endpoint)
};
// 应用模型映射(独立于格式转换) // 应用模型映射(独立于格式转换)
let (mapped_body, _original_model, _mapped_model) = let (mapped_body, _original_model, _mapped_model) =
@@ -916,7 +920,7 @@ impl RequestForwarder {
// 流式请求保守禁用压缩,避免上游压缩 SSE 在连接中断时触发解压错误。 // 流式请求保守禁用压缩,避免上游压缩 SSE 在连接中断时触发解压错误。
// 非流式请求不显式设置 Accept-Encoding让 reqwest 自动协商压缩并透明解压。 // 非流式请求不显式设置 Accept-Encoding让 reqwest 自动协商压缩并透明解压。
if should_force_identity_encoding(effective_endpoint, &filtered_body, headers) { if should_force_identity_encoding(&effective_endpoint, &filtered_body, headers) {
request = request.header("accept-encoding", "identity"); request = request.header("accept-encoding", "identity");
} }
@@ -1173,6 +1177,76 @@ fn extract_json_error_message(body: &Value) -> Option<String> {
.find_map(|value| value.as_str().map(ToString::to_string)) .find_map(|value| value.as_str().map(ToString::to_string))
} }
fn split_endpoint_and_query(endpoint: &str) -> (&str, Option<&str>) {
endpoint
.split_once('?')
.map_or((endpoint, None), |(path, query)| (path, Some(query)))
}
fn strip_beta_query(query: Option<&str>) -> Option<String> {
let filtered = query.map(|query| {
query
.split('&')
.filter(|pair| !pair.is_empty() && !pair.starts_with("beta="))
.collect::<Vec<_>>()
.join("&")
});
match filtered.as_deref() {
Some("") | None => None,
Some(_) => filtered,
}
}
fn is_claude_messages_path(path: &str) -> bool {
matches!(path, "/v1/messages" | "/claude/v1/messages")
}
fn rewrite_claude_transform_endpoint(
endpoint: &str,
api_format: &str,
is_copilot: bool,
) -> (String, Option<String>) {
let (path, query) = split_endpoint_and_query(endpoint);
let passthrough_query = if is_claude_messages_path(path) {
strip_beta_query(query)
} else {
query.map(ToString::to_string)
};
if !is_claude_messages_path(path) {
return (endpoint.to_string(), passthrough_query);
}
let target_path = if is_copilot {
"/chat/completions"
} else if api_format == "openai_responses" {
"/v1/responses"
} else {
"/v1/chat/completions"
};
let rewritten = match passthrough_query.as_deref() {
Some(query) if !query.is_empty() => format!("{target_path}?{query}"),
_ => target_path.to_string(),
};
(rewritten, passthrough_query)
}
fn append_query_to_full_url(base_url: &str, query: Option<&str>) -> String {
match query {
Some(query) if !query.is_empty() => {
if base_url.contains('?') {
format!("{base_url}&{query}")
} else {
format!("{base_url}?{query}")
}
}
_ => base_url.to_string(),
}
}
fn should_force_identity_encoding( fn should_force_identity_encoding(
endpoint: &str, endpoint: &str,
body: &Value, body: &Value,
@@ -1281,6 +1355,46 @@ mod tests {
assert_eq!(summary, "line1 line2..."); assert_eq!(summary, "line1 line2...");
} }
#[test]
fn rewrite_claude_transform_endpoint_strips_beta_for_chat_completions() {
let (endpoint, passthrough_query) = rewrite_claude_transform_endpoint(
"/v1/messages?beta=true&foo=bar",
"openai_chat",
false,
);
assert_eq!(endpoint, "/v1/chat/completions?foo=bar");
assert_eq!(passthrough_query.as_deref(), Some("foo=bar"));
}
#[test]
fn rewrite_claude_transform_endpoint_strips_beta_for_responses() {
let (endpoint, passthrough_query) = rewrite_claude_transform_endpoint(
"/claude/v1/messages?beta=true&x-id=1",
"openai_responses",
false,
);
assert_eq!(endpoint, "/v1/responses?x-id=1");
assert_eq!(passthrough_query.as_deref(), Some("x-id=1"));
}
#[test]
fn rewrite_claude_transform_endpoint_uses_copilot_path() {
let (endpoint, passthrough_query) =
rewrite_claude_transform_endpoint("/v1/messages?beta=true&x-id=1", "anthropic", true);
assert_eq!(endpoint, "/chat/completions?x-id=1");
assert_eq!(passthrough_query.as_deref(), Some("x-id=1"));
}
#[test]
fn append_query_to_full_url_preserves_existing_query_string() {
let url = append_query_to_full_url("https://relay.example/api?foo=bar", Some("x-id=1"));
assert_eq!(url, "https://relay.example/api?foo=bar&x-id=1");
}
#[test] #[test]
fn force_identity_for_stream_flag_requests() { fn force_identity_for_stream_flag_requests() {
let headers = HeaderMap::new(); let headers = HeaderMap::new();

View File

@@ -61,12 +61,18 @@ pub async fn get_status(State(state): State<ProxyState>) -> Result<Json<ProxySta
/// - 现在 OpenRouter 已推出 Claude Code 兼容接口,默认不再启用该转换(逻辑保留以备回退) /// - 现在 OpenRouter 已推出 Claude Code 兼容接口,默认不再启用该转换(逻辑保留以备回退)
pub async fn handle_messages( pub async fn handle_messages(
State(state): State<ProxyState>, State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap, headers: axum::http::HeaderMap,
Json(body): Json<Value>, Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> { ) -> Result<axum::response::Response, ProxyError> {
let mut ctx = let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Claude, "Claude", "claude").await?; RequestContext::new(&state, &body, &headers, AppType::Claude, "Claude", "claude").await?;
let endpoint = uri
.path_and_query()
.map(|path_and_query| path_and_query.as_str())
.unwrap_or(uri.path());
let is_stream = body let is_stream = body
.get("stream") .get("stream")
.and_then(|s| s.as_bool()) .and_then(|s| s.as_bool())
@@ -77,7 +83,7 @@ pub async fn handle_messages(
let result = match forwarder let result = match forwarder
.forward_with_retry( .forward_with_retry(
&AppType::Claude, &AppType::Claude,
"/v1/messages", endpoint,
body.clone(), body.clone(),
headers, headers,
ctx.get_providers(), ctx.get_providers(),
@@ -280,6 +286,13 @@ async fn handle_claude_transform(
}) })
} }
fn endpoint_with_query(uri: &axum::http::Uri, endpoint: &str) -> String {
match uri.query() {
Some(query) => format!("{endpoint}?{query}"),
None => endpoint.to_string(),
}
}
// ============================================================================ // ============================================================================
// Codex API 处理器 // Codex API 处理器
// ============================================================================ // ============================================================================
@@ -287,11 +300,13 @@ async fn handle_claude_transform(
/// 处理 /v1/chat/completions 请求OpenAI Chat Completions API - Codex CLI /// 处理 /v1/chat/completions 请求OpenAI Chat Completions API - Codex CLI
pub async fn handle_chat_completions( pub async fn handle_chat_completions(
State(state): State<ProxyState>, State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap, headers: axum::http::HeaderMap,
Json(body): Json<Value>, Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> { ) -> Result<axum::response::Response, ProxyError> {
let mut ctx = let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?; RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?;
let endpoint = endpoint_with_query(&uri, "/chat/completions");
let is_stream = body let is_stream = body
.get("stream") .get("stream")
@@ -302,7 +317,7 @@ pub async fn handle_chat_completions(
let result = match forwarder let result = match forwarder
.forward_with_retry( .forward_with_retry(
&AppType::Codex, &AppType::Codex,
"/chat/completions", &endpoint,
body, body,
headers, headers,
ctx.get_providers(), ctx.get_providers(),
@@ -328,11 +343,13 @@ pub async fn handle_chat_completions(
/// 处理 /v1/responses 请求OpenAI Responses API - Codex CLI 透传) /// 处理 /v1/responses 请求OpenAI Responses API - Codex CLI 透传)
pub async fn handle_responses( pub async fn handle_responses(
State(state): State<ProxyState>, State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap, headers: axum::http::HeaderMap,
Json(body): Json<Value>, Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> { ) -> Result<axum::response::Response, ProxyError> {
let mut ctx = let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?; RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?;
let endpoint = endpoint_with_query(&uri, "/responses");
let is_stream = body let is_stream = body
.get("stream") .get("stream")
@@ -343,7 +360,7 @@ pub async fn handle_responses(
let result = match forwarder let result = match forwarder
.forward_with_retry( .forward_with_retry(
&AppType::Codex, &AppType::Codex,
"/responses", &endpoint,
body, body,
headers, headers,
ctx.get_providers(), ctx.get_providers(),
@@ -369,11 +386,13 @@ pub async fn handle_responses(
/// 处理 /v1/responses/compact 请求OpenAI Responses Compact API - Codex CLI 透传) /// 处理 /v1/responses/compact 请求OpenAI Responses Compact API - Codex CLI 透传)
pub async fn handle_responses_compact( pub async fn handle_responses_compact(
State(state): State<ProxyState>, State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap, headers: axum::http::HeaderMap,
Json(body): Json<Value>, Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> { ) -> Result<axum::response::Response, ProxyError> {
let mut ctx = let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?; RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?;
let endpoint = endpoint_with_query(&uri, "/responses/compact");
let is_stream = body let is_stream = body
.get("stream") .get("stream")
@@ -384,7 +403,7 @@ pub async fn handle_responses_compact(
let result = match forwarder let result = match forwarder
.forward_with_retry( .forward_with_retry(
&AppType::Codex, &AppType::Codex,
"/responses/compact", &endpoint,
body, body,
headers, headers,
ctx.get_providers(), ctx.get_providers(),

View File

@@ -298,7 +298,7 @@ impl ProviderAdapter for ClaudeAdapter {
// //
// 现在 OpenRouter 已推出 Claude Code 兼容接口,因此默认直接透传 endpoint。 // 现在 OpenRouter 已推出 Claude Code 兼容接口,因此默认直接透传 endpoint。
// 如需回退旧逻辑,可在 forwarder 中根据 needs_transform 改写 endpoint。 // 如需回退旧逻辑,可在 forwarder 中根据 needs_transform 改写 endpoint。
//
let mut base = format!( let mut base = format!(
"{}/{}", "{}/{}",
base_url.trim_end_matches('/'), base_url.trim_end_matches('/'),
@@ -310,24 +310,7 @@ impl ProviderAdapter for ClaudeAdapter {
base = base.replace("/v1/v1", "/v1"); base = base.replace("/v1/v1", "/v1");
} }
// GitHub Copilot 不需要 ?beta=true 参数 base
if base_url.contains("githubcopilot.com") {
return base;
}
// 为 Claude 原生 /v1/messages 端点添加 ?beta=true 参数
// 这是某些上游服务(如 DuckCoding验证请求来源的关键参数
// 注意:不要为 OpenAI Chat Completions (/v1/chat/completions) 添加此参数
// 当 apiFormat="openai_chat" 时,请求会转发到 /v1/chat/completions
// 但该端点是 OpenAI 标准,不支持 ?beta=true 参数
if endpoint.contains("/v1/messages")
&& !endpoint.contains("/v1/chat/completions")
&& !endpoint.contains('?')
{
format!("{base}?beta=true")
} else {
base
}
} }
fn add_auth_headers(&self, request: RequestBuilder, auth: &AuthInfo) -> RequestBuilder { fn add_auth_headers(&self, request: RequestBuilder, auth: &AuthInfo) -> RequestBuilder {
@@ -578,23 +561,20 @@ mod tests {
#[test] #[test]
fn test_build_url_anthropic() { fn test_build_url_anthropic() {
let adapter = ClaudeAdapter::new(); let adapter = ClaudeAdapter::new();
// /v1/messages 端点会自动添加 ?beta=true 参数
let url = adapter.build_url("https://api.anthropic.com", "/v1/messages"); let url = adapter.build_url("https://api.anthropic.com", "/v1/messages");
assert_eq!(url, "https://api.anthropic.com/v1/messages?beta=true"); assert_eq!(url, "https://api.anthropic.com/v1/messages");
} }
#[test] #[test]
fn test_build_url_openrouter() { fn test_build_url_openrouter() {
let adapter = ClaudeAdapter::new(); let adapter = ClaudeAdapter::new();
// /v1/messages 端点会自动添加 ?beta=true 参数
let url = adapter.build_url("https://openrouter.ai/api", "/v1/messages"); let url = adapter.build_url("https://openrouter.ai/api", "/v1/messages");
assert_eq!(url, "https://openrouter.ai/api/v1/messages?beta=true"); assert_eq!(url, "https://openrouter.ai/api/v1/messages");
} }
#[test] #[test]
fn test_build_url_no_beta_for_other_endpoints() { fn test_build_url_no_beta_for_other_endpoints() {
let adapter = ClaudeAdapter::new(); let adapter = ClaudeAdapter::new();
// 非 /v1/messages 端点不添加 ?beta=true
let url = adapter.build_url("https://api.anthropic.com", "/v1/complete"); let url = adapter.build_url("https://api.anthropic.com", "/v1/complete");
assert_eq!(url, "https://api.anthropic.com/v1/complete"); assert_eq!(url, "https://api.anthropic.com/v1/complete");
} }
@@ -602,16 +582,20 @@ mod tests {
#[test] #[test]
fn test_build_url_preserve_existing_query() { fn test_build_url_preserve_existing_query() {
let adapter = ClaudeAdapter::new(); let adapter = ClaudeAdapter::new();
// 已有查询参数时不重复添加
let url = adapter.build_url("https://api.anthropic.com", "/v1/messages?foo=bar"); let url = adapter.build_url("https://api.anthropic.com", "/v1/messages?foo=bar");
assert_eq!(url, "https://api.anthropic.com/v1/messages?foo=bar"); assert_eq!(url, "https://api.anthropic.com/v1/messages?foo=bar");
} }
#[test]
fn test_build_url_no_beta_for_github_copilot() {
let adapter = ClaudeAdapter::new();
let url = adapter.build_url("https://api.githubcopilot.com", "/v1/messages");
assert_eq!(url, "https://api.githubcopilot.com/v1/messages");
}
#[test] #[test]
fn test_build_url_no_beta_for_openai_chat_completions() { fn test_build_url_no_beta_for_openai_chat_completions() {
let adapter = ClaudeAdapter::new(); let adapter = ClaudeAdapter::new();
// OpenAI Chat Completions 端点不添加 ?beta=true
// 这是 Nvidia 等 apiFormat="openai_chat" 供应商使用的端点
let url = adapter.build_url("https://integrate.api.nvidia.com", "/v1/chat/completions"); let url = adapter.build_url("https://integrate.api.nvidia.com", "/v1/chat/completions");
assert_eq!(url, "https://integrate.api.nvidia.com/v1/chat/completions"); assert_eq!(url, "https://integrate.api.nvidia.com/v1/chat/completions");
} }

View File

@@ -14,6 +14,7 @@ use crate::error::AppError;
use crate::provider::Provider; use crate::provider::Provider;
use crate::proxy::providers::transform::anthropic_to_openai; use crate::proxy::providers::transform::anthropic_to_openai;
use crate::proxy::providers::copilot_auth; use crate::proxy::providers::copilot_auth;
use crate::proxy::providers::transform_responses::anthropic_to_responses;
use crate::proxy::providers::{get_adapter, AuthInfo, AuthStrategy}; use crate::proxy::providers::{get_adapter, AuthInfo, AuthStrategy};
/// 健康状态枚举 /// 健康状态枚举
@@ -225,6 +226,7 @@ impl StreamCheckService {
&model_to_test, &model_to_test,
test_prompt, test_prompt,
request_timeout, request_timeout,
provider,
) )
.await .await
} }
@@ -318,37 +320,26 @@ impl StreamCheckService {
}) })
.unwrap_or("anthropic"); .unwrap_or("anthropic");
let is_full_url = provider
.meta
.as_ref()
.and_then(|meta| meta.is_full_url)
.unwrap_or(false);
let is_openai_chat = is_github_copilot || api_format == "openai_chat"; let is_openai_chat = is_github_copilot || api_format == "openai_chat";
let is_openai_responses = !is_github_copilot && api_format == "openai_responses";
let url = Self::resolve_claude_stream_url(base, auth.strategy, api_format, is_full_url);
// URL: // Build from Anthropic-native shape first, then convert for configured targets.
// - GitHub Copilot: /chat/completions (no /v1 prefix)
// - OpenAI-compatible: /v1/chat/completions
// - Anthropic native: /v1/messages?beta=true
let url = if is_github_copilot {
format!("{base}/chat/completions")
} else if is_openai_chat {
if base.ends_with("/v1") {
format!("{base}/chat/completions")
} else {
format!("{base}/v1/chat/completions")
}
} else {
// ?beta=true is required by some relay services to verify request origin
if base.ends_with("/v1") {
format!("{base}/messages?beta=true")
} else {
format!("{base}/v1/messages?beta=true")
}
};
// Build from Anthropic-native shape first, then convert for OpenAI-compatible targets.
let anthropic_body = json!({ let anthropic_body = json!({
"model": model, "model": model,
"max_tokens": 1, "max_tokens": 1,
"messages": [{ "role": "user", "content": test_prompt }], "messages": [{ "role": "user", "content": test_prompt }],
"stream": true "stream": true
}); });
let body = if is_openai_chat { let body = if is_openai_responses {
anthropic_to_responses(anthropic_body, Some(&provider.id))
.map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))?
} else if is_openai_chat {
anthropic_to_openai(anthropic_body, Some(&provider.id)) anthropic_to_openai(anthropic_body, Some(&provider.id))
.map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))? .map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))?
} else { } else {
@@ -369,8 +360,8 @@ impl StreamCheckService {
.header("copilot-integration-id", copilot_auth::COPILOT_INTEGRATION_ID) .header("copilot-integration-id", copilot_auth::COPILOT_INTEGRATION_ID)
.header("x-github-api-version", copilot_auth::COPILOT_API_VERSION) .header("x-github-api-version", copilot_auth::COPILOT_API_VERSION)
.header("openai-intent", "conversation-panel"); .header("openai-intent", "conversation-panel");
} else if is_openai_chat { } else if is_openai_chat || is_openai_responses {
// OpenAI-compatible: Bearer auth + standard headers only // OpenAI-compatible targets: Bearer auth + SSE headers only
request_builder = request_builder request_builder = request_builder
.header("authorization", format!("Bearer {}", auth.api_key)) .header("authorization", format!("Bearer {}", auth.api_key))
.header("content-type", "application/json") .header("content-type", "application/json")
@@ -455,18 +446,14 @@ impl StreamCheckService {
model: &str, model: &str,
test_prompt: &str, test_prompt: &str,
timeout: std::time::Duration, timeout: std::time::Duration,
provider: &Provider,
) -> Result<(u16, String), AppError> { ) -> Result<(u16, String), AppError> {
let base = base_url.trim_end_matches('/'); let is_full_url = provider
// Codex CLI 的 base_url 语义base_url 是 API base可能已包含 /v1 或其他自定义前缀), .meta
// Responses 端点为 `/responses`。 .as_ref()
// .and_then(|meta| meta.is_full_url)
// 兼容:如果 base_url 配成纯 origin如 https://api.openai.com则需要补 `/v1`。 .unwrap_or(false);
// 优先尝试 `{base}/responses`,若 404 再回退 `{base}/v1/responses`。 let urls = Self::resolve_codex_stream_urls(base_url, is_full_url);
let urls = if base.ends_with("/v1") {
vec![format!("{base}/responses")]
} else {
vec![format!("{base}/responses"), format!("{base}/v1/responses")]
};
// 解析模型名和推理等级 (支持 model@level 或 model#level 格式) // 解析模型名和推理等级 (支持 model@level 或 model#level 格式)
let (actual_model, reasoning_effort) = Self::parse_model_with_effort(model); let (actual_model, reasoning_effort) = Self::parse_model_with_effort(model);
@@ -724,28 +711,51 @@ impl StreamCheckService {
} }
} }
#[cfg(test)]
fn resolve_claude_stream_url( fn resolve_claude_stream_url(
base_url: &str, base_url: &str,
auth_strategy: AuthStrategy, auth_strategy: AuthStrategy,
api_format: &str, api_format: &str,
is_full_url: bool,
) -> String { ) -> String {
if is_full_url {
return base_url.to_string();
}
let base = base_url.trim_end_matches('/'); let base = base_url.trim_end_matches('/');
let is_github_copilot = auth_strategy == AuthStrategy::GitHubCopilot; let is_github_copilot = auth_strategy == AuthStrategy::GitHubCopilot;
let is_openai_chat = is_github_copilot || api_format == "openai_chat";
if is_github_copilot { if is_github_copilot {
format!("{base}/chat/completions") format!("{base}/chat/completions")
} else if is_openai_chat { } else if api_format == "openai_responses" {
if base.ends_with("/v1") {
format!("{base}/responses")
} else {
format!("{base}/v1/responses")
}
} else if api_format == "openai_chat" {
if base.ends_with("/v1") { if base.ends_with("/v1") {
format!("{base}/chat/completions") format!("{base}/chat/completions")
} else { } else {
format!("{base}/v1/chat/completions") format!("{base}/v1/chat/completions")
} }
} else if base.ends_with("/v1") { } else if base.ends_with("/v1") {
format!("{base}/messages?beta=true") format!("{base}/messages")
} else { } else {
format!("{base}/v1/messages?beta=true") format!("{base}/v1/messages")
}
}
fn resolve_codex_stream_urls(base_url: &str, is_full_url: bool) -> Vec<String> {
if is_full_url {
return vec![base_url.to_string()];
}
let base = base_url.trim_end_matches('/');
if base.ends_with("/v1") {
vec![format!("{base}/responses")]
} else {
vec![format!("{base}/responses"), format!("{base}/v1/responses")]
} }
} }
} }
@@ -851,12 +861,25 @@ mod tests {
assert_eq!(bearer, AuthStrategy::Bearer); assert_eq!(bearer, AuthStrategy::Bearer);
} }
#[test]
fn test_resolve_claude_stream_url_for_full_url_mode() {
let url = StreamCheckService::resolve_claude_stream_url(
"https://relay.example/v1/chat/completions",
AuthStrategy::Bearer,
"openai_chat",
true,
);
assert_eq!(url, "https://relay.example/v1/chat/completions");
}
#[test] #[test]
fn test_resolve_claude_stream_url_for_github_copilot() { fn test_resolve_claude_stream_url_for_github_copilot() {
let url = StreamCheckService::resolve_claude_stream_url( let url = StreamCheckService::resolve_claude_stream_url(
"https://api.githubcopilot.com", "https://api.githubcopilot.com",
AuthStrategy::GitHubCopilot, AuthStrategy::GitHubCopilot,
"anthropic", "anthropic",
false,
); );
assert_eq!(url, "https://api.githubcopilot.com/chat/completions"); assert_eq!(url, "https://api.githubcopilot.com/chat/completions");
@@ -868,19 +891,64 @@ mod tests {
"https://example.com/v1", "https://example.com/v1",
AuthStrategy::Bearer, AuthStrategy::Bearer,
"openai_chat", "openai_chat",
false,
); );
assert_eq!(url, "https://example.com/v1/chat/completions"); assert_eq!(url, "https://example.com/v1/chat/completions");
} }
#[test]
fn test_resolve_claude_stream_url_for_openai_responses() {
let url = StreamCheckService::resolve_claude_stream_url(
"https://example.com/v1",
AuthStrategy::Bearer,
"openai_responses",
false,
);
assert_eq!(url, "https://example.com/v1/responses");
}
#[test] #[test]
fn test_resolve_claude_stream_url_for_anthropic() { fn test_resolve_claude_stream_url_for_anthropic() {
let url = StreamCheckService::resolve_claude_stream_url( let url = StreamCheckService::resolve_claude_stream_url(
"https://api.anthropic.com", "https://api.anthropic.com",
AuthStrategy::Anthropic, AuthStrategy::Anthropic,
"anthropic", "anthropic",
false,
); );
assert_eq!(url, "https://api.anthropic.com/v1/messages?beta=true"); assert_eq!(url, "https://api.anthropic.com/v1/messages");
}
#[test]
fn test_resolve_codex_stream_urls_for_full_url_mode() {
let urls = StreamCheckService::resolve_codex_stream_urls(
"https://relay.example/custom/responses",
true,
);
assert_eq!(urls, vec!["https://relay.example/custom/responses"]);
}
#[test]
fn test_resolve_codex_stream_urls_for_v1_base() {
let urls =
StreamCheckService::resolve_codex_stream_urls("https://api.openai.com/v1", false);
assert_eq!(urls, vec!["https://api.openai.com/v1/responses"]);
}
#[test]
fn test_resolve_codex_stream_urls_for_origin_base() {
let urls = StreamCheckService::resolve_codex_stream_urls("https://api.openai.com", false);
assert_eq!(
urls,
vec![
"https://api.openai.com/responses",
"https://api.openai.com/v1/responses",
]
);
} }
} }

View File

@@ -255,7 +255,7 @@ function App() {
deleteProvider, deleteProvider,
saveUsageScript, saveUsageScript,
setAsDefaultModel, setAsDefaultModel,
} = useProviderActions(activeApp); } = useProviderActions(activeApp, isProxyRunning);
const disableOmoMutation = useDisableCurrentOmo(); const disableOmoMutation = useDisableCurrentOmo();
const handleDisableOmo = () => { const handleDisableOmo = () => {

View File

@@ -108,6 +108,10 @@ interface ClaudeFormFieldsProps {
// Auth Field (ANTHROPIC_AUTH_TOKEN or ANTHROPIC_API_KEY) // Auth Field (ANTHROPIC_AUTH_TOKEN or ANTHROPIC_API_KEY)
apiKeyField: ClaudeApiKeyField; apiKeyField: ClaudeApiKeyField;
onApiKeyFieldChange: (field: ClaudeApiKeyField) => void; onApiKeyFieldChange: (field: ClaudeApiKeyField) => void;
// Full URL mode
isFullUrl: boolean;
onFullUrlChange: (value: boolean) => void;
} }
export function ClaudeFormFields({ export function ClaudeFormFields({
@@ -149,6 +153,8 @@ export function ClaudeFormFields({
onApiFormatChange, onApiFormatChange,
apiKeyField, apiKeyField,
onApiKeyFieldChange, onApiKeyFieldChange,
isFullUrl,
onFullUrlChange,
}: ClaudeFormFieldsProps) { }: ClaudeFormFieldsProps) {
const { t } = useTranslation(); const { t } = useTranslation();
const hasAnyAdvancedValue = !!( const hasAnyAdvancedValue = !!(
@@ -379,6 +385,9 @@ export function ClaudeFormFields({
: t("providerForm.apiHint") : t("providerForm.apiHint")
} }
onManageClick={() => onEndpointModalToggle(true)} onManageClick={() => onEndpointModalToggle(true)}
showFullUrlToggle={true}
isFullUrl={isFullUrl}
onFullUrlChange={onFullUrlChange}
/> />
)} )}

View File

@@ -22,6 +22,8 @@ interface CodexFormFieldsProps {
shouldShowSpeedTest: boolean; shouldShowSpeedTest: boolean;
codexBaseUrl: string; codexBaseUrl: string;
onBaseUrlChange: (url: string) => void; onBaseUrlChange: (url: string) => void;
isFullUrl: boolean;
onFullUrlChange: (value: boolean) => void;
isEndpointModalOpen: boolean; isEndpointModalOpen: boolean;
onEndpointModalToggle: (open: boolean) => void; onEndpointModalToggle: (open: boolean) => void;
onCustomEndpointsChange?: (endpoints: string[]) => void; onCustomEndpointsChange?: (endpoints: string[]) => void;
@@ -49,6 +51,8 @@ export function CodexFormFields({
shouldShowSpeedTest, shouldShowSpeedTest,
codexBaseUrl, codexBaseUrl,
onBaseUrlChange, onBaseUrlChange,
isFullUrl,
onFullUrlChange,
isEndpointModalOpen, isEndpointModalOpen,
onEndpointModalToggle, onEndpointModalToggle,
onCustomEndpointsChange, onCustomEndpointsChange,
@@ -93,6 +97,9 @@ export function CodexFormFields({
onChange={onBaseUrlChange} onChange={onBaseUrlChange}
placeholder={t("providerForm.codexApiEndpointPlaceholder")} placeholder={t("providerForm.codexApiEndpointPlaceholder")}
hint={t("providerForm.codexApiHint")} hint={t("providerForm.codexApiHint")}
showFullUrlToggle
isFullUrl={isFullUrl}
onFullUrlChange={onFullUrlChange}
onManageClick={() => onEndpointModalToggle(true)} onManageClick={() => onEndpointModalToggle(true)}
/> />
)} )}

View File

@@ -162,6 +162,11 @@ export function ProviderForm({
const [endpointAutoSelect, setEndpointAutoSelect] = useState<boolean>( const [endpointAutoSelect, setEndpointAutoSelect] = useState<boolean>(
() => initialData?.meta?.endpointAutoSelect ?? true, () => initialData?.meta?.endpointAutoSelect ?? true,
); );
const supportsFullUrl = appId === "claude" || appId === "codex";
const [localIsFullUrl, setLocalIsFullUrl] = useState<boolean>(() => {
if (!supportsFullUrl) return false;
return initialData?.meta?.isFullUrl ?? false;
});
const [testConfig, setTestConfig] = useState<ProviderTestConfig>( const [testConfig, setTestConfig] = useState<ProviderTestConfig>(
() => initialData?.meta?.testConfig ?? { enabled: false }, () => initialData?.meta?.testConfig ?? { enabled: false },
@@ -201,6 +206,9 @@ export function ProviderForm({
setDraftCustomEndpoints([]); setDraftCustomEndpoints([]);
} }
setEndpointAutoSelect(initialData?.meta?.endpointAutoSelect ?? true); setEndpointAutoSelect(initialData?.meta?.endpointAutoSelect ?? true);
setLocalIsFullUrl(
supportsFullUrl ? (initialData?.meta?.isFullUrl ?? false) : false,
);
setTestConfig(initialData?.meta?.testConfig ?? { enabled: false }); setTestConfig(initialData?.meta?.testConfig ?? { enabled: false });
setProxyConfig(initialData?.meta?.proxyConfig ?? { enabled: false }); setProxyConfig(initialData?.meta?.proxyConfig ?? { enabled: false });
setPricingConfig({ setPricingConfig({
@@ -212,7 +220,7 @@ export function ProviderForm({
initialData?.meta?.pricingModelSource, initialData?.meta?.pricingModelSource,
), ),
}); });
}, [appId, initialData]); }, [appId, initialData, supportsFullUrl]);
const defaultValues: ProviderFormData = useMemo( const defaultValues: ProviderFormData = useMemo(
() => ({ () => ({
@@ -941,6 +949,10 @@ export function ProviderForm({
localApiKeyField !== "ANTHROPIC_AUTH_TOKEN" localApiKeyField !== "ANTHROPIC_AUTH_TOKEN"
? localApiKeyField ? localApiKeyField
: undefined, : undefined,
isFullUrl:
supportsFullUrl && category !== "official" && localIsFullUrl
? true
: undefined,
}; };
await onSubmit(payload); await onSubmit(payload);
@@ -1180,6 +1192,7 @@ export function ProviderForm({
} }
setLocalApiKeyField(preset.apiKeyField ?? "ANTHROPIC_AUTH_TOKEN"); setLocalApiKeyField(preset.apiKeyField ?? "ANTHROPIC_AUTH_TOKEN");
setLocalIsFullUrl(false);
form.reset({ form.reset({
name: preset.nameKey ? t(preset.nameKey) : preset.name, name: preset.nameKey ? t(preset.nameKey) : preset.name,
@@ -1402,6 +1415,8 @@ export function ProviderForm({
onApiFormatChange={handleApiFormatChange} onApiFormatChange={handleApiFormatChange}
apiKeyField={localApiKeyField} apiKeyField={localApiKeyField}
onApiKeyFieldChange={handleApiKeyFieldChange} onApiKeyFieldChange={handleApiKeyFieldChange}
isFullUrl={localIsFullUrl}
onFullUrlChange={setLocalIsFullUrl}
/> />
)} )}
@@ -1418,6 +1433,8 @@ export function ProviderForm({
shouldShowSpeedTest={shouldShowSpeedTest} shouldShowSpeedTest={shouldShowSpeedTest}
codexBaseUrl={codexBaseUrl} codexBaseUrl={codexBaseUrl}
onBaseUrlChange={handleCodexBaseUrlChange} onBaseUrlChange={handleCodexBaseUrlChange}
isFullUrl={localIsFullUrl}
onFullUrlChange={setLocalIsFullUrl}
isEndpointModalOpen={isCodexEndpointModalOpen} isEndpointModalOpen={isCodexEndpointModalOpen}
onEndpointModalToggle={setIsCodexEndpointModalOpen} onEndpointModalToggle={setIsCodexEndpointModalOpen}
onCustomEndpointsChange={ onCustomEndpointsChange={

View File

@@ -1,7 +1,8 @@
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import { FormLabel } from "@/components/ui/form"; import { FormLabel } from "@/components/ui/form";
import { Input } from "@/components/ui/input"; import { Input } from "@/components/ui/input";
import { Zap } from "lucide-react"; import { Switch } from "@/components/ui/switch";
import { Link2, Zap } from "lucide-react";
interface EndpointFieldProps { interface EndpointFieldProps {
id: string; id: string;
@@ -13,6 +14,9 @@ interface EndpointFieldProps {
showManageButton?: boolean; showManageButton?: boolean;
onManageClick?: () => void; onManageClick?: () => void;
manageButtonLabel?: string; manageButtonLabel?: string;
showFullUrlToggle?: boolean;
isFullUrl?: boolean;
onFullUrlChange?: (value: boolean) => void;
} }
export function EndpointField({ export function EndpointField({
@@ -25,18 +29,56 @@ export function EndpointField({
showManageButton = true, showManageButton = true,
onManageClick, onManageClick,
manageButtonLabel, manageButtonLabel,
showFullUrlToggle = false,
isFullUrl = false,
onFullUrlChange,
}: EndpointFieldProps) { }: EndpointFieldProps) {
const { t } = useTranslation(); const { t } = useTranslation();
const defaultManageLabel = t("providerForm.manageAndTest", { const defaultManageLabel = t("providerForm.manageAndTest", {
defaultValue: "管理和测速", defaultValue: "管理和测速",
}); });
const effectiveHint =
showFullUrlToggle && isFullUrl
? t("providerForm.fullUrlHint", {
defaultValue:
"💡 请填写完整请求 URL并且必须开启代理后使用代理将直接使用此 URL不拼接路径",
})
: hint;
return ( return (
<div className="space-y-2"> <div className="space-y-2">
<div className="flex items-center justify-between"> <div className="flex flex-wrap items-center justify-between gap-2">
<FormLabel htmlFor={id}>{label}</FormLabel> <div className="flex flex-wrap items-center gap-3">
{showManageButton && onManageClick && ( <FormLabel htmlFor={id}>{label}</FormLabel>
{showFullUrlToggle && onFullUrlChange ? (
<div className="flex items-center gap-2 rounded-full border border-border/70 bg-muted/30 px-2.5 py-1">
<Link2
className={`h-3.5 w-3.5 ${
isFullUrl ? "text-primary" : "text-muted-foreground"
}`}
/>
<span
className={`text-xs font-medium ${
isFullUrl ? "text-foreground" : "text-muted-foreground"
}`}
>
{t("providerForm.fullUrlLabel", {
defaultValue: "完整 URL",
})}
</span>
<Switch
checked={isFullUrl}
onCheckedChange={onFullUrlChange}
aria-label={t("providerForm.fullUrlLabel", {
defaultValue: "完整 URL",
})}
className="h-5 w-9"
/>
</div>
) : null}
</div>
{showManageButton && onManageClick ? (
<button <button
type="button" type="button"
onClick={onManageClick} onClick={onManageClick}
@@ -45,7 +87,7 @@ export function EndpointField({
<Zap className="h-3.5 w-3.5" /> <Zap className="h-3.5 w-3.5" />
{manageButtonLabel || defaultManageLabel} {manageButtonLabel || defaultManageLabel}
</button> </button>
)} ) : null}
</div> </div>
<Input <Input
id={id} id={id}
@@ -55,9 +97,11 @@ export function EndpointField({
placeholder={placeholder} placeholder={placeholder}
autoComplete="off" autoComplete="off"
/> />
{hint ? ( {effectiveHint ? (
<div className="p-3 bg-amber-50 dark:bg-amber-900/20 border border-amber-200 dark:border-amber-700 rounded-lg"> <div className="p-3 bg-amber-50 dark:bg-amber-900/20 border border-amber-200 dark:border-amber-700 rounded-lg">
<p className="text-xs text-amber-600 dark:text-amber-400">{hint}</p> <p className="text-xs text-amber-600 dark:text-amber-400">
{effectiveHint}
</p>
</div> </div>
) : null} ) : null}
</div> </div>

View File

@@ -23,7 +23,7 @@ import { openclawKeys } from "@/hooks/useOpenClaw";
* Hook for managing provider actions (add, update, delete, switch) * Hook for managing provider actions (add, update, delete, switch)
* Extracts business logic from App.tsx * Extracts business logic from App.tsx
*/ */
export function useProviderActions(activeApp: AppId) { export function useProviderActions(activeApp: AppId, isProxyRunning?: boolean) {
const { t } = useTranslation(); const { t } = useTranslation();
const queryClient = useQueryClient(); const queryClient = useQueryClient();
@@ -139,6 +139,26 @@ export function useProviderActions(activeApp: AppId) {
// 切换供应商 // 切换供应商
const switchProvider = useCallback( const switchProvider = useCallback(
async (provider: Provider) => { async (provider: Provider) => {
const requiresProxyForSwitch =
!isProxyRunning &&
provider.category !== "official" &&
((activeApp === "claude" &&
(provider.meta?.isFullUrl ||
provider.meta?.apiFormat === "openai_chat" ||
provider.meta?.apiFormat === "openai_responses")) ||
(activeApp === "codex" && provider.meta?.isFullUrl));
if (
requiresProxyForSwitch
) {
toast.warning(
t("notifications.proxyRequiredForSwitch", {
defaultValue: "此供应商需要代理服务,请先启动代理",
}),
);
return;
}
try { try {
const result = await switchProviderMutation.mutateAsync(provider.id); const result = await switchProviderMutation.mutateAsync(provider.id);
await syncClaudePlugin(provider); await syncClaudePlugin(provider);
@@ -192,7 +212,7 @@ export function useProviderActions(activeApp: AppId) {
// 错误提示由 mutation 处理 // 错误提示由 mutation 处理
} }
}, },
[switchProviderMutation, syncClaudePlugin, activeApp, t], [switchProviderMutation, syncClaudePlugin, activeApp, isProxyRunning, t],
); );
// 删除供应商 // 删除供应商

View File

@@ -176,6 +176,7 @@
"settingsSaveFailed": "Failed to save settings: {{error}}", "settingsSaveFailed": "Failed to save settings: {{error}}",
"openAIChatFormatHint": "This provider uses OpenAI Chat format and requires the proxy service to be enabled", "openAIChatFormatHint": "This provider uses OpenAI Chat format and requires the proxy service to be enabled",
"openAIFormatHint": "This provider uses OpenAI-compatible format and requires the proxy service to be enabled", "openAIFormatHint": "This provider uses OpenAI-compatible format and requires the proxy service to be enabled",
"proxyRequiredForSwitch": "This provider requires the proxy service. Start the proxy first.",
"openLinkFailed": "Failed to open link", "openLinkFailed": "Failed to open link",
"openclawModelsRegistered": "Models have been registered to /model list", "openclawModelsRegistered": "Models have been registered to /model list",
"openclawDefaultModelSet": "Set as default model", "openclawDefaultModelSet": "Set as default model",
@@ -743,6 +744,10 @@
"anthropicReasoningModel": "Reasoning Model (Thinking)", "anthropicReasoningModel": "Reasoning Model (Thinking)",
"apiFormat": "API Format", "apiFormat": "API Format",
"apiFormatHint": "Select the input format for the provider's API", "apiFormatHint": "Select the input format for the provider's API",
"fullUrlLabel": "Full URL",
"fullUrlEnabled": "Full URL Mode",
"fullUrlDisabled": "Mark as Full URL",
"fullUrlHint": "💡 Enter the full request URL. This mode requires the proxy to be enabled, and the proxy will use the URL as-is without appending a path",
"apiFormatAnthropic": "Anthropic Messages (Native)", "apiFormatAnthropic": "Anthropic Messages (Native)",
"apiFormatOpenAIChat": "OpenAI Chat Completions (Requires proxy)", "apiFormatOpenAIChat": "OpenAI Chat Completions (Requires proxy)",
"apiFormatOpenAIResponses": "OpenAI Responses API (Requires proxy)", "apiFormatOpenAIResponses": "OpenAI Responses API (Requires proxy)",

View File

@@ -176,6 +176,7 @@
"settingsSaveFailed": "設定の保存に失敗しました: {{error}}", "settingsSaveFailed": "設定の保存に失敗しました: {{error}}",
"openAIChatFormatHint": "このプロバイダーは OpenAI Chat フォーマットを使用しており、プロキシサービスの有効化が必要です", "openAIChatFormatHint": "このプロバイダーは OpenAI Chat フォーマットを使用しており、プロキシサービスの有効化が必要です",
"openAIFormatHint": "このプロバイダーは OpenAI 互換フォーマットを使用しており、プロキシサービスの有効化が必要です", "openAIFormatHint": "このプロバイダーは OpenAI 互換フォーマットを使用しており、プロキシサービスの有効化が必要です",
"proxyRequiredForSwitch": "このプロバイダーにはプロキシサービスが必要です。先にプロキシを起動してください",
"openLinkFailed": "リンクを開けませんでした", "openLinkFailed": "リンクを開けませんでした",
"openclawModelsRegistered": "モデルが /model リストに登録されました", "openclawModelsRegistered": "モデルが /model リストに登録されました",
"openclawDefaultModelSet": "デフォルトモデルに設定しました", "openclawDefaultModelSet": "デフォルトモデルに設定しました",
@@ -743,6 +744,10 @@
"anthropicReasoningModel": "推論モデルThinking", "anthropicReasoningModel": "推論モデルThinking",
"apiFormat": "API フォーマット", "apiFormat": "API フォーマット",
"apiFormatHint": "プロバイダー API の入力フォーマットを選択", "apiFormatHint": "プロバイダー API の入力フォーマットを選択",
"fullUrlLabel": "フル URL",
"fullUrlEnabled": "フル URL モード",
"fullUrlDisabled": "フル URL として設定",
"fullUrlHint": "💡 完全なリクエスト URL を入力してください。このモードはプロキシを有効にして使用する必要があり、プロキシはこの URL をそのまま使用し、パスを追加しません",
"apiFormatAnthropic": "Anthropic Messagesネイティブ", "apiFormatAnthropic": "Anthropic Messagesネイティブ",
"apiFormatOpenAIChat": "OpenAI Chat Completionsプロキシが必要", "apiFormatOpenAIChat": "OpenAI Chat Completionsプロキシが必要",
"apiFormatOpenAIResponses": "OpenAI Responses APIプロキシが必要", "apiFormatOpenAIResponses": "OpenAI Responses APIプロキシが必要",

View File

@@ -176,6 +176,7 @@
"settingsSaveFailed": "保存设置失败:{{error}}", "settingsSaveFailed": "保存设置失败:{{error}}",
"openAIChatFormatHint": "此供应商使用 OpenAI Chat 格式,需要开启代理服务才能正常使用", "openAIChatFormatHint": "此供应商使用 OpenAI Chat 格式,需要开启代理服务才能正常使用",
"openAIFormatHint": "此供应商使用 OpenAI 兼容格式,需要开启代理服务才能正常使用", "openAIFormatHint": "此供应商使用 OpenAI 兼容格式,需要开启代理服务才能正常使用",
"proxyRequiredForSwitch": "此供应商需要代理服务,请先启动代理",
"openLinkFailed": "链接打开失败", "openLinkFailed": "链接打开失败",
"openclawModelsRegistered": "模型已注册到 /model 列表", "openclawModelsRegistered": "模型已注册到 /model 列表",
"openclawDefaultModelSet": "已设为默认模型", "openclawDefaultModelSet": "已设为默认模型",
@@ -743,6 +744,10 @@
"anthropicReasoningModel": "推理模型 (Thinking)", "anthropicReasoningModel": "推理模型 (Thinking)",
"apiFormat": "API 格式", "apiFormat": "API 格式",
"apiFormatHint": "选择供应商 API 的输入格式", "apiFormatHint": "选择供应商 API 的输入格式",
"fullUrlLabel": "完整 URL",
"fullUrlEnabled": "完整 URL 模式",
"fullUrlDisabled": "标记为完整 URL",
"fullUrlHint": "💡 请填写完整请求 URL并且必须开启代理后使用代理将直接使用此 URL不拼接路径",
"apiFormatAnthropic": "Anthropic Messages (原生)", "apiFormatAnthropic": "Anthropic Messages (原生)",
"apiFormatOpenAIChat": "OpenAI Chat Completions (需开启代理)", "apiFormatOpenAIChat": "OpenAI Chat Completions (需开启代理)",
"apiFormatOpenAIResponses": "OpenAI Responses API (需开启代理)", "apiFormatOpenAIResponses": "OpenAI Responses API (需开启代理)",

View File

@@ -163,6 +163,8 @@ export interface ProviderMeta {
authBinding?: AuthBinding; authBinding?: AuthBinding;
// Claude 认证字段名 // Claude 认证字段名
apiKeyField?: ClaudeApiKeyField; apiKeyField?: ClaudeApiKeyField;
// 是否将 base_url 视为完整 API 端点(代理直接使用此 URL不拼接路径
isFullUrl?: boolean;
// Prompt cache key for OpenAI-compatible endpoints (improves cache hit rate) // Prompt cache key for OpenAI-compatible endpoints (improves cache hit rate)
promptCacheKey?: string; promptCacheKey?: string;
// 供应商类型(用于识别 Copilot 等特殊供应商) // 供应商类型(用于识别 Copilot 等特殊供应商)

View File

@@ -7,11 +7,15 @@ import type { Provider, UsageScript } from "@/types";
const toastSuccessMock = vi.fn(); const toastSuccessMock = vi.fn();
const toastErrorMock = vi.fn(); const toastErrorMock = vi.fn();
const toastInfoMock = vi.fn();
const toastWarningMock = vi.fn();
vi.mock("sonner", () => ({ vi.mock("sonner", () => ({
toast: { toast: {
success: (...args: unknown[]) => toastSuccessMock(...args), success: (...args: unknown[]) => toastSuccessMock(...args),
error: (...args: unknown[]) => toastErrorMock(...args), error: (...args: unknown[]) => toastErrorMock(...args),
info: (...args: unknown[]) => toastInfoMock(...args),
warning: (...args: unknown[]) => toastWarningMock(...args),
}, },
})); }));
@@ -116,6 +120,8 @@ beforeEach(() => {
openclawApiSetDefaultModelMock.mockReset(); openclawApiSetDefaultModelMock.mockReset();
toastSuccessMock.mockReset(); toastSuccessMock.mockReset();
toastErrorMock.mockReset(); toastErrorMock.mockReset();
toastInfoMock.mockReset();
toastWarningMock.mockReset();
addProviderMutation.isPending = false; addProviderMutation.isPending = false;
updateProviderMutation.isPending = false; updateProviderMutation.isPending = false;
@@ -185,6 +191,50 @@ describe("useProviderActions", () => {
expect(settingsApiApplyMock).not.toHaveBeenCalled(); expect(settingsApiApplyMock).not.toHaveBeenCalled();
}); });
it("blocks switching providers that require proxy when proxy is not running", async () => {
const { wrapper } = createWrapper();
const provider = createProvider({
category: "custom",
meta: {
apiFormat: "openai_chat",
},
});
const { result } = renderHook(() => useProviderActions("claude", false), {
wrapper,
});
await act(async () => {
await result.current.switchProvider(provider);
});
expect(switchProviderMutateAsync).not.toHaveBeenCalled();
expect(toastWarningMock).toHaveBeenCalledTimes(1);
expect(settingsApiGetMock).not.toHaveBeenCalled();
});
it("blocks switching Codex full URL providers when proxy is not running", async () => {
const { wrapper } = createWrapper();
const provider = createProvider({
category: "custom",
meta: {
isFullUrl: true,
},
});
const { result } = renderHook(() => useProviderActions("codex", false), {
wrapper,
});
await act(async () => {
await result.current.switchProvider(provider);
});
expect(switchProviderMutateAsync).not.toHaveBeenCalled();
expect(toastWarningMock).toHaveBeenCalledTimes(1);
expect(settingsApiGetMock).not.toHaveBeenCalled();
});
it("should sync plugin config when switching Claude provider with integration enabled", async () => { it("should sync plugin config when switching Claude provider with integration enabled", async () => {
switchProviderMutateAsync.mockResolvedValueOnce(undefined); switchProviderMutateAsync.mockResolvedValueOnce(undefined);
settingsApiGetMock.mockResolvedValueOnce({ settingsApiGetMock.mockResolvedValueOnce({