feat(proxy): add full URL mode and refactor endpoint rewriting (#1561)

* feat(proxy): add full URL mode and refactor endpoint rewriting

- Add `isFullUrl` provider meta to treat base_url as complete API endpoint
- Remove hardcoded `?beta=true` from Claude adapter, pass through from client
- Refactor forwarder endpoint rewriting with proper query string handling
- Block provider switching when proxy is required but not running
- Add full URL toggle UI in endpoint field with i18n (zh/en/ja)

* refactor(proxy): remove beta query handling

* fix(proxy): strip beta query when rewriting Claude endpoints

* feat(codex): complete full URL support

* refactor(ui): refine full URL endpoint hint
This commit is contained in:
Dex Miller
2026-03-28 15:52:02 +08:00
committed by GitHub
parent eaf83f4fbe
commit 8cae7b7b73
16 changed files with 454 additions and 103 deletions
+3 -1
View File
@@ -275,7 +275,9 @@ pub struct ProviderMeta {
/// Claude 认证字段名("ANTHROPIC_AUTH_TOKEN" 或 "ANTHROPIC_API_KEY"
#[serde(rename = "apiKeyField", skip_serializing_if = "Option::is_none")]
pub api_key_field: Option<String>,
/// 是否将 base_url 视为完整 API 端点(不拼接 endpoint 路径)
#[serde(rename = "isFullUrl", skip_serializing_if = "Option::is_none")]
pub is_full_url: Option<bool>,
/// Prompt cache key for OpenAI-compatible endpoints.
/// When set, injected into converted requests to improve cache hit rate.
/// If not set, provider ID is used automatically during format conversion.
+132 -18
View File
@@ -795,6 +795,12 @@ impl RequestForwarder {
// 检查是否需要格式转换
let needs_transform = adapter.needs_transform(provider);
let is_full_url = provider
.meta
.as_ref()
.and_then(|meta| meta.is_full_url)
.unwrap_or(false);
// 确定有效端点
// GitHub Copilot API 使用 /chat/completions(无 /v1 前缀)
let is_copilot = provider
@@ -803,26 +809,24 @@ impl RequestForwarder {
.and_then(|m| m.provider_type.as_deref())
== Some("github_copilot")
|| base_url.contains("githubcopilot.com");
let effective_endpoint =
if needs_transform && adapter.name() == "Claude" && endpoint == "/v1/messages" {
if is_copilot {
// GitHub Copilot uses /chat/completions without /v1 prefix
"/chat/completions"
} else {
// 根据 api_format 选择目标端点
let api_format = super::providers::get_claude_api_format(provider);
if api_format == "openai_responses" {
"/v1/responses"
} else {
"/v1/chat/completions"
}
}
let (effective_endpoint, passthrough_query) =
if needs_transform && adapter.name() == "Claude" {
let api_format = super::providers::get_claude_api_format(provider);
rewrite_claude_transform_endpoint(endpoint, api_format, is_copilot)
} else {
endpoint
(
endpoint.to_string(),
split_endpoint_and_query(endpoint)
.1
.map(ToString::to_string),
)
};
// 使用适配器构建 URL
let url = adapter.build_url(&base_url, effective_endpoint);
let url = if is_full_url {
append_query_to_full_url(&base_url, passthrough_query.as_deref())
} else {
adapter.build_url(&base_url, &effective_endpoint)
};
// 应用模型映射(独立于格式转换)
let (mapped_body, _original_model, _mapped_model) =
@@ -916,7 +920,7 @@ impl RequestForwarder {
// 流式请求保守禁用压缩,避免上游压缩 SSE 在连接中断时触发解压错误。
// 非流式请求不显式设置 Accept-Encoding,让 reqwest 自动协商压缩并透明解压。
if should_force_identity_encoding(effective_endpoint, &filtered_body, headers) {
if should_force_identity_encoding(&effective_endpoint, &filtered_body, headers) {
request = request.header("accept-encoding", "identity");
}
@@ -1173,6 +1177,76 @@ fn extract_json_error_message(body: &Value) -> Option<String> {
.find_map(|value| value.as_str().map(ToString::to_string))
}
fn split_endpoint_and_query(endpoint: &str) -> (&str, Option<&str>) {
endpoint
.split_once('?')
.map_or((endpoint, None), |(path, query)| (path, Some(query)))
}
fn strip_beta_query(query: Option<&str>) -> Option<String> {
let filtered = query.map(|query| {
query
.split('&')
.filter(|pair| !pair.is_empty() && !pair.starts_with("beta="))
.collect::<Vec<_>>()
.join("&")
});
match filtered.as_deref() {
Some("") | None => None,
Some(_) => filtered,
}
}
fn is_claude_messages_path(path: &str) -> bool {
matches!(path, "/v1/messages" | "/claude/v1/messages")
}
fn rewrite_claude_transform_endpoint(
endpoint: &str,
api_format: &str,
is_copilot: bool,
) -> (String, Option<String>) {
let (path, query) = split_endpoint_and_query(endpoint);
let passthrough_query = if is_claude_messages_path(path) {
strip_beta_query(query)
} else {
query.map(ToString::to_string)
};
if !is_claude_messages_path(path) {
return (endpoint.to_string(), passthrough_query);
}
let target_path = if is_copilot {
"/chat/completions"
} else if api_format == "openai_responses" {
"/v1/responses"
} else {
"/v1/chat/completions"
};
let rewritten = match passthrough_query.as_deref() {
Some(query) if !query.is_empty() => format!("{target_path}?{query}"),
_ => target_path.to_string(),
};
(rewritten, passthrough_query)
}
fn append_query_to_full_url(base_url: &str, query: Option<&str>) -> String {
match query {
Some(query) if !query.is_empty() => {
if base_url.contains('?') {
format!("{base_url}&{query}")
} else {
format!("{base_url}?{query}")
}
}
_ => base_url.to_string(),
}
}
fn should_force_identity_encoding(
endpoint: &str,
body: &Value,
@@ -1281,6 +1355,46 @@ mod tests {
assert_eq!(summary, "line1 line2...");
}
#[test]
fn rewrite_claude_transform_endpoint_strips_beta_for_chat_completions() {
let (endpoint, passthrough_query) = rewrite_claude_transform_endpoint(
"/v1/messages?beta=true&foo=bar",
"openai_chat",
false,
);
assert_eq!(endpoint, "/v1/chat/completions?foo=bar");
assert_eq!(passthrough_query.as_deref(), Some("foo=bar"));
}
#[test]
fn rewrite_claude_transform_endpoint_strips_beta_for_responses() {
let (endpoint, passthrough_query) = rewrite_claude_transform_endpoint(
"/claude/v1/messages?beta=true&x-id=1",
"openai_responses",
false,
);
assert_eq!(endpoint, "/v1/responses?x-id=1");
assert_eq!(passthrough_query.as_deref(), Some("x-id=1"));
}
#[test]
fn rewrite_claude_transform_endpoint_uses_copilot_path() {
let (endpoint, passthrough_query) =
rewrite_claude_transform_endpoint("/v1/messages?beta=true&x-id=1", "anthropic", true);
assert_eq!(endpoint, "/chat/completions?x-id=1");
assert_eq!(passthrough_query.as_deref(), Some("x-id=1"));
}
#[test]
fn append_query_to_full_url_preserves_existing_query_string() {
let url = append_query_to_full_url("https://relay.example/api?foo=bar", Some("x-id=1"));
assert_eq!(url, "https://relay.example/api?foo=bar&x-id=1");
}
#[test]
fn force_identity_for_stream_flag_requests() {
let headers = HeaderMap::new();
+23 -4
View File
@@ -61,12 +61,18 @@ pub async fn get_status(State(state): State<ProxyState>) -> Result<Json<ProxySta
/// - 现在 OpenRouter 已推出 Claude Code 兼容接口,默认不再启用该转换(逻辑保留以备回退)
pub async fn handle_messages(
State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap,
Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> {
let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Claude, "Claude", "claude").await?;
let endpoint = uri
.path_and_query()
.map(|path_and_query| path_and_query.as_str())
.unwrap_or(uri.path());
let is_stream = body
.get("stream")
.and_then(|s| s.as_bool())
@@ -77,7 +83,7 @@ pub async fn handle_messages(
let result = match forwarder
.forward_with_retry(
&AppType::Claude,
"/v1/messages",
endpoint,
body.clone(),
headers,
ctx.get_providers(),
@@ -280,6 +286,13 @@ async fn handle_claude_transform(
})
}
fn endpoint_with_query(uri: &axum::http::Uri, endpoint: &str) -> String {
match uri.query() {
Some(query) => format!("{endpoint}?{query}"),
None => endpoint.to_string(),
}
}
// ============================================================================
// Codex API 处理器
// ============================================================================
@@ -287,11 +300,13 @@ async fn handle_claude_transform(
/// 处理 /v1/chat/completions 请求(OpenAI Chat Completions API - Codex CLI
pub async fn handle_chat_completions(
State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap,
Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> {
let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?;
let endpoint = endpoint_with_query(&uri, "/chat/completions");
let is_stream = body
.get("stream")
@@ -302,7 +317,7 @@ pub async fn handle_chat_completions(
let result = match forwarder
.forward_with_retry(
&AppType::Codex,
"/chat/completions",
&endpoint,
body,
headers,
ctx.get_providers(),
@@ -328,11 +343,13 @@ pub async fn handle_chat_completions(
/// 处理 /v1/responses 请求(OpenAI Responses API - Codex CLI 透传)
pub async fn handle_responses(
State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap,
Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> {
let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?;
let endpoint = endpoint_with_query(&uri, "/responses");
let is_stream = body
.get("stream")
@@ -343,7 +360,7 @@ pub async fn handle_responses(
let result = match forwarder
.forward_with_retry(
&AppType::Codex,
"/responses",
&endpoint,
body,
headers,
ctx.get_providers(),
@@ -369,11 +386,13 @@ pub async fn handle_responses(
/// 处理 /v1/responses/compact 请求(OpenAI Responses Compact API - Codex CLI 透传)
pub async fn handle_responses_compact(
State(state): State<ProxyState>,
uri: axum::http::Uri,
headers: axum::http::HeaderMap,
Json(body): Json<Value>,
) -> Result<axum::response::Response, ProxyError> {
let mut ctx =
RequestContext::new(&state, &body, &headers, AppType::Codex, "Codex", "codex").await?;
let endpoint = endpoint_with_query(&uri, "/responses/compact");
let is_stream = body
.get("stream")
@@ -384,7 +403,7 @@ pub async fn handle_responses_compact(
let result = match forwarder
.forward_with_retry(
&AppType::Codex,
"/responses/compact",
&endpoint,
body,
headers,
ctx.get_providers(),
+11 -27
View File
@@ -298,7 +298,7 @@ impl ProviderAdapter for ClaudeAdapter {
//
// 现在 OpenRouter 已推出 Claude Code 兼容接口,因此默认直接透传 endpoint。
// 如需回退旧逻辑,可在 forwarder 中根据 needs_transform 改写 endpoint。
//
let mut base = format!(
"{}/{}",
base_url.trim_end_matches('/'),
@@ -310,24 +310,7 @@ impl ProviderAdapter for ClaudeAdapter {
base = base.replace("/v1/v1", "/v1");
}
// GitHub Copilot 不需要 ?beta=true 参数
if base_url.contains("githubcopilot.com") {
return base;
}
// 为 Claude 原生 /v1/messages 端点添加 ?beta=true 参数
// 这是某些上游服务(如 DuckCoding)验证请求来源的关键参数
// 注意:不要为 OpenAI Chat Completions (/v1/chat/completions) 添加此参数
// 当 apiFormat="openai_chat" 时,请求会转发到 /v1/chat/completions
// 但该端点是 OpenAI 标准,不支持 ?beta=true 参数
if endpoint.contains("/v1/messages")
&& !endpoint.contains("/v1/chat/completions")
&& !endpoint.contains('?')
{
format!("{base}?beta=true")
} else {
base
}
base
}
fn add_auth_headers(&self, request: RequestBuilder, auth: &AuthInfo) -> RequestBuilder {
@@ -578,23 +561,20 @@ mod tests {
#[test]
fn test_build_url_anthropic() {
let adapter = ClaudeAdapter::new();
// /v1/messages 端点会自动添加 ?beta=true 参数
let url = adapter.build_url("https://api.anthropic.com", "/v1/messages");
assert_eq!(url, "https://api.anthropic.com/v1/messages?beta=true");
assert_eq!(url, "https://api.anthropic.com/v1/messages");
}
#[test]
fn test_build_url_openrouter() {
let adapter = ClaudeAdapter::new();
// /v1/messages 端点会自动添加 ?beta=true 参数
let url = adapter.build_url("https://openrouter.ai/api", "/v1/messages");
assert_eq!(url, "https://openrouter.ai/api/v1/messages?beta=true");
assert_eq!(url, "https://openrouter.ai/api/v1/messages");
}
#[test]
fn test_build_url_no_beta_for_other_endpoints() {
let adapter = ClaudeAdapter::new();
// 非 /v1/messages 端点不添加 ?beta=true
let url = adapter.build_url("https://api.anthropic.com", "/v1/complete");
assert_eq!(url, "https://api.anthropic.com/v1/complete");
}
@@ -602,16 +582,20 @@ mod tests {
#[test]
fn test_build_url_preserve_existing_query() {
let adapter = ClaudeAdapter::new();
// 已有查询参数时不重复添加
let url = adapter.build_url("https://api.anthropic.com", "/v1/messages?foo=bar");
assert_eq!(url, "https://api.anthropic.com/v1/messages?foo=bar");
}
#[test]
fn test_build_url_no_beta_for_github_copilot() {
let adapter = ClaudeAdapter::new();
let url = adapter.build_url("https://api.githubcopilot.com", "/v1/messages");
assert_eq!(url, "https://api.githubcopilot.com/v1/messages");
}
#[test]
fn test_build_url_no_beta_for_openai_chat_completions() {
let adapter = ClaudeAdapter::new();
// OpenAI Chat Completions 端点不添加 ?beta=true
// 这是 Nvidia 等 apiFormat="openai_chat" 供应商使用的端点
let url = adapter.build_url("https://integrate.api.nvidia.com", "/v1/chat/completions");
assert_eq!(url, "https://integrate.api.nvidia.com/v1/chat/completions");
}
+110 -42
View File
@@ -14,6 +14,7 @@ use crate::error::AppError;
use crate::provider::Provider;
use crate::proxy::providers::transform::anthropic_to_openai;
use crate::proxy::providers::copilot_auth;
use crate::proxy::providers::transform_responses::anthropic_to_responses;
use crate::proxy::providers::{get_adapter, AuthInfo, AuthStrategy};
/// 健康状态枚举
@@ -225,6 +226,7 @@ impl StreamCheckService {
&model_to_test,
test_prompt,
request_timeout,
provider,
)
.await
}
@@ -318,37 +320,26 @@ impl StreamCheckService {
})
.unwrap_or("anthropic");
let is_full_url = provider
.meta
.as_ref()
.and_then(|meta| meta.is_full_url)
.unwrap_or(false);
let is_openai_chat = is_github_copilot || api_format == "openai_chat";
let is_openai_responses = !is_github_copilot && api_format == "openai_responses";
let url = Self::resolve_claude_stream_url(base, auth.strategy, api_format, is_full_url);
// URL:
// - GitHub Copilot: /chat/completions (no /v1 prefix)
// - OpenAI-compatible: /v1/chat/completions
// - Anthropic native: /v1/messages?beta=true
let url = if is_github_copilot {
format!("{base}/chat/completions")
} else if is_openai_chat {
if base.ends_with("/v1") {
format!("{base}/chat/completions")
} else {
format!("{base}/v1/chat/completions")
}
} else {
// ?beta=true is required by some relay services to verify request origin
if base.ends_with("/v1") {
format!("{base}/messages?beta=true")
} else {
format!("{base}/v1/messages?beta=true")
}
};
// Build from Anthropic-native shape first, then convert for OpenAI-compatible targets.
// Build from Anthropic-native shape first, then convert for configured targets.
let anthropic_body = json!({
"model": model,
"max_tokens": 1,
"messages": [{ "role": "user", "content": test_prompt }],
"stream": true
});
let body = if is_openai_chat {
let body = if is_openai_responses {
anthropic_to_responses(anthropic_body, Some(&provider.id))
.map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))?
} else if is_openai_chat {
anthropic_to_openai(anthropic_body, Some(&provider.id))
.map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))?
} else {
@@ -369,8 +360,8 @@ impl StreamCheckService {
.header("copilot-integration-id", copilot_auth::COPILOT_INTEGRATION_ID)
.header("x-github-api-version", copilot_auth::COPILOT_API_VERSION)
.header("openai-intent", "conversation-panel");
} else if is_openai_chat {
// OpenAI-compatible: Bearer auth + standard headers only
} else if is_openai_chat || is_openai_responses {
// OpenAI-compatible targets: Bearer auth + SSE headers only
request_builder = request_builder
.header("authorization", format!("Bearer {}", auth.api_key))
.header("content-type", "application/json")
@@ -455,18 +446,14 @@ impl StreamCheckService {
model: &str,
test_prompt: &str,
timeout: std::time::Duration,
provider: &Provider,
) -> Result<(u16, String), AppError> {
let base = base_url.trim_end_matches('/');
// Codex CLI 的 base_url 语义:base_url 是 API base(可能已包含 /v1 或其他自定义前缀),
// Responses 端点为 `/responses`。
//
// 兼容:如果 base_url 配成纯 origin(如 https://api.openai.com),则需要补 `/v1`。
// 优先尝试 `{base}/responses`,若 404 再回退 `{base}/v1/responses`。
let urls = if base.ends_with("/v1") {
vec![format!("{base}/responses")]
} else {
vec![format!("{base}/responses"), format!("{base}/v1/responses")]
};
let is_full_url = provider
.meta
.as_ref()
.and_then(|meta| meta.is_full_url)
.unwrap_or(false);
let urls = Self::resolve_codex_stream_urls(base_url, is_full_url);
// 解析模型名和推理等级 (支持 model@level 或 model#level 格式)
let (actual_model, reasoning_effort) = Self::parse_model_with_effort(model);
@@ -724,28 +711,51 @@ impl StreamCheckService {
}
}
#[cfg(test)]
fn resolve_claude_stream_url(
base_url: &str,
auth_strategy: AuthStrategy,
api_format: &str,
is_full_url: bool,
) -> String {
if is_full_url {
return base_url.to_string();
}
let base = base_url.trim_end_matches('/');
let is_github_copilot = auth_strategy == AuthStrategy::GitHubCopilot;
let is_openai_chat = is_github_copilot || api_format == "openai_chat";
if is_github_copilot {
format!("{base}/chat/completions")
} else if is_openai_chat {
} else if api_format == "openai_responses" {
if base.ends_with("/v1") {
format!("{base}/responses")
} else {
format!("{base}/v1/responses")
}
} else if api_format == "openai_chat" {
if base.ends_with("/v1") {
format!("{base}/chat/completions")
} else {
format!("{base}/v1/chat/completions")
}
} else if base.ends_with("/v1") {
format!("{base}/messages?beta=true")
format!("{base}/messages")
} else {
format!("{base}/v1/messages?beta=true")
format!("{base}/v1/messages")
}
}
fn resolve_codex_stream_urls(base_url: &str, is_full_url: bool) -> Vec<String> {
if is_full_url {
return vec![base_url.to_string()];
}
let base = base_url.trim_end_matches('/');
if base.ends_with("/v1") {
vec![format!("{base}/responses")]
} else {
vec![format!("{base}/responses"), format!("{base}/v1/responses")]
}
}
}
@@ -851,12 +861,25 @@ mod tests {
assert_eq!(bearer, AuthStrategy::Bearer);
}
#[test]
fn test_resolve_claude_stream_url_for_full_url_mode() {
let url = StreamCheckService::resolve_claude_stream_url(
"https://relay.example/v1/chat/completions",
AuthStrategy::Bearer,
"openai_chat",
true,
);
assert_eq!(url, "https://relay.example/v1/chat/completions");
}
#[test]
fn test_resolve_claude_stream_url_for_github_copilot() {
let url = StreamCheckService::resolve_claude_stream_url(
"https://api.githubcopilot.com",
AuthStrategy::GitHubCopilot,
"anthropic",
false,
);
assert_eq!(url, "https://api.githubcopilot.com/chat/completions");
@@ -868,19 +891,64 @@ mod tests {
"https://example.com/v1",
AuthStrategy::Bearer,
"openai_chat",
false,
);
assert_eq!(url, "https://example.com/v1/chat/completions");
}
#[test]
fn test_resolve_claude_stream_url_for_openai_responses() {
let url = StreamCheckService::resolve_claude_stream_url(
"https://example.com/v1",
AuthStrategy::Bearer,
"openai_responses",
false,
);
assert_eq!(url, "https://example.com/v1/responses");
}
#[test]
fn test_resolve_claude_stream_url_for_anthropic() {
let url = StreamCheckService::resolve_claude_stream_url(
"https://api.anthropic.com",
AuthStrategy::Anthropic,
"anthropic",
false,
);
assert_eq!(url, "https://api.anthropic.com/v1/messages?beta=true");
assert_eq!(url, "https://api.anthropic.com/v1/messages");
}
#[test]
fn test_resolve_codex_stream_urls_for_full_url_mode() {
let urls = StreamCheckService::resolve_codex_stream_urls(
"https://relay.example/custom/responses",
true,
);
assert_eq!(urls, vec!["https://relay.example/custom/responses"]);
}
#[test]
fn test_resolve_codex_stream_urls_for_v1_base() {
let urls =
StreamCheckService::resolve_codex_stream_urls("https://api.openai.com/v1", false);
assert_eq!(urls, vec!["https://api.openai.com/v1/responses"]);
}
#[test]
fn test_resolve_codex_stream_urls_for_origin_base() {
let urls = StreamCheckService::resolve_codex_stream_urls("https://api.openai.com", false);
assert_eq!(
urls,
vec![
"https://api.openai.com/responses",
"https://api.openai.com/v1/responses",
]
);
}
}