mirror of
https://github.com/farion1231/cc-switch.git
synced 2026-03-29 23:29:10 +08:00
fix(proxy): wait for server shutdown before exiting app
The previous cleanup logic only sent a shutdown signal but didn't wait for the proxy server to actually stop. This caused a race condition where the app would exit before cleanup completed, leaving Live configs in an inconsistent state. Changes: - Add `server_handle` field to ProxyServer to track the spawned task - Modify `stop()` to wait for server task completion (5s timeout) - Add 100ms delay before process exit to ensure I/O flush - Export ProxyService and fix test files that were missing proxy_service field
This commit is contained in:
@@ -232,7 +232,7 @@ fn scan_cli_version(tool: &str) -> (Option<String>, Option<String>) {
|
||||
// 构建 PATH 环境变量,确保 node 可被找到
|
||||
let current_path = std::env::var("PATH").unwrap_or_default();
|
||||
let new_path = format!("{}:{}", path.display(), current_path);
|
||||
|
||||
|
||||
let output = Command::new(&tool_path)
|
||||
.arg("--version")
|
||||
.env("PATH", &new_path)
|
||||
|
||||
@@ -39,8 +39,8 @@ pub use mcp::{
|
||||
};
|
||||
pub use provider::{Provider, ProviderMeta};
|
||||
pub use services::{
|
||||
ConfigService, EndpointLatency, McpService, PromptService, ProviderService, SkillService,
|
||||
SpeedtestService,
|
||||
ConfigService, EndpointLatency, McpService, PromptService, ProviderService, ProxyService,
|
||||
SkillService, SpeedtestService,
|
||||
};
|
||||
pub use settings::{update_settings, AppSettings};
|
||||
pub use store::AppState;
|
||||
@@ -697,6 +697,10 @@ pub fn run() {
|
||||
tauri::async_runtime::spawn(async move {
|
||||
cleanup_before_exit(&app_handle).await;
|
||||
log::info!("清理完成,退出应用");
|
||||
|
||||
// 短暂等待确保所有 I/O 操作(如数据库写入)刷新到磁盘
|
||||
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
|
||||
|
||||
// 使用 std::process::exit 避免再次触发 ExitRequested
|
||||
std::process::exit(0);
|
||||
});
|
||||
|
||||
@@ -11,6 +11,7 @@ use axum::{
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{oneshot, RwLock};
|
||||
use tokio::task::JoinHandle;
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
|
||||
/// 代理服务器状态(共享)
|
||||
@@ -29,6 +30,8 @@ pub struct ProxyServer {
|
||||
config: ProxyConfig,
|
||||
state: ProxyState,
|
||||
shutdown_tx: Arc<RwLock<Option<oneshot::Sender<()>>>>,
|
||||
/// 服务器任务句柄,用于等待服务器实际关闭
|
||||
server_handle: Arc<RwLock<Option<JoinHandle<()>>>>,
|
||||
}
|
||||
|
||||
impl ProxyServer {
|
||||
@@ -45,6 +48,7 @@ impl ProxyServer {
|
||||
config,
|
||||
state,
|
||||
shutdown_tx: Arc::new(RwLock::new(None)),
|
||||
server_handle: Arc::new(RwLock::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,7 +91,7 @@ impl ProxyServer {
|
||||
|
||||
// 启动服务器
|
||||
let state = self.state.clone();
|
||||
tokio::spawn(async move {
|
||||
let handle = tokio::spawn(async move {
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(async {
|
||||
shutdown_rx.await.ok();
|
||||
@@ -100,6 +104,9 @@ impl ProxyServer {
|
||||
*state.start_time.write().await = None;
|
||||
});
|
||||
|
||||
// 保存服务器任务句柄
|
||||
*self.server_handle.write().await = Some(handle);
|
||||
|
||||
Ok(ProxyServerInfo {
|
||||
address: self.config.listen_address.clone(),
|
||||
port: self.config.listen_port,
|
||||
@@ -108,12 +115,23 @@ impl ProxyServer {
|
||||
}
|
||||
|
||||
pub async fn stop(&self) -> Result<(), ProxyError> {
|
||||
// 1. 发送关闭信号
|
||||
if let Some(tx) = self.shutdown_tx.write().await.take() {
|
||||
let _ = tx.send(());
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ProxyError::NotRunning)
|
||||
return Err(ProxyError::NotRunning);
|
||||
}
|
||||
|
||||
// 2. 等待服务器任务结束(带 5 秒超时保护)
|
||||
if let Some(handle) = self.server_handle.write().await.take() {
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(5), handle).await {
|
||||
Ok(Ok(())) => log::info!("代理服务器已完全停止"),
|
||||
Ok(Err(e)) => log::warn!("代理服务器任务异常终止: {e}"),
|
||||
Err(_) => log::warn!("代理服务器停止超时(5秒),强制继续"),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_status(&self) -> ProxyStatus {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use cc_switch_lib::{import_provider_from_deeplink, parse_deeplink_url, AppState, Database};
|
||||
use cc_switch_lib::{import_provider_from_deeplink, parse_deeplink_url, AppState, Database, ProxyService};
|
||||
|
||||
#[path = "support.rs"]
|
||||
mod support;
|
||||
@@ -16,8 +16,8 @@ fn deeplink_import_claude_provider_persists_to_db() {
|
||||
let request = parse_deeplink_url(url).expect("parse deeplink url");
|
||||
|
||||
let db = Arc::new(Database::memory().expect("create memory db"));
|
||||
|
||||
let state = AppState { db: db.clone() };
|
||||
let proxy_service = ProxyService::new(db.clone());
|
||||
let state = AppState { db: db.clone(), proxy_service };
|
||||
|
||||
let provider_id = import_provider_from_deeplink(&state, request.clone())
|
||||
.expect("import provider from deeplink");
|
||||
@@ -53,8 +53,8 @@ fn deeplink_import_codex_provider_builds_auth_and_config() {
|
||||
let request = parse_deeplink_url(url).expect("parse deeplink url");
|
||||
|
||||
let db = Arc::new(Database::memory().expect("create memory db"));
|
||||
|
||||
let state = AppState { db: db.clone() };
|
||||
let proxy_service = ProxyService::new(db.clone());
|
||||
let state = AppState { db: db.clone(), proxy_service };
|
||||
|
||||
let provider_id = import_provider_from_deeplink(&state, request.clone())
|
||||
.expect("import provider from deeplink");
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex, OnceLock};
|
||||
|
||||
use cc_switch_lib::{update_settings, AppSettings, AppState, Database, MultiAppConfig};
|
||||
use cc_switch_lib::{update_settings, AppSettings, AppState, Database, MultiAppConfig, ProxyService};
|
||||
|
||||
/// 为测试设置隔离的 HOME 目录,避免污染真实用户数据。
|
||||
pub fn ensure_test_home() -> &'static Path {
|
||||
@@ -48,15 +48,17 @@ pub fn test_mutex() -> &'static Mutex<()> {
|
||||
|
||||
/// 创建测试用的 AppState,包含一个空的数据库
|
||||
pub fn create_test_state() -> Result<AppState, Box<dyn std::error::Error>> {
|
||||
let db = Database::init()?;
|
||||
Ok(AppState { db: Arc::new(db) })
|
||||
let db = Arc::new(Database::init()?);
|
||||
let proxy_service = ProxyService::new(db.clone());
|
||||
Ok(AppState { db, proxy_service })
|
||||
}
|
||||
|
||||
/// 创建测试用的 AppState,并从 MultiAppConfig 迁移数据
|
||||
pub fn create_test_state_with_config(
|
||||
config: &MultiAppConfig,
|
||||
) -> Result<AppState, Box<dyn std::error::Error>> {
|
||||
let db = Database::init()?;
|
||||
let db = Arc::new(Database::init()?);
|
||||
db.migrate_from_json(config)?;
|
||||
Ok(AppState { db: Arc::new(db) })
|
||||
let proxy_service = ProxyService::new(db.clone());
|
||||
Ok(AppState { db, proxy_service })
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user