feat(webdav): follow-up 补齐自动同步与大文件防护 (#1043)

* feat(webdav): add robust auto sync with failure feedback

(cherry picked from commit bb6760124a62a964b36902c004e173534910728f)

* fix(webdav): enforce bounded download and extraction size

(cherry picked from commit 7777d6ec2b9bba07c8bbba9b04fe3ea6b15e0e79)

* fix(webdav): only show auto-sync callout for auto-source errors

* refactor(webdav): remove services->commands auto-sync dependency
This commit is contained in:
SaladDay
2026-02-15 20:58:17 +08:00
committed by GitHub
parent 508aa6070c
commit 20f62bf4f8
19 changed files with 777 additions and 104 deletions

View File

@@ -57,7 +57,7 @@ url = "2.5"
auto-launch = "0.5"
once_cell = "1.21.3"
base64 = "0.22"
rusqlite = { version = "0.31", features = ["bundled", "backup"] }
rusqlite = { version = "0.31", features = ["bundled", "backup", "hooks"] }
indexmap = { version = "2", features = ["serde"] }
rust_decimal = "1.33"
uuid = { version = "1.11", features = ["v4"] }

View File

@@ -1,8 +1,6 @@
#![allow(non_snake_case)]
use serde_json::{Value, json};
use std::future::Future;
use std::sync::OnceLock;
use serde_json::{json, Value};
use tauri::State;
use crate::commands::sync_support::{
@@ -13,8 +11,9 @@ use crate::services::webdav_sync as webdav_sync_service;
use crate::settings::{self, WebDavSyncSettings};
use crate::store::AppState;
fn persist_sync_error(settings: &mut WebDavSyncSettings, error: &AppError) {
fn persist_sync_error(settings: &mut WebDavSyncSettings, error: &AppError, source: &str) {
settings.status.last_error = Some(error.to_string());
settings.status.last_error_source = Some(source.to_string());
let _ = settings::update_webdav_sync_status(settings.status.clone());
}
@@ -57,20 +56,16 @@ fn resolve_password_for_request(
incoming
}
#[cfg(test)]
fn webdav_sync_mutex() -> &'static tokio::sync::Mutex<()> {
static LOCK: OnceLock<tokio::sync::Mutex<()>> = OnceLock::new();
LOCK.get_or_init(|| tokio::sync::Mutex::new(()))
webdav_sync_service::sync_mutex()
}
async fn run_with_webdav_lock<T, Fut>(operation: Fut) -> Result<T, AppError>
where
Fut: Future<Output = Result<T, AppError>>,
Fut: std::future::Future<Output = Result<T, AppError>>,
{
let result = {
let _guard = webdav_sync_mutex().lock().await;
operation.await
};
result
webdav_sync_service::run_with_sync_lock(operation).await
}
fn map_sync_result<T, F>(result: Result<T, AppError>, on_error: F) -> Result<T, String>
@@ -112,7 +107,9 @@ pub async fn webdav_sync_upload(state: State<'_, AppState>) -> Result<Value, Str
let mut settings = require_enabled_webdav_settings()?;
let result = run_with_webdav_lock(webdav_sync_service::upload(&db, &mut settings)).await;
map_sync_result(result, |error| persist_sync_error(&mut settings, error))
map_sync_result(result, |error| {
persist_sync_error(&mut settings, error, "manual")
})
}
#[tauri::command]
@@ -120,10 +117,11 @@ pub async fn webdav_sync_download(state: State<'_, AppState>) -> Result<Value, S
let db = state.db.clone();
let db_for_sync = db.clone();
let mut settings = require_enabled_webdav_settings()?;
let _auto_sync_suppression = crate::services::webdav_auto_sync::AutoSyncSuppressionGuard::new();
let sync_result = run_with_webdav_lock(webdav_sync_service::download(&db, &mut settings)).await;
let mut result = map_sync_result(sync_result, |error| {
persist_sync_error(&mut settings, error)
persist_sync_error(&mut settings, error, "manual")
})?;
// Post-download sync is best-effort: snapshot restore has already succeeded.
@@ -179,8 +177,8 @@ mod tests {
use crate::error::AppError;
use crate::settings::{AppSettings, WebDavSyncSettings};
use serial_test::serial;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::time::Duration;
#[tokio::test]
@@ -287,6 +285,7 @@ mod tests {
persist_sync_error(
&mut current,
&crate::error::AppError::Config("boom".to_string()),
"manual",
);
let after = crate::settings::get_webdav_sync_settings().expect("read webdav settings");
@@ -304,6 +303,7 @@ mod tests {
.contains("boom"),
"status error should be updated"
);
assert_eq!(after.status.last_error_source.as_deref(), Some("manual"));
}
#[test]

View File

@@ -37,7 +37,7 @@ pub use dao::OmoGlobalConfig;
use crate::config::get_app_config_dir;
use crate::error::AppError;
use rusqlite::Connection;
use rusqlite::{hooks::Action, Connection};
use serde::Serialize;
use std::sync::Mutex;
@@ -76,6 +76,17 @@ pub struct Database {
pub(crate) conn: Mutex<Connection>,
}
fn register_db_change_hook(conn: &Connection) {
conn.update_hook(Some(
|action: Action, _database: &str, table: &str, _row_id: i64| match action {
Action::SQLITE_INSERT | Action::SQLITE_UPDATE | Action::SQLITE_DELETE => {
crate::services::webdav_auto_sync::notify_db_changed(table);
}
_ => {}
},
));
}
impl Database {
/// 初始化数据库连接并创建表
///
@@ -93,6 +104,7 @@ impl Database {
// 启用外键约束
conn.execute("PRAGMA foreign_keys = ON;", [])
.map_err(|e| AppError::Database(e.to_string()))?;
register_db_change_hook(&conn);
let db = Self {
conn: Mutex::new(conn),
@@ -111,6 +123,7 @@ impl Database {
// 启用外键约束
conn.execute("PRAGMA foreign_keys = ON;", [])
.map_err(|e| AppError::Database(e.to_string()))?;
register_db_change_hook(&conn);
let db = Self {
conn: Mutex::new(conn),

View File

@@ -702,6 +702,10 @@ pub fn run() {
}
let _tray = tray_builder.build(app)?;
crate::services::webdav_auto_sync::start_worker(
app_state.db.clone(),
app.handle().clone(),
);
// 将同一个实例注入到全局状态,避免重复创建导致的不一致
app.manage(app_state);

View File

@@ -11,6 +11,7 @@ pub mod speedtest;
pub mod stream_check;
pub mod usage_stats;
pub mod webdav;
pub mod webdav_auto_sync;
pub mod webdav_sync;
pub use config::ConfigService;

View File

@@ -8,6 +8,7 @@ use std::time::Duration;
use crate::error::AppError;
use crate::proxy::http_client;
use futures::StreamExt;
const DEFAULT_TIMEOUT_SECS: u64 = 30;
/// Timeout for large file transfers (PUT/GET of db.sql, skills.zip).
@@ -237,15 +238,7 @@ pub async fn put_bytes(
)
.send()
.await
.map_err(|e| {
webdav_transport_error(
"webdav.put_failed",
"PUT 请求",
"PUT request",
url,
&e,
)
})?;
.map_err(|e| webdav_transport_error("webdav.put_failed", "PUT 请求", "PUT request", url, &e))?;
if resp.status().is_success() {
return Ok(());
@@ -259,6 +252,7 @@ pub async fn put_bytes(
pub async fn get_bytes(
url: &str,
auth: &WebDavAuth,
max_bytes: usize,
) -> Result<Option<(Vec<u8>, Option<String>)>, AppError> {
let client = http_client::get();
let resp = apply_auth(
@@ -269,15 +263,7 @@ pub async fn get_bytes(
)
.send()
.await
.map_err(|e| {
webdav_transport_error(
"webdav.get_failed",
"GET 请求",
"GET request",
url,
&e,
)
})?;
.map_err(|e| webdav_transport_error("webdav.get_failed", "GET 请求", "GET request", url, &e))?;
if resp.status() == StatusCode::NOT_FOUND {
return Ok(None);
@@ -285,22 +271,29 @@ pub async fn get_bytes(
if !resp.status().is_success() {
return Err(webdav_status_error("GET", resp.status(), url));
}
ensure_content_length_within_limit(resp.headers(), max_bytes, url)?;
let etag = resp
.headers()
.get("etag")
.and_then(|v| v.to_str().ok())
.map(|s| s.to_string());
let bytes = resp
.bytes()
.await
.map_err(|e| {
let mut bytes = Vec::new();
let mut stream = resp.bytes_stream();
while let Some(chunk) = stream.next().await {
let chunk = chunk.map_err(|e| {
AppError::localized(
"webdav.response_read_failed",
format!("读取 WebDAV 响应失败: {e}"),
format!("Failed to read WebDAV response: {e}"),
)
})?;
Ok(Some((bytes.to_vec(), etag)))
if bytes.len().saturating_add(chunk.len()) > max_bytes {
return Err(response_too_large_error(url, max_bytes));
}
bytes.extend_from_slice(&chunk);
}
Ok(Some((bytes, etag)))
}
/// HEAD request to retrieve the ETag. Returns `None` on 404.
@@ -315,13 +308,7 @@ pub async fn head_etag(url: &str, auth: &WebDavAuth) -> Result<Option<String>, A
.send()
.await
.map_err(|e| {
webdav_transport_error(
"webdav.head_failed",
"HEAD 请求",
"HEAD request",
url,
&e,
)
webdav_transport_error("webdav.head_failed", "HEAD 请求", "HEAD request", url, &e)
})?;
if resp.status() == StatusCode::NOT_FOUND {
@@ -386,9 +373,7 @@ pub fn webdav_status_error(op: &str, status: StatusCode, url: &str) -> AppError
if matches!(status, StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN) {
if jgy {
zh.push_str(
"。坚果云请使用「第三方应用密码」,并确认地址指向 /dav/ 下的目录。",
);
zh.push_str("。坚果云请使用「第三方应用密码」,并确认地址指向 /dav/ 下的目录。");
en.push_str(
". For Jianguoyun, use an app-specific password and ensure the URL points under /dav/.",
);
@@ -401,9 +386,7 @@ pub fn webdav_status_error(op: &str, status: StatusCode, url: &str) -> AppError
en.push_str(". Common Jianguoyun cause: URL is outside a writable /dav/ directory.");
} else if op == "MKCOL" && status == StatusCode::CONFLICT {
if jgy {
zh.push_str(
"。坚果云不允许自动创建顶层文件夹,请先在网页端手动创建后重试。",
);
zh.push_str("。坚果云不允许自动创建顶层文件夹,请先在网页端手动创建后重试。");
en.push_str(
". Jianguoyun does not allow creating top-level folders automatically; create it manually first.",
);
@@ -446,9 +429,47 @@ fn redact_url(raw: &str) -> String {
}
}
fn response_too_large_error(url: &str, max_bytes: usize) -> AppError {
let max_mb = max_bytes / 1024 / 1024;
AppError::localized(
"webdav.response_too_large",
format!(
"WebDAV 响应体超过上限({} MB: {}",
max_mb,
redact_url(url)
),
format!(
"WebDAV response body exceeds limit ({} MB): {}",
max_mb,
redact_url(url)
),
)
}
fn ensure_content_length_within_limit(
headers: &reqwest::header::HeaderMap,
max_bytes: usize,
url: &str,
) -> Result<(), AppError> {
let Some(content_length) = headers.get(reqwest::header::CONTENT_LENGTH) else {
return Ok(());
};
let Ok(raw) = content_length.to_str() else {
return Ok(());
};
let Ok(value) = raw.parse::<u64>() else {
return Ok(());
};
if value > max_bytes as u64 {
return Err(response_too_large_error(url, max_bytes));
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use reqwest::header::{HeaderMap, HeaderValue, CONTENT_LENGTH};
#[test]
fn build_remote_url_encodes_path_segments() {
@@ -498,10 +519,34 @@ mod tests {
#[test]
fn redact_url_hides_credentials_and_query_values() {
let redacted = redact_url("https://alice:secret@example.com:8443/dav?token=abc&foo=1");
assert_eq!(
redacted,
"https://example.com:8443/dav?[keys:foo,token]"
);
assert_eq!(redacted, "https://example.com:8443/dav?[keys:foo,token]");
assert!(!redacted.contains("secret"));
}
#[test]
fn ensure_content_length_within_limit_accepts_missing_or_small_values() {
let empty = HeaderMap::new();
assert!(
ensure_content_length_within_limit(&empty, 1024, "https://dav.example.com").is_ok()
);
let mut small = HeaderMap::new();
small.insert(CONTENT_LENGTH, HeaderValue::from_static("1024"));
assert!(
ensure_content_length_within_limit(&small, 1024, "https://dav.example.com").is_ok()
);
}
#[test]
fn ensure_content_length_within_limit_rejects_oversized_values() {
let mut large = HeaderMap::new();
large.insert(CONTENT_LENGTH, HeaderValue::from_static("2048"));
let err = ensure_content_length_within_limit(&large, 1024, "https://dav.example.com")
.expect_err("oversized response should be rejected");
assert!(
err.to_string().contains("too large") || err.to_string().contains("超过"),
"unexpected error: {err}"
);
}
}

View File

@@ -0,0 +1,277 @@
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::sync::OnceLock;
use std::time::{Duration, Instant};
use serde_json::json;
use tauri::{AppHandle, Emitter};
use tokio::sync::mpsc::error::TrySendError;
use tokio::sync::mpsc::{channel, Receiver, Sender};
use crate::error::AppError;
use crate::services::webdav_sync as webdav_sync_service;
use crate::settings::{self, WebDavSyncSettings};
const AUTO_SYNC_DEBOUNCE_MS: u64 = 1000;
pub(crate) const MAX_AUTO_SYNC_WAIT_MS: u64 = 10_000;
static DB_CHANGE_TX: OnceLock<Sender<String>> = OnceLock::new();
static AUTO_SYNC_SUPPRESS_DEPTH: AtomicUsize = AtomicUsize::new(0);
pub(crate) struct AutoSyncSuppressionGuard;
impl AutoSyncSuppressionGuard {
pub fn new() -> Self {
AUTO_SYNC_SUPPRESS_DEPTH.fetch_add(1, Ordering::SeqCst);
Self
}
}
impl Drop for AutoSyncSuppressionGuard {
fn drop(&mut self) {
let _ =
AUTO_SYNC_SUPPRESS_DEPTH.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |value| {
Some(value.saturating_sub(1))
});
}
}
pub(crate) fn is_auto_sync_suppressed() -> bool {
AUTO_SYNC_SUPPRESS_DEPTH.load(Ordering::SeqCst) > 0
}
pub fn should_trigger_for_table(table: &str) -> bool {
let normalized = table.trim().to_ascii_lowercase();
matches!(
normalized.as_str(),
"providers"
| "provider_endpoints"
| "mcp_servers"
| "prompts"
| "skills"
| "skill_repos"
| "settings"
| "proxy_config"
)
}
pub(crate) fn enqueue_change_signal(tx: &Sender<String>, table: &str) -> bool {
match tx.try_send(table.to_string()) {
Ok(()) => true,
Err(TrySendError::Full(_)) | Err(TrySendError::Closed(_)) => false,
}
}
pub(crate) fn auto_sync_wait_duration(started_at: Instant, now: Instant) -> Option<Duration> {
let max_wait = Duration::from_millis(MAX_AUTO_SYNC_WAIT_MS);
let debounce = Duration::from_millis(AUTO_SYNC_DEBOUNCE_MS);
let elapsed = now.saturating_duration_since(started_at);
if elapsed >= max_wait {
return None;
}
Some(debounce.min(max_wait - elapsed))
}
fn should_run_auto_sync(settings: Option<&WebDavSyncSettings>) -> bool {
let Some(sync) = settings else {
return false;
};
sync.enabled && sync.auto_sync
}
fn persist_auto_sync_error(settings: &mut WebDavSyncSettings, error: &AppError) {
settings.status.last_error = Some(error.to_string());
settings.status.last_error_source = Some("auto".to_string());
let _ = settings::update_webdav_sync_status(settings.status.clone());
}
fn emit_auto_sync_status_updated(app: &AppHandle, status: &str, error: Option<&str>) {
let payload = match error {
Some(message) => json!({
"source": "auto",
"status": status,
"error": message,
}),
None => json!({
"source": "auto",
"status": status,
}),
};
if let Err(err) = app.emit("webdav-sync-status-updated", payload) {
log::debug!("[WebDAV] failed to emit sync status update event: {err}");
}
}
async fn run_auto_sync_upload(
db: &crate::database::Database,
app: &AppHandle,
) -> Result<(), AppError> {
let mut settings = settings::get_webdav_sync_settings();
if !should_run_auto_sync(settings.as_ref()) {
return Ok(());
}
let mut sync_settings = match settings.take() {
Some(value) => value,
None => return Ok(()),
};
let result = webdav_sync_service::run_with_sync_lock(webdav_sync_service::upload(
db,
&mut sync_settings,
))
.await;
match result {
Ok(_) => {
emit_auto_sync_status_updated(app, "success", None);
Ok(())
}
Err(err) => {
persist_auto_sync_error(&mut sync_settings, &err);
emit_auto_sync_status_updated(app, "error", Some(&err.to_string()));
Err(err)
}
}
}
pub fn notify_db_changed(table: &str) {
if is_auto_sync_suppressed() {
return;
}
if !should_trigger_for_table(table) {
return;
}
let Some(tx) = DB_CHANGE_TX.get() else {
return;
};
let _ = enqueue_change_signal(tx, table);
}
pub fn start_worker(db: Arc<crate::database::Database>, app: tauri::AppHandle) {
if DB_CHANGE_TX.get().is_some() {
return;
}
// Buffer size 1 is enough: we only need "dirty" signals, not every event.
let (tx, rx) = channel::<String>(1);
if DB_CHANGE_TX.set(tx).is_err() {
return;
}
tauri::async_runtime::spawn(async move {
run_worker_loop(db, rx, app).await;
});
}
async fn run_worker_loop(
db: Arc<crate::database::Database>,
mut rx: Receiver<String>,
app: tauri::AppHandle,
) {
while let Some(first_table) = rx.recv().await {
let started_at = Instant::now();
let mut merged_count = 1usize;
loop {
let Some(wait_for) = auto_sync_wait_duration(started_at, Instant::now()) else {
break;
};
let timeout = tokio::time::timeout(wait_for, rx.recv()).await;
match timeout {
Ok(Some(_)) => merged_count += 1,
Ok(None) => return,
Err(_) => break,
}
}
log::debug!(
"[WebDAV][AutoSync] Triggered by table={first_table}, merged_changes={merged_count}"
);
if let Err(err) = run_auto_sync_upload(&db, &app).await {
log::warn!("[WebDAV][AutoSync] Upload failed: {err}");
}
}
}
#[cfg(test)]
mod tests {
use super::{
auto_sync_wait_duration, enqueue_change_signal, is_auto_sync_suppressed,
should_run_auto_sync, should_trigger_for_table, AutoSyncSuppressionGuard,
MAX_AUTO_SYNC_WAIT_MS,
};
use crate::settings::WebDavSyncSettings;
use std::time::{Duration, Instant};
use tokio::sync::mpsc::channel;
#[test]
fn should_trigger_sync_for_config_tables_only() {
assert!(should_trigger_for_table("providers"));
assert!(should_trigger_for_table("settings"));
assert!(!should_trigger_for_table("proxy_request_logs"));
assert!(!should_trigger_for_table("provider_health"));
}
#[test]
fn suppression_guard_enables_and_restores_state() {
assert!(!is_auto_sync_suppressed());
{
let _guard = AutoSyncSuppressionGuard::new();
assert!(is_auto_sync_suppressed());
}
assert!(!is_auto_sync_suppressed());
}
#[test]
fn max_wait_caps_flush_latency_for_continuous_events() {
let started = Instant::now();
let later = started + Duration::from_millis(MAX_AUTO_SYNC_WAIT_MS + 1);
assert!(auto_sync_wait_duration(started, later).is_none());
}
#[tokio::test]
async fn enqueue_change_signal_drops_when_channel_is_full() {
let (tx, _rx) = channel::<String>(1);
assert!(enqueue_change_signal(&tx, "providers"));
assert!(!enqueue_change_signal(&tx, "providers"));
}
#[test]
fn should_run_auto_sync_requires_enabled_and_auto_sync_flag() {
assert!(!should_run_auto_sync(None));
let disabled = WebDavSyncSettings {
enabled: false,
auto_sync: true,
..WebDavSyncSettings::default()
};
assert!(!should_run_auto_sync(Some(&disabled)));
let auto_sync_off = WebDavSyncSettings {
enabled: true,
auto_sync: false,
..WebDavSyncSettings::default()
};
assert!(!should_run_auto_sync(Some(&auto_sync_off)));
let enabled = WebDavSyncSettings {
enabled: true,
auto_sync: true,
..WebDavSyncSettings::default()
};
assert!(should_run_auto_sync(Some(&enabled)));
}
#[test]
fn service_layer_does_not_depend_on_commands_layer() {
let source = include_str!("webdav_auto_sync.rs");
let needle = ["crate", "commands", ""].join("::");
assert!(
!source.contains(&needle),
"services layer should not depend on commands layer"
);
}
}

View File

@@ -5,7 +5,9 @@
use std::collections::BTreeMap;
use std::fs;
use std::future::Future;
use std::process::Command;
use std::sync::OnceLock;
use chrono::Utc;
use serde::{Deserialize, Serialize};
@@ -33,6 +35,21 @@ const REMOTE_DB_SQL: &str = "db.sql";
const REMOTE_SKILLS_ZIP: &str = "skills.zip";
const REMOTE_MANIFEST: &str = "manifest.json";
const MAX_DEVICE_NAME_LEN: usize = 64;
const MAX_MANIFEST_BYTES: usize = 1024 * 1024;
pub(super) const MAX_SYNC_ARTIFACT_BYTES: u64 = 512 * 1024 * 1024;
pub fn sync_mutex() -> &'static tokio::sync::Mutex<()> {
static LOCK: OnceLock<tokio::sync::Mutex<()>> = OnceLock::new();
LOCK.get_or_init(|| tokio::sync::Mutex::new(()))
}
pub async fn run_with_sync_lock<T, Fut>(operation: Fut) -> Result<T, AppError>
where
Fut: Future<Output = Result<T, AppError>>,
{
let _guard = sync_mutex().lock().await;
operation.await
}
fn localized(key: &'static str, zh: impl Into<String>, en: impl Into<String>) -> AppError {
AppError::localized(key, zh, en)
@@ -145,13 +162,15 @@ pub async fn download(
let auth = auth_for(settings);
let manifest_url = remote_file_url(settings, REMOTE_MANIFEST)?;
let (manifest_bytes, etag) = get_bytes(&manifest_url, &auth).await?.ok_or_else(|| {
localized(
"webdav.sync.remote_empty",
"远端没有可下载的同步数据",
"No downloadable sync data found on the remote.",
)
})?;
let (manifest_bytes, etag) = get_bytes(&manifest_url, &auth, MAX_MANIFEST_BYTES)
.await?
.ok_or_else(|| {
localized(
"webdav.sync.remote_empty",
"远端没有可下载的同步数据",
"No downloadable sync data found on the remote.",
)
})?;
let manifest: SyncManifest =
serde_json::from_slice(&manifest_bytes).map_err(|e| AppError::Json {
@@ -181,7 +200,7 @@ pub async fn fetch_remote_info(settings: &WebDavSyncSettings) -> Result<Option<V
let auth = auth_for(settings);
let manifest_url = remote_file_url(settings, REMOTE_MANIFEST)?;
let Some((bytes, _)) = get_bytes(&manifest_url, &auth).await? else {
let Some((bytes, _)) = get_bytes(&manifest_url, &auth, MAX_MANIFEST_BYTES).await? else {
return Ok(None);
};
@@ -214,6 +233,7 @@ fn persist_sync_success(
let status = WebDavSyncStatus {
last_sync_at: Some(Utc::now().timestamp()),
last_error: None,
last_error_source: None,
last_local_manifest_hash: Some(manifest_hash.clone()),
last_remote_manifest_hash: Some(manifest_hash),
last_remote_etag: etag,
@@ -319,14 +339,10 @@ fn sha256_hex(bytes: &[u8]) -> String {
}
fn detect_system_device_name() -> Option<String> {
let env_name = [
"CC_SWITCH_DEVICE_NAME",
"COMPUTERNAME",
"HOSTNAME",
]
.iter()
.filter_map(|key| std::env::var(key).ok())
.find_map(|value| normalize_device_name(&value));
let env_name = ["CC_SWITCH_DEVICE_NAME", "COMPUTERNAME", "HOSTNAME"]
.iter()
.filter_map(|key| std::env::var(key).ok())
.find_map(|value| normalize_device_name(&value));
if env_name.is_some() {
return env_name;
@@ -341,21 +357,26 @@ fn detect_system_device_name() -> Option<String> {
}
fn normalize_device_name(raw: &str) -> Option<String> {
let compact = raw.chars().fold(String::with_capacity(raw.len()), |mut acc, ch| {
if ch.is_whitespace() {
acc.push(' ');
} else if !ch.is_control() {
acc.push(ch);
}
acc
});
let compact = raw
.chars()
.fold(String::with_capacity(raw.len()), |mut acc, ch| {
if ch.is_whitespace() {
acc.push(' ');
} else if !ch.is_control() {
acc.push(ch);
}
acc
});
let normalized = compact.split_whitespace().collect::<Vec<_>>().join(" ");
let trimmed = normalized.trim();
if trimmed.is_empty() {
return None;
}
let limited = trimmed.chars().take(MAX_DEVICE_NAME_LEN).collect::<String>();
let limited = trimmed
.chars()
.take(MAX_DEVICE_NAME_LEN)
.collect::<String>();
if limited.is_empty() {
None
} else {
@@ -405,14 +426,18 @@ async fn download_and_verify(
format!("Manifest missing artifact: {artifact_name}"),
)
})?;
validate_artifact_size_limit(artifact_name, meta.size)?;
let url = remote_file_url(settings, artifact_name)?;
let (bytes, _) = get_bytes(&url, auth).await?.ok_or_else(|| {
localized(
"webdav.sync.remote_missing_artifact",
format!("远端缺少 artifact 文件: {artifact_name}"),
format!("Remote artifact file missing: {artifact_name}"),
)
})?;
let (bytes, _) = get_bytes(&url, auth, MAX_SYNC_ARTIFACT_BYTES as usize)
.await?
.ok_or_else(|| {
localized(
"webdav.sync.remote_missing_artifact",
format!("远端缺少 artifact 文件: {artifact_name}"),
format!("Remote artifact file missing: {artifact_name}"),
)
})?;
// Quick size check before expensive hash
if bytes.len() as u64 != meta.size {
@@ -503,6 +528,21 @@ fn auth_for(settings: &WebDavSyncSettings) -> WebDavAuth {
auth_from_credentials(&settings.username, &settings.password)
}
fn validate_artifact_size_limit(artifact_name: &str, size: u64) -> Result<(), AppError> {
if size > MAX_SYNC_ARTIFACT_BYTES {
let max_mb = MAX_SYNC_ARTIFACT_BYTES / 1024 / 1024;
return Err(localized(
"webdav.sync.artifact_too_large",
format!("artifact {artifact_name} 超过下载上限({} MB", max_mb),
format!(
"Artifact {artifact_name} exceeds download limit ({} MB)",
max_mb
),
));
}
Ok(())
}
// ─── Tests ───────────────────────────────────────────────────
#[cfg(test)]
@@ -646,4 +686,19 @@ mod tests {
"manifest should not contain deviceId"
);
}
#[test]
fn validate_artifact_size_limit_rejects_oversized_artifacts() {
let err = validate_artifact_size_limit("skills.zip", MAX_SYNC_ARTIFACT_BYTES + 1)
.expect_err("artifact larger than limit should be rejected");
assert!(
err.to_string().contains("too large") || err.to_string().contains("超过"),
"unexpected error: {err}"
);
}
#[test]
fn validate_artifact_size_limit_accepts_limit_boundary() {
assert!(validate_artifact_size_limit("skills.zip", MAX_SYNC_ARTIFACT_BYTES).is_ok());
}
}

View File

@@ -10,10 +10,8 @@ use zip::DateTime;
use crate::error::AppError;
use crate::services::skill::SkillService;
use super::{io_context_localized, localized, REMOTE_SKILLS_ZIP};
use super::{io_context_localized, localized, MAX_SYNC_ARTIFACT_BYTES, REMOTE_SKILLS_ZIP};
/// Maximum total bytes allowed during zip extraction (512 MB).
const MAX_EXTRACT_BYTES: u64 = 512 * 1024 * 1024;
/// Maximum number of entries allowed in a zip archive.
const MAX_EXTRACT_ENTRIES: usize = 10_000;
@@ -92,8 +90,14 @@ pub(super) fn restore_skills_zip(raw: &[u8]) -> Result<(), AppError> {
if archive.len() > MAX_EXTRACT_ENTRIES {
return Err(localized(
"webdav.sync.skills_zip_too_many_entries",
format!("skills.zip 条目数过多({}),上限 {MAX_EXTRACT_ENTRIES}", archive.len()),
format!("skills.zip has too many entries ({}), limit is {MAX_EXTRACT_ENTRIES}", archive.len()),
format!(
"skills.zip 条目数过多({}),上限 {MAX_EXTRACT_ENTRIES}",
archive.len()
),
format!(
"skills.zip has too many entries ({}), limit is {MAX_EXTRACT_ENTRIES}",
archive.len()
),
));
}
@@ -118,15 +122,13 @@ pub(super) fn restore_skills_zip(raw: &[u8]) -> Result<(), AppError> {
fs::create_dir_all(parent).map_err(|e| AppError::io(parent, e))?;
}
let mut out = fs::File::create(&out_path).map_err(|e| AppError::io(&out_path, e))?;
let written = std::io::copy(&mut entry, &mut out).map_err(|e| AppError::io(&out_path, e))?;
total_bytes += written;
if total_bytes > MAX_EXTRACT_BYTES {
return Err(localized(
"webdav.sync.skills_zip_too_large",
format!("skills.zip 解压后体积超过上限({} MB", MAX_EXTRACT_BYTES / 1024 / 1024),
format!("skills.zip extracted size exceeds limit ({} MB)", MAX_EXTRACT_BYTES / 1024 / 1024),
));
}
let _written = copy_entry_with_total_limit(
&mut entry,
&mut out,
&mut total_bytes,
MAX_SYNC_ARTIFACT_BYTES,
&out_path,
)?;
}
let ssot = SkillService::get_ssot_dir().map_err(|e| {
@@ -327,10 +329,47 @@ fn mark_visited_dir(path: &Path, visited: &mut HashSet<PathBuf>) -> Result<bool,
Ok(visited.insert(canonical))
}
fn copy_entry_with_total_limit<R: Read, W: Write>(
reader: &mut R,
writer: &mut W,
total_bytes: &mut u64,
max_total_bytes: u64,
out_path: &Path,
) -> Result<u64, AppError> {
let mut buffer = [0u8; 16 * 1024];
let mut written = 0u64;
loop {
let n = reader
.read(&mut buffer)
.map_err(|e| AppError::io(out_path, e))?;
if n == 0 {
break;
}
if total_bytes.saturating_add(n as u64) > max_total_bytes {
let max_mb = max_total_bytes / 1024 / 1024;
return Err(localized(
"webdav.sync.skills_zip_too_large",
format!("skills.zip 解压后体积超过上限({} MB", max_mb),
format!("skills.zip extracted size exceeds limit ({} MB)", max_mb),
));
}
writer
.write_all(&buffer[..n])
.map_err(|e| AppError::io(out_path, e))?;
*total_bytes += n as u64;
written += n as u64;
}
Ok(written)
}
#[cfg(test)]
mod tests {
use super::mark_visited_dir;
use super::{copy_entry_with_total_limit, mark_visited_dir};
use std::collections::HashSet;
use std::io::Cursor;
use std::path::Path;
use tempfile::tempdir;
#[test]
@@ -343,4 +382,29 @@ mod tests {
assert!(mark_visited_dir(&dir, &mut visited).expect("first visit"));
assert!(!mark_visited_dir(&dir, &mut visited).expect("second visit"));
}
#[test]
fn copy_entry_with_total_limit_rejects_oversized_stream_before_write() {
let mut reader = Cursor::new(vec![1u8; 16]);
let mut writer = Vec::new();
let mut total_bytes = 0u64;
let err = copy_entry_with_total_limit(
&mut reader,
&mut writer,
&mut total_bytes,
8,
Path::new("skills-extracted/file.bin"),
)
.expect_err("stream larger than limit should be rejected");
assert!(
err.to_string().contains("too large") || err.to_string().contains("超过"),
"unexpected error: {err}"
);
assert_eq!(
writer.len(),
0,
"should not write when the first chunk exceeds limit"
);
}
}

View File

@@ -72,6 +72,8 @@ pub struct WebDavSyncStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_error: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_error_source: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_remote_etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_local_manifest_hash: Option<String>,
@@ -93,6 +95,8 @@ pub struct WebDavSyncSettings {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub auto_sync: bool,
#[serde(default)]
pub base_url: String,
#[serde(default)]
pub username: String,
@@ -110,6 +114,7 @@ impl Default for WebDavSyncSettings {
fn default() -> Self {
Self {
enabled: false,
auto_sync: false,
base_url: String::new(),
username: String::new(),
password: String::new(),

View File

@@ -3,6 +3,7 @@ import { useTranslation } from "react-i18next";
import { motion, AnimatePresence } from "framer-motion";
import { toast } from "sonner";
import { invoke } from "@tauri-apps/api/core";
import { listen } from "@tauri-apps/api/event";
import { useQueryClient } from "@tanstack/react-query";
import {
Plus,
@@ -81,6 +82,12 @@ type View =
| "openclawTools"
| "openclawAgents";
interface WebDavSyncStatusUpdatedPayload {
source?: string;
status?: string;
error?: string;
}
const DRAG_BAR_HEIGHT = isWindows() || isLinux() ? 0 : 28; // px
const HEADER_HEIGHT = 64; // px
const CONTENT_TOP_OFFSET = DRAG_BAR_HEIGHT + HEADER_HEIGHT;
@@ -292,6 +299,49 @@ function App() {
};
}, [queryClient]);
useEffect(() => {
let unsubscribe: (() => void) | undefined;
let active = true;
const setupListener = async () => {
try {
const off = await listen(
"webdav-sync-status-updated",
async (event) => {
const payload = (event.payload ?? {}) as WebDavSyncStatusUpdatedPayload;
await queryClient.invalidateQueries({ queryKey: ["settings"] });
if (payload.source !== "auto" || payload.status !== "error") {
return;
}
toast.error(
t("settings.webdavSync.autoSyncFailedToast", {
error: payload.error || t("common.unknown"),
}),
);
},
);
if (!active) {
off();
return;
}
unsubscribe = off;
} catch (error) {
console.error(
"[App] Failed to subscribe webdav-sync-status-updated event",
error,
);
}
};
void setupListener();
return () => {
active = false;
unsubscribe?.();
};
}, [queryClient, t]);
useEffect(() => {
const checkEnvOnStartup = async () => {
try {

View File

@@ -16,6 +16,7 @@ import { useQueryClient } from "@tanstack/react-query";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Switch } from "@/components/ui/switch";
import {
Select,
SelectContent,
@@ -162,6 +163,7 @@ export function WebdavSyncSection({ config }: WebdavSyncSectionProps) {
password: config?.password ?? "",
remoteRoot: config?.remoteRoot ?? "cc-switch-sync",
profile: config?.profile ?? "default",
autoSync: config?.autoSync ?? false,
}));
// Preset selector — derived from initial URL, updated on user selection
@@ -196,6 +198,7 @@ export function WebdavSyncSection({ config }: WebdavSyncSectionProps) {
password: config.password ?? "",
remoteRoot: config.remoteRoot ?? "cc-switch-sync",
profile: config.profile ?? "default",
autoSync: config.autoSync ?? false,
});
setPasswordTouched(false);
setPresetId(detectPreset(config.baseUrl ?? ""));
@@ -237,6 +240,16 @@ export function WebdavSyncSection({ config }: WebdavSyncSectionProps) {
}
}, [form.baseUrl, presetId]);
const handleAutoSyncChange = useCallback((checked: boolean) => {
setForm((prev) => ({ ...prev, autoSync: checked }));
setDirty(true);
setJustSaved(false);
if (justSavedTimerRef.current) {
clearTimeout(justSavedTimerRef.current);
justSavedTimerRef.current = null;
}
}, []);
const buildSettings = useCallback((): WebDavSyncSettings | null => {
const baseUrl = form.baseUrl.trim();
if (!baseUrl) return null;
@@ -247,6 +260,7 @@ export function WebdavSyncSection({ config }: WebdavSyncSectionProps) {
password: form.password,
remoteRoot: form.remoteRoot.trim() || "cc-switch-sync",
profile: form.profile.trim() || "default",
autoSync: form.autoSync,
};
}, [form]);
@@ -433,6 +447,9 @@ export function WebdavSyncSection({ config }: WebdavSyncSectionProps) {
const lastSyncDisplay = lastSyncAt
? new Date(lastSyncAt * 1000).toLocaleString()
: null;
const lastError = config?.status?.lastError?.trim();
const showAutoSyncError =
!!lastError && config?.status?.lastErrorSource === "auto";
// ─── Render ─────────────────────────────────────────────
@@ -559,6 +576,23 @@ export function WebdavSyncSection({ config }: WebdavSyncSectionProps) {
disabled={isLoading}
/>
</div>
<div className="flex items-start gap-4">
<label className="w-40 text-xs font-medium text-foreground shrink-0">
{t("settings.webdavSync.autoSync")}
<span className="block text-[10px] font-normal text-muted-foreground">
{t("settings.webdavSync.autoSyncHint")}
</span>
</label>
<div className="pt-1">
<Switch
checked={form.autoSync}
onCheckedChange={handleAutoSyncChange}
aria-label={t("settings.webdavSync.autoSync")}
disabled={isLoading}
/>
</div>
</div>
</div>
{/* Last sync time */}
@@ -567,6 +601,17 @@ export function WebdavSyncSection({ config }: WebdavSyncSectionProps) {
{t("settings.webdavSync.lastSync", { time: lastSyncDisplay })}
</p>
)}
{showAutoSyncError && (
<div className="rounded-lg border border-red-300/70 bg-red-50/80 px-3 py-2 text-xs text-red-900 dark:border-red-500/50 dark:bg-red-950/30 dark:text-red-200">
<p className="font-medium">
{t("settings.webdavSync.autoSyncLastErrorTitle")}
</p>
<p className="mt-1 break-all whitespace-pre-wrap">{lastError}</p>
<p className="mt-1 text-[11px] text-red-700/90 dark:text-red-300/80">
{t("settings.webdavSync.autoSyncLastErrorHint")}
</p>
</div>
)}
{/* Config buttons + save status */}
<div className="flex flex-wrap items-center gap-3 pt-2">

View File

@@ -283,6 +283,8 @@
"passwordPlaceholder": "App password",
"remoteRoot": "Remote Root Directory",
"profile": "Sync Profile Name",
"autoSync": "Auto Sync",
"autoSyncHint": "When enabled, each database change triggers an automatic WebDAV upload.",
"test": "Test Connection",
"testing": "Testing...",
"testSuccess": "Connection successful",
@@ -294,11 +296,14 @@
"uploading": "Uploading...",
"uploadSuccess": "Uploaded to WebDAV",
"uploadFailed": "Upload failed: {{error}}",
"autoSyncFailedToast": "Auto sync failed: {{error}}",
"download": "Download from Cloud",
"downloading": "Downloading...",
"downloadSuccess": "Downloaded and restored from WebDAV",
"downloadFailed": "Download failed: {{error}}",
"lastSync": "Last sync: {{time}}",
"autoSyncLastErrorTitle": "Last auto sync failed",
"autoSyncLastErrorHint": "Please check network or WebDAV settings. Auto sync will retry on future changes.",
"missingUrl": "Please enter the WebDAV server URL",
"presets": {
"label": "Provider",

View File

@@ -283,6 +283,8 @@
"passwordPlaceholder": "アプリパスワード",
"remoteRoot": "リモートルートディレクトリ",
"profile": "同期プロファイル名",
"autoSync": "自動同期",
"autoSyncHint": "有効にすると、データベース変更のたびに WebDAV へ自動アップロードします。",
"test": "接続テスト",
"testing": "テスト中...",
"testSuccess": "接続成功",
@@ -294,11 +296,14 @@
"uploading": "アップロード中...",
"uploadSuccess": "WebDAV にアップロードしました",
"uploadFailed": "アップロードに失敗しました:{{error}}",
"autoSyncFailedToast": "自動同期に失敗しました:{{error}}",
"download": "クラウドからダウンロード",
"downloading": "ダウンロード中...",
"downloadSuccess": "WebDAV からダウンロード・復元しました",
"downloadFailed": "ダウンロードに失敗しました:{{error}}",
"lastSync": "前回の同期:{{time}}",
"autoSyncLastErrorTitle": "前回の自動同期に失敗しました",
"autoSyncLastErrorHint": "ネットワークまたは WebDAV 設定を確認してください。次回の変更時に自動再試行されます。",
"missingUrl": "WebDAV サーバー URL を入力してください",
"presets": {
"label": "サービス",

View File

@@ -283,6 +283,8 @@
"passwordPlaceholder": "应用密码(坚果云请使用「第三方应用密码」)",
"remoteRoot": "远程根目录",
"profile": "同步配置名",
"autoSync": "自动同步",
"autoSyncHint": "开启后每次数据库变更都会自动上传到 WebDAV。",
"test": "测试连接",
"testing": "测试中...",
"testSuccess": "连接成功",
@@ -294,11 +296,14 @@
"uploading": "上传中...",
"uploadSuccess": "已上传到 WebDAV",
"uploadFailed": "上传失败:{{error}}",
"autoSyncFailedToast": "自动同步失败:{{error}}",
"download": "从云端下载",
"downloading": "下载中...",
"downloadSuccess": "已从 WebDAV 下载并恢复",
"downloadFailed": "下载失败:{{error}}",
"lastSync": "上次同步:{{time}}",
"autoSyncLastErrorTitle": "上次自动同步失败",
"autoSyncLastErrorHint": "请检查网络或 WebDAV 配置,系统会在后续变更时继续自动重试。",
"missingUrl": "请填写 WebDAV 服务器地址",
"presets": {
"label": "服务商",

View File

@@ -33,6 +33,7 @@ export const settingsSchema = z.object({
webdavSync: z
.object({
enabled: z.boolean().optional(),
autoSync: z.boolean().optional(),
baseUrl: z.string().trim().optional().or(z.literal("")),
username: z.string().trim().optional().or(z.literal("")),
password: z.string().optional(),
@@ -42,6 +43,7 @@ export const settingsSchema = z.object({
.object({
lastSyncAt: z.number().nullable().optional(),
lastError: z.string().nullable().optional(),
lastErrorSource: z.string().nullable().optional(),
lastRemoteEtag: z.string().nullable().optional(),
lastLocalManifestHash: z.string().nullable().optional(),
lastRemoteManifestHash: z.string().nullable().optional(),

View File

@@ -167,6 +167,7 @@ export interface VisibleApps {
export interface WebDavSyncStatus {
lastSyncAt?: number | null;
lastError?: string | null;
lastErrorSource?: string | null;
lastRemoteEtag?: string | null;
lastLocalManifestHash?: string | null;
lastRemoteManifestHash?: string | null;
@@ -175,6 +176,7 @@ export interface WebDavSyncStatus {
// WebDAV v2 同步配置
export interface WebDavSyncSettings {
enabled?: boolean;
autoSync?: boolean;
baseUrl?: string;
username?: string;
password?: string;

View File

@@ -34,6 +34,17 @@ vi.mock("@/components/ui/input", () => ({
Input: (props: any) => <input {...props} />,
}));
vi.mock("@/components/ui/switch", () => ({
Switch: ({ checked, onCheckedChange, ...props }: any) => (
<button
role="switch"
aria-checked={checked}
onClick={() => onCheckedChange?.(!checked)}
{...props}
/>
),
}));
vi.mock("@/components/ui/select", () => ({
Select: ({ value, onValueChange, children }: any) => (
<select
@@ -82,6 +93,7 @@ const baseConfig: WebDavSyncSettings = {
password: "secret",
remoteRoot: "cc-switch-sync",
profile: "default",
autoSync: false,
status: {},
};
@@ -128,6 +140,49 @@ describe("WebdavSyncSection", () => {
settingsApiMock.webdavSyncDownload.mockResolvedValue({ status: "downloaded" });
});
it("shows auto sync error callout when last auto sync failed", () => {
renderSection({
...baseConfig,
status: {
lastError: "network timeout",
lastErrorSource: "auto",
},
});
expect(
screen.getByText("settings.webdavSync.autoSyncLastErrorTitle"),
).toBeInTheDocument();
expect(screen.getByText("network timeout")).toBeInTheDocument();
});
it("does not show auto sync error callout for manual sync errors", () => {
renderSection({
...baseConfig,
status: {
lastError: "manual upload failed",
lastErrorSource: "manual",
},
});
expect(
screen.queryByText("settings.webdavSync.autoSyncLastErrorTitle"),
).not.toBeInTheDocument();
});
it("does not show auto sync error callout when source is missing", () => {
renderSection({
...baseConfig,
autoSync: true,
status: {
lastError: "legacy error without source",
},
});
expect(
screen.queryByText("settings.webdavSync.autoSyncLastErrorTitle"),
).not.toBeInTheDocument();
});
it("shows validation error when saving without base url", async () => {
renderSection({ ...baseConfig, baseUrl: "" });
@@ -150,6 +205,7 @@ describe("WebdavSyncSection", () => {
baseUrl: "https://dav.example.com/dav/",
username: "alice",
password: "secret",
autoSync: false,
}),
false,
);
@@ -166,6 +222,24 @@ describe("WebdavSyncSection", () => {
);
});
it("saves auto sync as true after toggle", async () => {
renderSection(baseConfig);
fireEvent.click(
screen.getByRole("switch", { name: "settings.webdavSync.autoSync" }),
);
fireEvent.click(screen.getByRole("button", { name: "settings.webdavSync.save" }));
await waitFor(() => {
expect(settingsApiMock.webdavSyncSaveSettings).toHaveBeenCalledWith(
expect.objectContaining({
autoSync: true,
}),
false,
);
});
});
it("blocks upload when there are unsaved changes", async () => {
renderSection(baseConfig);

View File

@@ -209,4 +209,25 @@ describe("App integration with MSW", () => {
expect(toastErrorMock).not.toHaveBeenCalled();
expect(toastSuccessMock).toHaveBeenCalled();
});
it("shows toast when auto sync fails in background", async () => {
const { default: App } = await import("@/App");
renderApp(App);
await waitFor(() =>
expect(screen.getByTestId("provider-list").textContent).toContain(
"claude-1",
),
);
emitTauriEvent("webdav-sync-status-updated", {
source: "auto",
status: "error",
error: "network timeout",
});
await waitFor(() => {
expect(toastErrorMock).toHaveBeenCalled();
});
});
});