diff --git a/src-tauri/src/claude_binary.rs b/src-tauri/src/claude_binary.rs index 031457f..dd38659 100644 --- a/src-tauri/src/claude_binary.rs +++ b/src-tauri/src/claude_binary.rs @@ -61,7 +61,10 @@ pub fn find_claude_binary(app_handle: &tauri::AppHandle) -> Result Result Vec { // Create command with enhanced PATH for production environments let mut cmd = Command::new(command_name); cmd.arg("claude"); - + // In production (DMG), we need to ensure proper PATH is set let enhanced_path = build_enhanced_path(); - debug!("Using enhanced PATH for {}: {}", command_name, enhanced_path); + debug!( + "Using enhanced PATH for {}: {}", + command_name, enhanced_path + ); cmd.env("PATH", enhanced_path); match cmd.output() { @@ -243,7 +252,10 @@ fn find_which_installations() -> Vec { // Convert /c/path to C:\path let windows_path = path.replace("/c/", "C:\\").replace("/", "\\"); windows_path - } else if path.starts_with("/") && path.len() > 3 && path.chars().nth(2) == Some('/') { + } else if path.starts_with("/") + && path.len() > 3 + && path.chars().nth(2) == Some('/') + { // Convert /X/path to X:\path where X is drive letter let drive = path.chars().nth(1).unwrap(); let rest = &path[3..]; @@ -284,7 +296,10 @@ fn find_which_installations() -> Vec { // Verify the path exists if !PathBuf::from(&final_path).exists() { - warn!("Path from '{}' does not exist: {}", command_name, final_path); + warn!( + "Path from '{}' does not exist: {}", + command_name, final_path + ); continue; } @@ -413,12 +428,13 @@ fn find_standard_installations() -> Vec { let mut path_cmd = Command::new("claude"); path_cmd.arg("--version"); path_cmd.env("PATH", build_enhanced_path()); - + if let Ok(output) = path_cmd.output() { if output.status.success() { debug!("claude is available in PATH"); // Combine stdout and stderr for robust version extraction - let mut combined: Vec = Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1); + let mut combined: Vec = + Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1); combined.extend_from_slice(&output.stdout); if !output.stderr.is_empty() { combined.extend_from_slice(b"\n"); @@ -443,12 +459,13 @@ fn get_claude_version(path: &str) -> Result, String> { // Use the helper function to create command with proper environment let mut cmd = create_command_with_env(path); cmd.arg("--version"); - + match cmd.output() { Ok(output) => { if output.status.success() { // Combine stdout and stderr for robust version extraction - let mut combined: Vec = Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1); + let mut combined: Vec = + Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1); combined.extend_from_slice(&output.stdout); if !output.stderr.is_empty() { combined.extend_from_slice(b"\n"); @@ -481,7 +498,8 @@ fn extract_version_from_output(stdout: &[u8]) -> Option { // - A dot, followed by // - One or more digits // - Optionally followed by pre-release/build metadata - let version_regex = regex::Regex::new(r"(\d+\.\d+\.\d+(?:-[a-zA-Z0-9.-]+)?(?:\+[a-zA-Z0-9.-]+)?)").ok()?; + let version_regex = + regex::Regex::new(r"(\d+\.\d+\.\d+(?:-[a-zA-Z0-9.-]+)?(?:\+[a-zA-Z0-9.-]+)?)").ok()?; if let Some(captures) = version_regex.captures(&output_str) { if let Some(version_match) = captures.get(1) { @@ -616,7 +634,8 @@ pub fn create_command_with_env(program: &str) -> Command { if program.contains("/.nvm/versions/node/") { if let Some(node_bin_dir) = std::path::Path::new(program).parent() { // Ensure the Node.js bin directory is in PATH - let current_path = cmd.get_envs() + let current_path = cmd + .get_envs() .find(|(k, _)| k.to_str() == Some("PATH")) .and_then(|(_, v)| v) .and_then(|v| v.to_str()) @@ -638,12 +657,12 @@ pub fn create_command_with_env(program: &str) -> Command { /// This is especially important for DMG/packaged applications where PATH may be limited fn build_enhanced_path() -> String { let mut paths = Vec::new(); - + // Start with current PATH if let Ok(current_path) = std::env::var("PATH") { paths.push(current_path); } - + // Add standard system paths that might be missing in packaged apps let system_paths = vec![ "/usr/local/bin", @@ -652,13 +671,13 @@ fn build_enhanced_path() -> String { "/opt/homebrew/bin", "/opt/homebrew/sbin", ]; - + for path in system_paths { if PathBuf::from(path).exists() { paths.push(path.to_string()); } } - + // Add user-specific paths if let Ok(home) = std::env::var("HOME") { let user_paths = vec![ @@ -671,13 +690,13 @@ fn build_enhanced_path() -> String { format!("{}/.config/yarn/global/node_modules/.bin", home), format!("{}/node_modules/.bin", home), ]; - + for path in user_paths { if PathBuf::from(&path).exists() { paths.push(path); } } - + // Add all NVM node versions let nvm_dir = PathBuf::from(&home).join(".nvm/versions/node"); if nvm_dir.exists() { @@ -693,13 +712,13 @@ fn build_enhanced_path() -> String { } } } - + // Remove duplicates while preserving order let mut seen = std::collections::HashSet::new(); let unique_paths: Vec = paths .into_iter() .filter(|path| seen.insert(path.clone())) .collect(); - + unique_paths.join(":") } diff --git a/src-tauri/src/claude_config.rs b/src-tauri/src/claude_config.rs index c83852e..5e13853 100644 --- a/src-tauri/src/claude_config.rs +++ b/src-tauri/src/claude_config.rs @@ -1,10 +1,10 @@ -use std::fs; -use std::path::PathBuf; -use std::collections::HashMap; +use crate::commands::relay_stations::RelayStation; +use dirs::home_dir; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; -use dirs::home_dir; -use crate::commands::relay_stations::RelayStation; +use std::collections::HashMap; +use std::fs; +use std::path::PathBuf; /// Claude 配置文件结构 #[derive(Debug, Clone, Serialize, Deserialize)] @@ -39,11 +39,17 @@ pub struct StatusLineConfig { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ClaudeEnv { - #[serde(rename = "ANTHROPIC_AUTH_TOKEN", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "ANTHROPIC_AUTH_TOKEN", + skip_serializing_if = "Option::is_none" + )] pub anthropic_auth_token: Option, #[serde(rename = "ANTHROPIC_BASE_URL", skip_serializing_if = "Option::is_none")] pub anthropic_base_url: Option, - #[serde(rename = "CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC", + skip_serializing_if = "Option::is_none" + )] pub disable_nonessential_traffic: Option, // 使用 flatten 来支持任何其他环境变量 #[serde(flatten)] @@ -84,7 +90,7 @@ pub fn get_config_backup_path() -> Result { /// 读取 Claude 配置文件 pub fn read_claude_config() -> Result { let config_path = get_claude_config_path()?; - + if !config_path.exists() { // 如果配置文件不存在,创建默认配置 return Ok(ClaudeConfig { @@ -96,14 +102,14 @@ pub fn read_claude_config() -> Result { extra_fields: HashMap::new(), }); } - - let content = fs::read_to_string(&config_path) - .map_err(|e| format!("读取配置文件失败: {}", e))?; - + + let content = + fs::read_to_string(&config_path).map_err(|e| format!("读取配置文件失败: {}", e))?; + // 首先尝试解析为 JSON Value,以便处理可能的格式问题 - let mut json_value: Value = serde_json::from_str(&content) - .map_err(|e| format!("解析配置文件失败: {}", e))?; - + let mut json_value: Value = + serde_json::from_str(&content).map_err(|e| format!("解析配置文件失败: {}", e))?; + // 如果JSON解析成功,再转换为ClaudeConfig if let Some(obj) = json_value.as_object_mut() { // 确保必要的字段存在 @@ -111,44 +117,40 @@ pub fn read_claude_config() -> Result { obj.insert("env".to_string(), json!({})); } } - - serde_json::from_value(json_value) - .map_err(|e| format!("转换配置结构失败: {}", e)) + + serde_json::from_value(json_value).map_err(|e| format!("转换配置结构失败: {}", e)) } /// 写入 Claude 配置文件 pub fn write_claude_config(config: &ClaudeConfig) -> Result<(), String> { let config_path = get_claude_config_path()?; - + log::info!("尝试写入配置文件到: {:?}", config_path); - + // 确保目录存在 if let Some(parent) = config_path.parent() { log::info!("确保目录存在: {:?}", parent); - fs::create_dir_all(parent) - .map_err(|e| { - let error_msg = format!("创建配置目录失败: {}", e); - log::error!("{}", error_msg); - error_msg - })?; + fs::create_dir_all(parent).map_err(|e| { + let error_msg = format!("创建配置目录失败: {}", e); + log::error!("{}", error_msg); + error_msg + })?; } - - let content = serde_json::to_string_pretty(config) - .map_err(|e| { - let error_msg = format!("序列化配置失败: {}", e); - log::error!("{}", error_msg); - error_msg - })?; - + + let content = serde_json::to_string_pretty(config).map_err(|e| { + let error_msg = format!("序列化配置失败: {}", e); + log::error!("{}", error_msg); + error_msg + })?; + log::info!("准备写入内容:\n{}", content); - - fs::write(&config_path, &content) - .map_err(|e| { - let error_msg = format!("写入配置文件失败: {} (路径: {:?})", e, config_path); - log::error!("{}", error_msg); - error_msg - })?; - + + fs::write(&config_path, &content).map_err(|e| { + let error_msg = format!("写入配置文件失败: {} (路径: {:?})", e, config_path); + log::error!("{}", error_msg); + error_msg + })?; + log::info!("配置文件写入成功: {:?}", config_path); Ok(()) } @@ -157,12 +159,11 @@ pub fn write_claude_config(config: &ClaudeConfig) -> Result<(), String> { pub fn backup_claude_config() -> Result<(), String> { let config_path = get_claude_config_path()?; let backup_path = get_config_backup_path()?; - + if config_path.exists() { - fs::copy(&config_path, &backup_path) - .map_err(|e| format!("备份配置文件失败: {}", e))?; + fs::copy(&config_path, &backup_path).map_err(|e| format!("备份配置文件失败: {}", e))?; } - + Ok(()) } @@ -170,14 +171,13 @@ pub fn backup_claude_config() -> Result<(), String> { pub fn restore_claude_config() -> Result<(), String> { let config_path = get_claude_config_path()?; let backup_path = get_config_backup_path()?; - + if !backup_path.exists() { return Err("备份文件不存在".to_string()); } - - fs::copy(&backup_path, &config_path) - .map_err(|e| format!("恢复配置文件失败: {}", e))?; - + + fs::copy(&backup_path, &config_path).map_err(|e| format!("恢复配置文件失败: {}", e))?; + Ok(()) } @@ -185,20 +185,20 @@ pub fn restore_claude_config() -> Result<(), String> { pub fn apply_relay_station_to_config(station: &RelayStation) -> Result<(), String> { // 先备份当前配置 backup_claude_config()?; - + // 读取当前配置 let mut config = read_claude_config()?; - + // 仅更新这三个关键字段,保留其他所有配置不变: // 1. ANTHROPIC_BASE_URL config.env.anthropic_base_url = Some(station.api_url.clone()); - - // 2. ANTHROPIC_AUTH_TOKEN + + // 2. ANTHROPIC_AUTH_TOKEN config.env.anthropic_auth_token = Some(station.system_token.clone()); - + // 3. apiKeyHelper - 设置为 echo 格式 config.api_key_helper = Some(format!("echo '{}'", station.system_token)); - + // 如果是特定适配器,可能需要特殊处理 URL 格式 match station.adapter.as_str() { "packycode" => { @@ -209,10 +209,10 @@ pub fn apply_relay_station_to_config(station: &RelayStation) -> Result<(), Strin } _ => {} } - + // 写入更新后的配置 write_claude_config(&config)?; - + log::info!("已将中转站 {} 的 API 配置(apiKeyHelper, ANTHROPIC_BASE_URL, ANTHROPIC_AUTH_TOKEN)应用到 Claude 配置文件", station.name); Ok(()) } @@ -230,14 +230,14 @@ pub fn clear_relay_station_from_config() -> Result<(), String> { } else { None }; - + // 读取当前配置 let mut config = read_claude_config()?; - + // 清除 API URL 和 Token config.env.anthropic_base_url = None; config.env.anthropic_auth_token = None; - + // 恢复原始的 apiKeyHelper(如果有备份的话) if let Some(backup) = backup_config { config.api_key_helper = backup.api_key_helper; @@ -249,10 +249,10 @@ pub fn clear_relay_station_from_config() -> Result<(), String> { // 如果没有备份,清除 apiKeyHelper config.api_key_helper = None; } - + // 写入更新后的配置 write_claude_config(&config)?; - + log::info!("已清除 Claude 配置文件中的中转站设置"); Ok(()) } @@ -267,4 +267,4 @@ pub fn get_current_api_url() -> Result, String> { pub fn get_current_api_token() -> Result, String> { let config = read_claude_config()?; Ok(config.env.anthropic_auth_token) -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/agents.rs b/src-tauri/src/commands/agents.rs index 9b1fff5..3bdbb47 100644 --- a/src-tauri/src/commands/agents.rs +++ b/src-tauri/src/commands/agents.rs @@ -10,8 +10,8 @@ use std::io::{BufRead, BufReader}; use std::process::Stdio; use std::sync::Mutex; use tauri::{AppHandle, Emitter, Manager, State}; -use tauri_plugin_shell::ShellExt; use tauri_plugin_shell::process::CommandEvent; +use tauri_plugin_shell::ShellExt; use tokio::io::{AsyncBufReadExt, BufReader as TokioBufReader}; use tokio::process::Command; @@ -321,7 +321,6 @@ pub fn init_database(app: &AppHandle) -> SqliteResult { [], )?; - // Create settings table for app-wide settings conn.execute( "CREATE TABLE IF NOT EXISTS app_settings ( @@ -355,11 +354,9 @@ pub fn init_database(app: &AppHandle) -> SqliteResult { )?; // Initialize default model mappings if empty - let count: i64 = conn.query_row( - "SELECT COUNT(*) FROM model_mappings", - [], - |row| row.get(0), - ).unwrap_or(0); + let count: i64 = conn + .query_row("SELECT COUNT(*) FROM model_mappings", [], |row| row.get(0)) + .unwrap_or(0); if count == 0 { conn.execute( @@ -726,47 +723,49 @@ pub async fn execute_agent( // Get the agent from database let agent = get_agent(db.clone(), agent_id).await?; let execution_model = model.unwrap_or(agent.model.clone()); - + // Resolve model alias to actual model name using mappings - let resolved_model = get_model_by_alias(&db, &execution_model) - .unwrap_or_else(|_| { - warn!("Model alias '{}' not found, using as-is", execution_model); - execution_model.clone() - }); - + let resolved_model = get_model_by_alias(&db, &execution_model).unwrap_or_else(|_| { + warn!("Model alias '{}' not found, using as-is", execution_model); + execution_model.clone() + }); + info!("Resolved model: {} -> {}", execution_model, resolved_model); - + // Create .claude/settings.json with agent hooks if it doesn't exist if let Some(hooks_json) = &agent.hooks { let claude_dir = std::path::Path::new(&project_path).join(".claude"); let settings_path = claude_dir.join("settings.json"); - + // Create .claude directory if it doesn't exist if !claude_dir.exists() { std::fs::create_dir_all(&claude_dir) .map_err(|e| format!("Failed to create .claude directory: {}", e))?; info!("Created .claude directory at: {:?}", claude_dir); } - + // Check if settings.json already exists if !settings_path.exists() { // Parse the hooks JSON let hooks: serde_json::Value = serde_json::from_str(hooks_json) .map_err(|e| format!("Failed to parse agent hooks: {}", e))?; - + // Create a settings object with just the hooks let settings = serde_json::json!({ "hooks": hooks }); - + // Write the settings file let settings_content = serde_json::to_string_pretty(&settings) .map_err(|e| format!("Failed to serialize settings: {}", e))?; - + std::fs::write(&settings_path, settings_content) .map_err(|e| format!("Failed to write settings.json: {}", e))?; - - info!("Created settings.json with agent hooks at: {:?}", settings_path); + + info!( + "Created settings.json with agent hooks at: {:?}", + settings_path + ); } else { info!("settings.json already exists at: {:?}", settings_path); } @@ -800,7 +799,7 @@ pub async fn execute_agent( "--system-prompt".to_string(), agent.system_prompt.clone(), "--model".to_string(), - resolved_model.clone(), // Use resolved model name + resolved_model.clone(), // Use resolved model name "--output-format".to_string(), "stream-json".to_string(), "--verbose".to_string(), @@ -809,9 +808,34 @@ pub async fn execute_agent( // Execute based on whether we should use sidecar or system binary if should_use_sidecar(&claude_path) { - spawn_agent_sidecar(app, run_id, agent_id, agent.name.clone(), args, project_path, task, resolved_model, db, registry).await + spawn_agent_sidecar( + app, + run_id, + agent_id, + agent.name.clone(), + args, + project_path, + task, + resolved_model, + db, + registry, + ) + .await } else { - spawn_agent_system(app, run_id, agent_id, agent.name.clone(), claude_path, args, project_path, task, resolved_model, db, registry).await + spawn_agent_system( + app, + run_id, + agent_id, + agent.name.clone(), + claude_path, + args, + project_path, + task, + resolved_model, + db, + registry, + ) + .await } } @@ -830,25 +854,21 @@ fn create_agent_sidecar_command( .shell() .sidecar("claude-code") .map_err(|e| format!("Failed to create sidecar command: {}", e))?; - + // Add all arguments sidecar_cmd = sidecar_cmd.args(args); - + // Set working directory sidecar_cmd = sidecar_cmd.current_dir(project_path); - + // Pass through proxy environment variables if they exist (only uppercase) for (key, value) in std::env::vars() { - if key == "HTTP_PROXY" - || key == "HTTPS_PROXY" - || key == "NO_PROXY" - || key == "ALL_PROXY" - { + if key == "HTTP_PROXY" || key == "HTTPS_PROXY" || key == "NO_PROXY" || key == "ALL_PROXY" { debug!("Setting proxy env var for agent sidecar: {}={}", key, value); sidecar_cmd = sidecar_cmd.env(&key, &value); } } - + Ok(sidecar_cmd) } @@ -859,17 +879,17 @@ fn create_agent_system_command( project_path: &str, ) -> Command { let mut cmd = create_command_with_env(claude_path); - + // Add all arguments for arg in args { cmd.arg(arg); } - + cmd.current_dir(project_path) .stdin(Stdio::null()) .stdout(Stdio::piped()) .stderr(Stdio::piped()); - + cmd } @@ -899,7 +919,10 @@ async fn spawn_agent_sidecar( // Get the PID from child let pid = child.pid(); let now = chrono::Utc::now().to_rfc3339(); - info!("✅ Claude sidecar process spawned successfully with PID: {}", pid); + info!( + "✅ Claude sidecar process spawned successfully with PID: {}", + pid + ); // Update the database with PID and status { @@ -983,14 +1006,15 @@ async fn spawn_agent_sidecar( // Extract session ID from JSONL output if let Ok(json) = serde_json::from_str::(&line) { - if json.get("type").and_then(|t| t.as_str()) == Some("system") && - json.get("subtype").and_then(|s| s.as_str()) == Some("init") { + if json.get("type").and_then(|t| t.as_str()) == Some("system") + && json.get("subtype").and_then(|s| s.as_str()) == Some("init") + { if let Some(sid) = json.get("session_id").and_then(|s| s.as_str()) { if let Ok(mut current_session_id) = session_id_clone.lock() { if current_session_id.is_empty() { *current_session_id = sid.to_string(); info!("🔑 Extracted session ID: {}", sid); - + // Update database immediately with session ID if let Ok(conn) = Connection::open(&db_path_for_sidecar) { match conn.execute( @@ -1024,8 +1048,11 @@ async fn spawn_agent_sidecar( let _ = app_handle.emit("agent-error", &line); } CommandEvent::Terminated(payload) => { - info!("Claude sidecar process terminated with code: {:?}", payload.code); - + info!( + "Claude sidecar process terminated with code: {:?}", + payload.code + ); + // Get the session ID let extracted_session_id = if let Ok(sid) = session_id.lock() { sid.clone() @@ -1050,7 +1077,10 @@ async fn spawn_agent_sidecar( } } - info!("📖 Finished reading Claude sidecar events. Total lines: {}", line_count); + info!( + "📖 Finished reading Claude sidecar events. Total lines: {}", + line_count + ); }); Ok(run_id) @@ -1162,14 +1192,15 @@ async fn spawn_agent_system( // Extract session ID from JSONL output if let Ok(json) = serde_json::from_str::(&line) { // Claude Code uses "session_id" (underscore), not "sessionId" - if json.get("type").and_then(|t| t.as_str()) == Some("system") && - json.get("subtype").and_then(|s| s.as_str()) == Some("init") { + if json.get("type").and_then(|t| t.as_str()) == Some("system") + && json.get("subtype").and_then(|s| s.as_str()) == Some("init") + { if let Some(sid) = json.get("session_id").and_then(|s| s.as_str()) { if let Ok(mut current_session_id) = session_id_clone.lock() { if current_session_id.is_empty() { *current_session_id = sid.to_string(); info!("🔑 Extracted session ID: {}", sid); - + // Update database immediately with session ID if let Ok(conn) = Connection::open(&db_path_for_stdout) { match conn.execute( @@ -1182,7 +1213,10 @@ async fn spawn_agent_system( } } Err(e) => { - error!("❌ Failed to update session ID immediately: {}", e); + error!( + "❌ Failed to update session ID immediately: {}", + e + ); } } } @@ -1342,7 +1376,10 @@ async fn spawn_agent_system( // Update the run record with session ID and mark as completed - open a new connection if let Ok(conn) = Connection::open(&db_path_for_monitor) { - info!("🔄 Updating database with extracted session ID: {}", extracted_session_id); + info!( + "🔄 Updating database with extracted session ID: {}", + extracted_session_id + ); match conn.execute( "UPDATE agent_runs SET session_id = ?1, status = 'completed', completed_at = CURRENT_TIMESTAMP WHERE id = ?2", params![extracted_session_id, run_id], @@ -1359,7 +1396,10 @@ async fn spawn_agent_system( } } } else { - error!("❌ Failed to open database to update session ID for run {}", run_id); + error!( + "❌ Failed to open database to update session ID for run {}", + run_id + ); } // Cleanup will be handled by the cleanup_finished_processes function @@ -1419,10 +1459,8 @@ pub async fn list_running_sessions( // Cross-check with the process registry to ensure accuracy // Get actually running processes from the registry let registry_processes = registry.0.get_running_agent_processes()?; - let registry_run_ids: std::collections::HashSet = registry_processes - .iter() - .map(|p| p.run_id) - .collect(); + let registry_run_ids: std::collections::HashSet = + registry_processes.iter().map(|p| p.run_id).collect(); // Filter out any database entries that aren't actually running in the registry // This handles cases where processes crashed without updating the database @@ -1615,7 +1653,7 @@ pub async fn get_session_output( // Find the correct project directory by searching for the session file let projects_dir = claude_dir.join("projects"); - + // Check if projects directory exists if !projects_dir.exists() { log::error!("Projects directory not found at: {:?}", projects_dir); @@ -1624,15 +1662,18 @@ pub async fn get_session_output( // Search for the session file in all project directories let mut session_file_path = None; - log::info!("Searching for session file {} in all project directories", run.session_id); - + log::info!( + "Searching for session file {} in all project directories", + run.session_id + ); + if let Ok(entries) = std::fs::read_dir(&projects_dir) { for entry in entries.filter_map(Result::ok) { let path = entry.path(); if path.is_dir() { let dir_name = path.file_name().unwrap_or_default().to_string_lossy(); log::debug!("Checking project directory: {}", dir_name); - + let potential_session_file = path.join(format!("{}.jsonl", run.session_id)); if potential_session_file.exists() { log::info!("Found session file at: {:?}", potential_session_file); @@ -1652,7 +1693,11 @@ pub async fn get_session_output( match tokio::fs::read_to_string(&session_path).await { Ok(content) => Ok(content), Err(e) => { - log::error!("Failed to read session file {}: {}", session_path.display(), e); + log::error!( + "Failed to read session file {}: {}", + session_path.display(), + e + ); // Fallback to live output if file read fails let live_output = registry.0.get_live_output(run_id)?; Ok(live_output) @@ -1660,7 +1705,10 @@ pub async fn get_session_output( } } else { // If session file not found, try the old method as fallback - log::warn!("Session file not found for {}, trying legacy method", run.session_id); + log::warn!( + "Session file not found for {}, trying legacy method", + run.session_id + ); match read_session_jsonl(&run.session_id, &run.project_path).await { Ok(content) => Ok(content), Err(_) => { @@ -2166,7 +2214,7 @@ pub async fn load_agent_session_history( .join(".claude"); let projects_dir = claude_dir.join("projects"); - + if !projects_dir.exists() { log::error!("Projects directory not found at: {:?}", projects_dir); return Err("Projects directory not found".to_string()); @@ -2174,15 +2222,18 @@ pub async fn load_agent_session_history( // Search for the session file in all project directories let mut session_file_path = None; - log::info!("Searching for session file {} in all project directories", session_id); - + log::info!( + "Searching for session file {} in all project directories", + session_id + ); + if let Ok(entries) = std::fs::read_dir(&projects_dir) { for entry in entries.filter_map(Result::ok) { let path = entry.path(); if path.is_dir() { let dir_name = path.file_name().unwrap_or_default().to_string_lossy(); log::debug!("Checking project directory: {}", dir_name); - + let potential_session_file = path.join(format!("{}.jsonl", session_id)); if potential_session_file.exists() { log::info!("Found session file at: {:?}", potential_session_file); diff --git a/src-tauri/src/commands/ccr.rs b/src-tauri/src/commands/ccr.rs index a50cf3a..ffec337 100644 --- a/src-tauri/src/commands/ccr.rs +++ b/src-tauri/src/commands/ccr.rs @@ -1,10 +1,10 @@ -use serde::{Deserialize, Serialize}; -use std::process::{Command, Stdio}; use log::{debug, error, info}; -use std::net::TcpStream; -use std::time::Duration; use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; +use std::net::TcpStream; +use std::process::{Command, Stdio}; use std::sync::Mutex; +use std::time::Duration; // 全局变量存储找到的 CCR 路径 static CCR_PATH: Lazy>> = Lazy::new(|| Mutex::new(None)); @@ -50,10 +50,12 @@ fn get_possible_ccr_paths() -> Vec { let mut paths: Vec = Vec::new(); // PATH 中的候选名(稍后用 PATH 遍历拼接,这里仅保留可直接执行名) paths.extend(candidate_binaries().into_iter().map(|s| s.to_string())); - + // 获取用户主目录 - let home = std::env::var("HOME").or_else(|_| std::env::var("USERPROFILE")).unwrap_or_default(); - + let home = std::env::var("HOME") + .or_else(|_| std::env::var("USERPROFILE")) + .unwrap_or_default(); + #[cfg(target_os = "macos")] { // macOS 特定路径 @@ -71,22 +73,28 @@ fn get_possible_ccr_paths() -> Vec { paths.push(format!("/usr/local/lib/node_modules/.bin/{}", bin)); paths.push(format!("/opt/homebrew/lib/node_modules/.bin/{}", bin)); } - + // 添加常见的 Node.js 版本路径 for version in &["v16", "v18", "v20", "v21", "v22"] { paths.push(format!("{}/.nvm/versions/node/{}.*/bin/ccr", home, version)); } } - + #[cfg(target_os = "windows")] { // Windows 特定路径 - let program_files = std::env::var("ProgramFiles").unwrap_or_else(|_| "C:\\Program Files".to_string()); - let program_files_x86 = std::env::var("ProgramFiles(x86)").unwrap_or_else(|_| "C:\\Program Files (x86)".to_string()); - let appdata = std::env::var("APPDATA").unwrap_or_else(|_| format!("{}\\AppData\\Roaming", home)); - + let program_files = + std::env::var("ProgramFiles").unwrap_or_else(|_| "C:\\Program Files".to_string()); + let program_files_x86 = std::env::var("ProgramFiles(x86)") + .unwrap_or_else(|_| "C:\\Program Files (x86)".to_string()); + let appdata = + std::env::var("APPDATA").unwrap_or_else(|_| format!("{}\\AppData\\Roaming", home)); + for bin in [ - "ccr.exe", "ccr.cmd", "claude-code-router.exe", "claude-code-router.cmd", + "ccr.exe", + "ccr.cmd", + "claude-code-router.exe", + "claude-code-router.cmd", ] { paths.push(bin.to_string()); paths.push(format!("{}\\npm\\{}", appdata, bin)); @@ -95,7 +103,7 @@ fn get_possible_ccr_paths() -> Vec { paths.push(format!("{}\\AppData\\Roaming\\npm\\{}", home, bin)); } } - + #[cfg(target_os = "linux")] { // Linux 特定路径 @@ -107,15 +115,19 @@ fn get_possible_ccr_paths() -> Vec { paths.push(format!("/usr/lib/node_modules/.bin/{}", bin)); } } - + paths } /// 获取扩展的 PATH 环境变量 fn get_extended_path() -> String { let mut extended_path = std::env::var("PATH").unwrap_or_default(); - let separator = if cfg!(target_os = "windows") { ";" } else { ":" }; - + let separator = if cfg!(target_os = "windows") { + ";" + } else { + ":" + }; + // 添加常见的额外路径 let additional_paths = if cfg!(target_os = "macos") { vec![ @@ -129,12 +141,9 @@ fn get_extended_path() -> String { } else if cfg!(target_os = "windows") { vec![] } else { - vec![ - "/usr/local/bin", - "/opt/bin", - ] + vec!["/usr/local/bin", "/opt/bin"] }; - + // 添加用户特定路径 if let Ok(home) = std::env::var("HOME") { let user_paths = if cfg!(target_os = "macos") { @@ -149,7 +158,9 @@ fn get_extended_path() -> String { for entry in entries.flatten() { let p = entry.path().join("bin"); if p.exists() { - if let Some(s) = p.to_str() { list.push(s.to_string()); } + if let Some(s) = p.to_str() { + list.push(s.to_string()); + } } } } @@ -161,7 +172,9 @@ fn get_extended_path() -> String { for entry in entries.flatten() { let p = entry.path().join("bin"); if p.exists() { - if let Some(s) = p.to_str() { list.push(s.to_string()); } + if let Some(s) = p.to_str() { + list.push(s.to_string()); + } } } } @@ -172,16 +185,16 @@ fn get_extended_path() -> String { for entry in entries.flatten() { let p = entry.path().join("installation").join("bin"); if p.exists() { - if let Some(s) = p.to_str() { list.push(s.to_string()); } + if let Some(s) = p.to_str() { + list.push(s.to_string()); + } } } } list } else if cfg!(target_os = "windows") { if let Ok(appdata) = std::env::var("APPDATA") { - vec![ - format!("{}\\npm", appdata), - ] + vec![format!("{}\\npm", appdata)] } else { vec![] } @@ -191,7 +204,7 @@ fn get_extended_path() -> String { format!("{}/.npm-global/bin", home), ] }; - + for path in user_paths { if std::path::Path::new(&path).exists() && !extended_path.contains(&path) { extended_path.push_str(separator); @@ -199,7 +212,7 @@ fn get_extended_path() -> String { } } } - + // 添加系统额外路径 for path in additional_paths { if std::path::Path::new(path).exists() && !extended_path.contains(path) { @@ -207,7 +220,7 @@ fn get_extended_path() -> String { extended_path.push_str(path); } } - + extended_path } @@ -219,34 +232,40 @@ fn find_ccr_via_shell() -> Option { } else { "command -v ccr || which ccr || command -v claude-code-router || which claude-code-router" }; - + let shell = if cfg!(target_os = "windows") { "cmd" } else { "sh" }; - + let shell_args = if cfg!(target_os = "windows") { vec!["/C", shell_cmd] } else { vec!["-c", shell_cmd] }; - + if let Ok(output) = Command::new(shell) .args(&shell_args) .env("PATH", get_extended_path()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .output() { + .output() + { if output.status.success() { - let path = String::from_utf8_lossy(&output.stdout).lines().next().unwrap_or("").trim().to_string(); + let path = String::from_utf8_lossy(&output.stdout) + .lines() + .next() + .unwrap_or("") + .trim() + .to_string(); if !path.is_empty() && test_ccr_command(&path) { info!("Found ccr via shell: {}", path); return Some(path); } } } - + // 如果标准方法失败,尝试加载用户的 shell 配置 if !cfg!(target_os = "windows") { let home = std::env::var("HOME").ok()?; @@ -255,18 +274,27 @@ fn find_ccr_via_shell() -> Option { format!("{}/.zshrc", home), format!("{}/.profile", home), ]; - + for config in shell_configs { if std::path::Path::new(&config).exists() { - let cmd = format!("source {} && (command -v ccr || command -v claude-code-router)", config); + let cmd = format!( + "source {} && (command -v ccr || command -v claude-code-router)", + config + ); if let Ok(output) = Command::new("sh") .args(&["-c", &cmd]) .env("PATH", get_extended_path()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .output() { + .output() + { if output.status.success() { - let path = String::from_utf8_lossy(&output.stdout).lines().next().unwrap_or("").trim().to_string(); + let path = String::from_utf8_lossy(&output.stdout) + .lines() + .next() + .unwrap_or("") + .trim() + .to_string(); if !path.is_empty() && test_ccr_command(&path) { info!("Found ccr via shell config {}: {}", config, path); return Some(path); @@ -276,7 +304,7 @@ fn find_ccr_via_shell() -> Option { } } } - + None } @@ -288,7 +316,7 @@ fn find_ccr_path() -> Option { return cached.clone(); } } - + // 硬编码检查最常见的路径(针对打包应用的特殊处理) let home = std::env::var("HOME").unwrap_or_default(); let mut hardcoded_paths: Vec = Vec::new(); @@ -296,7 +324,7 @@ fn find_ccr_path() -> Option { hardcoded_paths.push(format!("/usr/local/bin/{}", bin)); hardcoded_paths.push(format!("/opt/homebrew/bin/{}", bin)); } - + // 动态添加 NVM 路径 let nvm_base = format!("{}/.nvm/versions/node", home); if std::path::Path::new(&nvm_base).exists() { @@ -313,9 +341,9 @@ fn find_ccr_path() -> Option { } } } - + info!("Checking hardcoded paths: {:?}", hardcoded_paths); - + for path in &hardcoded_paths { if std::path::Path::new(path).exists() { // 对于打包应用,存在即认为可用,不进行执行测试 @@ -326,10 +354,10 @@ fn find_ccr_path() -> Option { return Some(path.to_string()); } } - + // 获取扩展的 PATH let extended_path = get_extended_path(); - + // 首先尝试通过 shell 查找(最可靠) if let Some(path) = find_ccr_via_shell() { if let Ok(mut cached) = CCR_PATH.lock() { @@ -337,7 +365,7 @@ fn find_ccr_path() -> Option { } return Some(path); } - + // 然后尝试使用带有扩展 PATH 的 which/command -v 命令 for name in ["ccr", "claude-code-router"] { if let Ok(output) = Command::new("sh") @@ -346,9 +374,15 @@ fn find_ccr_path() -> Option { .arg(format!("command -v {} || which {}", name, name)) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .output() { + .output() + { if output.status.success() { - let path = String::from_utf8_lossy(&output.stdout).lines().next().unwrap_or("").trim().to_string(); + let path = String::from_utf8_lossy(&output.stdout) + .lines() + .next() + .unwrap_or("") + .trim() + .to_string(); if !path.is_empty() && test_ccr_command(&path) { info!("Found {} using shell which: {}", name, path); if let Ok(mut cached) = CCR_PATH.lock() { @@ -359,9 +393,13 @@ fn find_ccr_path() -> Option { } } } - + // 然后检查扩展后的 PATH - let separator = if cfg!(target_os = "windows") { ";" } else { ":" }; + let separator = if cfg!(target_os = "windows") { + ";" + } else { + ":" + }; for path_dir in extended_path.split(separator) { for name in candidate_binaries() { let candidate = if cfg!(target_os = "windows") { @@ -378,10 +416,10 @@ fn find_ccr_path() -> Option { } } } - + // 最后尝试预定义的路径列表 let possible_paths = get_possible_ccr_paths(); - + for path in &possible_paths { // 处理通配符路径 (仅限 Unix-like 系统) if path.contains('*') { @@ -408,8 +446,11 @@ fn find_ccr_path() -> Option { return Some(path.clone()); } } - - error!("CCR not found in any location. Original PATH: {:?}", std::env::var("PATH")); + + error!( + "CCR not found in any location. Original PATH: {:?}", + std::env::var("PATH") + ); error!("Extended PATH: {}", extended_path); error!("Searched paths: {:?}", possible_paths); None @@ -423,7 +464,7 @@ fn test_ccr_command(path: &str) -> bool { debug!("CCR path does not exist: {}", path); return false; } - + // 如果是符号链接,解析真实路径 let real_path = if path_obj.is_symlink() { match std::fs::read_link(path) { @@ -447,9 +488,12 @@ fn test_ccr_command(path: &str) -> bool { } else { path.to_string() }; - - debug!("Testing CCR command at: {} (real path: {})", path, real_path); - + + debug!( + "Testing CCR command at: {} (real path: {})", + path, real_path + ); + // 如果是 .js 文件,使用 node 来执行 if real_path.ends_with(".js") { let output = Command::new("node") @@ -459,7 +503,7 @@ fn test_ccr_command(path: &str) -> bool { .stdout(Stdio::piped()) .stderr(Stdio::piped()) .output(); - + match output { Ok(result) => { let success = result.status.success(); @@ -511,10 +555,10 @@ pub async fn check_ccr_installation() -> Result { #[tauri::command] pub async fn get_ccr_version() -> Result { let ccr_path = find_ccr_path().ok_or("CCR not found")?; - + // 尝试多个版本命令参数 let version_args = vec!["--version", "-v", "version"]; - + for arg in version_args { let output = if ccr_path.contains("node_modules") || ccr_path.contains(".nvm") { Command::new("sh") @@ -532,7 +576,7 @@ pub async fn get_ccr_version() -> Result { .stderr(Stdio::piped()) .output() }; - + if let Ok(result) = output { if result.status.success() { let version = String::from_utf8_lossy(&result.stdout); @@ -543,7 +587,7 @@ pub async fn get_ccr_version() -> Result { } } } - + Err("Unable to get CCR version".to_string()) } @@ -552,7 +596,7 @@ pub async fn get_ccr_version() -> Result { pub async fn get_ccr_service_status() -> Result { // 首先检查 ccr 二进制是否存在 let has_ccr_binary = check_ccr_installation().await.unwrap_or(false); - + if !has_ccr_binary { info!("CCR binary not found in PATH"); let original_path = std::env::var("PATH").unwrap_or_else(|_| "PATH not found".to_string()); @@ -567,7 +611,9 @@ pub async fn get_ccr_service_status() -> Result { for entry in entries.flatten() { let p = entry.path().join("bin"); if p.exists() { - if let Some(s) = p.to_str() { scan_dirs.push(s.to_string()); } + if let Some(s) = p.to_str() { + scan_dirs.push(s.to_string()); + } } } } @@ -580,7 +626,9 @@ pub async fn get_ccr_service_status() -> Result { for entry in entries.flatten() { let p = entry.path().join("bin"); if p.exists() { - if let Some(s) = p.to_str() { scan_dirs.push(s.to_string()); } + if let Some(s) = p.to_str() { + scan_dirs.push(s.to_string()); + } } } } @@ -590,7 +638,9 @@ pub async fn get_ccr_service_status() -> Result { for entry in entries.flatten() { let p = entry.path().join("installation").join("bin"); if p.exists() { - if let Some(s) = p.to_str() { scan_dirs.push(s.to_string()); } + if let Some(s) = p.to_str() { + scan_dirs.push(s.to_string()); + } } } } @@ -613,7 +663,8 @@ pub async fn get_ccr_service_status() -> Result { .env("PATH", get_extended_path()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .output() { + .output() + { Ok(output) => { if output.status.success() { let version = String::from_utf8_lossy(&output.stdout); @@ -623,7 +674,7 @@ pub async fn get_ccr_service_status() -> Result { format!("Direct execution FAILED: {}", stderr.trim()) } } - Err(e) => format!("Direct execution ERROR: {}", e) + Err(e) => format!("Direct execution ERROR: {}", e), } } else { "No candidate binary found in Node manager dirs".to_string() @@ -638,7 +689,9 @@ pub async fn get_ccr_service_status() -> Result { let files: Vec = entries .filter_map(|e| e.ok()) .filter_map(|e| e.file_name().to_str().map(|s| s.to_string())) - .filter(|name| name.contains("ccr") || name.contains("claude-code-router")) + .filter(|name| { + name.contains("ccr") || name.contains("claude-code-router") + }) .collect(); if !files.is_empty() { scan_summary.push(format!("{} -> {:?}", dir, files)); @@ -662,7 +715,7 @@ pub async fn get_ccr_service_status() -> Result { direct_test, scan_summary.join("; ") ); - + return Ok(CcrServiceStatus { is_running: false, port: None, @@ -677,7 +730,7 @@ pub async fn get_ccr_service_status() -> Result { // 获取版本信息 let ccr_version = get_ccr_version().await.ok(); debug!("CCR version: {:?}", ccr_version); - + // 获取 CCR 路径 let ccr_path = find_ccr_path().ok_or("CCR not found")?; @@ -686,23 +739,23 @@ pub async fn get_ccr_service_status() -> Result { // 如果是 Node.js 安装的路径,可能需要使用 node 来执行 let mut c = Command::new("sh"); c.arg("-c") - .arg(format!("{} status", ccr_path)) - .env("PATH", get_extended_path()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); + .arg(format!("{} status", ccr_path)) + .env("PATH", get_extended_path()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); c } else { let mut c = Command::new(&ccr_path); c.arg("status") - .env("PATH", get_extended_path()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); + .env("PATH", get_extended_path()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); c }; - + info!("Executing ccr status command at path: {}", ccr_path); let output = cmd.output(); - + let output = match output { Ok(o) => o, Err(e) => { @@ -718,50 +771,56 @@ pub async fn get_ccr_service_status() -> Result { }); } }; - + let status_output = String::from_utf8_lossy(&output.stdout); let stderr_output = String::from_utf8_lossy(&output.stderr); - + info!("CCR status command exit code: {:?}", output.status.code()); info!("CCR status stdout length: {}", status_output.len()); info!("CCR status stdout: {}", status_output); info!("CCR status stderr: {}", stderr_output); - + // 检查状态 - 明确检测运行和停止状态 - let is_running = if status_output.contains("❌") || status_output.contains("Status: Not Running") { - // 明确显示未运行 - false - } else if status_output.contains("✅") || status_output.contains("Status: Running") { - // 明确显示运行中 - true - } else if status_output.contains("Process ID:") && status_output.contains("Port:") { - // 包含进程ID和端口信息,可能在运行 - true - } else { - // 默认认为未运行 - false - }; - + let is_running = + if status_output.contains("❌") || status_output.contains("Status: Not Running") { + // 明确显示未运行 + false + } else if status_output.contains("✅") || status_output.contains("Status: Running") { + // 明确显示运行中 + true + } else if status_output.contains("Process ID:") && status_output.contains("Port:") { + // 包含进程ID和端口信息,可能在运行 + true + } else { + // 默认认为未运行 + false + }; + info!("CCR service running detection - is_running: {}", is_running); - + // 尝试从输出中提取端口、端点和进程ID信息 let mut port = None; let mut endpoint = None; let mut process_id = None; - + if is_running { // 提取端口信息 - 支持多种格式 for line in status_output.lines() { info!("Parsing line for port: {}", line); - + // 检查是否包含端口信息 - if line.contains("Port:") || line.contains("port:") || line.contains("端口:") || line.contains("🌐") { + if line.contains("Port:") + || line.contains("port:") + || line.contains("端口:") + || line.contains("🌐") + { // 查找数字 - let numbers: String = line.chars() + let numbers: String = line + .chars() .skip_while(|c| !c.is_numeric()) .take_while(|c| c.is_numeric()) .collect(); - + if !numbers.is_empty() { if let Ok(port_num) = numbers.parse::() { port = Some(port_num); @@ -771,19 +830,24 @@ pub async fn get_ccr_service_status() -> Result { } } } - + // 提取API端点信息 - 支持多种格式 for line in status_output.lines() { info!("Parsing line for endpoint: {}", line); - if line.contains("API Endpoint:") || line.contains("Endpoint:") || - line.contains("http://") || line.contains("https://") || line.contains("📡") { + if line.contains("API Endpoint:") + || line.contains("Endpoint:") + || line.contains("http://") + || line.contains("https://") + || line.contains("📡") + { // 尝试提取URL if let Some(start) = line.find("http") { let url_part = &line[start..]; // 找到URL的结束位置(空格或行尾) let end = url_part.find(char::is_whitespace).unwrap_or(url_part.len()); let url = &url_part[..end]; - if url.contains(":") && (url.contains("localhost") || url.contains("127.0.0.1")) { + if url.contains(":") && (url.contains("localhost") || url.contains("127.0.0.1")) + { endpoint = Some(url.to_string()); info!("Successfully extracted endpoint: {}", url); break; @@ -791,17 +855,22 @@ pub async fn get_ccr_service_status() -> Result { } } } - + // 提取进程ID信息 - 支持多种格式 for line in status_output.lines() { info!("Parsing line for PID: {}", line); - if line.contains("Process ID:") || line.contains("PID:") || line.contains("pid:") || line.contains("🆔") { + if line.contains("Process ID:") + || line.contains("PID:") + || line.contains("pid:") + || line.contains("🆔") + { // 查找数字 - let numbers: String = line.chars() + let numbers: String = line + .chars() .skip_while(|c| !c.is_numeric()) .take_while(|c| c.is_numeric()) .collect(); - + if !numbers.is_empty() { if let Ok(pid_num) = numbers.parse::() { process_id = Some(pid_num); @@ -811,7 +880,7 @@ pub async fn get_ccr_service_status() -> Result { } } } - + // 如果没有找到具体信息,使用默认值 if port.is_none() { port = Some(3456); @@ -828,7 +897,8 @@ pub async fn get_ccr_service_status() -> Result { if !is_running { info!("Status command didn't detect running service, checking port 3456..."); // 尝试连接默认端口 - match TcpStream::connect_timeout(&"127.0.0.1:3456".parse().unwrap(), Duration::from_secs(1)) { + match TcpStream::connect_timeout(&"127.0.0.1:3456".parse().unwrap(), Duration::from_secs(1)) + { Ok(_) => { info!("Port 3456 is open, service appears to be running"); return Ok(CcrServiceStatus { @@ -846,7 +916,7 @@ pub async fn get_ccr_service_status() -> Result { } } } - + Ok(CcrServiceStatus { is_running, port, @@ -892,7 +962,7 @@ pub async fn start_ccr_service() -> Result { // 再次检查状态 let new_status = get_ccr_service_status().await?; - + if new_status.is_running { Ok(CcrServiceInfo { status: new_status, @@ -926,7 +996,7 @@ pub async fn stop_ccr_service() -> Result { // 检查新状态 let new_status = get_ccr_service_status().await?; - + Ok(CcrServiceInfo { status: new_status, message: "CCR service stopped successfully".to_string(), @@ -959,7 +1029,7 @@ pub async fn restart_ccr_service() -> Result { // 检查新状态 let new_status = get_ccr_service_status().await?; - + Ok(CcrServiceInfo { status: new_status, message: "CCR service restarted successfully".to_string(), @@ -998,12 +1068,9 @@ pub async fn open_ccr_ui() -> Result { /// 获取 CCR 配置路径 #[tauri::command] pub async fn get_ccr_config_path() -> Result { - let home_dir = dirs::home_dir() - .ok_or("Could not find home directory")?; - - let config_path = home_dir - .join(".claude-code-router") - .join("config.json"); - + let home_dir = dirs::home_dir().ok_or("Could not find home directory")?; + + let config_path = home_dir.join(".claude-code-router").join("config.json"); + Ok(config_path.to_string_lossy().to_string()) } diff --git a/src-tauri/src/commands/claude.rs b/src-tauri/src/commands/claude.rs index b020e6e..4d297cb 100644 --- a/src-tauri/src/commands/claude.rs +++ b/src-tauri/src/commands/claude.rs @@ -10,7 +10,6 @@ use tauri::{AppHandle, Emitter, Manager}; use tokio::process::{Child, Command}; use tokio::sync::Mutex; - /// Global state to track current Claude process pub struct ClaudeProcessState { pub current_process: Arc>>, @@ -267,22 +266,18 @@ fn create_command_with_env(program: &str) -> Command { } /// Creates a system binary command with the given arguments -fn create_system_command( - claude_path: &str, - args: Vec, - project_path: &str, -) -> Command { +fn create_system_command(claude_path: &str, args: Vec, project_path: &str) -> Command { let mut cmd = create_command_with_env(claude_path); - + // Add all arguments for arg in args { cmd.arg(arg); } - + cmd.current_dir(project_path) .stdout(Stdio::piped()) .stderr(Stdio::piped()); - + cmd } @@ -293,16 +288,31 @@ pub async fn watch_claude_project_directory( app_handle: tauri::AppHandle, ) -> Result<(), String> { use crate::file_watcher::FileWatcherState; - - log::info!("Starting to watch Claude project directory for project: {}", project_path); - + let project_path_buf = PathBuf::from(&project_path); + + // 支持直接传入位于 ~/.claude 或 ~/.claudia 下的特殊目录(例如智能会话) + if (project_path.contains("/.claude/") || project_path.contains("/.claudia/")) + && project_path_buf.exists() + { + let file_watcher_state = app_handle.state::(); + let path_str = project_path_buf.to_string_lossy().to_string(); + return file_watcher_state + .with_manager(|manager| manager.watch_path(&path_str, false)) + .map_err(|e| format!("Failed to watch Claude project directory: {}", e)); + } + + log::info!( + "Starting to watch Claude project directory for project: {}", + project_path + ); + let claude_dir = get_claude_dir().map_err(|e| e.to_string())?; let projects_dir = claude_dir.join("projects"); - + if !projects_dir.exists() { return Err("Claude projects directory does not exist".to_string()); } - + // 找到对应项目的目录 if let Ok(entries) = std::fs::read_dir(&projects_dir) { for entry in entries { @@ -315,17 +325,19 @@ pub async fn watch_claude_project_directory( // 找到了对应的项目目录,开始监控 let file_watcher_state = app_handle.state::(); let path_str = path.to_string_lossy().to_string(); - - return file_watcher_state.with_manager(|manager| { - manager.watch_path(&path_str, false) - }).map_err(|e| format!("Failed to watch Claude project directory: {}", e)); + + return file_watcher_state + .with_manager(|manager| manager.watch_path(&path_str, false)) + .map_err(|e| { + format!("Failed to watch Claude project directory: {}", e) + }); } } } } } } - + Err("Could not find Claude project directory for the given project path".to_string()) } @@ -336,16 +348,29 @@ pub async fn unwatch_claude_project_directory( app_handle: tauri::AppHandle, ) -> Result<(), String> { use crate::file_watcher::FileWatcherState; - - log::info!("Stopping watch on Claude project directory for project: {}", project_path); - + let project_path_buf = PathBuf::from(&project_path); + + // 对智能会话等位于 ~/.claude* 下的目录执行直接取消 + if project_path.contains("/.claude/") || project_path.contains("/.claudia/") { + let file_watcher_state = app_handle.state::(); + let path_str = project_path_buf.to_string_lossy().to_string(); + return file_watcher_state + .with_manager(|manager| manager.unwatch_path(&path_str)) + .map_err(|e| format!("Failed to stop watching Claude project directory: {}", e)); + } + + log::info!( + "Stopping watch on Claude project directory for project: {}", + project_path + ); + let claude_dir = get_claude_dir().map_err(|e| e.to_string())?; let projects_dir = claude_dir.join("projects"); - + if !projects_dir.exists() { return Ok(()); // 目录不存在,视为成功 } - + // 找到对应项目的目录 if let Ok(entries) = std::fs::read_dir(&projects_dir) { for entry in entries { @@ -358,17 +383,22 @@ pub async fn unwatch_claude_project_directory( // 找到了对应的项目目录,停止监控 let file_watcher_state = app_handle.state::(); let path_str = path.to_string_lossy().to_string(); - - return file_watcher_state.with_manager(|manager| { - manager.unwatch_path(&path_str) - }).map_err(|e| format!("Failed to stop watching Claude project directory: {}", e)); + + return file_watcher_state + .with_manager(|manager| manager.unwatch_path(&path_str)) + .map_err(|e| { + format!( + "Failed to stop watching Claude project directory: {}", + e + ) + }); } } } } } } - + Ok(()) } @@ -425,7 +455,7 @@ pub async fn list_projects() -> Result, String> { // List all JSONL files (sessions) in this project directory let mut sessions = Vec::new(); let mut last_session_time = created_at; // Default to project creation time - + if let Ok(session_entries) = fs::read_dir(&path) { for session_entry in session_entries.flatten() { let session_path = session_entry.path(); @@ -435,7 +465,7 @@ pub async fn list_projects() -> Result, String> { if let Some(session_id) = session_path.file_stem().and_then(|s| s.to_str()) { sessions.push(session_id.to_string()); - + // Get the modified time of this session file if let Ok(metadata) = fs::metadata(&session_path) { if let Ok(modified) = metadata.modified() { @@ -443,7 +473,7 @@ pub async fn list_projects() -> Result, String> { .duration_since(SystemTime::UNIX_EPOCH) .unwrap_or_default() .as_secs(); - + // Update last_session_time if this file is newer if modified_time > last_session_time { last_session_time = modified_time; @@ -648,7 +678,7 @@ pub async fn check_claude_version(_app: AppHandle) -> Result Result v1.cmp(v2), - (Some(_), None) => std::cmp::Ordering::Greater, - (None, Some(_)) => std::cmp::Ordering::Less, - (None, None) => std::cmp::Ordering::Equal, - } + .max_by(|a, b| match (&a.version, &b.version) { + (Some(v1), Some(v2)) => v1.cmp(v2), + (Some(_), None) => std::cmp::Ordering::Greater, + (None, Some(_)) => std::cmp::Ordering::Less, + (None, None) => std::cmp::Ordering::Equal, }) .unwrap(); // Safe because we checked is_empty() above @@ -866,8 +894,6 @@ pub async fn load_session_history( Ok(messages) } - - /// Execute a new interactive Claude Code session with streaming output #[tauri::command] pub async fn execute_claude_code( @@ -883,13 +909,13 @@ pub async fn execute_claude_code( ); let claude_path = find_claude_binary(&app)?; - + // Map opus-plan to the appropriate Claude CLI parameter let claude_model = match model.as_str() { "opus-plan" => "opusplan".to_string(), - _ => model.clone() + _ => model.clone(), }; - + let args = vec![ "-p".to_string(), prompt.clone(), @@ -920,13 +946,13 @@ pub async fn continue_claude_code( ); let claude_path = find_claude_binary(&app)?; - + // Map opus-plan to the appropriate Claude CLI parameter let claude_model = match model.as_str() { "opus-plan" => "opusplan".to_string(), - _ => model.clone() + _ => model.clone(), }; - + let args = vec![ "-c".to_string(), // Continue flag "-p".to_string(), @@ -960,13 +986,13 @@ pub async fn resume_claude_code( ); let claude_path = find_claude_binary(&app)?; - + // Map opus-plan to the appropriate Claude CLI parameter let claude_model = match model.as_str() { "opus-plan" => "opusplan".to_string(), - _ => model.clone() + _ => model.clone(), }; - + let args = vec![ "--resume".to_string(), session_id.clone(), @@ -1003,8 +1029,12 @@ pub async fn cancel_claude_execution( let registry = app.state::(); match registry.0.get_claude_session_by_id(sid) { Ok(Some(process_info)) => { - log::info!("Found process in registry for session {}: run_id={}, PID={}", - sid, process_info.run_id, process_info.pid); + log::info!( + "Found process in registry for session {}: run_id={}, PID={}", + sid, + process_info.run_id, + process_info.pid + ); match registry.0.kill_process(process_info.run_id).await { Ok(success) => { if success { @@ -1037,7 +1067,10 @@ pub async fn cancel_claude_execution( if let Some(mut child) = current_process.take() { // Try to get the PID before killing let pid = child.id(); - log::info!("Attempting to kill Claude process via ClaudeProcessState with PID: {:?}", pid); + log::info!( + "Attempting to kill Claude process via ClaudeProcessState with PID: {:?}", + pid + ); // Kill the process match child.kill().await { @@ -1046,8 +1079,11 @@ pub async fn cancel_claude_execution( killed = true; } Err(e) => { - log::error!("Failed to kill Claude process via ClaudeProcessState: {}", e); - + log::error!( + "Failed to kill Claude process via ClaudeProcessState: {}", + e + ); + // Method 3: If we have a PID, try system kill as last resort if let Some(pid) = pid { log::info!("Attempting system kill as last resort for PID: {}", pid); @@ -1060,7 +1096,7 @@ pub async fn cancel_claude_execution( .args(["-KILL", &pid.to_string()]) .output() }; - + match kill_result { Ok(output) if output.status.success() => { log::info!("Successfully killed process via system command"); @@ -1093,18 +1129,18 @@ pub async fn cancel_claude_execution( tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; let _ = app.emit(&format!("claude-complete:{}", sid), false); } - + // Also emit generic events for backward compatibility let _ = app.emit("claude-cancelled", true); tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; let _ = app.emit("claude-complete", false); - + if killed { log::info!("Claude process cancellation completed successfully"); } else if !attempted_methods.is_empty() { log::warn!("Claude process cancellation attempted but process may have already exited. Attempted methods: {:?}", attempted_methods); } - + Ok(()) } @@ -1131,9 +1167,15 @@ pub async fn get_claude_session_output( } /// Helper function to spawn Claude process and handle streaming -async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, model: String, project_path: String) -> Result<(), String> { - use tokio::io::{AsyncBufReadExt, BufReader}; +async fn spawn_claude_process( + app: AppHandle, + mut cmd: Command, + prompt: String, + model: String, + project_path: String, +) -> Result<(), String> { use std::sync::Mutex; + use tokio::io::{AsyncBufReadExt, BufReader}; // Spawn the process let mut child = cmd @@ -1146,10 +1188,7 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, // Get the child PID for logging let pid = child.id().unwrap_or(0); - log::info!( - "Spawned Claude process with PID: {:?}", - pid - ); + log::info!("Spawned Claude process with PID: {:?}", pid); // Create readers first (before moving child) let stdout_reader = BufReader::new(stdout); @@ -1184,7 +1223,7 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, let mut lines = stdout_reader.lines(); while let Ok(Some(line)) = lines.next_line().await { log::debug!("Claude stdout: {}", line); - + // Parse the line to check for init message with session ID if let Ok(msg) = serde_json::from_str::(&line) { if msg["type"] == "system" && msg["subtype"] == "init" { @@ -1193,7 +1232,7 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, if session_id_guard.is_none() { *session_id_guard = Some(claude_session_id.to_string()); log::info!("Extracted Claude session ID: {}", claude_session_id); - + // Now register with ProcessRegistry using Claude's session ID match registry_clone.register_claude_session( claude_session_id.to_string(), @@ -1215,12 +1254,12 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, } } } - + // Store live output in registry if we have a run_id if let Some(run_id) = *run_id_holder_clone.lock().unwrap() { let _ = registry_clone.append_live_output(run_id, &line); } - + // Emit the line to the frontend with session isolation if we have session ID if let Some(ref session_id) = *session_id_holder_clone.lock().unwrap() { let _ = app_handle.emit(&format!("claude-output:{}", session_id), &line); @@ -1264,10 +1303,8 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, // Add a small delay to ensure all messages are processed tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; if let Some(ref session_id) = *session_id_holder_clone3.lock().unwrap() { - let _ = app_handle_wait.emit( - &format!("claude-complete:{}", session_id), - status.success(), - ); + let _ = app_handle_wait + .emit(&format!("claude-complete:{}", session_id), status.success()); } // Also emit to the generic event for backward compatibility let _ = app_handle_wait.emit("claude-complete", status.success()); @@ -1277,8 +1314,8 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, // Add a small delay to ensure all messages are processed tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; if let Some(ref session_id) = *session_id_holder_clone3.lock().unwrap() { - let _ = app_handle_wait - .emit(&format!("claude-complete:{}", session_id), false); + let _ = + app_handle_wait.emit(&format!("claude-complete:{}", session_id), false); } // Also emit to the generic event for backward compatibility let _ = app_handle_wait.emit("claude-complete", false); @@ -1298,7 +1335,6 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, Ok(()) } - /// Lists files and directories in a given path #[tauri::command] pub async fn list_directory_contents(directory_path: String) -> Result, String> { @@ -2015,78 +2051,92 @@ pub async fn track_session_messages( /// Gets hooks configuration from settings at specified scope #[tauri::command] -pub async fn get_hooks_config(scope: String, project_path: Option) -> Result { - log::info!("Getting hooks config for scope: {}, project: {:?}", scope, project_path); +pub async fn get_hooks_config( + scope: String, + project_path: Option, +) -> Result { + log::info!( + "Getting hooks config for scope: {}, project: {:?}", + scope, + project_path + ); let settings_path = match scope.as_str() { - "user" => { - get_claude_dir() - .map_err(|e| e.to_string())? - .join("settings.json") - }, + "user" => get_claude_dir() + .map_err(|e| e.to_string())? + .join("settings.json"), "project" => { let path = project_path.ok_or("Project path required for project scope")?; PathBuf::from(path).join(".claude").join("settings.json") - }, + } "local" => { let path = project_path.ok_or("Project path required for local scope")?; - PathBuf::from(path).join(".claude").join("settings.local.json") - }, - _ => return Err("Invalid scope".to_string()) + PathBuf::from(path) + .join(".claude") + .join("settings.local.json") + } + _ => return Err("Invalid scope".to_string()), }; if !settings_path.exists() { - log::info!("Settings file does not exist at {:?}, returning empty hooks", settings_path); + log::info!( + "Settings file does not exist at {:?}, returning empty hooks", + settings_path + ); return Ok(serde_json::json!({})); } let content = fs::read_to_string(&settings_path) .map_err(|e| format!("Failed to read settings: {}", e))?; - - let settings: serde_json::Value = serde_json::from_str(&content) - .map_err(|e| format!("Failed to parse settings: {}", e))?; - - Ok(settings.get("hooks").cloned().unwrap_or(serde_json::json!({}))) + + let settings: serde_json::Value = + serde_json::from_str(&content).map_err(|e| format!("Failed to parse settings: {}", e))?; + + Ok(settings + .get("hooks") + .cloned() + .unwrap_or(serde_json::json!({}))) } /// Updates hooks configuration in settings at specified scope #[tauri::command] pub async fn update_hooks_config( - scope: String, + scope: String, hooks: serde_json::Value, - project_path: Option + project_path: Option, ) -> Result { - log::info!("Updating hooks config for scope: {}, project: {:?}", scope, project_path); + log::info!( + "Updating hooks config for scope: {}, project: {:?}", + scope, + project_path + ); let settings_path = match scope.as_str() { - "user" => { - get_claude_dir() - .map_err(|e| e.to_string())? - .join("settings.json") - }, + "user" => get_claude_dir() + .map_err(|e| e.to_string())? + .join("settings.json"), "project" => { let path = project_path.ok_or("Project path required for project scope")?; let claude_dir = PathBuf::from(path).join(".claude"); fs::create_dir_all(&claude_dir) .map_err(|e| format!("Failed to create .claude directory: {}", e))?; claude_dir.join("settings.json") - }, + } "local" => { let path = project_path.ok_or("Project path required for local scope")?; let claude_dir = PathBuf::from(path).join(".claude"); fs::create_dir_all(&claude_dir) .map_err(|e| format!("Failed to create .claude directory: {}", e))?; claude_dir.join("settings.local.json") - }, - _ => return Err("Invalid scope".to_string()) + } + _ => return Err("Invalid scope".to_string()), }; // Read existing settings or create new let mut settings = if settings_path.exists() { let content = fs::read_to_string(&settings_path) .map_err(|e| format!("Failed to read settings: {}", e))?; - serde_json::from_str(&content) - .map_err(|e| format!("Failed to parse settings: {}", e))? + serde_json::from_str(&content).map_err(|e| format!("Failed to parse settings: {}", e))? } else { serde_json::json!({}) }; @@ -2097,7 +2147,7 @@ pub async fn update_hooks_config( // Write back with pretty formatting let json_string = serde_json::to_string_pretty(&settings) .map_err(|e| format!("Failed to serialize settings: {}", e))?; - + fs::write(&settings_path, json_string) .map_err(|e| format!("Failed to write settings: {}", e))?; @@ -2112,9 +2162,9 @@ pub async fn validate_hook_command(command: String) -> Result { if output.status.success() { @@ -2130,6 +2180,6 @@ pub async fn validate_hook_command(command: String) -> Result Err(format!("Failed to validate command: {}", e)) + Err(e) => Err(format!("Failed to validate command: {}", e)), } } diff --git a/src-tauri/src/commands/filesystem.rs b/src-tauri/src/commands/filesystem.rs index eaea375..901a36b 100644 --- a/src-tauri/src/commands/filesystem.rs +++ b/src-tauri/src/commands/filesystem.rs @@ -1,8 +1,8 @@ +use crate::file_watcher::FileWatcherState; use serde::{Deserialize, Serialize}; use std::fs; use std::path::Path; use tauri::State; -use crate::file_watcher::FileWatcherState; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct FileNode { @@ -23,15 +23,13 @@ pub struct FileSystemChange { /// 读取文件内容 #[tauri::command] pub async fn read_file(path: String) -> Result { - fs::read_to_string(&path) - .map_err(|e| format!("Failed to read file: {}", e)) + fs::read_to_string(&path).map_err(|e| format!("Failed to read file: {}", e)) } /// 写入文件内容 #[tauri::command] pub async fn write_file(path: String, content: String) -> Result<(), String> { - fs::write(&path, content) - .map_err(|e| format!("Failed to write file: {}", e)) + fs::write(&path, content).map_err(|e| format!("Failed to write file: {}", e)) } /// 读取目录树结构 @@ -47,20 +45,21 @@ pub async fn read_directory_tree( } let max_depth = max_depth.unwrap_or(5); - let ignore_patterns = ignore_patterns.unwrap_or_else(|| vec![ - String::from("node_modules"), - String::from(".git"), - String::from("target"), - String::from("dist"), - String::from("build"), - String::from(".idea"), - String::from(".vscode"), - String::from("__pycache__"), - String::from(".DS_Store"), - ]); + let ignore_patterns = ignore_patterns.unwrap_or_else(|| { + vec![ + String::from("node_modules"), + String::from(".git"), + String::from("target"), + String::from("dist"), + String::from("build"), + String::from(".idea"), + String::from(".vscode"), + String::from("__pycache__"), + String::from(".DS_Store"), + ] + }); - read_directory_recursive(path, 0, max_depth, &ignore_patterns) - .map_err(|e| e.to_string()) + read_directory_recursive(path, 0, max_depth, &ignore_patterns).map_err(|e| e.to_string()) } fn read_directory_recursive( @@ -69,28 +68,29 @@ fn read_directory_recursive( max_depth: u32, ignore_patterns: &[String], ) -> std::io::Result { - let name = path.file_name() + let name = path + .file_name() .and_then(|n| n.to_str()) .unwrap_or("") .to_string(); let metadata = fs::metadata(path)?; - + let node = if metadata.is_dir() { let mut children = Vec::new(); - + if current_depth < max_depth { // Check if directory should be ignored - let should_ignore = ignore_patterns.iter().any(|pattern| { - &name == pattern || name.starts_with('.') - }); - + let should_ignore = ignore_patterns + .iter() + .any(|pattern| &name == pattern || name.starts_with('.')); + if !should_ignore { let entries = fs::read_dir(path)?; for entry in entries { let entry = entry?; let child_path = entry.path(); - + // Skip symlinks to avoid infinite loops if let Ok(meta) = entry.metadata() { if !meta.file_type().is_symlink() { @@ -105,25 +105,24 @@ fn read_directory_recursive( } } } - + // Sort children: directories first, then files, alphabetically - children.sort_by(|a, b| { - match (a.file_type.as_str(), b.file_type.as_str()) { - ("directory", "file") => std::cmp::Ordering::Less, - ("file", "directory") => std::cmp::Ordering::Greater, - _ => a.name.to_lowercase().cmp(&b.name.to_lowercase()), - } + children.sort_by(|a, b| match (a.file_type.as_str(), b.file_type.as_str()) { + ("directory", "file") => std::cmp::Ordering::Less, + ("file", "directory") => std::cmp::Ordering::Greater, + _ => a.name.to_lowercase().cmp(&b.name.to_lowercase()), }); } } - + FileNode { name, path: path.to_string_lossy().to_string(), file_type: String::from("directory"), children: Some(children), size: None, - modified: metadata.modified() + modified: metadata + .modified() .ok() .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) .map(|d| d.as_secs()), @@ -135,13 +134,14 @@ fn read_directory_recursive( file_type: String::from("file"), children: None, size: Some(metadata.len()), - modified: metadata.modified() + modified: metadata + .modified() .ok() .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) .map(|d| d.as_secs()), } }; - + Ok(node) } @@ -162,7 +162,7 @@ pub async fn search_files_by_name( let mut results = Vec::new(); search_recursive(base_path, &query_lower, &mut results, max_results)?; - + Ok(results) } @@ -176,8 +176,7 @@ fn search_recursive( return Ok(()); } - let entries = fs::read_dir(dir) - .map_err(|e| format!("Failed to read directory: {}", e))?; + let entries = fs::read_dir(dir).map_err(|e| format!("Failed to read directory: {}", e))?; for entry in entries { if results.len() >= max_results { @@ -186,7 +185,8 @@ fn search_recursive( let entry = entry.map_err(|e| format!("Failed to read entry: {}", e))?; let path = entry.path(); - let file_name = path.file_name() + let file_name = path + .file_name() .and_then(|n| n.to_str()) .unwrap_or("") .to_lowercase(); @@ -197,10 +197,11 @@ fn search_recursive( if path.is_dir() { // Skip hidden directories and common ignore patterns - if !file_name.starts_with('.') + if !file_name.starts_with('.') && file_name != "node_modules" && file_name != "target" - && file_name != "dist" { + && file_name != "dist" + { let _ = search_recursive(&path, query, results, max_results); } } @@ -217,10 +218,10 @@ pub async fn get_file_info(path: String) -> Result { return Err(format!("Path does not exist: {}", path.display())); } - let metadata = fs::metadata(path) - .map_err(|e| format!("Failed to get metadata: {}", e))?; + let metadata = fs::metadata(path).map_err(|e| format!("Failed to get metadata: {}", e))?; - let name = path.file_name() + let name = path + .file_name() .and_then(|n| n.to_str()) .unwrap_or("") .to_string(); @@ -228,18 +229,19 @@ pub async fn get_file_info(path: String) -> Result { Ok(FileNode { name, path: path.to_string_lossy().to_string(), - file_type: if metadata.is_dir() { - String::from("directory") - } else { - String::from("file") + file_type: if metadata.is_dir() { + String::from("directory") + } else { + String::from("file") }, children: None, - size: if metadata.is_file() { - Some(metadata.len()) - } else { - None + size: if metadata.is_file() { + Some(metadata.len()) + } else { + None }, - modified: metadata.modified() + modified: metadata + .modified() .ok() .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) .map(|d| d.as_secs()), @@ -254,10 +256,8 @@ pub async fn watch_directory( recursive: Option, ) -> Result<(), String> { let recursive = recursive.unwrap_or(false); - - watcher_state.with_manager(|manager| { - manager.watch_path(&path, recursive) - }) + + watcher_state.with_manager(|manager| manager.watch_path(&path, recursive)) } /// 停止监听指定路径 @@ -266,9 +266,7 @@ pub async fn unwatch_directory( watcher_state: State<'_, FileWatcherState>, path: String, ) -> Result<(), String> { - watcher_state.with_manager(|manager| { - manager.unwatch_path(&path) - }) + watcher_state.with_manager(|manager| manager.unwatch_path(&path)) } /// 获取当前监听的路径列表 @@ -276,9 +274,7 @@ pub async fn unwatch_directory( pub async fn get_watched_paths( watcher_state: State<'_, FileWatcherState>, ) -> Result, String> { - watcher_state.with_manager(|manager| { - Ok(manager.get_watched_paths()) - }) + watcher_state.with_manager(|manager| Ok(manager.get_watched_paths())) } /// 获取文件树(简化版,供文件浏览器使用) @@ -302,9 +298,9 @@ pub async fn get_file_tree(project_path: String) -> Result, String ]; // 增加最大深度为 10,以支持更深的文件夹结构 - let root_node = read_directory_recursive(path, 0, 10, &ignore_patterns) - .map_err(|e| e.to_string())?; + let root_node = + read_directory_recursive(path, 0, 10, &ignore_patterns).map_err(|e| e.to_string())?; // Return children of root node if it has any Ok(root_node.children.unwrap_or_default()) -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/git.rs b/src-tauri/src/commands/git.rs index 4b0212e..a673d95 100644 --- a/src-tauri/src/commands/git.rs +++ b/src-tauri/src/commands/git.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -use std::process::Command; use std::path::Path; +use std::process::Command; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct GitStatus { @@ -94,14 +94,13 @@ pub async fn get_git_status(path: String) -> Result { .output() .ok(); - let remote_url = remote_output - .and_then(|o| { - if o.status.success() { - Some(String::from_utf8_lossy(&o.stdout).trim().to_string()) - } else { - None - } - }); + let remote_url = remote_output.and_then(|o| { + if o.status.success() { + Some(String::from_utf8_lossy(&o.stdout).trim().to_string()) + } else { + None + } + }); let is_clean = staged.is_empty() && modified.is_empty() && untracked.is_empty(); @@ -161,7 +160,14 @@ fn get_tracking_info(path: &Path) -> Result<(u32, u32), String> { Ok((ahead, behind)) } -fn parse_git_status(status_text: &str) -> (Vec, Vec, Vec, Vec) { +fn parse_git_status( + status_text: &str, +) -> ( + Vec, + Vec, + Vec, + Vec, +) { let mut staged = Vec::new(); let mut modified = Vec::new(); let mut untracked = Vec::new(); @@ -197,7 +203,7 @@ fn parse_git_status(status_text: &str) -> (Vec, Vec staged.push(GitFileStatus { path: file_path, status: "added".to_string(), @@ -360,7 +366,7 @@ pub async fn get_git_branches(path: String) -> Result, String> { for line in branch_text.lines() { let is_current = line.starts_with('*'); let line = line.trim_start_matches('*').trim(); - + let parts: Vec<&str> = line.split_whitespace().collect(); if parts.is_empty() { continue; @@ -404,11 +410,11 @@ pub async fn get_git_diff( let mut cmd = Command::new("git"); cmd.arg("diff"); - + if staged.unwrap_or(false) { cmd.arg("--cached"); } - + if let Some(file) = file_path { cmd.arg(file); } @@ -440,19 +446,19 @@ mod tests { fn test_parse_git_status() { let status_text = "?? test-untracked.txt\nA staged-file.txt\n M modified-file.txt"; let (staged, modified, untracked, conflicted) = parse_git_status(status_text); - + println!("Untracked files: {:?}", untracked); println!("Staged files: {:?}", staged); println!("Modified files: {:?}", modified); - + assert_eq!(untracked.len(), 1); assert_eq!(untracked[0].path, "test-untracked.txt"); assert_eq!(untracked[0].status, "untracked"); - + assert_eq!(staged.len(), 1); assert_eq!(staged[0].path, "staged-file.txt"); - + assert_eq!(modified.len(), 1); assert_eq!(modified[0].path, "modified-file.txt"); } -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/language.rs b/src-tauri/src/commands/language.rs index 9279664..0de1e14 100644 --- a/src-tauri/src/commands/language.rs +++ b/src-tauri/src/commands/language.rs @@ -1,6 +1,6 @@ -use tauri::command; -use serde::{Deserialize, Serialize}; use crate::i18n; +use serde::{Deserialize, Serialize}; +use tauri::command; #[derive(Debug, Serialize, Deserialize)] pub struct LanguageSettings { @@ -14,14 +14,16 @@ pub async fn get_current_language() -> Result { #[command] pub async fn set_language(locale: String) -> Result<(), String> { - i18n::set_locale(&locale) - .map_err(|e| format!("Failed to set language: {}", e))?; - + i18n::set_locale(&locale).map_err(|e| format!("Failed to set language: {}", e))?; + log::info!("Language changed to: {}", locale); Ok(()) } #[command] pub async fn get_supported_languages() -> Result, String> { - Ok(i18n::SUPPORTED_LOCALES.iter().map(|&s| s.to_string()).collect()) -} \ No newline at end of file + Ok(i18n::SUPPORTED_LOCALES + .iter() + .map(|&s| s.to_string()) + .collect()) +} diff --git a/src-tauri/src/commands/mcp.rs b/src-tauri/src/commands/mcp.rs index 0ad2849..a73b416 100644 --- a/src-tauri/src/commands/mcp.rs +++ b/src-tauri/src/commands/mcp.rs @@ -751,7 +751,7 @@ pub async fn mcp_export_servers(app: AppHandle) -> Result Result { @@ -792,7 +792,12 @@ pub async fn mcp_export_servers(app: AppHandle) -> Result Vec { /// 测试单个节点速度(仅测试网络延时,不需要认证) async fn test_node_speed(node: &PackycodeNode) -> NodeSpeedTestResult { let client = Client::builder() - .timeout(Duration::from_secs(3)) // 减少超时时间 - .danger_accept_invalid_certs(true) // 接受自签名证书 + .timeout(Duration::from_secs(3)) // 减少超时时间 + .danger_accept_invalid_certs(true) // 接受自签名证书 .build() .unwrap_or_else(|_| Client::new()); - + let start_time = Instant::now(); - + // 使用 GET 请求到根路径,这是最简单的 ping 测试 // 不需要 token,只测试网络延迟 let url = format!("{}/", node.url.trim_end_matches('/')); - + match client .get(&url) .timeout(Duration::from_secs(3)) @@ -143,11 +143,11 @@ async fn test_node_speed(node: &PackycodeNode) -> NodeSpeedTestResult { { Ok(_response) => { let response_time = start_time.elapsed().as_millis() as u64; - + // 只要能连接到服务器就算成功(不管状态码) // 因为我们只是测试延迟,不是测试 API 功能 - let success = response_time < 3000; // 小于 3 秒就算成功 - + let success = response_time < 3000; // 小于 3 秒就算成功 + NodeSpeedTestResult { node: PackycodeNode { response_time: Some(response_time), @@ -156,12 +156,16 @@ async fn test_node_speed(node: &PackycodeNode) -> NodeSpeedTestResult { }, response_time, success, - error: if success { None } else { Some("响应时间过长".to_string()) }, + error: if success { + None + } else { + Some("响应时间过长".to_string()) + }, } } Err(e) => { let response_time = start_time.elapsed().as_millis() as u64; - + // 如果是超时错误,特别标记 let error_msg = if e.is_timeout() { "连接超时".to_string() @@ -170,7 +174,7 @@ async fn test_node_speed(node: &PackycodeNode) -> NodeSpeedTestResult { } else { format!("网络错误: {}", e) }; - + NodeSpeedTestResult { node: PackycodeNode { response_time: Some(response_time), @@ -190,33 +194,29 @@ async fn test_node_speed(node: &PackycodeNode) -> NodeSpeedTestResult { pub async fn test_all_packycode_nodes() -> Result, String> { let nodes = get_all_nodes(); let mut results = Vec::new(); - + // 并发测试所有节点 - let futures: Vec<_> = nodes - .iter() - .map(|node| test_node_speed(node)) - .collect(); - + let futures: Vec<_> = nodes.iter().map(|node| test_node_speed(node)).collect(); + // 等待所有测试完成 for (i, future) in futures.into_iter().enumerate() { let result = future.await; - log::info!("节点 {} 测速结果: {}ms, 成功: {}", - nodes[i].name, - result.response_time, + log::info!( + "节点 {} 测速结果: {}ms, 成功: {}", + nodes[i].name, + result.response_time, result.success ); results.push(result); } - + // 按响应时间排序(成功的节点优先,然后按延迟排序) - results.sort_by(|a, b| { - match (a.success, b.success) { - (true, false) => std::cmp::Ordering::Less, - (false, true) => std::cmp::Ordering::Greater, - _ => a.response_time.cmp(&b.response_time), - } + results.sort_by(|a, b| match (a.success, b.success) { + (true, false) => std::cmp::Ordering::Less, + (false, true) => std::cmp::Ordering::Greater, + _ => a.response_time.cmp(&b.response_time), }); - + Ok(results) } @@ -225,41 +225,43 @@ pub async fn test_all_packycode_nodes() -> Result, Stri pub async fn auto_select_best_node() -> Result { let nodes = get_all_nodes(); let mut best_node: Option<(PackycodeNode, u64)> = None; - + // 只测试直连和备用节点,过滤掉紧急节点 let test_nodes: Vec<_> = nodes .into_iter() .filter(|n| matches!(n.node_type, NodeType::Direct | NodeType::Backup)) .collect(); - + log::info!("开始测试 {} 个节点...", test_nodes.len()); - + // 并发测试所有节点 let futures: Vec<_> = test_nodes .iter() .map(|node| test_node_speed(node)) .collect(); - + // 收集结果并找出最佳节点 for (i, future) in futures.into_iter().enumerate() { let result = future.await; - - log::info!("节点 {} - 延迟: {}ms, 可用: {}", - test_nodes[i].name, - result.response_time, + + log::info!( + "节点 {} - 延迟: {}ms, 可用: {}", + test_nodes[i].name, + result.response_time, result.success ); - + if result.success { match &best_node { None => { log::info!("初始最佳节点: {}", result.node.name); best_node = Some((result.node, result.response_time)); - }, + } Some((_, best_time)) if result.response_time < *best_time => { - log::info!("发现更快节点: {} ({}ms < {}ms)", - result.node.name, - result.response_time, + log::info!( + "发现更快节点: {} ({}ms < {}ms)", + result.node.name, + result.response_time, best_time ); best_node = Some((result.node, result.response_time)); @@ -268,12 +270,12 @@ pub async fn auto_select_best_node() -> Result { } } } - + match best_node { Some((node, time)) => { log::info!("最佳节点选择: {} (延迟: {}ms)", node.name, time); Ok(node) - }, + } None => { log::error!("没有找到可用的节点"); Err("没有找到可用的节点".to_string()) @@ -285,4 +287,4 @@ pub async fn auto_select_best_node() -> Result { #[command] pub fn get_packycode_nodes() -> Vec { get_all_nodes() -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/proxy.rs b/src-tauri/src/commands/proxy.rs index e2454ec..2192e0e 100644 --- a/src-tauri/src/commands/proxy.rs +++ b/src-tauri/src/commands/proxy.rs @@ -1,6 +1,6 @@ +use rusqlite::params; use serde::{Deserialize, Serialize}; use tauri::State; -use rusqlite::params; use crate::commands::agents::AgentDb; @@ -29,9 +29,9 @@ impl Default for ProxySettings { #[tauri::command] pub async fn get_proxy_settings(db: State<'_, AgentDb>) -> Result { let conn = db.0.lock().map_err(|e| e.to_string())?; - + let mut settings = ProxySettings::default(); - + // Query each proxy setting let keys = vec![ ("proxy_enabled", "enabled"), @@ -40,7 +40,7 @@ pub async fn get_proxy_settings(db: State<'_, AgentDb>) -> Result) -> Result Result<(), String> { let conn = db.0.lock().map_err(|e| e.to_string())?; - + // Save each setting let values = vec![ ("proxy_enabled", settings.enabled.to_string()), - ("proxy_http", settings.http_proxy.clone().unwrap_or_default()), - ("proxy_https", settings.https_proxy.clone().unwrap_or_default()), + ( + "proxy_http", + settings.http_proxy.clone().unwrap_or_default(), + ), + ( + "proxy_https", + settings.https_proxy.clone().unwrap_or_default(), + ), ("proxy_no", settings.no_proxy.clone().unwrap_or_default()), ("proxy_all", settings.all_proxy.clone().unwrap_or_default()), ]; - + for (key, value) in values { conn.execute( "INSERT OR REPLACE INTO app_settings (key, value) VALUES (?1, ?2)", params![key, value], - ).map_err(|e| format!("Failed to save {}: {}", key, e))?; + ) + .map_err(|e| format!("Failed to save {}: {}", key, e))?; } - + // Apply the proxy settings immediately to the current process apply_proxy_settings(&settings); - + Ok(()) } /// Apply proxy settings as environment variables pub fn apply_proxy_settings(settings: &ProxySettings) { log::info!("Applying proxy settings: enabled={}", settings.enabled); - + if !settings.enabled { // Clear proxy environment variables if disabled log::info!("Clearing proxy environment variables"); @@ -109,7 +116,7 @@ pub fn apply_proxy_settings(settings: &ProxySettings) { std::env::remove_var("all_proxy"); return; } - + // Ensure NO_PROXY includes localhost by default let mut no_proxy_list = vec!["localhost", "127.0.0.1", "::1", "0.0.0.0"]; if let Some(user_no_proxy) = &settings.no_proxy { @@ -118,7 +125,7 @@ pub fn apply_proxy_settings(settings: &ProxySettings) { } } let no_proxy_value = no_proxy_list.join(","); - + // Set proxy environment variables (uppercase is standard) if let Some(http_proxy) = &settings.http_proxy { if !http_proxy.is_empty() { @@ -126,25 +133,25 @@ pub fn apply_proxy_settings(settings: &ProxySettings) { std::env::set_var("HTTP_PROXY", http_proxy); } } - + if let Some(https_proxy) = &settings.https_proxy { if !https_proxy.is_empty() { log::info!("Setting HTTPS_PROXY={}", https_proxy); std::env::set_var("HTTPS_PROXY", https_proxy); } } - + // Always set NO_PROXY to include localhost log::info!("Setting NO_PROXY={}", no_proxy_value); std::env::set_var("NO_PROXY", &no_proxy_value); - + if let Some(all_proxy) = &settings.all_proxy { if !all_proxy.is_empty() { log::info!("Setting ALL_PROXY={}", all_proxy); std::env::set_var("ALL_PROXY", all_proxy); } } - + // Log current proxy environment variables for debugging log::info!("Current proxy environment variables:"); for (key, value) in std::env::vars() { @@ -152,4 +159,4 @@ pub fn apply_proxy_settings(settings: &ProxySettings) { log::info!(" {}={}", key, value); } } -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/relay_adapters.rs b/src-tauri/src/commands/relay_adapters.rs index a9fa899..97705b1 100644 --- a/src-tauri/src/commands/relay_adapters.rs +++ b/src-tauri/src/commands/relay_adapters.rs @@ -8,7 +8,7 @@ use std::time::Duration; use tauri::{command, State}; use crate::commands::agents::AgentDb; -use crate::commands::relay_stations::{RelayStationAdapter, RelayStation}; +use crate::commands::relay_stations::{RelayStation, RelayStationAdapter}; use crate::i18n; // 创建HTTP客户端的辅助函数 @@ -89,25 +89,47 @@ pub struct TokenPaginationResponse { pub trait StationAdapter: Send + Sync { /// 获取中转站信息 async fn get_station_info(&self, station: &RelayStation) -> Result; - + /// 获取用户信息 async fn get_user_info(&self, station: &RelayStation, user_id: &str) -> Result; - + /// 测试连接 async fn test_connection(&self, station: &RelayStation) -> Result; - + /// 获取使用日志 - async fn get_usage_logs(&self, station: &RelayStation, user_id: &str, page: Option, size: Option) -> Result; - + async fn get_usage_logs( + &self, + station: &RelayStation, + user_id: &str, + page: Option, + size: Option, + ) -> Result; + /// 列出 Tokens - async fn list_tokens(&self, station: &RelayStation, page: Option, size: Option) -> Result; - + async fn list_tokens( + &self, + station: &RelayStation, + page: Option, + size: Option, + ) -> Result; + /// 创建 Token - async fn create_token(&self, station: &RelayStation, name: &str, quota: Option) -> Result; - + async fn create_token( + &self, + station: &RelayStation, + name: &str, + quota: Option, + ) -> Result; + /// 更新 Token - async fn update_token(&self, station: &RelayStation, token_id: &str, name: Option<&str>, quota: Option) -> Result; - + async fn update_token( + &self, + station: &RelayStation, + token_id: &str, + name: Option<&str>, + quota: Option, + ) -> Result; + /// 删除 Token async fn delete_token(&self, station: &RelayStation, token_id: &str) -> Result; } @@ -120,7 +142,7 @@ impl StationAdapter for PackycodeAdapter { async fn get_station_info(&self, station: &RelayStation) -> Result { // PackyCode 使用简单的健康检查端点 let url = format!("{}/health", station.api_url.trim_end_matches('/')); - + let client = create_http_client(); let response = client .get(&url) @@ -137,7 +159,10 @@ impl StationAdapter for PackycodeAdapter { metadata: Some({ let mut map = HashMap::new(); map.insert("adapter_type".to_string(), json!("packycode")); - map.insert("support_features".to_string(), json!(["quota_query", "usage_stats"])); + map.insert( + "support_features".to_string(), + json!(["quota_query", "usage_stats"]), + ); map }), quota_per_unit: Some(1), @@ -150,7 +175,7 @@ impl StationAdapter for PackycodeAdapter { async fn get_user_info(&self, station: &RelayStation, _user_id: &str) -> Result { // PackyCode 用户信息获取 let url = format!("{}/user/info", station.api_url.trim_end_matches('/')); - + let client = create_http_client(); let response = client .get(&url) @@ -159,24 +184,23 @@ impl StationAdapter for PackycodeAdapter { .await?; let data: Value = response.json().await?; - + Ok(UserInfo { id: "packycode_user".to_string(), - username: data.get("username") + username: data + .get("username") .and_then(|v| v.as_str()) .unwrap_or("PackyCode用户") .to_string(), display_name: Some("PackyCode用户".to_string()), - email: data.get("email") + email: data + .get("email") .and_then(|v| v.as_str()) .map(|s| s.to_string()), - quota: data.get("quota") - .and_then(|v| v.as_i64()) - .unwrap_or(0), - used_quota: data.get("used_quota") - .and_then(|v| v.as_i64()) - .unwrap_or(0), - request_count: data.get("request_count") + quota: data.get("quota").and_then(|v| v.as_i64()).unwrap_or(0), + used_quota: data.get("used_quota").and_then(|v| v.as_i64()).unwrap_or(0), + request_count: data + .get("request_count") .and_then(|v| v.as_i64()) .unwrap_or(0), group: "default".to_string(), @@ -186,7 +210,7 @@ impl StationAdapter for PackycodeAdapter { async fn test_connection(&self, station: &RelayStation) -> Result { let start_time = std::time::Instant::now(); - + match self.get_station_info(station).await { Ok(info) => { let response_time = start_time.elapsed().as_millis() as u64; @@ -194,8 +218,10 @@ impl StationAdapter for PackycodeAdapter { success: true, response_time, message: format!("{} - 连接成功", info.name), - details: Some(format!("服务版本: {}", - info.version.unwrap_or_else(|| "Unknown".to_string()))), + details: Some(format!( + "服务版本: {}", + info.version.unwrap_or_else(|| "Unknown".to_string()) + )), }) } Err(e) => { @@ -210,7 +236,13 @@ impl StationAdapter for PackycodeAdapter { } } - async fn get_usage_logs(&self, _station: &RelayStation, _user_id: &str, _page: Option, _size: Option) -> Result { + async fn get_usage_logs( + &self, + _station: &RelayStation, + _user_id: &str, + _page: Option, + _size: Option, + ) -> Result { // PackyCode 暂不支持详细使用日志 Ok(json!({ "logs": [], @@ -218,21 +250,45 @@ impl StationAdapter for PackycodeAdapter { })) } - async fn list_tokens(&self, _station: &RelayStation, _page: Option, _size: Option) -> Result { + async fn list_tokens( + &self, + _station: &RelayStation, + _page: Option, + _size: Option, + ) -> Result { // PackyCode 使用单一 Token,不支持多 Token 管理 - Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token"))) + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.packycode_single_token" + ))) } - async fn create_token(&self, _station: &RelayStation, _name: &str, _quota: Option) -> Result { - Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token"))) + async fn create_token( + &self, + _station: &RelayStation, + _name: &str, + _quota: Option, + ) -> Result { + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.packycode_single_token" + ))) } - async fn update_token(&self, _station: &RelayStation, _token_id: &str, _name: Option<&str>, _quota: Option) -> Result { - Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token"))) + async fn update_token( + &self, + _station: &RelayStation, + _token_id: &str, + _name: Option<&str>, + _quota: Option, + ) -> Result { + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.packycode_single_token" + ))) } async fn delete_token(&self, _station: &RelayStation, _token_id: &str) -> Result { - Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token"))) + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.packycode_single_token" + ))) } } @@ -272,7 +328,7 @@ impl StationAdapter for CustomAdapter { async fn test_connection(&self, station: &RelayStation) -> Result { let start_time = std::time::Instant::now(); - + // 尝试简单的 GET 请求测试连接 let client = create_http_client(); let response = client @@ -285,50 +341,76 @@ impl StationAdapter for CustomAdapter { let response_time = start_time.elapsed().as_millis() as u64; match response { - Ok(resp) => { - Ok(ConnectionTestResult { - success: resp.status().is_success(), - response_time, - message: if resp.status().is_success() { - format!("{} - 连接成功", station.name) - } else { - format!("HTTP {}: 服务器响应错误", resp.status()) - }, - details: Some(format!("响应状态: {}", resp.status())), - }) - } - Err(e) => { - Ok(ConnectionTestResult { - success: false, - response_time, - message: format!("连接失败: {}", e), - details: None, - }) - } + Ok(resp) => Ok(ConnectionTestResult { + success: resp.status().is_success(), + response_time, + message: if resp.status().is_success() { + format!("{} - 连接成功", station.name) + } else { + format!("HTTP {}: 服务器响应错误", resp.status()) + }, + details: Some(format!("响应状态: {}", resp.status())), + }), + Err(e) => Ok(ConnectionTestResult { + success: false, + response_time, + message: format!("连接失败: {}", e), + details: None, + }), } } - async fn get_usage_logs(&self, _station: &RelayStation, _user_id: &str, _page: Option, _size: Option) -> Result { + async fn get_usage_logs( + &self, + _station: &RelayStation, + _user_id: &str, + _page: Option, + _size: Option, + ) -> Result { Ok(json!({ "logs": [], "message": "自定义适配器暂不支持使用日志查询" })) } - async fn list_tokens(&self, _station: &RelayStation, _page: Option, _size: Option) -> Result { - Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available"))) + async fn list_tokens( + &self, + _station: &RelayStation, + _page: Option, + _size: Option, + ) -> Result { + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.token_management_not_available" + ))) } - async fn create_token(&self, _station: &RelayStation, _name: &str, _quota: Option) -> Result { - Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available"))) + async fn create_token( + &self, + _station: &RelayStation, + _name: &str, + _quota: Option, + ) -> Result { + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.token_management_not_available" + ))) } - async fn update_token(&self, _station: &RelayStation, _token_id: &str, _name: Option<&str>, _quota: Option) -> Result { - Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available"))) + async fn update_token( + &self, + _station: &RelayStation, + _token_id: &str, + _name: Option<&str>, + _quota: Option, + ) -> Result { + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.token_management_not_available" + ))) } async fn delete_token(&self, _station: &RelayStation, _token_id: &str) -> Result { - Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available"))) + Err(anyhow::anyhow!(i18n::t( + "relay_adapter.token_management_not_available" + ))) } } @@ -349,20 +431,19 @@ pub fn create_adapter(adapter_type: &RelayStationAdapter) -> Box + db: State<'_, AgentDb>, ) -> Result { // 获取中转站配置 let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; - + // 创建适配器 let adapter = create_adapter(&station.adapter); - + // 获取站点信息 - adapter.get_station_info(&station).await - .map_err(|e| { - log::error!("Failed to get station info: {}", e); - i18n::t("relay_adapter.get_info_failed") - }) + adapter.get_station_info(&station).await.map_err(|e| { + log::error!("Failed to get station info: {}", e); + i18n::t("relay_adapter.get_info_failed") + }) } /// 获取用户信息 @@ -370,12 +451,14 @@ pub async fn relay_station_get_info( pub async fn relay_station_get_user_info( station_id: String, user_id: String, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; let adapter = create_adapter(&station.adapter); - - adapter.get_user_info(&station, &user_id).await + + adapter + .get_user_info(&station, &user_id) + .await .map_err(|e| { log::error!("Failed to get user info: {}", e); i18n::t("relay_adapter.get_user_info_failed") @@ -386,16 +469,15 @@ pub async fn relay_station_get_user_info( #[command] pub async fn relay_station_test_connection( station_id: String, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; let adapter = create_adapter(&station.adapter); - - adapter.test_connection(&station).await - .map_err(|e| { - log::error!("Connection test failed: {}", e); - i18n::t("relay_adapter.connection_test_failed") - }) + + adapter.test_connection(&station).await.map_err(|e| { + log::error!("Connection test failed: {}", e); + i18n::t("relay_adapter.connection_test_failed") + }) } /// 获取使用日志 @@ -405,12 +487,14 @@ pub async fn relay_station_get_usage_logs( user_id: String, page: Option, size: Option, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; let adapter = create_adapter(&station.adapter); - - adapter.get_usage_logs(&station, &user_id, page, size).await + + adapter + .get_usage_logs(&station, &user_id, page, size) + .await .map_err(|e| { log::error!("Failed to get usage logs: {}", e); i18n::t("relay_adapter.get_usage_logs_failed") @@ -423,12 +507,14 @@ pub async fn relay_station_list_tokens( station_id: String, page: Option, size: Option, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; let adapter = create_adapter(&station.adapter); - - adapter.list_tokens(&station, page, size).await + + adapter + .list_tokens(&station, page, size) + .await .map_err(|e| { log::error!("Failed to list tokens: {}", e); i18n::t("relay_adapter.list_tokens_failed") @@ -441,12 +527,14 @@ pub async fn relay_station_create_token( station_id: String, name: String, quota: Option, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; let adapter = create_adapter(&station.adapter); - - adapter.create_token(&station, &name, quota).await + + adapter + .create_token(&station, &name, quota) + .await .map_err(|e| { log::error!("Failed to create token: {}", e); i18n::t("relay_adapter.create_token_failed") @@ -460,12 +548,14 @@ pub async fn relay_station_update_token( token_id: String, name: Option, quota: Option, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; let adapter = create_adapter(&station.adapter); - - adapter.update_token(&station, &token_id, name.as_deref(), quota).await + + adapter + .update_token(&station, &token_id, name.as_deref(), quota) + .await .map_err(|e| { log::error!("Failed to update token: {}", e); i18n::t("relay_adapter.update_token_failed") @@ -477,12 +567,14 @@ pub async fn relay_station_update_token( pub async fn relay_station_delete_token( station_id: String, token_id: String, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?; let adapter = create_adapter(&station.adapter); - - adapter.delete_token(&station, &token_id).await + + adapter + .delete_token(&station, &token_id) + .await .map_err(|e| { log::error!("Failed to delete token: {}", e); i18n::t("relay_adapter.delete_token_failed") @@ -493,7 +585,7 @@ pub async fn relay_station_delete_token( #[derive(Debug, Clone, Serialize, Deserialize)] pub struct PackycodeUserQuota { pub daily_budget_usd: f64, // 日预算(美元) - pub daily_spent_usd: f64, // 日已使用(美元) + pub daily_spent_usd: f64, // 日已使用(美元) pub monthly_budget_usd: f64, // 月预算(美元) pub monthly_spent_usd: f64, // 月已使用(美元) pub balance_usd: f64, // 账户余额(美元) @@ -509,32 +601,34 @@ pub struct PackycodeUserQuota { #[command] pub async fn packycode_get_user_quota( station_id: String, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { - let station = crate::commands::relay_stations::relay_station_get(station_id, db).await + let station = crate::commands::relay_stations::relay_station_get(station_id, db) + .await .map_err(|e| format!("Failed to get station: {}", e))?; - + if station.adapter.as_str() != "packycode" { return Err("此功能仅支持 PackyCode 中转站".to_string()); } - + // 根据服务类型构建不同的 URL - let url = if station.api_url.contains("share-api") || station.api_url.contains("share.packycode") { - // 滴滴车服务 - "https://share.packycode.com/api/backend/users/info" - } else { - // 公交车服务 - "https://www.packycode.com/api/backend/users/info" - }; - + let url = + if station.api_url.contains("share-api") || station.api_url.contains("share.packycode") { + // 滴滴车服务 + "https://share.packycode.com/api/backend/users/info" + } else { + // 公交车服务 + "https://www.packycode.com/api/backend/users/info" + }; + let client = Client::builder() .timeout(Duration::from_secs(30)) - .no_proxy() // 禁用所有代理 + .no_proxy() // 禁用所有代理 .build() .map_err(|e| format!("创建 HTTP 客户端失败: {}", e))?; - + log::info!("正在请求 PackyCode 用户信息: {}", url); - + let response = client .get(url) .header("Authorization", format!("Bearer {}", station.system_token)) @@ -564,15 +658,19 @@ pub async fn packycode_get_user_quota( }); } - let data: Value = response.json().await + let data: Value = response + .json() + .await .map_err(|e| format!("解析响应失败: {}", e))?; - + // 辅助函数:将值转换为 f64 let to_f64 = |v: &Value| -> f64 { if v.is_null() { 0.0 } else if v.is_string() { - v.as_str().and_then(|s| s.parse::().ok()).unwrap_or(0.0) + v.as_str() + .and_then(|s| s.parse::().ok()) + .unwrap_or(0.0) } else if v.is_f64() { v.as_f64().unwrap_or(0.0) } else if v.is_i64() { @@ -581,7 +679,7 @@ pub async fn packycode_get_user_quota( 0.0 } }; - + Ok(PackycodeUserQuota { daily_budget_usd: to_f64(data.get("daily_budget_usd").unwrap_or(&Value::Null)), daily_spent_usd: to_f64(data.get("daily_spent_usd").unwrap_or(&Value::Null)), @@ -589,20 +687,23 @@ pub async fn packycode_get_user_quota( monthly_spent_usd: to_f64(data.get("monthly_spent_usd").unwrap_or(&Value::Null)), balance_usd: to_f64(data.get("balance_usd").unwrap_or(&Value::Null)), total_spent_usd: to_f64(data.get("total_spent_usd").unwrap_or(&Value::Null)), - plan_type: data.get("plan_type") + plan_type: data + .get("plan_type") .and_then(|v| v.as_str()) .unwrap_or("basic") .to_string(), - plan_expires_at: data.get("plan_expires_at") + plan_expires_at: data + .get("plan_expires_at") .and_then(|v| v.as_str()) .map(|s| s.to_string()), - username: data.get("username") + username: data + .get("username") .and_then(|v| v.as_str()) .map(|s| s.to_string()), - email: data.get("email") + email: data + .get("email") .and_then(|v| v.as_str()) .map(|s| s.to_string()), - opus_enabled: data.get("opus_enabled") - .and_then(|v| v.as_bool()), + opus_enabled: data.get("opus_enabled").and_then(|v| v.as_bool()), }) -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/relay_stations.rs b/src-tauri/src/commands/relay_stations.rs index 69d6e89..8294007 100644 --- a/src-tauri/src/commands/relay_stations.rs +++ b/src-tauri/src/commands/relay_stations.rs @@ -1,14 +1,14 @@ +use anyhow::Result; +use chrono::Utc; +use rusqlite::{params, Connection, OptionalExtension, Row}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use tauri::{command, State}; -use anyhow::Result; -use chrono::Utc; -use rusqlite::{params, Connection, Row, OptionalExtension}; use uuid::Uuid; +use crate::claude_config; use crate::commands::agents::AgentDb; use crate::i18n; -use crate::claude_config; /// 中转站适配器类型 #[derive(Debug, Clone, Serialize, Deserialize)] @@ -39,27 +39,27 @@ impl RelayStationAdapter { #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum AuthMethod { - BearerToken, // Bearer Token 认证(推荐) - ApiKey, // API Key 认证 - Custom, // 自定义认证方式 + BearerToken, // Bearer Token 认证(推荐) + ApiKey, // API Key 认证 + Custom, // 自定义认证方式 } /// 中转站配置(完整版本) #[derive(Debug, Clone, Serialize, Deserialize)] pub struct RelayStation { - pub id: String, // 唯一标识符 - pub name: String, // 显示名称 - pub description: Option, // 描述信息 - pub api_url: String, // API 基础 URL - pub adapter: RelayStationAdapter, // 适配器类型 - pub auth_method: AuthMethod, // 认证方式 - pub system_token: String, // 系统令牌 - pub user_id: Option, // 用户 ID(可选) + pub id: String, // 唯一标识符 + pub name: String, // 显示名称 + pub description: Option, // 描述信息 + pub api_url: String, // API 基础 URL + pub adapter: RelayStationAdapter, // 适配器类型 + pub auth_method: AuthMethod, // 认证方式 + pub system_token: String, // 系统令牌 + pub user_id: Option, // 用户 ID(可选) pub adapter_config: Option>, // 适配器特定配置 - pub enabled: bool, // 启用状态 - pub display_order: i32, // 显示顺序 - pub created_at: i64, // 创建时间 - pub updated_at: i64, // 更新时间 + pub enabled: bool, // 启用状态 + pub display_order: i32, // 显示顺序 + pub created_at: i64, // 创建时间 + pub updated_at: i64, // 更新时间 } /// 创建中转站请求(无自动生成字段) @@ -94,34 +94,34 @@ pub struct UpdateRelayStationRequest { /// 站点信息(统一格式) #[derive(Debug, Clone, Serialize, Deserialize)] pub struct StationInfo { - pub name: String, // 站点名称 - pub announcement: Option, // 公告信息 - pub api_url: String, // API 地址 - pub version: Option, // 版本信息 - pub metadata: Option>, // 扩展元数据 - pub quota_per_unit: Option, // 单位配额(用于价格转换) + pub name: String, // 站点名称 + pub announcement: Option, // 公告信息 + pub api_url: String, // API 地址 + pub version: Option, // 版本信息 + pub metadata: Option>, // 扩展元数据 + pub quota_per_unit: Option, // 单位配额(用于价格转换) } /// 用户信息(统一格式) #[derive(Debug, Clone, Serialize, Deserialize)] pub struct UserInfo { - pub user_id: String, // 用户 ID - pub username: Option, // 用户名 - pub email: Option, // 邮箱 - pub balance_remaining: Option, // 剩余余额(美元) - pub amount_used: Option, // 已用金额(美元) - pub request_count: Option, // 请求次数 - pub status: Option, // 账户状态 - pub metadata: Option>, // 原始数据 + pub user_id: String, // 用户 ID + pub username: Option, // 用户名 + pub email: Option, // 邮箱 + pub balance_remaining: Option, // 剩余余额(美元) + pub amount_used: Option, // 已用金额(美元) + pub request_count: Option, // 请求次数 + pub status: Option, // 账户状态 + pub metadata: Option>, // 原始数据 } /// 连接测试结果 #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ConnectionTestResult { - pub success: bool, // 连接是否成功 - pub response_time: Option, // 响应时间(毫秒) - pub message: String, // 结果消息 - pub error: Option, // 错误信息 + pub success: bool, // 连接是否成功 + pub response_time: Option, // 响应时间(毫秒) + pub message: String, // 结果消息 + pub error: Option, // 错误信息 } /// Token 信息 @@ -153,18 +153,34 @@ impl RelayStation { let auth_method_str: String = row.get("auth_method")?; let adapter_config_str: Option = row.get("adapter_config")?; - let adapter = serde_json::from_str(&format!("\"{}\"", adapter_str)) - .map_err(|_| rusqlite::Error::InvalidColumnType(0, "adapter".to_string(), rusqlite::types::Type::Text))?; - - let auth_method = serde_json::from_str(&format!("\"{}\"", auth_method_str)) - .map_err(|_| rusqlite::Error::InvalidColumnType(0, "auth_method".to_string(), rusqlite::types::Type::Text))?; - + let adapter = serde_json::from_str(&format!("\"{}\"", adapter_str)).map_err(|_| { + rusqlite::Error::InvalidColumnType( + 0, + "adapter".to_string(), + rusqlite::types::Type::Text, + ) + })?; + + let auth_method = + serde_json::from_str(&format!("\"{}\"", auth_method_str)).map_err(|_| { + rusqlite::Error::InvalidColumnType( + 0, + "auth_method".to_string(), + rusqlite::types::Type::Text, + ) + })?; + let adapter_config = if let Some(config_str) = adapter_config_str { if config_str.trim().is_empty() { None } else { - Some(serde_json::from_str(&config_str) - .map_err(|_| rusqlite::Error::InvalidColumnType(0, "adapter_config".to_string(), rusqlite::types::Type::Text))?) + Some(serde_json::from_str(&config_str).map_err(|_| { + rusqlite::Error::InvalidColumnType( + 0, + "adapter_config".to_string(), + rusqlite::types::Type::Text, + ) + })?) } } else { None @@ -253,13 +269,15 @@ pub async fn relay_stations_list(db: State<'_, AgentDb>) -> Result) -> Result -) -> Result { +pub async fn relay_station_get(id: String, db: State<'_, AgentDb>) -> Result { let conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); i18n::t("database.lock_failed") })?; - let mut stmt = conn.prepare("SELECT * FROM relay_stations WHERE id = ?1") + let mut stmt = conn + .prepare("SELECT * FROM relay_stations WHERE id = ?1") .map_err(|e| { log::error!("Failed to prepare statement: {}", e); i18n::t("database.query_failed") })?; - let station = stmt.query_row(params![id], |row| RelayStation::from_row(row)) + let station = stmt + .query_row(params![id], |row| RelayStation::from_row(row)) .map_err(|e| { log::error!("Failed to get relay station {}: {}", id, e); i18n::t("relay_station.not_found") @@ -305,7 +322,7 @@ pub async fn relay_station_get( #[command] pub async fn relay_station_create( request: CreateRelayStationRequest, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); @@ -326,26 +343,28 @@ pub async fn relay_station_create( let adapter_str = serde_json::to_string(&request.adapter) .map_err(|_| i18n::t("relay_station.invalid_adapter"))? - .trim_matches('"').to_string(); + .trim_matches('"') + .to_string(); let auth_method_str = serde_json::to_string(&request.auth_method) .map_err(|_| i18n::t("relay_station.invalid_auth_method"))? - .trim_matches('"').to_string(); + .trim_matches('"') + .to_string(); - let adapter_config_str = request.adapter_config.as_ref() + let adapter_config_str = request + .adapter_config + .as_ref() .map(|config| serde_json::to_string(config)) .transpose() .map_err(|_| i18n::t("relay_station.invalid_config"))?; // 如果要启用这个新中转站,先禁用所有其他中转站 if request.enabled { - conn.execute( - "UPDATE relay_stations SET enabled = 0", - [], - ).map_err(|e| { - log::error!("Failed to disable other relay stations: {}", e); - i18n::t("relay_station.create_failed") - })?; + conn.execute("UPDATE relay_stations SET enabled = 0", []) + .map_err(|e| { + log::error!("Failed to disable other relay stations: {}", e); + i18n::t("relay_station.create_failed") + })?; } conn.execute( @@ -397,7 +416,7 @@ pub async fn relay_station_create( #[command] pub async fn relay_station_update( request: UpdateRelayStationRequest, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); @@ -411,13 +430,17 @@ pub async fn relay_station_update( let adapter_str = serde_json::to_string(&request.adapter) .map_err(|_| i18n::t("relay_station.invalid_adapter"))? - .trim_matches('"').to_string(); + .trim_matches('"') + .to_string(); let auth_method_str = serde_json::to_string(&request.auth_method) .map_err(|_| i18n::t("relay_station.invalid_auth_method"))? - .trim_matches('"').to_string(); + .trim_matches('"') + .to_string(); - let adapter_config_str = request.adapter_config.as_ref() + let adapter_config_str = request + .adapter_config + .as_ref() .map(|config| serde_json::to_string(config)) .transpose() .map_err(|_| i18n::t("relay_station.invalid_config"))?; @@ -427,36 +450,39 @@ pub async fn relay_station_update( conn.execute( "UPDATE relay_stations SET enabled = 0 WHERE id != ?1", params![request.id], - ).map_err(|e| { + ) + .map_err(|e| { log::error!("Failed to disable other relay stations: {}", e); i18n::t("relay_station.update_failed") })?; } - let rows_affected = conn.execute( - r#" + let rows_affected = conn + .execute( + r#" UPDATE relay_stations SET name = ?2, description = ?3, api_url = ?4, adapter = ?5, auth_method = ?6, system_token = ?7, user_id = ?8, adapter_config = ?9, enabled = ?10, updated_at = ?11 WHERE id = ?1 "#, - params![ - request.id, - request.name, - request.description, - request.api_url, - adapter_str, - auth_method_str, - request.system_token, - request.user_id, - adapter_config_str, - if request.enabled { 1 } else { 0 }, - now - ], - ).map_err(|e| { - log::error!("Failed to update relay station: {}", e); - i18n::t("relay_station.update_failed") - })?; + params![ + request.id, + request.name, + request.description, + request.api_url, + adapter_str, + auth_method_str, + request.system_token, + request.user_id, + adapter_config_str, + if request.enabled { 1 } else { 0 }, + now + ], + ) + .map_err(|e| { + log::error!("Failed to update relay station: {}", e); + i18n::t("relay_station.update_failed") + })?; if rows_affected == 0 { return Err(i18n::t("relay_station.not_found")); @@ -474,7 +500,7 @@ pub async fn relay_station_update( adapter_config: request.adapter_config, enabled: request.enabled, display_order: 0, // 保持原有顺序 - created_at: 0, // 不重要,前端可以重新获取 + created_at: 0, // 不重要,前端可以重新获取 updated_at: now, }; @@ -484,16 +510,14 @@ pub async fn relay_station_update( /// 删除中转站 #[command] -pub async fn relay_station_delete( - id: String, - db: State<'_, AgentDb> -) -> Result { +pub async fn relay_station_delete(id: String, db: State<'_, AgentDb>) -> Result { let conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); i18n::t("database.lock_failed") })?; - let rows_affected = conn.execute("DELETE FROM relay_stations WHERE id = ?1", params![id]) + let rows_affected = conn + .execute("DELETE FROM relay_stations WHERE id = ?1", params![id]) .map_err(|e| { log::error!("Failed to delete relay station: {}", e); i18n::t("relay_station.delete_failed") @@ -512,7 +536,7 @@ pub async fn relay_station_delete( pub async fn relay_station_toggle_enable( id: String, enabled: bool, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); @@ -526,14 +550,15 @@ pub async fn relay_station_toggle_enable( conn.execute( "UPDATE relay_stations SET enabled = 0, updated_at = ?1 WHERE id != ?2", params![now, id], - ).map_err(|e| { + ) + .map_err(|e| { log::error!("Failed to disable other relay stations: {}", e); i18n::t("relay_station.update_failed") })?; - + // 获取要启用的中转站信息 let station = relay_station_get_internal(&conn, &id)?; - + // 将中转站配置应用到 Claude 配置文件 claude_config::apply_relay_station_to_config(&station).map_err(|e| { log::error!("Failed to apply relay station config: {}", e); @@ -549,13 +574,15 @@ pub async fn relay_station_toggle_enable( } // 更新目标中转站的启用状态 - let rows_affected = conn.execute( - "UPDATE relay_stations SET enabled = ?1, updated_at = ?2 WHERE id = ?3", - params![if enabled { 1 } else { 0 }, now, id], - ).map_err(|e| { - log::error!("Failed to toggle relay station enable status: {}", e); - i18n::t("relay_station.update_failed") - })?; + let rows_affected = conn + .execute( + "UPDATE relay_stations SET enabled = ?1, updated_at = ?2 WHERE id = ?3", + params![if enabled { 1 } else { 0 }, now, id], + ) + .map_err(|e| { + log::error!("Failed to toggle relay station enable status: {}", e); + i18n::t("relay_station.update_failed") + })?; if rows_affected == 0 { return Err(i18n::t("relay_station.not_found")); @@ -571,25 +598,29 @@ pub async fn relay_station_toggle_enable( /// 内部方法:获取单个中转站 fn relay_station_get_internal(conn: &Connection, id: &str) -> Result { - let mut stmt = conn.prepare( - "SELECT * FROM relay_stations WHERE id = ?1" - ).map_err(|e| { - log::error!("Failed to prepare statement: {}", e); - i18n::t("database.query_failed") - })?; + let mut stmt = conn + .prepare("SELECT * FROM relay_stations WHERE id = ?1") + .map_err(|e| { + log::error!("Failed to prepare statement: {}", e); + i18n::t("database.query_failed") + })?; - let station = stmt.query_row(params![id], |row| { - RelayStation::from_row(row) - }).map_err(|e| { - log::error!("Failed to get relay station: {}", e); - i18n::t("relay_station.not_found") - })?; + let station = stmt + .query_row(params![id], |row| RelayStation::from_row(row)) + .map_err(|e| { + log::error!("Failed to get relay station: {}", e); + i18n::t("relay_station.not_found") + })?; Ok(station) } /// 输入验证 -fn validate_relay_station_request(name: &str, api_url: &str, system_token: &str) -> Result<(), String> { +fn validate_relay_station_request( + name: &str, + api_url: &str, + system_token: &str, +) -> Result<(), String> { if name.trim().is_empty() { return Err(i18n::t("relay_station.name_required")); } @@ -599,14 +630,20 @@ fn validate_relay_station_request(name: &str, api_url: &str, system_token: &str) } // 验证 URL 格式 - let parsed_url = url::Url::parse(api_url) - .map_err(|_| i18n::t("relay_station.invalid_url"))?; - + let parsed_url = url::Url::parse(api_url).map_err(|_| i18n::t("relay_station.invalid_url"))?; + // 允许本地开发环境使用 HTTP - let is_localhost = parsed_url.host_str() - .map(|host| host == "localhost" || host == "127.0.0.1" || host == "::1" || host.starts_with("192.168.") || host.starts_with("10.")) + let is_localhost = parsed_url + .host_str() + .map(|host| { + host == "localhost" + || host == "127.0.0.1" + || host == "::1" + || host.starts_with("192.168.") + || host.starts_with("10.") + }) .unwrap_or(false); - + // 非本地环境必须使用 HTTPS if !is_localhost && !api_url.starts_with("https://") { return Err(i18n::t("relay_station.https_required")); @@ -621,7 +658,10 @@ fn validate_relay_station_request(name: &str, api_url: &str, system_token: &str) } // 检查 Token 是否包含特殊字符 - if system_token.chars().any(|c| c.is_whitespace() || c.is_control()) { + if system_token + .chars() + .any(|c| c.is_whitespace() || c.is_control()) + { return Err(i18n::t("relay_station.token_invalid_chars")); } @@ -634,47 +674,52 @@ pub fn mask_token(token: &str) -> String { if token.len() <= 8 { "*".repeat(token.len()) } else { - format!("{}...{}", &token[..4], &token[token.len()-4..]) + format!("{}...{}", &token[..4], &token[token.len() - 4..]) } } /// 手动同步中转站配置到 Claude 配置文件 #[command] -pub async fn relay_station_sync_config( - db: State<'_, AgentDb> -) -> Result { +pub async fn relay_station_sync_config(db: State<'_, AgentDb>) -> Result { let conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); i18n::t("database.lock_failed") })?; // 查找当前启用的中转站 - let mut stmt = conn.prepare( - "SELECT * FROM relay_stations WHERE enabled = 1 LIMIT 1" - ).map_err(|e| { - log::error!("Failed to prepare statement: {}", e); - i18n::t("database.query_failed") - })?; + let mut stmt = conn + .prepare("SELECT * FROM relay_stations WHERE enabled = 1 LIMIT 1") + .map_err(|e| { + log::error!("Failed to prepare statement: {}", e); + i18n::t("database.query_failed") + })?; - let station_opt = stmt.query_row([], |row| { - RelayStation::from_row(row) - }).optional().map_err(|e| { - log::error!("Failed to query enabled relay station: {}", e); - i18n::t("database.query_failed") - })?; + let station_opt = stmt + .query_row([], |row| RelayStation::from_row(row)) + .optional() + .map_err(|e| { + log::error!("Failed to query enabled relay station: {}", e); + i18n::t("database.query_failed") + })?; if let Some(station) = station_opt { // 应用中转站配置 claude_config::apply_relay_station_to_config(&station) .map_err(|e| format!("配置同步失败: {}", e))?; - - log::info!("Synced relay station {} config to Claude settings", station.name); - Ok(format!("已同步中转站 {} 的配置到 Claude 设置", station.name)) + + log::info!( + "Synced relay station {} config to Claude settings", + station.name + ); + Ok(format!( + "已同步中转站 {} 的配置到 Claude 设置", + station.name + )) } else { // 没有启用的中转站,清除配置 claude_config::clear_relay_station_from_config() .map_err(|e| format!("清除配置失败: {}", e))?; - + log::info!("Cleared relay station config from Claude settings"); Ok("已清除 Claude 设置中的中转站配置".to_string()) } @@ -683,9 +728,8 @@ pub async fn relay_station_sync_config( /// 恢复 Claude 配置备份 #[command] pub async fn relay_station_restore_config() -> Result { - claude_config::restore_claude_config() - .map_err(|e| format!("恢复配置失败: {}", e))?; - + claude_config::restore_claude_config().map_err(|e| format!("恢复配置失败: {}", e))?; + log::info!("Restored Claude config from backup"); Ok("已从备份恢复 Claude 配置".to_string()) } @@ -694,21 +738,22 @@ pub async fn relay_station_restore_config() -> Result { #[command] pub async fn relay_station_get_current_config() -> Result>, String> { let mut config = HashMap::new(); - + config.insert( "api_url".to_string(), - claude_config::get_current_api_url().unwrap_or(None) + claude_config::get_current_api_url().unwrap_or(None), ); - + config.insert( "api_token".to_string(), - claude_config::get_current_api_token().unwrap_or(None) + claude_config::get_current_api_token() + .unwrap_or(None) .map(|token: String| { // 脱敏显示 token mask_token(&token) - }) + }), ); - + Ok(config) } @@ -726,13 +771,15 @@ pub async fn relay_stations_export(db: State<'_, AgentDb>) -> Result) -> Result, - pub clear_existing: bool, // 是否清除现有配置 + pub clear_existing: bool, // 是否清除现有配置 } #[command] pub async fn relay_stations_import( request: ImportRelayStationsRequest, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result { let mut conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); @@ -788,30 +835,31 @@ pub async fn relay_stations_import( // 如果需要清除现有配置 if request.clear_existing { - tx.execute("DELETE FROM relay_stations", []) - .map_err(|e| { - log::error!("Failed to clear existing relay stations: {}", e); - i18n::t("relay_station.clear_failed") - })?; + tx.execute("DELETE FROM relay_stations", []).map_err(|e| { + log::error!("Failed to clear existing relay stations: {}", e); + i18n::t("relay_station.clear_failed") + })?; log::info!("Cleared existing relay stations"); } // 获取现有的中转站列表(用于重复检查) let existing_stations: Vec<(String, String)> = if !request.clear_existing { - let mut stmt = tx.prepare("SELECT api_url, system_token FROM relay_stations") + let mut stmt = tx + .prepare("SELECT api_url, system_token FROM relay_stations") .map_err(|e| { log::error!("Failed to prepare statement: {}", e); i18n::t("database.query_failed") })?; - - let stations_iter = stmt.query_map([], |row| { - Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?)) - }) - .map_err(|e| { - log::error!("Failed to query existing stations: {}", e); - i18n::t("database.query_failed") - })?; - + + let stations_iter = stmt + .query_map([], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?)) + }) + .map_err(|e| { + log::error!("Failed to query existing stations: {}", e); + i18n::t("database.query_failed") + })?; + // 立即收集结果,避免生命周期问题 let mut existing = Vec::new(); for station_result in stations_iter { @@ -837,7 +885,11 @@ pub async fn relay_stations_import( for station_request in request.stations { // 验证输入 - if let Err(e) = validate_relay_station_request(&station_request.name, &station_request.api_url, &station_request.system_token) { + if let Err(e) = validate_relay_station_request( + &station_request.name, + &station_request.api_url, + &station_request.system_token, + ) { log::warn!("Skipping invalid station {}: {}", station_request.name, e); failed_count += 1; continue; @@ -849,22 +901,30 @@ pub async fn relay_stations_import( }); if is_duplicate { - log::info!("Skipping duplicate station: {} ({})", station_request.name, station_request.api_url); + log::info!( + "Skipping duplicate station: {} ({})", + station_request.name, + station_request.api_url + ); skipped_count += 1; continue; } let id = Uuid::new_v4().to_string(); - + let adapter_str = serde_json::to_string(&station_request.adapter) .map_err(|_| i18n::t("relay_station.invalid_adapter"))? - .trim_matches('"').to_string(); + .trim_matches('"') + .to_string(); let auth_method_str = serde_json::to_string(&station_request.auth_method) .map_err(|_| i18n::t("relay_station.invalid_auth_method"))? - .trim_matches('"').to_string(); + .trim_matches('"') + .to_string(); - let adapter_config_str = station_request.adapter_config.as_ref() + let adapter_config_str = station_request + .adapter_config + .as_ref() .map(|config| serde_json::to_string(config)) .transpose() .map_err(|_| i18n::t("relay_station.invalid_config"))?; @@ -908,9 +968,9 @@ pub async fn relay_stations_import( "导入完成:总计 {} 个,成功 {} 个,跳过 {} 个(重复),失败 {} 个", total, imported_count, skipped_count, failed_count ); - + log::info!("{}", message); - + Ok(ImportResult { total, imported: imported_count, @@ -925,7 +985,7 @@ pub async fn relay_stations_import( #[command] pub async fn relay_station_update_order( station_ids: Vec, - db: State<'_, AgentDb> + db: State<'_, AgentDb>, ) -> Result<(), String> { let conn = db.0.lock().map_err(|e| { log::error!("Failed to acquire database lock: {}", e); @@ -943,7 +1003,8 @@ pub async fn relay_station_update_order( tx.execute( "UPDATE relay_stations SET display_order = ?1, updated_at = ?2 WHERE id = ?3", params![index as i32, Utc::now().timestamp(), station_id], - ).map_err(|e| { + ) + .map_err(|e| { log::error!("Failed to update station order: {}", e); i18n::t("database.update_failed") })?; @@ -955,6 +1016,9 @@ pub async fn relay_station_update_order( i18n::t("database.transaction_failed") })?; - log::info!("Updated display order for {} relay stations", station_ids.len()); + log::info!( + "Updated display order for {} relay stations", + station_ids.len() + ); Ok(()) -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/slash_commands.rs b/src-tauri/src/commands/slash_commands.rs index dbf12e6..6f77309 100644 --- a/src-tauri/src/commands/slash_commands.rs +++ b/src-tauri/src/commands/slash_commands.rs @@ -45,13 +45,13 @@ struct CommandFrontmatter { /// Parse a markdown file with optional YAML frontmatter fn parse_markdown_with_frontmatter(content: &str) -> Result<(Option, String)> { let lines: Vec<&str> = content.lines().collect(); - + // Check if the file starts with YAML frontmatter if lines.is_empty() || lines[0] != "---" { // No frontmatter return Ok((None, content.to_string())); } - + // Find the end of frontmatter let mut frontmatter_end = None; for (i, line) in lines.iter().enumerate().skip(1) { @@ -60,12 +60,12 @@ fn parse_markdown_with_frontmatter(content: &str) -> Result<(Option(&frontmatter_content) { Ok(frontmatter) => Ok((Some(frontmatter), body_content)), @@ -86,20 +86,20 @@ fn extract_command_info(file_path: &Path, base_path: &Path) -> Result<(String, O let relative_path = file_path .strip_prefix(base_path) .context("Failed to get relative path")?; - + // Remove .md extension let path_without_ext = relative_path .with_extension("") .to_string_lossy() .to_string(); - + // Split into components let components: Vec<&str> = path_without_ext.split('/').collect(); - + if components.is_empty() { return Err(anyhow::anyhow!("Invalid command path")); } - + if components.len() == 1 { // No namespace Ok((components[0].to_string(), None)) @@ -112,44 +112,43 @@ fn extract_command_info(file_path: &Path, base_path: &Path) -> Result<(String, O } /// Load a single command from a markdown file -fn load_command_from_file( - file_path: &Path, - base_path: &Path, - scope: &str, -) -> Result { +fn load_command_from_file(file_path: &Path, base_path: &Path, scope: &str) -> Result { debug!("Loading command from: {:?}", file_path); - + // Read file content - let content = fs::read_to_string(file_path) - .context("Failed to read command file")?; - + let content = fs::read_to_string(file_path).context("Failed to read command file")?; + // Parse frontmatter let (frontmatter, body) = parse_markdown_with_frontmatter(&content)?; - + // Extract command info let (name, namespace) = extract_command_info(file_path, base_path)?; - + // Build full command (no scope prefix, just /command or /namespace:command) let full_command = match &namespace { Some(ns) => format!("/{ns}:{name}"), None => format!("/{name}"), }; - + // Generate unique ID - let id = format!("{}-{}", scope, file_path.to_string_lossy().replace('/', "-")); - + let id = format!( + "{}-{}", + scope, + file_path.to_string_lossy().replace('/', "-") + ); + // Check for special content let has_bash_commands = body.contains("!`"); let has_file_references = body.contains('@'); let accepts_arguments = body.contains("$ARGUMENTS"); - + // Extract metadata from frontmatter let (description, allowed_tools) = if let Some(fm) = frontmatter { (fm.description, fm.allowed_tools.unwrap_or_default()) } else { (None, Vec::new()) }; - + Ok(SlashCommand { id, name, @@ -171,18 +170,18 @@ fn find_markdown_files(dir: &Path, files: &mut Vec) -> Result<()> { if !dir.exists() { return Ok(()); } - + for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); - + // Skip hidden files/directories if let Some(name) = path.file_name().and_then(|n| n.to_str()) { if name.starts_with('.') { continue; } } - + if path.is_dir() { find_markdown_files(&path, files)?; } else if path.is_file() { @@ -193,7 +192,7 @@ fn find_markdown_files(dir: &Path, files: &mut Vec) -> Result<()> { } } } - + Ok(()) } @@ -252,16 +251,16 @@ pub async fn slash_commands_list( ) -> Result, String> { info!("Discovering slash commands"); let mut commands = Vec::new(); - + // Add default commands commands.extend(create_default_commands()); - + // Load project commands if project path is provided if let Some(proj_path) = project_path { let project_commands_dir = PathBuf::from(&proj_path).join(".claude").join("commands"); if project_commands_dir.exists() { debug!("Scanning project commands at: {:?}", project_commands_dir); - + let mut md_files = Vec::new(); if let Err(e) = find_markdown_files(&project_commands_dir, &mut md_files) { error!("Failed to find project command files: {}", e); @@ -280,13 +279,13 @@ pub async fn slash_commands_list( } } } - + // Load user commands if let Some(home_dir) = dirs::home_dir() { let user_commands_dir = home_dir.join(".claude").join("commands"); if user_commands_dir.exists() { debug!("Scanning user commands at: {:?}", user_commands_dir); - + let mut md_files = Vec::new(); if let Err(e) = find_markdown_files(&user_commands_dir, &mut md_files) { error!("Failed to find user command files: {}", e); @@ -305,7 +304,7 @@ pub async fn slash_commands_list( } } } - + info!("Found {} slash commands", commands.len()); Ok(commands) } @@ -314,17 +313,17 @@ pub async fn slash_commands_list( #[tauri::command] pub async fn slash_command_get(command_id: String) -> Result { debug!("Getting slash command: {}", command_id); - + // Parse the ID to determine scope and reconstruct file path let parts: Vec<&str> = command_id.split('-').collect(); if parts.len() < 2 { return Err("Invalid command ID".to_string()); } - + // The actual implementation would need to reconstruct the path and reload the command // For now, we'll list all commands and find the matching one let commands = slash_commands_list(None).await?; - + commands .into_iter() .find(|cmd| cmd.id == command_id) @@ -343,16 +342,16 @@ pub async fn slash_command_save( project_path: Option, ) -> Result { info!("Saving slash command: {} in scope: {}", name, scope); - + // Validate inputs if name.is_empty() { return Err("Command name cannot be empty".to_string()); } - + if !["project", "user"].contains(&scope.as_str()) { return Err("Invalid scope. Must be 'project' or 'user'".to_string()); } - + // Determine base directory let base_dir = if scope == "project" { if let Some(proj_path) = project_path { @@ -366,7 +365,7 @@ pub async fn slash_command_save( .join(".claude") .join("commands") }; - + // Build file path let mut file_path = base_dir.clone(); if let Some(ns) = &namespace { @@ -374,41 +373,40 @@ pub async fn slash_command_save( file_path = file_path.join(component); } } - + // Create directories if needed - fs::create_dir_all(&file_path) - .map_err(|e| format!("Failed to create directories: {}", e))?; - + fs::create_dir_all(&file_path).map_err(|e| format!("Failed to create directories: {}", e))?; + // Add filename file_path = file_path.join(format!("{}.md", name)); - + // Build content with frontmatter let mut full_content = String::new(); - + // Add frontmatter if we have metadata if description.is_some() || !allowed_tools.is_empty() { full_content.push_str("---\n"); - + if let Some(desc) = &description { full_content.push_str(&format!("description: {}\n", desc)); } - + if !allowed_tools.is_empty() { full_content.push_str("allowed-tools:\n"); for tool in &allowed_tools { full_content.push_str(&format!(" - {}\n", tool)); } } - + full_content.push_str("---\n\n"); } - + full_content.push_str(&content); - + // Write file fs::write(&file_path, &full_content) .map_err(|e| format!("Failed to write command file: {}", e))?; - + // Load and return the saved command load_command_from_file(&file_path, &base_dir, &scope) .map_err(|e| format!("Failed to load saved command: {}", e)) @@ -416,35 +414,38 @@ pub async fn slash_command_save( /// Delete a slash command #[tauri::command] -pub async fn slash_command_delete(command_id: String, project_path: Option) -> Result { +pub async fn slash_command_delete( + command_id: String, + project_path: Option, +) -> Result { info!("Deleting slash command: {}", command_id); - + // First, we need to determine if this is a project command by parsing the ID let is_project_command = command_id.starts_with("project-"); - + // If it's a project command and we don't have a project path, error out if is_project_command && project_path.is_none() { return Err("Project path required to delete project commands".to_string()); } - + // List all commands (including project commands if applicable) let commands = slash_commands_list(project_path).await?; - + // Find the command by ID let command = commands .into_iter() .find(|cmd| cmd.id == command_id) .ok_or_else(|| format!("Command not found: {}", command_id))?; - + // Delete the file fs::remove_file(&command.file_path) .map_err(|e| format!("Failed to delete command file: {}", e))?; - + // Clean up empty directories if let Some(parent) = Path::new(&command.file_path).parent() { let _ = remove_empty_dirs(parent); } - + Ok(format!("Deleted command: {}", command.full_command)) } @@ -453,18 +454,18 @@ fn remove_empty_dirs(dir: &Path) -> Result<()> { if !dir.exists() { return Ok(()); } - + // Check if directory is empty let is_empty = fs::read_dir(dir)?.next().is_none(); - + if is_empty { fs::remove_dir(dir)?; - + // Try to remove parent if it's also empty if let Some(parent) = dir.parent() { let _ = remove_empty_dirs(parent); } } - + Ok(()) } diff --git a/src-tauri/src/commands/smart_sessions.rs b/src-tauri/src/commands/smart_sessions.rs index 7d41594..9d71e86 100644 --- a/src-tauri/src/commands/smart_sessions.rs +++ b/src-tauri/src/commands/smart_sessions.rs @@ -1,9 +1,9 @@ use anyhow::{Context, Result}; +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::fs; use std::path::PathBuf; use tauri::AppHandle; -use chrono::{DateTime, Utc}; use uuid::Uuid; /// 智能会话结果 @@ -105,42 +105,40 @@ fn get_config_path() -> Result { let claudia_dir = dirs::home_dir() .context("Failed to get home directory")? .join(".claudia"); - - fs::create_dir_all(&claudia_dir) - .context("Failed to create .claudia directory")?; - + + fs::create_dir_all(&claudia_dir).context("Failed to create .claudia directory")?; + Ok(claudia_dir.join("smart_sessions_config.json")) } /// 加载智能会话配置 pub fn load_smart_session_config() -> Result { let config_path = get_config_path()?; - + if !config_path.exists() { let default_config = SmartSessionConfig::default(); save_smart_session_config(&default_config)?; return Ok(default_config); } - - let config_content = fs::read_to_string(&config_path) - .context("Failed to read smart session config")?; - - let config: SmartSessionConfig = serde_json::from_str(&config_content) - .context("Failed to parse smart session config")?; - + + let config_content = + fs::read_to_string(&config_path).context("Failed to read smart session config")?; + + let config: SmartSessionConfig = + serde_json::from_str(&config_content).context("Failed to parse smart session config")?; + Ok(config) } /// 保存智能会话配置 pub fn save_smart_session_config(config: &SmartSessionConfig) -> Result<()> { let config_path = get_config_path()?; - - let config_content = serde_json::to_string_pretty(config) - .context("Failed to serialize smart session config")?; - - fs::write(&config_path, config_content) - .context("Failed to write smart session config")?; - + + let config_content = + serde_json::to_string_pretty(config).context("Failed to serialize smart session config")?; + + fs::write(&config_path, config_content).context("Failed to write smart session config")?; + Ok(()) } @@ -150,18 +148,16 @@ pub fn generate_smart_session_path( session_name: Option, ) -> Result { let timestamp = chrono::Utc::now(); - - let session_name = session_name.unwrap_or_else(|| { - match config.naming_pattern.as_str() { - "chat-{timestamp}" => format!("chat-{}", timestamp.format("%Y-%m-%d-%H%M%S")), - "session-{date}" => format!("session-{}", timestamp.format("%Y-%m-%d")), - "conversation-{datetime}" => format!("conversation-{}", timestamp.format("%Y%m%d_%H%M%S")), - _ => format!("chat-{}", timestamp.format("%Y-%m-%d-%H%M%S")), - } + + let session_name = session_name.unwrap_or_else(|| match config.naming_pattern.as_str() { + "chat-{timestamp}" => format!("chat-{}", timestamp.format("%Y-%m-%d-%H%M%S")), + "session-{date}" => format!("session-{}", timestamp.format("%Y-%m-%d")), + "conversation-{datetime}" => format!("conversation-{}", timestamp.format("%Y%m%d_%H%M%S")), + _ => format!("chat-{}", timestamp.format("%Y-%m-%d-%H%M%S")), }); - + let session_path = config.base_directory.join(&session_name); - + // 确保路径唯一 if session_path.exists() { let uuid = Uuid::new_v4().to_string()[..8].to_string(); @@ -175,16 +171,14 @@ pub fn generate_smart_session_path( /// 创建智能会话环境 pub fn create_smart_session_environment(session_path: &PathBuf) -> Result<()> { let config = load_smart_session_config()?; - + // 创建主目录 - fs::create_dir_all(session_path) - .context("Failed to create smart session directory")?; - + fs::create_dir_all(session_path).context("Failed to create smart session directory")?; + // 创建 .claude 子目录 let claude_dir = session_path.join(".claude"); - fs::create_dir_all(&claude_dir) - .context("Failed to create .claude directory")?; - + fs::create_dir_all(&claude_dir).context("Failed to create .claude directory")?; + // 创建基础 Claude 设置文件 let claude_settings = serde_json::json!({ "smart_session": true, @@ -192,33 +186,37 @@ pub fn create_smart_session_environment(session_path: &PathBuf) -> Result<()> { "created_at": chrono::Utc::now().to_rfc3339(), "session_path": session_path.to_string_lossy() }); - + let settings_path = claude_dir.join("settings.json"); - fs::write(&settings_path, serde_json::to_string_pretty(&claude_settings)?) - .context("Failed to write Claude settings")?; - + fs::write( + &settings_path, + serde_json::to_string_pretty(&claude_settings)?, + ) + .context("Failed to write Claude settings")?; + // 创建模板文件 let session_id = Uuid::new_v4().to_string(); let created_at = chrono::Utc::now().to_rfc3339(); - + for template in &config.template_files { let file_path = session_path.join(&template.path); - + // 创建父目录(如果需要) if let Some(parent) = file_path.parent() { fs::create_dir_all(parent) .context("Failed to create template file parent directory")?; } - + // 替换模板变量 - let content = template.content + let content = template + .content .replace("{session_id}", &session_id) .replace("{created_at}", &created_at) .replace("{project_path}", &session_path.to_string_lossy()); - + fs::write(&file_path, content) .context(format!("Failed to write template file: {}", template.path))?; - + // 设置可执行权限(如果需要) #[cfg(unix)] if template.executable { @@ -228,8 +226,11 @@ pub fn create_smart_session_environment(session_path: &PathBuf) -> Result<()> { fs::set_permissions(&file_path, perms)?; } } - - log::info!("Created smart session environment at: {}", session_path.display()); + + log::info!( + "Created smart session environment at: {}", + session_path.display() + ); Ok(()) } @@ -238,10 +239,9 @@ fn get_sessions_history_path() -> Result { let claudia_dir = dirs::home_dir() .context("Failed to get home directory")? .join(".claudia"); - - fs::create_dir_all(&claudia_dir) - .context("Failed to create .claudia directory")?; - + + fs::create_dir_all(&claudia_dir).context("Failed to create .claudia directory")?; + Ok(claudia_dir.join("smart_sessions_history.json")) } @@ -249,7 +249,7 @@ fn get_sessions_history_path() -> Result { pub fn save_smart_session_record(session_path: &PathBuf) -> Result { let session_id = Uuid::new_v4().to_string(); let now = chrono::Utc::now(); - + let session = SmartSession { id: session_id.clone(), display_name: session_path @@ -262,42 +262,40 @@ pub fn save_smart_session_record(session_path: &PathBuf) -> Result { last_accessed: now, session_type: "smart".to_string(), }; - + let history_path = get_sessions_history_path()?; - + let mut sessions: Vec = if history_path.exists() { - let content = fs::read_to_string(&history_path) - .context("Failed to read sessions history")?; + let content = + fs::read_to_string(&history_path).context("Failed to read sessions history")?; serde_json::from_str(&content).unwrap_or_default() } else { Vec::new() }; - + sessions.push(session); - - let history_content = serde_json::to_string_pretty(&sessions) - .context("Failed to serialize sessions history")?; - - fs::write(&history_path, history_content) - .context("Failed to write sessions history")?; - + + let history_content = + serde_json::to_string_pretty(&sessions).context("Failed to serialize sessions history")?; + + fs::write(&history_path, history_content).context("Failed to write sessions history")?; + Ok(session_id) } /// 列出所有智能会话 pub fn list_smart_sessions() -> Result> { let history_path = get_sessions_history_path()?; - + if !history_path.exists() { return Ok(Vec::new()); } - - let content = fs::read_to_string(&history_path) - .context("Failed to read sessions history")?; - - let sessions: Vec = serde_json::from_str(&content) - .context("Failed to parse sessions history")?; - + + let content = fs::read_to_string(&history_path).context("Failed to read sessions history")?; + + let sessions: Vec = + serde_json::from_str(&content).context("Failed to parse sessions history")?; + // 过滤仍然存在的会话 let existing_sessions: Vec = sessions .into_iter() @@ -306,7 +304,7 @@ pub fn list_smart_sessions() -> Result> { path.exists() }) .collect(); - + Ok(existing_sessions) } @@ -316,20 +314,24 @@ pub fn cleanup_old_smart_sessions(days: u32) -> Result { if !config.auto_cleanup_enabled { return Ok(0); } - + let cutoff_time = chrono::Utc::now() - chrono::Duration::days(days as i64); let sessions = list_smart_sessions()?; let mut cleaned_count = 0u32; - + let mut remaining_sessions = Vec::new(); - + for session in sessions { if session.last_accessed < cutoff_time { // 删除会话目录 let session_path = PathBuf::from(&session.project_path); if session_path.exists() { if let Err(e) = fs::remove_dir_all(&session_path) { - log::warn!("Failed to remove session directory {}: {}", session_path.display(), e); + log::warn!( + "Failed to remove session directory {}: {}", + session_path.display(), + e + ); } else { cleaned_count += 1; log::info!("Cleaned up expired session: {}", session.display_name); @@ -339,17 +341,17 @@ pub fn cleanup_old_smart_sessions(days: u32) -> Result { remaining_sessions.push(session); } } - + // 更新历史记录 if cleaned_count > 0 { let history_path = get_sessions_history_path()?; let history_content = serde_json::to_string_pretty(&remaining_sessions) .context("Failed to serialize updated sessions history")?; - + fs::write(&history_path, history_content) .context("Failed to write updated sessions history")?; } - + Ok(cleaned_count) } @@ -362,32 +364,32 @@ pub async fn create_smart_quick_start_session( session_name: Option, ) -> Result { log::info!("Creating smart quick start session: {:?}", session_name); - - let config = load_smart_session_config() - .map_err(|e| format!("Failed to load config: {}", e))?; - + + let config = + load_smart_session_config().map_err(|e| format!("Failed to load config: {}", e))?; + if !config.enabled { return Err("Smart sessions are disabled".to_string()); } - + // 1. 生成唯一的会话路径 let session_path = generate_smart_session_path(&config, session_name) .map_err(|e| format!("Failed to generate session path: {}", e))?; - + // 2. 创建目录结构和环境 create_smart_session_environment(&session_path) .map_err(|e| format!("Failed to create session environment: {}", e))?; - + // 3. 保存到历史记录 let session_id = save_smart_session_record(&session_path) .map_err(|e| format!("Failed to save session record: {}", e))?; - + let display_name = session_path .file_name() .and_then(|n| n.to_str()) .unwrap_or("Smart Session") .to_string(); - + let result = SmartSessionResult { session_id, project_path: session_path.to_string_lossy().to_string(), @@ -395,23 +397,23 @@ pub async fn create_smart_quick_start_session( created_at: chrono::Utc::now(), session_type: "smart".to_string(), }; - - log::info!("Smart session created successfully: {}", result.project_path); + + log::info!( + "Smart session created successfully: {}", + result.project_path + ); Ok(result) } /// 获取智能会话配置 #[tauri::command] pub async fn get_smart_session_config() -> Result { - load_smart_session_config() - .map_err(|e| format!("Failed to load smart session config: {}", e)) + load_smart_session_config().map_err(|e| format!("Failed to load smart session config: {}", e)) } /// 更新智能会话配置 #[tauri::command] -pub async fn update_smart_session_config( - config: SmartSessionConfig, -) -> Result<(), String> { +pub async fn update_smart_session_config(config: SmartSessionConfig) -> Result<(), String> { save_smart_session_config(&config) .map_err(|e| format!("Failed to save smart session config: {}", e)) } @@ -419,21 +421,19 @@ pub async fn update_smart_session_config( /// 列出智能会话 #[tauri::command] pub async fn list_smart_sessions_command() -> Result, String> { - list_smart_sessions() - .map_err(|e| format!("Failed to list smart sessions: {}", e)) + list_smart_sessions().map_err(|e| format!("Failed to list smart sessions: {}", e)) } /// 切换智能会话模式 #[tauri::command] pub async fn toggle_smart_session_mode(enabled: bool) -> Result<(), String> { - let mut config = load_smart_session_config() - .map_err(|e| format!("Failed to load config: {}", e))?; - + let mut config = + load_smart_session_config().map_err(|e| format!("Failed to load config: {}", e))?; + config.enabled = enabled; - - save_smart_session_config(&config) - .map_err(|e| format!("Failed to save config: {}", e))?; - + + save_smart_session_config(&config).map_err(|e| format!("Failed to save config: {}", e))?; + log::info!("Smart session mode toggled: {}", enabled); Ok(()) } @@ -441,6 +441,5 @@ pub async fn toggle_smart_session_mode(enabled: bool) -> Result<(), String> { /// 清理过期智能会话 #[tauri::command] pub async fn cleanup_old_smart_sessions_command(days: u32) -> Result { - cleanup_old_smart_sessions(days) - .map_err(|e| format!("Failed to cleanup old sessions: {}", e)) -} \ No newline at end of file + cleanup_old_smart_sessions(days).map_err(|e| format!("Failed to cleanup old sessions: {}", e)) +} diff --git a/src-tauri/src/commands/storage.rs b/src-tauri/src/commands/storage.rs index 1bcdb1b..02c5529 100644 --- a/src-tauri/src/commands/storage.rs +++ b/src-tauri/src/commands/storage.rs @@ -1,10 +1,10 @@ +use super::agents::AgentDb; use anyhow::Result; -use rusqlite::{params, Connection, Result as SqliteResult, types::ValueRef}; +use rusqlite::{params, types::ValueRef, Connection, Result as SqliteResult}; use serde::{Deserialize, Serialize}; use serde_json::{Map, Value as JsonValue}; use std::collections::HashMap; use tauri::{AppHandle, Manager, State}; -use super::agents::AgentDb; /// Represents metadata about a database table #[derive(Debug, Serialize, Deserialize, Clone)] @@ -50,37 +50,35 @@ pub struct QueryResult { #[tauri::command] pub async fn storage_list_tables(db: State<'_, AgentDb>) -> Result, String> { let conn = db.0.lock().map_err(|e| e.to_string())?; - + // Query for all tables let mut stmt = conn .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name") .map_err(|e| e.to_string())?; - + let table_names: Vec = stmt .query_map([], |row| row.get(0)) .map_err(|e| e.to_string())? .collect::>>() .map_err(|e| e.to_string())?; - + drop(stmt); - + let mut tables = Vec::new(); - + for table_name in table_names { // Get row count let row_count: i64 = conn - .query_row( - &format!("SELECT COUNT(*) FROM {}", table_name), - [], - |row| row.get(0), - ) + .query_row(&format!("SELECT COUNT(*) FROM {}", table_name), [], |row| { + row.get(0) + }) .unwrap_or(0); - + // Get column information let mut pragma_stmt = conn .prepare(&format!("PRAGMA table_info({})", table_name)) .map_err(|e| e.to_string())?; - + let columns: Vec = pragma_stmt .query_map([], |row| { Ok(ColumnInfo { @@ -95,14 +93,14 @@ pub async fn storage_list_tables(db: State<'_, AgentDb>) -> Result>>() .map_err(|e| e.to_string())?; - + tables.push(TableInfo { name: table_name, row_count, columns, }); } - + Ok(tables) } @@ -117,17 +115,17 @@ pub async fn storage_read_table( searchQuery: Option, ) -> Result { let conn = db.0.lock().map_err(|e| e.to_string())?; - + // Validate table name to prevent SQL injection if !is_valid_table_name(&conn, &tableName)? { return Err("Invalid table name".to_string()); } - + // Get column information let mut pragma_stmt = conn .prepare(&format!("PRAGMA table_info({})", tableName)) .map_err(|e| e.to_string())?; - + let columns: Vec = pragma_stmt .query_map([], |row| { Ok(ColumnInfo { @@ -142,9 +140,9 @@ pub async fn storage_read_table( .map_err(|e| e.to_string())? .collect::>>() .map_err(|e| e.to_string())?; - + drop(pragma_stmt); - + // Build query with optional search let (query, count_query) = if let Some(search) = &searchQuery { // Create search conditions for all text columns @@ -153,7 +151,7 @@ pub async fn storage_read_table( .filter(|col| col.type_name.contains("TEXT") || col.type_name.contains("VARCHAR")) .map(|col| format!("{} LIKE '%{}%'", col.name, search.replace("'", "''"))) .collect(); - + if search_conditions.is_empty() { ( format!("SELECT * FROM {} LIMIT ? OFFSET ?", tableName), @@ -162,7 +160,10 @@ pub async fn storage_read_table( } else { let where_clause = search_conditions.join(" OR "); ( - format!("SELECT * FROM {} WHERE {} LIMIT ? OFFSET ?", tableName, where_clause), + format!( + "SELECT * FROM {} WHERE {} LIMIT ? OFFSET ?", + tableName, where_clause + ), format!("SELECT COUNT(*) FROM {} WHERE {}", tableName, where_clause), ) } @@ -172,25 +173,23 @@ pub async fn storage_read_table( format!("SELECT COUNT(*) FROM {}", tableName), ) }; - + // Get total row count let total_rows: i64 = conn .query_row(&count_query, [], |row| row.get(0)) .unwrap_or(0); - + // Calculate pagination let offset = (page - 1) * pageSize; let total_pages = (total_rows as f64 / pageSize as f64).ceil() as i64; - + // Query data - let mut data_stmt = conn - .prepare(&query) - .map_err(|e| e.to_string())?; - + let mut data_stmt = conn.prepare(&query).map_err(|e| e.to_string())?; + let rows: Vec> = data_stmt .query_map(params![pageSize, offset], |row| { let mut row_map = Map::new(); - + for (idx, col) in columns.iter().enumerate() { let value = match row.get_ref(idx)? { ValueRef::Null => JsonValue::Null, @@ -203,17 +202,20 @@ pub async fn storage_read_table( } } ValueRef::Text(s) => JsonValue::String(String::from_utf8_lossy(s).to_string()), - ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode(&base64::engine::general_purpose::STANDARD, b)), + ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode( + &base64::engine::general_purpose::STANDARD, + b, + )), }; row_map.insert(col.name.clone(), value); } - + Ok(row_map) }) .map_err(|e| e.to_string())? .collect::>>() .map_err(|e| e.to_string())?; - + Ok(TableData { table_name: tableName, columns, @@ -235,49 +237,52 @@ pub async fn storage_update_row( updates: HashMap, ) -> Result<(), String> { let conn = db.0.lock().map_err(|e| e.to_string())?; - + // Validate table name if !is_valid_table_name(&conn, &tableName)? { return Err("Invalid table name".to_string()); } - + // Build UPDATE query let set_clauses: Vec = updates .keys() .enumerate() .map(|(idx, key)| format!("{} = ?{}", key, idx + 1)) .collect(); - + let where_clauses: Vec = primaryKeyValues .keys() .enumerate() .map(|(idx, key)| format!("{} = ?{}", key, idx + updates.len() + 1)) .collect(); - + let query = format!( "UPDATE {} SET {} WHERE {}", tableName, set_clauses.join(", "), where_clauses.join(" AND ") ); - + // Prepare parameters let mut params: Vec> = Vec::new(); - + // Add update values for value in updates.values() { params.push(json_to_sql_value(value)?); } - + // Add where clause values for value in primaryKeyValues.values() { params.push(json_to_sql_value(value)?); } - + // Execute update - conn.execute(&query, rusqlite::params_from_iter(params.iter().map(|p| p.as_ref()))) - .map_err(|e| format!("Failed to update row: {}", e))?; - + conn.execute( + &query, + rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())), + ) + .map_err(|e| format!("Failed to update row: {}", e))?; + Ok(()) } @@ -290,35 +295,38 @@ pub async fn storage_delete_row( primaryKeyValues: HashMap, ) -> Result<(), String> { let conn = db.0.lock().map_err(|e| e.to_string())?; - + // Validate table name if !is_valid_table_name(&conn, &tableName)? { return Err("Invalid table name".to_string()); } - + // Build DELETE query let where_clauses: Vec = primaryKeyValues .keys() .enumerate() .map(|(idx, key)| format!("{} = ?{}", key, idx + 1)) .collect(); - + let query = format!( "DELETE FROM {} WHERE {}", tableName, where_clauses.join(" AND ") ); - + // Prepare parameters let params: Vec> = primaryKeyValues .values() .map(json_to_sql_value) .collect::, _>>()?; - + // Execute delete - conn.execute(&query, rusqlite::params_from_iter(params.iter().map(|p| p.as_ref()))) - .map_err(|e| format!("Failed to delete row: {}", e))?; - + conn.execute( + &query, + rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())), + ) + .map_err(|e| format!("Failed to delete row: {}", e))?; + Ok(()) } @@ -331,35 +339,40 @@ pub async fn storage_insert_row( values: HashMap, ) -> Result { let conn = db.0.lock().map_err(|e| e.to_string())?; - + // Validate table name if !is_valid_table_name(&conn, &tableName)? { return Err("Invalid table name".to_string()); } - + // Build INSERT query let columns: Vec<&String> = values.keys().collect(); - let placeholders: Vec = (1..=columns.len()) - .map(|i| format!("?{}", i)) - .collect(); - + let placeholders: Vec = (1..=columns.len()).map(|i| format!("?{}", i)).collect(); + let query = format!( "INSERT INTO {} ({}) VALUES ({})", tableName, - columns.iter().map(|c| c.as_str()).collect::>().join(", "), + columns + .iter() + .map(|c| c.as_str()) + .collect::>() + .join(", "), placeholders.join(", ") ); - + // Prepare parameters let params: Vec> = values .values() .map(json_to_sql_value) .collect::, _>>()?; - + // Execute insert - conn.execute(&query, rusqlite::params_from_iter(params.iter().map(|p| p.as_ref()))) - .map_err(|e| format!("Failed to insert row: {}", e))?; - + conn.execute( + &query, + rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())), + ) + .map_err(|e| format!("Failed to insert row: {}", e))?; + Ok(conn.last_insert_rowid()) } @@ -370,20 +383,20 @@ pub async fn storage_execute_sql( query: String, ) -> Result { let conn = db.0.lock().map_err(|e| e.to_string())?; - + // Check if it's a SELECT query let is_select = query.trim().to_uppercase().starts_with("SELECT"); - + if is_select { // Handle SELECT queries let mut stmt = conn.prepare(&query).map_err(|e| e.to_string())?; let column_count = stmt.column_count(); - + // Get column names let columns: Vec = (0..column_count) .map(|i| stmt.column_name(i).unwrap_or("").to_string()) .collect(); - + // Execute query and collect results let rows: Vec> = stmt .query_map([], |row| { @@ -399,8 +412,13 @@ pub async fn storage_execute_sql( JsonValue::String(f.to_string()) } } - ValueRef::Text(s) => JsonValue::String(String::from_utf8_lossy(s).to_string()), - ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode(&base64::engine::general_purpose::STANDARD, b)), + ValueRef::Text(s) => { + JsonValue::String(String::from_utf8_lossy(s).to_string()) + } + ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode( + &base64::engine::general_purpose::STANDARD, + b, + )), }; row_values.push(value); } @@ -409,7 +427,7 @@ pub async fn storage_execute_sql( .map_err(|e| e.to_string())? .collect::>>() .map_err(|e| e.to_string())?; - + Ok(QueryResult { columns, rows, @@ -419,7 +437,7 @@ pub async fn storage_execute_sql( } else { // Handle non-SELECT queries (INSERT, UPDATE, DELETE, etc.) let rows_affected = conn.execute(&query, []).map_err(|e| e.to_string())?; - + Ok(QueryResult { columns: vec![], rows: vec![], @@ -435,13 +453,12 @@ pub async fn storage_reset_database(app: AppHandle) -> Result<(), String> { { // Drop all existing tables within a scoped block let db_state = app.state::(); - let conn = db_state.0.lock() - .map_err(|e| e.to_string())?; - + let conn = db_state.0.lock().map_err(|e| e.to_string())?; + // Disable foreign key constraints temporarily to allow dropping tables conn.execute("PRAGMA foreign_keys = OFF", []) .map_err(|e| format!("Failed to disable foreign keys: {}", e))?; - + // Drop tables - order doesn't matter with foreign keys disabled conn.execute("DROP TABLE IF EXISTS agent_runs", []) .map_err(|e| format!("Failed to drop agent_runs table: {}", e))?; @@ -449,34 +466,31 @@ pub async fn storage_reset_database(app: AppHandle) -> Result<(), String> { .map_err(|e| format!("Failed to drop agents table: {}", e))?; conn.execute("DROP TABLE IF EXISTS app_settings", []) .map_err(|e| format!("Failed to drop app_settings table: {}", e))?; - + // Re-enable foreign key constraints conn.execute("PRAGMA foreign_keys = ON", []) .map_err(|e| format!("Failed to re-enable foreign keys: {}", e))?; - + // Connection is automatically dropped at end of scope } - + // Re-initialize the database which will recreate all tables empty let new_conn = init_database(&app).map_err(|e| format!("Failed to reset database: {}", e))?; - + // Update the managed state with the new connection { let db_state = app.state::(); - let mut conn_guard = db_state.0.lock() - .map_err(|e| e.to_string())?; + let mut conn_guard = db_state.0.lock().map_err(|e| e.to_string())?; *conn_guard = new_conn; } - + // Run VACUUM to optimize the database { let db_state = app.state::(); - let conn = db_state.0.lock() - .map_err(|e| e.to_string())?; - conn.execute("VACUUM", []) - .map_err(|e| e.to_string())?; + let conn = db_state.0.lock().map_err(|e| e.to_string())?; + conn.execute("VACUUM", []).map_err(|e| e.to_string())?; } - + Ok(()) } @@ -489,7 +503,7 @@ fn is_valid_table_name(conn: &Connection, table_name: &str) -> Result 0) } @@ -513,4 +527,4 @@ fn json_to_sql_value(value: &JsonValue) -> Result, Stri } /// Initialize the agents database (re-exported from agents module) -use super::agents::init_database; \ No newline at end of file +use super::agents::init_database; diff --git a/src-tauri/src/commands/system.rs b/src-tauri/src/commands/system.rs index 4636f61..67c3c5b 100644 --- a/src-tauri/src/commands/system.rs +++ b/src-tauri/src/commands/system.rs @@ -15,7 +15,11 @@ pub async fn flush_dns() -> Result { return Ok("DNS cache flushed".into()); } else { let err = String::from_utf8_lossy(&output.stderr).to_string(); - return Err(if err.is_empty() { "ipconfig /flushdns failed".into() } else { err }); + return Err(if err.is_empty() { + "ipconfig /flushdns failed".into() + } else { + err + }); } } @@ -31,7 +35,11 @@ pub async fn flush_dns() -> Result { return Ok("DNS cache flushed".into()); } else { let err = String::from_utf8_lossy(&output.stderr).to_string(); - return Err(if err.is_empty() { "dscacheutil -flushcache failed".into() } else { err }); + return Err(if err.is_empty() { + "dscacheutil -flushcache failed".into() + } else { + err + }); } } @@ -41,7 +49,13 @@ pub async fn flush_dns() -> Result { let attempts: Vec<(&str, Vec<&str>)> = vec![ ("resolvectl", vec!["flush-caches"]), ("systemd-resolve", vec!["--flush-caches"]), - ("sh", vec!["-c", "service nscd restart || service dnsmasq restart || rc-service nscd restart"]), + ( + "sh", + vec![ + "-c", + "service nscd restart || service dnsmasq restart || rc-service nscd restart", + ], + ), ]; for (cmd, args) in attempts { @@ -59,4 +73,3 @@ pub async fn flush_dns() -> Result { Err("No supported DNS flush method succeeded on this Linux system".into()) } } - diff --git a/src-tauri/src/commands/terminal.rs b/src-tauri/src/commands/terminal.rs index 9bfca01..a40df27 100644 --- a/src-tauri/src/commands/terminal.rs +++ b/src-tauri/src/commands/terminal.rs @@ -1,12 +1,12 @@ -use std::collections::HashMap; -use std::sync::Arc; +use anyhow::Result; +use portable_pty::{native_pty_system, Child, CommandBuilder, MasterPty, PtySize}; use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::io::{Read, Write}; +use std::sync::Arc; use tauri::{AppHandle, Emitter, State}; use tokio::sync::Mutex; use uuid::Uuid; -use anyhow::Result; -use portable_pty::{native_pty_system, CommandBuilder, PtySize, Child, MasterPty}; -use std::io::{Read, Write}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TerminalSession { @@ -19,8 +19,8 @@ pub struct TerminalSession { /// Terminal child process wrapper pub struct TerminalChild { writer: Arc>>, - _master: Box, // Keep master PTY alive - _child: Box, // Keep child process alive + _master: Box, // Keep master PTY alive + _child: Box, // Keep child process alive } /// State for managing terminal sessions @@ -34,37 +34,46 @@ pub async fn create_terminal_session( terminal_state: State<'_, TerminalState>, ) -> Result { let session_id = Uuid::new_v4().to_string(); - - log::info!("Creating terminal session: {} in {}", session_id, working_directory); - + + log::info!( + "Creating terminal session: {} in {}", + session_id, + working_directory + ); + // Check if working directory exists if !std::path::Path::new(&working_directory).exists() { - return Err(format!("Working directory does not exist: {}", working_directory)); + return Err(format!( + "Working directory does not exist: {}", + working_directory + )); } - + let session = TerminalSession { id: session_id.clone(), working_directory: working_directory.clone(), created_at: chrono::Utc::now(), is_active: true, }; - + // Create PTY system let pty_system = native_pty_system(); - + // Create PTY pair with size - let pty_pair = pty_system.openpty(PtySize { - rows: 30, - cols: 120, - pixel_width: 0, - pixel_height: 0, - }).map_err(|e| format!("Failed to create PTY: {}", e))?; - + let pty_pair = pty_system + .openpty(PtySize { + rows: 30, + cols: 120, + pixel_width: 0, + pixel_height: 0, + }) + .map_err(|e| format!("Failed to create PTY: {}", e))?; + // Get shell command let shell = get_default_shell(); log::info!("Using shell: {}", shell); let mut cmd = CommandBuilder::new(&shell); - + // Set shell-specific arguments if cfg!(target_os = "windows") { if shell.contains("pwsh") { @@ -72,7 +81,7 @@ pub async fn create_terminal_session( cmd.arg("-NoLogo"); cmd.arg("-NoExit"); } else if shell.contains("powershell") { - // Windows PowerShell - stay interactive + // Windows PowerShell - stay interactive cmd.arg("-NoLogo"); cmd.arg("-NoExit"); } else { @@ -87,10 +96,10 @@ pub async fn create_terminal_session( cmd.arg("-il"); } } - + // Set working directory cmd.cwd(working_directory.clone()); - + // Set environment variables based on platform if cfg!(target_os = "windows") { // Windows-specific environment @@ -105,40 +114,65 @@ pub async fn create_terminal_session( // Unix-specific environment cmd.env("TERM", "xterm-256color"); cmd.env("COLORTERM", "truecolor"); - cmd.env("LANG", std::env::var("LANG").unwrap_or_else(|_| "en_US.UTF-8".to_string())); - cmd.env("LC_ALL", std::env::var("LC_ALL").unwrap_or_else(|_| "en_US.UTF-8".to_string())); - cmd.env("LC_CTYPE", std::env::var("LC_CTYPE").unwrap_or_else(|_| "en_US.UTF-8".to_string())); - + cmd.env( + "LANG", + std::env::var("LANG").unwrap_or_else(|_| "en_US.UTF-8".to_string()), + ); + cmd.env( + "LC_ALL", + std::env::var("LC_ALL").unwrap_or_else(|_| "en_US.UTF-8".to_string()), + ); + cmd.env( + "LC_CTYPE", + std::env::var("LC_CTYPE").unwrap_or_else(|_| "en_US.UTF-8".to_string()), + ); + // Inherit other Unix environment variables for (key, value) in std::env::vars() { - if !key.starts_with("TERM") && !key.starts_with("COLORTERM") && - !key.starts_with("LC_") && !key.starts_with("LANG") && - !key.starts_with("TAURI_") && !key.starts_with("VITE_") { + if !key.starts_with("TERM") + && !key.starts_with("COLORTERM") + && !key.starts_with("LC_") + && !key.starts_with("LANG") + && !key.starts_with("TAURI_") + && !key.starts_with("VITE_") + { cmd.env(&key, &value); } } } - + // Spawn the shell process - let child = pty_pair.slave.spawn_command(cmd) + let child = pty_pair + .slave + .spawn_command(cmd) .map_err(|e| format!("Failed to spawn shell: {}", e))?; - - log::info!("Shell process spawned successfully for session: {}", session_id); - + + log::info!( + "Shell process spawned successfully for session: {}", + session_id + ); + // Get writer for stdin - let writer = pty_pair.master.take_writer() + let writer = pty_pair + .master + .take_writer() .map_err(|e| format!("Failed to get PTY writer: {}", e))?; - + // Start reading output in background let session_id_clone = session_id.clone(); let app_handle_clone = app_handle.clone(); - let mut reader = pty_pair.master.try_clone_reader() + let mut reader = pty_pair + .master + .try_clone_reader() .map_err(|e| format!("Failed to get PTY reader: {}", e))?; - + // Spawn reader thread std::thread::spawn(move || { let mut buffer = [0u8; 4096]; - log::info!("PTY reader thread started for session: {}", session_id_clone); + log::info!( + "PTY reader thread started for session: {}", + session_id_clone + ); loop { match reader.read(&mut buffer) { Ok(0) => { @@ -147,30 +181,43 @@ pub async fn create_terminal_session( } Ok(n) => { let data = String::from_utf8_lossy(&buffer[..n]).to_string(); - log::debug!("PTY reader got {} bytes for session {}: {:?}", n, session_id_clone, data); - let _ = app_handle_clone.emit(&format!("terminal-output:{}", session_id_clone), &data); + log::debug!( + "PTY reader got {} bytes for session {}: {:?}", + n, + session_id_clone, + data + ); + let _ = app_handle_clone + .emit(&format!("terminal-output:{}", session_id_clone), &data); } Err(e) => { - log::error!("Error reading PTY output for session {}: {}", session_id_clone, e); + log::error!( + "Error reading PTY output for session {}: {}", + session_id_clone, + e + ); break; } } } - log::debug!("PTY reader thread finished for session: {}", session_id_clone); + log::debug!( + "PTY reader thread finished for session: {}", + session_id_clone + ); }); - + // Store the session with PTY writer, master PTY and child process let terminal_child = TerminalChild { writer: Arc::new(Mutex::new(writer)), _master: pty_pair.master, _child: child, }; - + { let mut state = terminal_state.lock().await; state.insert(session_id.clone(), (session, Some(terminal_child))); } - + log::info!("Terminal session created successfully: {}", session_id); Ok(session_id) } @@ -183,22 +230,27 @@ pub async fn send_terminal_input( terminal_state: State<'_, TerminalState>, ) -> Result<(), String> { let state = terminal_state.lock().await; - + if let Some((_session, child_opt)) = state.get(&session_id) { if let Some(child) = child_opt { log::debug!("Sending input to terminal {}: {:?}", session_id, input); - + // Write to PTY let mut writer = child.writer.lock().await; - writer.write_all(input.as_bytes()) + writer + .write_all(input.as_bytes()) .map_err(|e| format!("Failed to write to terminal: {}", e))?; - writer.flush() + writer + .flush() .map_err(|e| format!("Failed to flush terminal input: {}", e))?; return Ok(()); } } - - Err(format!("Terminal session not found or not active: {}", session_id)) + + Err(format!( + "Terminal session not found or not active: {}", + session_id + )) } /// Closes a terminal session @@ -208,11 +260,11 @@ pub async fn close_terminal_session( terminal_state: State<'_, TerminalState>, ) -> Result<(), String> { let mut state = terminal_state.lock().await; - + if let Some((mut session, _child)) = state.remove(&session_id) { session.is_active = false; // PTY and child process will be dropped automatically - + log::info!("Closed terminal session: {}", session_id); Ok(()) } else { @@ -226,8 +278,9 @@ pub async fn list_terminal_sessions( terminal_state: State<'_, TerminalState>, ) -> Result, String> { let state = terminal_state.lock().await; - - let sessions: Vec = state.iter() + + let sessions: Vec = state + .iter() .filter_map(|(id, (session, _))| { if session.is_active { Some(id.clone()) @@ -236,7 +289,7 @@ pub async fn list_terminal_sessions( } }) .collect(); - + Ok(sessions) } @@ -251,7 +304,10 @@ pub async fn resize_terminal( // Note: With the current architecture, resize is not supported // To support resize, we would need to keep a reference to the PTY master // or use a different approach - log::warn!("Terminal resize not currently supported for session: {}", session_id); + log::warn!( + "Terminal resize not currently supported for session: {}", + session_id + ); Ok(()) } @@ -262,25 +318,25 @@ pub async fn cleanup_terminal_sessions( ) -> Result { let mut state = terminal_state.lock().await; let mut cleaned_up = 0; - + let mut to_remove = Vec::new(); - + for (id, (session, _child)) in state.iter() { if !session.is_active { to_remove.push(id.clone()); cleaned_up += 1; } } - + // Remove the sessions for id in to_remove { state.remove(&id); } - + if cleaned_up > 0 { log::info!("Cleaned up {} orphaned terminal sessions", cleaned_up); } - + Ok(cleaned_up) } @@ -288,9 +344,17 @@ pub async fn cleanup_terminal_sessions( fn get_default_shell() -> String { if cfg!(target_os = "windows") { // Try PowerShell Core (pwsh) first, then Windows PowerShell, fallback to cmd - if std::process::Command::new("pwsh").arg("--version").output().is_ok() { + if std::process::Command::new("pwsh") + .arg("--version") + .output() + .is_ok() + { "pwsh".to_string() - } else if std::process::Command::new("powershell").arg("-Version").output().is_ok() { + } else if std::process::Command::new("powershell") + .arg("-Version") + .output() + .is_ok() + { "powershell".to_string() } else { "cmd.exe".to_string() @@ -307,4 +371,4 @@ fn get_default_shell() -> String { } }) } -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/usage.rs b/src-tauri/src/commands/usage.rs index 5472c06..6aa4778 100644 --- a/src-tauri/src/commands/usage.rs +++ b/src-tauri/src/commands/usage.rs @@ -152,40 +152,86 @@ fn calculate_cost(model: &str, usage: &UsageData) -> f64 { // 独立的模型价格匹配函数,更精确的模型识别 fn match_model_prices(model_lower: &str) -> (f64, f64, f64, f64) { // Claude Opus 4.1 (最新最强) - if model_lower.contains("opus") && (model_lower.contains("4-1") || model_lower.contains("4.1")) { - (OPUS_4_1_INPUT_PRICE, OPUS_4_1_OUTPUT_PRICE, OPUS_4_1_CACHE_WRITE_PRICE, OPUS_4_1_CACHE_READ_PRICE) + if model_lower.contains("opus") && (model_lower.contains("4-1") || model_lower.contains("4.1")) + { + ( + OPUS_4_1_INPUT_PRICE, + OPUS_4_1_OUTPUT_PRICE, + OPUS_4_1_CACHE_WRITE_PRICE, + OPUS_4_1_CACHE_READ_PRICE, + ) } // Claude Sonnet 4 - else if model_lower.contains("sonnet") && (model_lower.contains("-4-") || model_lower.contains("sonnet-4")) { - (SONNET_4_INPUT_PRICE, SONNET_4_OUTPUT_PRICE, SONNET_4_CACHE_WRITE_PRICE, SONNET_4_CACHE_READ_PRICE) + else if model_lower.contains("sonnet") + && (model_lower.contains("-4-") || model_lower.contains("sonnet-4")) + { + ( + SONNET_4_INPUT_PRICE, + SONNET_4_OUTPUT_PRICE, + SONNET_4_CACHE_WRITE_PRICE, + SONNET_4_CACHE_READ_PRICE, + ) } // Claude Haiku 3.5 else if model_lower.contains("haiku") { - (HAIKU_3_5_INPUT_PRICE, HAIKU_3_5_OUTPUT_PRICE, HAIKU_3_5_CACHE_WRITE_PRICE, HAIKU_3_5_CACHE_READ_PRICE) + ( + HAIKU_3_5_INPUT_PRICE, + HAIKU_3_5_OUTPUT_PRICE, + HAIKU_3_5_CACHE_WRITE_PRICE, + HAIKU_3_5_CACHE_READ_PRICE, + ) } // Claude 3.x Sonnet 系列(3.7, 3.5) - else if model_lower.contains("sonnet") && - (model_lower.contains("3-7") || model_lower.contains("3.7") || - model_lower.contains("3-5") || model_lower.contains("3.5")) { - (SONNET_3_INPUT_PRICE, SONNET_3_OUTPUT_PRICE, SONNET_3_CACHE_WRITE_PRICE, SONNET_3_CACHE_READ_PRICE) + else if model_lower.contains("sonnet") + && (model_lower.contains("3-7") + || model_lower.contains("3.7") + || model_lower.contains("3-5") + || model_lower.contains("3.5")) + { + ( + SONNET_3_INPUT_PRICE, + SONNET_3_OUTPUT_PRICE, + SONNET_3_CACHE_WRITE_PRICE, + SONNET_3_CACHE_READ_PRICE, + ) } // Claude 3 Opus (旧版) else if model_lower.contains("opus") && model_lower.contains("3") { - (OPUS_3_INPUT_PRICE, OPUS_3_OUTPUT_PRICE, OPUS_3_CACHE_WRITE_PRICE, OPUS_3_CACHE_READ_PRICE) + ( + OPUS_3_INPUT_PRICE, + OPUS_3_OUTPUT_PRICE, + OPUS_3_CACHE_WRITE_PRICE, + OPUS_3_CACHE_READ_PRICE, + ) } // 默认 Sonnet(未明确版本号时) else if model_lower.contains("sonnet") { - (SONNET_3_INPUT_PRICE, SONNET_3_OUTPUT_PRICE, SONNET_3_CACHE_WRITE_PRICE, SONNET_3_CACHE_READ_PRICE) + ( + SONNET_3_INPUT_PRICE, + SONNET_3_OUTPUT_PRICE, + SONNET_3_CACHE_WRITE_PRICE, + SONNET_3_CACHE_READ_PRICE, + ) } // 默认 Opus(未明确版本号时,假设是最新版) else if model_lower.contains("opus") { - (OPUS_4_1_INPUT_PRICE, OPUS_4_1_OUTPUT_PRICE, OPUS_4_1_CACHE_WRITE_PRICE, OPUS_4_1_CACHE_READ_PRICE) + ( + OPUS_4_1_INPUT_PRICE, + OPUS_4_1_OUTPUT_PRICE, + OPUS_4_1_CACHE_WRITE_PRICE, + OPUS_4_1_CACHE_READ_PRICE, + ) } // 未知模型 else { log::warn!("Unknown model for cost calculation: {}", model_lower); // 默认使用 Sonnet 3 的价格(保守估计) - (SONNET_3_INPUT_PRICE, SONNET_3_OUTPUT_PRICE, SONNET_3_CACHE_WRITE_PRICE, SONNET_3_CACHE_READ_PRICE) + ( + SONNET_3_INPUT_PRICE, + SONNET_3_OUTPUT_PRICE, + SONNET_3_CACHE_WRITE_PRICE, + SONNET_3_CACHE_READ_PRICE, + ) } } @@ -236,7 +282,8 @@ pub fn parse_jsonl_file( // 智能去重策略 let has_io_tokens = usage.input_tokens.unwrap_or(0) > 0 || usage.output_tokens.unwrap_or(0) > 0; - let has_cache_tokens = usage.cache_creation_input_tokens.unwrap_or(0) > 0 + let has_cache_tokens = usage.cache_creation_input_tokens.unwrap_or(0) + > 0 || usage.cache_read_input_tokens.unwrap_or(0) > 0; let should_skip = if has_io_tokens { @@ -254,7 +301,9 @@ pub fn parse_jsonl_file( } } else if has_cache_tokens { // 缓存令牌:使用 message_id + request_id 宽松去重 - if let (Some(msg_id), Some(req_id)) = (&message.id, &entry.request_id) { + if let (Some(msg_id), Some(req_id)) = + (&message.id, &entry.request_id) + { let unique_hash = format!("cache:{}:{}", msg_id, req_id); if processed_hashes.contains(&unique_hash) { true @@ -287,13 +336,16 @@ pub fn parse_jsonl_file( .unwrap_or_else(|| encoded_project_name.to_string()); // 转换时间戳为本地时间格式 - let local_timestamp = if let Ok(dt) = DateTime::parse_from_rfc3339(&entry.timestamp) { - // 转换为本地时区并格式化为 ISO 格式 - dt.with_timezone(&Local).format("%Y-%m-%d %H:%M:%S%.3f").to_string() - } else { - // 如果解析失败,保留原始时间戳 - entry.timestamp.clone() - }; + let local_timestamp = + if let Ok(dt) = DateTime::parse_from_rfc3339(&entry.timestamp) { + // 转换为本地时区并格式化为 ISO 格式 + dt.with_timezone(&Local) + .format("%Y-%m-%d %H:%M:%S%.3f") + .to_string() + } else { + // 如果解析失败,保留原始时间戳 + entry.timestamp.clone() + }; entries.push(UsageEntry { timestamp: local_timestamp, @@ -414,7 +466,9 @@ pub fn get_usage_stats(days: Option) -> Result { // 处理新的本地时间格式 "YYYY-MM-DD HH:MM:SS.sss" let date = if e.timestamp.contains(' ') { // 新格式:直接解析日期部分 - e.timestamp.split(' ').next() + e.timestamp + .split(' ') + .next() .and_then(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").ok()) } else if let Ok(dt) = DateTime::parse_from_rfc3339(&e.timestamp) { // 旧格式:RFC3339 格式 @@ -487,7 +541,12 @@ pub fn get_usage_stats(days: Option) -> Result { // 处理新的本地时间格式 "YYYY-MM-DD HH:MM:SS.sss" let date = if entry.timestamp.contains(' ') { // 新格式:直接提取日期部分 - entry.timestamp.split(' ').next().unwrap_or(&entry.timestamp).to_string() + entry + .timestamp + .split(' ') + .next() + .unwrap_or(&entry.timestamp) + .to_string() } else if let Ok(dt) = DateTime::parse_from_rfc3339(&entry.timestamp) { // 旧格式:RFC3339 格式 dt.with_timezone(&Local).date_naive().to_string() @@ -631,7 +690,9 @@ pub fn get_usage_by_date_range(start_date: String, end_date: String) -> Result Result>>, pub last_scan_time: Arc>>, - pub is_scanning: Arc>, // 防止并发扫描 + pub is_scanning: Arc>, // 防止并发扫描 } #[derive(Debug, Serialize, Deserialize)] @@ -44,10 +43,10 @@ fn ensure_parent_dir(p: &Path) -> std::io::Result<()> { pub fn init_cache_db() -> rusqlite::Result { let path = db_path(); ensure_parent_dir(&path).map_err(|e| rusqlite::Error::ToSqlConversionFailure(Box::new(e)))?; - + let conn = Connection::open(path)?; conn.pragma_update(None, "journal_mode", &"WAL")?; - + // Create schema conn.execute_batch( r#" @@ -86,7 +85,7 @@ pub fn init_cache_db() -> rusqlite::Result { CREATE INDEX IF NOT EXISTS idx_entries_model ON usage_entries(model); "#, )?; - + Ok(conn) } @@ -100,18 +99,22 @@ fn get_file_mtime_ms(path: &Path) -> i64 { } fn get_file_size(path: &Path) -> i64 { - fs::metadata(path) - .map(|m| m.len() as i64) - .unwrap_or(0) + fs::metadata(path).map(|m| m.len() as i64).unwrap_or(0) } fn generate_unique_hash(entry: &UsageEntry, has_io_tokens: bool, has_cache_tokens: bool) -> String { if has_io_tokens { // For I/O tokens: use session_id + timestamp + model - format!("io:{}:{}:{}", entry.session_id, entry.timestamp, entry.model) + format!( + "io:{}:{}:{}", + entry.session_id, entry.timestamp, entry.model + ) } else if has_cache_tokens { // For cache tokens: use timestamp + model + project - format!("cache:{}:{}:{}", entry.timestamp, entry.model, entry.project_path) + format!( + "cache:{}:{}:{}", + entry.timestamp, entry.model, entry.project_path + ) } else { // Fallback format!("other:{}:{}", entry.timestamp, entry.session_id) @@ -133,12 +136,12 @@ pub async fn usage_scan_update(state: State<'_, UsageCacheState>) -> Result { is_scanning: &'a Arc>, } - + impl<'a> Drop for ScanGuard<'a> { fn drop(&mut self) { if let Ok(mut is_scanning) = self.is_scanning.lock() { @@ -146,57 +149,59 @@ pub async fn usage_scan_update(state: State<'_, UsageCacheState>) -> Result = HashMap::new(); { let mut stmt = conn .prepare("SELECT file_path, file_size, mtime_ms FROM scanned_files") .map_err(|e| e.to_string())?; - - let rows = stmt.query_map(params![], |row| { - Ok(( - row.get::<_, String>(0)?, - (row.get::<_, i64>(1)?, row.get::<_, i64>(2)?), - )) - }).map_err(|e| e.to_string())?; - + + let rows = stmt + .query_map(params![], |row| { + Ok(( + row.get::<_, String>(0)?, + (row.get::<_, i64>(1)?, row.get::<_, i64>(2)?), + )) + }) + .map_err(|e| e.to_string())?; + for row in rows { if let Ok((path, data)) = row { existing_files.insert(path, data); } } } - + // Find all .jsonl files let mut files_to_process = Vec::new(); let mut all_current_files = HashSet::new(); - + if let Ok(projects) = fs::read_dir(&projects_dir) { for project in projects.flatten() { if project.file_type().map(|t| t.is_dir()).unwrap_or(false) { let project_name = project.file_name().to_string_lossy().to_string(); let project_path = project.path(); - + WalkDir::new(&project_path) .into_iter() .filter_map(Result::ok) @@ -205,17 +210,19 @@ pub async fn usage_scan_update(state: State<'_, UsageCacheState>) -> Result) -> Result) -> Result 0 || entry.output_tokens > 0; let has_cache_tokens = entry.cache_creation_tokens > 0 || entry.cache_read_tokens > 0; let unique_hash = generate_unique_hash(&entry, has_io_tokens, has_cache_tokens); - + let result = tx.execute( "INSERT INTO usage_entries ( timestamp, model, input_tokens, output_tokens, @@ -279,34 +286,40 @@ pub async fn usage_scan_update(state: State<'_, UsageCacheState>) -> Result 0 => entries_added += 1, _ => entries_skipped += 1, } } - + files_scanned += 1; } - + // Remove entries for files that no longer exist for (old_path, _) in existing_files { if !all_current_files.contains(&old_path) { - tx.execute("DELETE FROM usage_entries WHERE file_path = ?1", params![old_path]) - .map_err(|e| e.to_string())?; - tx.execute("DELETE FROM scanned_files WHERE file_path = ?1", params![old_path]) - .map_err(|e| e.to_string())?; + tx.execute( + "DELETE FROM usage_entries WHERE file_path = ?1", + params![old_path], + ) + .map_err(|e| e.to_string())?; + tx.execute( + "DELETE FROM scanned_files WHERE file_path = ?1", + params![old_path], + ) + .map_err(|e| e.to_string())?; } } - + tx.commit().map_err(|e| e.to_string())?; - + // Update last scan time let mut last_scan = state.last_scan_time.lock().map_err(|e| e.to_string())?; *last_scan = Some(start_time); - + let scan_time_ms = (Utc::now().timestamp_millis() - start_time) as u64; - + Ok(ScanResult { files_scanned, entries_added, @@ -325,16 +338,16 @@ pub async fn usage_get_stats_cached( let conn_guard = state.conn.lock().map_err(|e| e.to_string())?; conn_guard.is_none() }; - + if needs_init { // 首次调用,需要初始化和扫描 usage_scan_update(state.clone()).await?; } // 移除自动扫描逻辑,让系统只在手动触发时扫描 - + let conn_guard = state.conn.lock().map_err(|e| e.to_string())?; let conn = conn_guard.as_ref().ok_or("Database not initialized")?; - + // Build date filter let date_filter = if let Some(d) = days { let cutoff = Local::now().naive_local().date() - chrono::Duration::days(d as i64); @@ -342,12 +355,17 @@ pub async fn usage_get_stats_cached( } else { None }; - + // Query total stats - let (total_cost, total_input, total_output, total_cache_creation, total_cache_read): (f64, i64, i64, i64, i64) = - if let Some(cutoff) = &date_filter { - conn.query_row( - "SELECT + let (total_cost, total_input, total_output, total_cache_creation, total_cache_read): ( + f64, + i64, + i64, + i64, + i64, + ) = if let Some(cutoff) = &date_filter { + conn.query_row( + "SELECT COALESCE(SUM(cost), 0.0), COALESCE(SUM(input_tokens), 0), COALESCE(SUM(output_tokens), 0), @@ -355,40 +373,60 @@ pub async fn usage_get_stats_cached( COALESCE(SUM(cache_read_tokens), 0) FROM usage_entries WHERE timestamp >= ?1", - params![cutoff], - |row| Ok((row.get(0)?, row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?)), - ).map_err(|e| e.to_string())? - } else { - conn.query_row( - "SELECT + params![cutoff], + |row| { + Ok(( + row.get(0)?, + row.get(1)?, + row.get(2)?, + row.get(3)?, + row.get(4)?, + )) + }, + ) + .map_err(|e| e.to_string())? + } else { + conn.query_row( + "SELECT COALESCE(SUM(cost), 0.0), COALESCE(SUM(input_tokens), 0), COALESCE(SUM(output_tokens), 0), COALESCE(SUM(cache_creation_tokens), 0), COALESCE(SUM(cache_read_tokens), 0) FROM usage_entries", - params![], - |row| Ok((row.get(0)?, row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?)), - ).map_err(|e| e.to_string())? - }; - + params![], + |row| { + Ok(( + row.get(0)?, + row.get(1)?, + row.get(2)?, + row.get(3)?, + row.get(4)?, + )) + }, + ) + .map_err(|e| e.to_string())? + }; + let total_tokens = total_input + total_output + total_cache_creation + total_cache_read; - + // Get session count let total_sessions: i64 = if let Some(cutoff) = &date_filter { conn.query_row( "SELECT COUNT(DISTINCT session_id) FROM usage_entries WHERE timestamp >= ?1", params![cutoff], |row| row.get(0), - ).map_err(|e| e.to_string())? + ) + .map_err(|e| e.to_string())? } else { conn.query_row( "SELECT COUNT(DISTINCT session_id) FROM usage_entries", params![], |row| row.get(0), - ).map_err(|e| e.to_string())? + ) + .map_err(|e| e.to_string())? }; - + // Get stats by model let mut by_model = Vec::new(); { @@ -418,9 +456,9 @@ pub async fn usage_get_stats_cached( GROUP BY model ORDER BY total_cost DESC" }; - + let mut stmt = conn.prepare(query).map_err(|e| e.to_string())?; - + // Create closure once to avoid type mismatch let create_model_usage = |row: &rusqlite::Row| -> rusqlite::Result { Ok(ModelUsage { @@ -434,22 +472,26 @@ pub async fn usage_get_stats_cached( total_tokens: 0, // Will calculate below }) }; - + let rows = if let Some(cutoff) = &date_filter { - stmt.query_map(params![cutoff], create_model_usage).map_err(|e| e.to_string())? + stmt.query_map(params![cutoff], create_model_usage) + .map_err(|e| e.to_string())? } else { - stmt.query_map(params![], create_model_usage).map_err(|e| e.to_string())? + stmt.query_map(params![], create_model_usage) + .map_err(|e| e.to_string())? }; - + for row in rows { if let Ok(mut usage) = row { - usage.total_tokens = usage.input_tokens + usage.output_tokens + - usage.cache_creation_tokens + usage.cache_read_tokens; + usage.total_tokens = usage.input_tokens + + usage.output_tokens + + usage.cache_creation_tokens + + usage.cache_read_tokens; by_model.push(usage); } } } - + // Get daily stats let mut by_date = Vec::new(); { @@ -483,19 +525,21 @@ pub async fn usage_get_stats_cached( GROUP BY DATE(timestamp) ORDER BY date DESC" }; - + let mut stmt = conn.prepare(query).map_err(|e| e.to_string())?; - + // Create closure once to avoid type mismatch let create_daily_usage = |row: &rusqlite::Row| -> rusqlite::Result { let models_str: String = row.get(8)?; let models_used: Vec = models_str.split(',').map(|s| s.to_string()).collect(); - + Ok(DailyUsage { date: row.get(0)?, total_cost: row.get(1)?, - total_tokens: (row.get::<_, i64>(2)? + row.get::<_, i64>(3)? + - row.get::<_, i64>(4)? + row.get::<_, i64>(5)?) as u64, + total_tokens: (row.get::<_, i64>(2)? + + row.get::<_, i64>(3)? + + row.get::<_, i64>(4)? + + row.get::<_, i64>(5)?) as u64, input_tokens: row.get::<_, i64>(2)? as u64, output_tokens: row.get::<_, i64>(3)? as u64, cache_creation_tokens: row.get::<_, i64>(4)? as u64, @@ -504,20 +548,22 @@ pub async fn usage_get_stats_cached( models_used, }) }; - + let rows = if let Some(cutoff) = &date_filter { - stmt.query_map(params![cutoff], create_daily_usage).map_err(|e| e.to_string())? + stmt.query_map(params![cutoff], create_daily_usage) + .map_err(|e| e.to_string())? } else { - stmt.query_map(params![], create_daily_usage).map_err(|e| e.to_string())? + stmt.query_map(params![], create_daily_usage) + .map_err(|e| e.to_string())? }; - + for row in rows { if let Ok(daily) = row { by_date.push(daily); } } } - + // Get project stats let mut by_project = Vec::new(); { @@ -543,9 +589,9 @@ pub async fn usage_get_stats_cached( GROUP BY project_path ORDER BY total_cost DESC" }; - + let mut stmt = conn.prepare(query).map_err(|e| e.to_string())?; - + // Create closure once to avoid type mismatch let create_project_usage = |row: &rusqlite::Row| -> rusqlite::Result { Ok(ProjectUsage { @@ -557,17 +603,20 @@ pub async fn usage_get_stats_cached( last_used: row.get(4)?, }) }; - + let rows = if let Some(cutoff) = &date_filter { - stmt.query_map(params![cutoff], create_project_usage).map_err(|e| e.to_string())? + stmt.query_map(params![cutoff], create_project_usage) + .map_err(|e| e.to_string())? } else { - stmt.query_map(params![], create_project_usage).map_err(|e| e.to_string())? + stmt.query_map(params![], create_project_usage) + .map_err(|e| e.to_string())? }; - + for row in rows { if let Ok(mut project) = row { // Extract project name from path - project.project_name = project.project_path + project.project_name = project + .project_path .split('/') .last() .unwrap_or(&project.project_path) @@ -576,7 +625,7 @@ pub async fn usage_get_stats_cached( } } } - + Ok(UsageStats { total_cost, total_tokens: total_tokens as u64, @@ -594,20 +643,20 @@ pub async fn usage_get_stats_cached( #[command] pub async fn usage_clear_cache(state: State<'_, UsageCacheState>) -> Result { let mut conn_guard = state.conn.lock().map_err(|e| e.to_string())?; - + if let Some(conn) = conn_guard.as_mut() { conn.execute("DELETE FROM usage_entries", params![]) .map_err(|e| e.to_string())?; conn.execute("DELETE FROM scanned_files", params![]) .map_err(|e| e.to_string())?; - + // 重置last scan time let mut last_scan = state.last_scan_time.lock().map_err(|e| e.to_string())?; *last_scan = None; - + return Ok("Cache cleared successfully. All costs will be recalculated.".to_string()); } - + Ok("No cache to clear.".to_string()) } @@ -615,37 +664,39 @@ pub async fn usage_clear_cache(state: State<'_, UsageCacheState>) -> Result) -> Result { let conn_guard = state.conn.lock().map_err(|e| e.to_string())?; let conn = conn_guard.as_ref().ok_or("Database not initialized")?; - + let claude_path = dirs::home_dir() .ok_or("Failed to get home directory")? .join(".claude"); let projects_dir = claude_path.join("projects"); - + // 获取已知文件的修改时间和大小 let mut stmt = conn .prepare("SELECT file_path, file_size, mtime_ms FROM scanned_files") .map_err(|e| e.to_string())?; - + let mut known_files = std::collections::HashMap::new(); - let rows = stmt.query_map([], |row| { - Ok(( - row.get::<_, String>(0)?, - (row.get::<_, i64>(1)?, row.get::<_, i64>(2)?), - )) - }).map_err(|e| e.to_string())?; - + let rows = stmt + .query_map([], |row| { + Ok(( + row.get::<_, String>(0)?, + (row.get::<_, i64>(1)?, row.get::<_, i64>(2)?), + )) + }) + .map_err(|e| e.to_string())?; + for row in rows { if let Ok((path, data)) = row { known_files.insert(path, data); } } - + // 快速检查是否有文件变化 if let Ok(projects) = fs::read_dir(&projects_dir) { for project in projects.flatten() { if project.file_type().map(|t| t.is_dir()).unwrap_or(false) { let project_path = project.path(); - + for entry in walkdir::WalkDir::new(&project_path) .into_iter() .filter_map(Result::ok) @@ -655,7 +706,7 @@ pub async fn check_files_changed(state: &State<'_, UsageCacheState>) -> Result) -> Result) -> Result) -> Result { // 检查是否有文件更新 check_files_changed(&state).await -} \ No newline at end of file +} diff --git a/src-tauri/src/commands/usage_index.rs b/src-tauri/src/commands/usage_index.rs index 14fc802..429f342 100644 --- a/src-tauri/src/commands/usage_index.rs +++ b/src-tauri/src/commands/usage_index.rs @@ -32,14 +32,20 @@ pub struct UsageSummary { } #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ImportResult { pub inserted: u64, pub skipped: u64, pub errors: u64 } +pub struct ImportResult { + pub inserted: u64, + pub skipped: u64, + pub errors: u64, +} fn db_path_for(project_root: &Path) -> PathBuf { project_root.join(".claudia/cache/usage.sqlite") } fn ensure_parent_dir(p: &Path) -> std::io::Result<()> { - if let Some(dir) = p.parent() { std::fs::create_dir_all(dir)?; } + if let Some(dir) = p.parent() { + std::fs::create_dir_all(dir)?; + } Ok(()) } @@ -101,7 +107,9 @@ fn sha256_file(path: &Path) -> std::io::Result { let mut buf = [0u8; 8192]; loop { let n = file.read(&mut buf)?; - if n == 0 { break; } + if n == 0 { + break; + } hasher.update(&buf[..n]); } Ok(format!("{:x}", hasher.finalize())) @@ -124,9 +132,13 @@ fn count_lines_chars_tokens(path: &Path) -> std::io::Result<(u64, u64, u64)> { fn should_exclude(rel: &str, excludes: &HashSet) -> bool { // simple prefix/segment check - let default = ["node_modules/", "dist/", "target/", ".git/" ]; - if default.iter().any(|p| rel.starts_with(p)) { return true; } - if rel.ends_with(".lock") { return true; } + let default = ["node_modules/", "dist/", "target/", ".git/"]; + if default.iter().any(|p| rel.starts_with(p)) { + return true; + } + if rel.ends_with(".lock") { + return true; + } excludes.iter().any(|p| rel.starts_with(p)) } @@ -137,34 +149,56 @@ pub async fn usage_scan_index( state: State<'_, UsageIndexState>, ) -> Result { let project = PathBuf::from(project_root.clone()); - if !project.is_dir() { return Err("project_root is not a directory".into()); } + if !project.is_dir() { + return Err("project_root is not a directory".into()); + } let job_id = uuid::Uuid::new_v4().to_string(); { let mut jobs = state.jobs.lock().map_err(|e| e.to_string())?; - jobs.insert(job_id.clone(), ScanProgress{ processed:0, total:0, started_ts: Utc::now().timestamp_millis(), finished_ts: None}); + jobs.insert( + job_id.clone(), + ScanProgress { + processed: 0, + total: 0, + started_ts: Utc::now().timestamp_millis(), + finished_ts: None, + }, + ); } let excludes: HashSet = exclude.unwrap_or_default().into_iter().collect(); let state_jobs = state.jobs.clone(); let job_id_task = job_id.clone(); let job_id_ret = job_id.clone(); tauri::async_runtime::spawn(async move { - let mut conn = match open_db(&project) { Ok(c)=>c, Err(e)=>{ log::error!("DB open error: {}", e); return; } }; + let mut conn = match open_db(&project) { + Ok(c) => c, + Err(e) => { + log::error!("DB open error: {}", e); + return; + } + }; // First pass: count total let mut total: u64 = 0; for entry in WalkDir::new(&project).into_iter().filter_map(Result::ok) { if entry.file_type().is_file() { if let Ok(rel) = entry.path().strip_prefix(&project) { - let rel = rel.to_string_lossy().replace('\\',"/"); - if should_exclude(&format!("{}/", rel).trim_end_matches('/'), &excludes) { continue; } + let rel = rel.to_string_lossy().replace('\\', "/"); + if should_exclude(&format!("{}/", rel).trim_end_matches('/'), &excludes) { + continue; + } total += 1; } } } { - if let Ok(mut jobs) = state_jobs.lock() { if let Some(p) = jobs.get_mut(&job_id_task){ p.total = total; } } + if let Ok(mut jobs) = state_jobs.lock() { + if let Some(p) = jobs.get_mut(&job_id_task) { + p.total = total; + } + } } // Cache existing file meta - let mut existing: HashMap = HashMap::new(); // rel -> (size, mtime, sha, file_id) + let mut existing: HashMap = HashMap::new(); // rel -> (size, mtime, sha, file_id) { let stmt = conn.prepare("SELECT id, rel_path, size_bytes, mtime_ms, sha256 FROM files WHERE project_root=?1").ok(); if let Some(mut st) = stmt { @@ -176,7 +210,11 @@ pub async fn usage_scan_index( let sha: String = row.get(4)?; Ok((rel, (size, mtime, sha, id))) }); - if let Ok(rows) = rows { for r in rows.flatten(){ existing.insert(r.0, r.1); } } + if let Ok(rows) = rows { + for r in rows.flatten() { + existing.insert(r.0, r.1); + } + } } } @@ -188,17 +226,37 @@ pub async fn usage_scan_index( for entry in WalkDir::new(&project).into_iter().filter_map(Result::ok) { if entry.file_type().is_file() { if let Ok(relp) = entry.path().strip_prefix(&project) { - let rel = relp.to_string_lossy().replace('\\',"/"); + let rel = relp.to_string_lossy().replace('\\', "/"); let rel_norm = rel.clone(); - if should_exclude(&format!("{}/", rel_norm).trim_end_matches('/'), &excludes) { continue; } - let md = match entry.metadata() { Ok(m)=>m, Err(_)=>{ continue } }; + if should_exclude( + &format!("{}/", rel_norm).trim_end_matches('/'), + &excludes, + ) { + continue; + } + let md = match entry.metadata() { + Ok(m) => m, + Err(_) => continue, + }; let size = md.len() as i64; - let mtime = md.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_millis() as i64).unwrap_or(0); + let mtime = md + .modified() + .ok() + .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) + .map(|d| d.as_millis() as i64) + .unwrap_or(0); let mut content_changed = true; let sha: String; if let Some((esize, emtime, esha, _fid)) = existing.get(&rel_norm) { - if *esize == size && *emtime == mtime { content_changed = false; sha = esha.clone(); } - else { sha = sha256_file(entry.path()).unwrap_or_default(); if sha == *esha { content_changed = false; } } + if *esize == size && *emtime == mtime { + content_changed = false; + sha = esha.clone(); + } else { + sha = sha256_file(entry.path()).unwrap_or_default(); + if sha == *esha { + content_changed = false; + } + } } else { sha = sha256_file(entry.path()).unwrap_or_default(); } @@ -211,14 +269,19 @@ pub async fn usage_scan_index( ).ok(); // get file_id - let file_id: i64 = tx.query_row( - "SELECT id FROM files WHERE project_root=?1 AND rel_path=?2", - params![project.to_string_lossy(), rel_norm], |row| row.get(0) - ).unwrap_or(-1); + let file_id: i64 = tx + .query_row( + "SELECT id FROM files WHERE project_root=?1 AND rel_path=?2", + params![project.to_string_lossy(), rel_norm], + |row| row.get(0), + ) + .unwrap_or(-1); // metrics if content_changed { - if let Ok((lines, chars, tokens)) = count_lines_chars_tokens(entry.path()) { + if let Ok((lines, chars, tokens)) = + count_lines_chars_tokens(entry.path()) + { tx.execute( "INSERT INTO file_metrics(file_id, snapshot_ts, lines, tokens, chars) VALUES (?1,?2,?3,?4,?5)", params![file_id, now, lines as i64, tokens as i64, chars as i64] @@ -228,13 +291,29 @@ pub async fn usage_scan_index( "SELECT lines, tokens, snapshot_ts FROM file_metrics WHERE file_id=?1 ORDER BY snapshot_ts DESC LIMIT 1 OFFSET 1", params![file_id], |r| Ok((r.get(0)?, r.get(1)?, r.get(2)?)) ).ok(); - let (added_l, removed_l, added_t, removed_t, prev_ts, change_type) = match prev { - None => (lines as i64, 0, tokens as i64, 0, None, "created".to_string()), - Some((pl, pt, pts)) => { - let dl = lines as i64 - pl; let dt = tokens as i64 - pt; - (dl.max(0), (-dl).max(0), dt.max(0), (-dt).max(0), Some(pts), "modified".to_string()) - } - }; + let (added_l, removed_l, added_t, removed_t, prev_ts, change_type) = + match prev { + None => ( + lines as i64, + 0, + tokens as i64, + 0, + None, + "created".to_string(), + ), + Some((pl, pt, pts)) => { + let dl = lines as i64 - pl; + let dt = tokens as i64 - pt; + ( + dl.max(0), + (-dl).max(0), + dt.max(0), + (-dt).max(0), + Some(pts), + "modified".to_string(), + ) + } + }; tx.execute( "INSERT INTO file_diffs(file_id, snapshot_ts, prev_snapshot_ts, added_lines, removed_lines, added_tokens, removed_tokens, change_type) VALUES (?1,?2,?3,?4,?5,?6,?7,?8)", params![file_id, now, prev_ts, added_l, removed_l, added_t, removed_t, change_type] @@ -243,22 +322,42 @@ pub async fn usage_scan_index( } seen.insert(rel_norm); processed += 1; - if let Ok(mut jobs) = state_jobs.lock() { if let Some(p) = jobs.get_mut(&job_id_task){ p.processed = processed; } } + if let Ok(mut jobs) = state_jobs.lock() { + if let Some(p) = jobs.get_mut(&job_id_task) { + p.processed = processed; + } + } } } } // deletions: files in DB but not seen - let mut to_delete: Vec<(i64,i64,i64)> = Vec::new(); // (file_id, last_lines, last_tokens) + let mut to_delete: Vec<(i64, i64, i64)> = Vec::new(); // (file_id, last_lines, last_tokens) { let stmt = tx.prepare("SELECT f.id, m.lines, m.tokens FROM files f LEFT JOIN file_metrics m ON m.file_id=f.id WHERE f.project_root=?1 AND m.snapshot_ts=(SELECT MAX(snapshot_ts) FROM file_metrics WHERE file_id=f.id)").ok(); if let Some(mut st) = stmt { - let rows = st.query_map(params![project.to_string_lossy()], |row| Ok((row.get(0)?, row.get::<_,Option>(1).unwrap_or(None).unwrap_or(0), row.get::<_,Option>(2).unwrap_or(None).unwrap_or(0)))) ; - if let Ok(rows) = rows { for r in rows.flatten() { to_delete.push(r); } } + let rows = st.query_map(params![project.to_string_lossy()], |row| { + Ok(( + row.get(0)?, + row.get::<_, Option>(1).unwrap_or(None).unwrap_or(0), + row.get::<_, Option>(2).unwrap_or(None).unwrap_or(0), + )) + }); + if let Ok(rows) = rows { + for r in rows.flatten() { + to_delete.push(r); + } + } } } for (fid, last_lines, last_tokens) in to_delete { - let rel: String = tx.query_row("SELECT rel_path FROM files WHERE id=?1", params![fid], |r| r.get(0)).unwrap_or_default(); + let rel: String = tx + .query_row( + "SELECT rel_path FROM files WHERE id=?1", + params![fid], + |r| r.get(0), + ) + .unwrap_or_default(); if !seen.contains(&rel) { tx.execute( "INSERT INTO file_diffs(file_id, snapshot_ts, prev_snapshot_ts, added_lines, removed_lines, added_tokens, removed_tokens, change_type) VALUES (?1,?2,NULL,0,?3,0,?4,'deleted')", @@ -270,41 +369,76 @@ pub async fn usage_scan_index( tx.commit().ok(); } - if let Ok(mut jobs) = state_jobs.lock() { if let Some(p) = jobs.get_mut(&job_id_task){ p.finished_ts = Some(Utc::now().timestamp_millis()); } } + if let Ok(mut jobs) = state_jobs.lock() { + if let Some(p) = jobs.get_mut(&job_id_task) { + p.finished_ts = Some(Utc::now().timestamp_millis()); + } + } }); Ok(job_id_ret) } #[tauri::command] -pub fn usage_scan_progress(job_id: String, state: State<'_, UsageIndexState>) -> Result { +pub fn usage_scan_progress( + job_id: String, + state: State<'_, UsageIndexState>, +) -> Result { let jobs = state.jobs.lock().map_err(|e| e.to_string())?; - jobs.get(&job_id).cloned().ok_or_else(|| "job not found".into()) + jobs.get(&job_id) + .cloned() + .ok_or_else(|| "job not found".into()) } #[tauri::command] pub fn usage_get_summary(project_root: String) -> Result { let project = PathBuf::from(project_root); let conn = open_db(&project).map_err(|e| e.to_string())?; - let files: u64 = conn.query_row("SELECT COUNT(*) FROM files WHERE project_root=?1", params![project.to_string_lossy()], |r| r.get::<_,i64>(0)).unwrap_or(0) as u64; - let mut lines: u64 = 0; let mut tokens: u64 = 0; let mut last_ts: Option = None; + let files: u64 = conn + .query_row( + "SELECT COUNT(*) FROM files WHERE project_root=?1", + params![project.to_string_lossy()], + |r| r.get::<_, i64>(0), + ) + .unwrap_or(0) as u64; + let mut lines: u64 = 0; + let mut tokens: u64 = 0; + let mut last_ts: Option = None; let mut stmt = conn.prepare("SELECT MAX(snapshot_ts), SUM(lines), SUM(tokens) FROM file_metrics WHERE file_id IN (SELECT id FROM files WHERE project_root=?1)").map_err(|e| e.to_string())?; let res = stmt.query_row(params![project.to_string_lossy()], |r| { - Ok((r.get::<_,Option>(0)?, r.get::<_,Option>(1)?, r.get::<_,Option>(2)?)) + Ok(( + r.get::<_, Option>(0)?, + r.get::<_, Option>(1)?, + r.get::<_, Option>(2)?, + )) }); - if let Ok((mx, lsum, tsum)) = res { last_ts = mx; lines = lsum.unwrap_or(0) as u64; tokens = tsum.unwrap_or(0) as u64; } - Ok(UsageSummary{ files, tokens, lines, last_scan_ts: last_ts }) + if let Ok((mx, lsum, tsum)) = res { + last_ts = mx; + lines = lsum.unwrap_or(0) as u64; + tokens = tsum.unwrap_or(0) as u64; + } + Ok(UsageSummary { + files, + tokens, + lines, + last_scan_ts: last_ts, + }) } #[derive(Debug, Deserialize)] struct ExternalDiff { rel_path: String, snapshot_ts: i64, - #[serde(default)] prev_snapshot_ts: Option, - #[serde(default)] added_lines: i64, - #[serde(default)] removed_lines: i64, - #[serde(default)] added_tokens: i64, - #[serde(default)] removed_tokens: i64, + #[serde(default)] + prev_snapshot_ts: Option, + #[serde(default)] + added_lines: i64, + #[serde(default)] + removed_lines: i64, + #[serde(default)] + added_tokens: i64, + #[serde(default)] + removed_tokens: i64, change_type: String, } @@ -313,19 +447,33 @@ pub fn usage_import_diffs(project_root: String, path: String) -> Result = Vec::new(); match serde_json::from_str::(&data) { Ok(serde_json::Value::Array(arr)) => { - for v in arr { if let Ok(d) = serde_json::from_value::(v) { diffs.push(d); } } - }, + for v in arr { + if let Ok(d) = serde_json::from_value::(v) { + diffs.push(d); + } + } + } _ => { // try NDJSON for line in data.lines() { - let l = line.trim(); if l.is_empty() { continue; } - match serde_json::from_str::(l) { Ok(d)=>diffs.push(d), Err(_)=>{ errors+=1; } } + let l = line.trim(); + if l.is_empty() { + continue; + } + match serde_json::from_str::(l) { + Ok(d) => diffs.push(d), + Err(_) => { + errors += 1; + } + } } } } @@ -336,18 +484,31 @@ pub fn usage_import_diffs(project_root: String, path: String) -> Result = tx.query_row( - "SELECT id FROM files WHERE project_root=?1 AND rel_path=?2", - params![project.to_string_lossy(), d.rel_path], |r| r.get(0) - ).ok(); + let file_id: Option = tx + .query_row( + "SELECT id FROM files WHERE project_root=?1 AND rel_path=?2", + params![project.to_string_lossy(), d.rel_path], + |r| r.get(0), + ) + .ok(); if let Some(fid) = file_id { let res = tx.execute( "INSERT INTO file_diffs(file_id, snapshot_ts, prev_snapshot_ts, added_lines, removed_lines, added_tokens, removed_tokens, change_type) VALUES (?1,?2,?3,?4,?5,?6,?7,?8)", params![fid, d.snapshot_ts, d.prev_snapshot_ts, d.added_lines, d.removed_lines, d.added_tokens, d.removed_tokens, d.change_type] ); - if res.is_ok() { inserted+=1; } else { skipped+=1; } - } else { errors+=1; } + if res.is_ok() { + inserted += 1; + } else { + skipped += 1; + } + } else { + errors += 1; + } } tx.commit().map_err(|e| e.to_string())?; - Ok(ImportResult{ inserted, skipped, errors }) + Ok(ImportResult { + inserted, + skipped, + errors, + }) } diff --git a/src-tauri/src/file_watcher.rs b/src-tauri/src/file_watcher.rs index 641d5f0..a143d4d 100644 --- a/src-tauri/src/file_watcher.rs +++ b/src-tauri/src/file_watcher.rs @@ -31,7 +31,7 @@ impl FileWatcherManager { /// 监听指定路径(文件或目录) pub fn watch_path(&self, path: &str, recursive: bool) -> Result<(), String> { let path_buf = PathBuf::from(path); - + // 检查路径是否存在 if !path_buf.exists() { return Err(format!("Path does not exist: {}", path)); @@ -52,20 +52,19 @@ impl FileWatcherManager { // 创建文件监听器 let mut watcher = RecommendedWatcher::new( - move |res: Result| { - match res { - Ok(event) => { - Self::handle_event(event, &app_handle, &last_events); - } - Err(e) => { - log::error!("Watch error: {:?}", e); - } + move |res: Result| match res { + Ok(event) => { + Self::handle_event(event, &app_handle, &last_events); + } + Err(e) => { + log::error!("Watch error: {:?}", e); } }, Config::default() .with_poll_interval(Duration::from_secs(1)) .with_compare_contents(false), - ).map_err(|e| format!("Failed to create watcher: {}", e))?; + ) + .map_err(|e| format!("Failed to create watcher: {}", e))?; // 开始监听 let mode = if recursive { @@ -89,7 +88,7 @@ impl FileWatcherManager { /// 停止监听指定路径 pub fn unwatch_path(&self, path: &str) -> Result<(), String> { let mut watchers = self.watchers.lock().unwrap(); - + if watchers.remove(path).is_some() { log::info!("Stopped watching path: {}", path); Ok(()) @@ -108,7 +107,11 @@ impl FileWatcherManager { } /// 处理文件系统事件 - fn handle_event(event: Event, app_handle: &AppHandle, last_events: &Arc>>) { + fn handle_event( + event: Event, + app_handle: &AppHandle, + last_events: &Arc>>, + ) { // 过滤不需要的事件 let change_type = match event.kind { EventKind::Create(_) => "created", @@ -123,10 +126,12 @@ impl FileWatcherManager { let now = SystemTime::now(); let should_emit = { let mut last_events = last_events.lock().unwrap(); - + if let Some(last_time) = last_events.get(&path) { // 如果距离上次事件不到500ms,忽略 - if now.duration_since(*last_time).unwrap_or(Duration::ZERO) < Duration::from_millis(500) { + if now.duration_since(*last_time).unwrap_or(Duration::ZERO) + < Duration::from_millis(500) + { false } else { last_events.insert(path.clone(), now); @@ -192,4 +197,4 @@ impl FileWatcherState { None => Err("File watcher manager not initialized".to_string()), } } -} \ No newline at end of file +} diff --git a/src-tauri/src/i18n.rs b/src-tauri/src/i18n.rs index fa19272..3acd79a 100644 --- a/src-tauri/src/i18n.rs +++ b/src-tauri/src/i18n.rs @@ -33,7 +33,7 @@ impl SimpleI18n { #[allow(dead_code)] pub fn t(&self, key: &str) -> String { let locale = self.get_current_locale(); - + // 简单的翻译映射,避免复杂的 FluentBundle match (locale.as_str(), key) { // 英文翻译 @@ -42,41 +42,69 @@ impl SimpleI18n { ("en-US", "error-failed-to-delete") => "Failed to delete".to_string(), ("en-US", "agent-not-found") => "Agent not found".to_string(), ("en-US", "claude-not-installed") => "Claude Code is not installed".to_string(), - + // Relay Station English translations - ("en-US", "relay_adapter.custom_no_test") => "Custom configuration, connection test skipped".to_string(), - ("en-US", "relay_adapter.packycode_single_token") => "PackyCode only supports single API key".to_string(), - ("en-US", "relay_adapter.user_info_not_available") => "User info not available for this configuration".to_string(), - ("en-US", "relay_adapter.usage_logs_not_available") => "Usage logs not available for this configuration".to_string(), - ("en-US", "relay_adapter.token_management_not_available") => "Token management not available for this configuration".to_string(), + ("en-US", "relay_adapter.custom_no_test") => { + "Custom configuration, connection test skipped".to_string() + } + ("en-US", "relay_adapter.packycode_single_token") => { + "PackyCode only supports single API key".to_string() + } + ("en-US", "relay_adapter.user_info_not_available") => { + "User info not available for this configuration".to_string() + } + ("en-US", "relay_adapter.usage_logs_not_available") => { + "Usage logs not available for this configuration".to_string() + } + ("en-US", "relay_adapter.token_management_not_available") => { + "Token management not available for this configuration".to_string() + } ("en-US", "relay_adapter.connection_success") => "Connection successful".to_string(), ("en-US", "relay_adapter.api_error") => "API returned error".to_string(), ("en-US", "relay_adapter.parse_error") => "Failed to parse response".to_string(), ("en-US", "relay_adapter.http_error") => "HTTP request failed".to_string(), ("en-US", "relay_adapter.network_error") => "Network connection failed".to_string(), - ("en-US", "relay_station.enabled_success") => "Relay station enabled successfully".to_string(), - ("en-US", "relay_station.disabled_success") => "Relay station disabled successfully".to_string(), + ("en-US", "relay_station.enabled_success") => { + "Relay station enabled successfully".to_string() + } + ("en-US", "relay_station.disabled_success") => { + "Relay station disabled successfully".to_string() + } ("en-US", "relay_station.name_required") => "Station name is required".to_string(), ("en-US", "relay_station.api_url_required") => "API URL is required".to_string(), ("en-US", "relay_station.invalid_url") => "Invalid URL format".to_string(), - ("en-US", "relay_station.https_required") => "API URL must use HTTPS protocol for security".to_string(), + ("en-US", "relay_station.https_required") => { + "API URL must use HTTPS protocol for security".to_string() + } ("en-US", "relay_station.token_required") => "API token is required".to_string(), - ("en-US", "relay_station.token_too_short") => "API token is too short (minimum 10 characters)".to_string(), - ("en-US", "relay_station.token_invalid_chars") => "API token contains invalid characters".to_string(), - + ("en-US", "relay_station.token_too_short") => { + "API token is too short (minimum 10 characters)".to_string() + } + ("en-US", "relay_station.token_invalid_chars") => { + "API token contains invalid characters".to_string() + } + // 中文翻译 ("zh-CN", "error-failed-to-create") => "创建失败".to_string(), ("zh-CN", "error-failed-to-update") => "更新失败".to_string(), ("zh-CN", "error-failed-to-delete") => "删除失败".to_string(), ("zh-CN", "agent-not-found") => "未找到智能体".to_string(), ("zh-CN", "claude-not-installed") => "未安装 Claude Code".to_string(), - + // Relay Station Chinese translations ("zh-CN", "relay_adapter.custom_no_test") => "自定义配置,跳过连接测试".to_string(), - ("zh-CN", "relay_adapter.packycode_single_token") => "PackyCode 仅支持单个 API 密钥".to_string(), - ("zh-CN", "relay_adapter.user_info_not_available") => "该配置不支持用户信息查询".to_string(), - ("zh-CN", "relay_adapter.usage_logs_not_available") => "该配置不支持使用日志查询".to_string(), - ("zh-CN", "relay_adapter.token_management_not_available") => "该配置不支持 Token 管理".to_string(), + ("zh-CN", "relay_adapter.packycode_single_token") => { + "PackyCode 仅支持单个 API 密钥".to_string() + } + ("zh-CN", "relay_adapter.user_info_not_available") => { + "该配置不支持用户信息查询".to_string() + } + ("zh-CN", "relay_adapter.usage_logs_not_available") => { + "该配置不支持使用日志查询".to_string() + } + ("zh-CN", "relay_adapter.token_management_not_available") => { + "该配置不支持 Token 管理".to_string() + } ("zh-CN", "relay_adapter.connection_success") => "连接成功".to_string(), ("zh-CN", "relay_adapter.api_error") => "API 返回错误".to_string(), ("zh-CN", "relay_adapter.parse_error") => "解析响应失败".to_string(), @@ -87,11 +115,15 @@ impl SimpleI18n { ("zh-CN", "relay_station.name_required") => "中转站名称不能为空".to_string(), ("zh-CN", "relay_station.api_url_required") => "API地址不能为空".to_string(), ("zh-CN", "relay_station.invalid_url") => "无效的URL格式".to_string(), - ("zh-CN", "relay_station.https_required") => "出于安全考虑,API地址必须使用HTTPS协议".to_string(), + ("zh-CN", "relay_station.https_required") => { + "出于安全考虑,API地址必须使用HTTPS协议".to_string() + } ("zh-CN", "relay_station.token_required") => "API令牌不能为空".to_string(), - ("zh-CN", "relay_station.token_too_short") => "API令牌太短(至少需要10个字符)".to_string(), + ("zh-CN", "relay_station.token_too_short") => { + "API令牌太短(至少需要10个字符)".to_string() + } ("zh-CN", "relay_station.token_invalid_chars") => "API令牌包含无效字符".to_string(), - + // 默认情况 _ => key.to_string(), } @@ -118,4 +150,4 @@ pub fn set_locale(locale: &str) -> Result<(), Box> { pub fn get_current_locale() -> String { get_i18n().get_current_locale() -} \ No newline at end of file +} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 102bae5..32c0626 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -5,9 +5,9 @@ pub mod checkpoint; pub mod claude_binary; pub mod claude_config; pub mod commands; -pub mod process; -pub mod i18n; pub mod file_watcher; +pub mod i18n; +pub mod process; #[cfg_attr(mobile, tauri::mobile_entry_point)] pub fn run() { diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index d98f7e6..f4eef6e 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -3,98 +3,97 @@ mod checkpoint; mod claude_binary; -mod commands; -mod process; -mod i18n; mod claude_config; +mod commands; mod file_watcher; +mod i18n; +mod process; use checkpoint::state::CheckpointState; use commands::agents::{ cleanup_finished_processes, create_agent, delete_agent, execute_agent, export_agent, export_agent_to_file, fetch_github_agent_content, fetch_github_agents, get_agent, get_agent_run, get_agent_run_with_real_time_metrics, get_claude_binary_path, - get_live_session_output, get_session_output, get_session_status, import_agent, - import_agent_from_file, import_agent_from_github, init_database, kill_agent_session, - list_agent_runs, list_agent_runs_with_metrics, list_agents, list_claude_installations, - list_running_sessions, load_agent_session_history, set_claude_binary_path, stream_session_output, update_agent, AgentDb, - get_model_mappings, update_model_mapping, + get_live_session_output, get_model_mappings, get_session_output, get_session_status, + import_agent, import_agent_from_file, import_agent_from_github, init_database, + kill_agent_session, list_agent_runs, list_agent_runs_with_metrics, list_agents, + list_claude_installations, list_running_sessions, load_agent_session_history, + set_claude_binary_path, stream_session_output, update_agent, update_model_mapping, AgentDb, }; use commands::claude::{ cancel_claude_execution, check_auto_checkpoint, check_claude_version, cleanup_old_checkpoints, clear_checkpoint_manager, continue_claude_code, create_checkpoint, execute_claude_code, find_claude_md_files, fork_from_checkpoint, get_checkpoint_diff, get_checkpoint_settings, - get_checkpoint_state_stats, get_claude_session_output, get_claude_settings, get_project_sessions, - get_recently_modified_files, get_session_timeline, get_system_prompt, list_checkpoints, - list_directory_contents, list_projects, list_running_claude_sessions, load_session_history, - open_new_session, read_claude_md_file, restore_checkpoint, resume_claude_code, - save_claude_md_file, save_claude_settings, save_system_prompt, search_files, - track_checkpoint_message, track_session_messages, update_checkpoint_settings, - get_hooks_config, update_hooks_config, validate_hook_command, - watch_claude_project_directory, unwatch_claude_project_directory, - ClaudeProcessState, + get_checkpoint_state_stats, get_claude_session_output, get_claude_settings, get_hooks_config, + get_project_sessions, get_recently_modified_files, get_session_timeline, get_system_prompt, + list_checkpoints, list_directory_contents, list_projects, list_running_claude_sessions, + load_session_history, open_new_session, read_claude_md_file, restore_checkpoint, + resume_claude_code, save_claude_md_file, save_claude_settings, save_system_prompt, + search_files, track_checkpoint_message, track_session_messages, + unwatch_claude_project_directory, update_checkpoint_settings, update_hooks_config, + validate_hook_command, watch_claude_project_directory, ClaudeProcessState, }; use commands::mcp::{ - mcp_add, mcp_add_from_claude_desktop, mcp_add_json, mcp_get, mcp_get_server_status, mcp_list, - mcp_read_project_config, mcp_remove, mcp_reset_project_choices, mcp_save_project_config, - mcp_serve, mcp_test_connection, mcp_export_servers, + mcp_add, mcp_add_from_claude_desktop, mcp_add_json, mcp_export_servers, mcp_get, + mcp_get_server_status, mcp_list, mcp_read_project_config, mcp_remove, + mcp_reset_project_choices, mcp_save_project_config, mcp_serve, mcp_test_connection, }; +use commands::ccr::{ + check_ccr_installation, get_ccr_config_path, get_ccr_service_status, get_ccr_version, + open_ccr_ui, restart_ccr_service, start_ccr_service, stop_ccr_service, +}; +use commands::filesystem::{ + get_file_info, get_file_tree, get_watched_paths, read_directory_tree, read_file, + search_files_by_name, unwatch_directory, watch_directory, write_file, +}; +use commands::git::{ + get_git_branches, get_git_commits, get_git_diff, get_git_history, get_git_status, +}; +use commands::language::{get_current_language, get_supported_languages, set_language}; +use commands::packycode_nodes::{ + auto_select_best_node, get_packycode_nodes, test_all_packycode_nodes, +}; +use commands::proxy::{apply_proxy_settings, get_proxy_settings, save_proxy_settings}; +use commands::relay_adapters::{ + packycode_get_user_quota, relay_station_create_token, relay_station_delete_token, + relay_station_get_info, relay_station_get_usage_logs, relay_station_get_user_info, + relay_station_list_tokens, relay_station_test_connection, relay_station_update_token, +}; +use commands::relay_stations::{ + relay_station_create, relay_station_delete, relay_station_get, + relay_station_get_current_config, relay_station_restore_config, relay_station_sync_config, + relay_station_toggle_enable, relay_station_update, relay_station_update_order, + relay_stations_export, relay_stations_import, relay_stations_list, +}; +use commands::smart_sessions::{ + cleanup_old_smart_sessions_command, create_smart_quick_start_session, get_smart_session_config, + list_smart_sessions_command, toggle_smart_session_mode, update_smart_session_config, +}; +use commands::storage::{ + storage_delete_row, storage_execute_sql, storage_insert_row, storage_list_tables, + storage_read_table, storage_reset_database, storage_update_row, +}; +use commands::system::flush_dns; +use commands::terminal::{ + cleanup_terminal_sessions, close_terminal_session, create_terminal_session, + list_terminal_sessions, resize_terminal, send_terminal_input, TerminalState, +}; use commands::usage::{ get_session_stats, get_usage_by_date_range, get_usage_details, get_usage_stats, }; +use commands::usage_cache::{ + usage_check_updates, usage_clear_cache, usage_force_scan, usage_get_stats_cached, + usage_scan_update, UsageCacheState, +}; use commands::usage_index::{ usage_get_summary, usage_import_diffs, usage_scan_index, usage_scan_progress, UsageIndexState, }; -use commands::usage_cache::{ - usage_scan_update, usage_get_stats_cached, usage_clear_cache, usage_force_scan, usage_check_updates, UsageCacheState, -}; -use commands::storage::{ - storage_list_tables, storage_read_table, storage_update_row, storage_delete_row, - storage_insert_row, storage_execute_sql, storage_reset_database, -}; -use commands::proxy::{get_proxy_settings, save_proxy_settings, apply_proxy_settings}; -use commands::language::{get_current_language, set_language, get_supported_languages}; -use commands::relay_stations::{ - relay_stations_list, relay_station_get, relay_station_create, relay_station_update, - relay_station_delete, relay_station_toggle_enable, relay_station_sync_config, - relay_station_restore_config, relay_station_get_current_config, - relay_stations_export, relay_stations_import, relay_station_update_order, -}; -use commands::relay_adapters::{ - relay_station_get_info, relay_station_get_user_info, - relay_station_test_connection, relay_station_get_usage_logs, relay_station_list_tokens, - relay_station_create_token, relay_station_update_token, relay_station_delete_token, - packycode_get_user_quota, -}; -use commands::packycode_nodes::{ - test_all_packycode_nodes, auto_select_best_node, get_packycode_nodes, -}; -use commands::filesystem::{ - read_directory_tree, search_files_by_name, get_file_info, watch_directory, - read_file, write_file, get_file_tree, unwatch_directory, get_watched_paths, -}; -use commands::git::{ - get_git_status, get_git_history, get_git_branches, get_git_diff, get_git_commits, -}; -use commands::terminal::{ - create_terminal_session, send_terminal_input, close_terminal_session, - list_terminal_sessions, resize_terminal, cleanup_terminal_sessions, TerminalState, -}; -use commands::ccr::{ - check_ccr_installation, get_ccr_version, get_ccr_service_status, start_ccr_service, - stop_ccr_service, restart_ccr_service, open_ccr_ui, get_ccr_config_path, -}; -use commands::system::flush_dns; -use commands::smart_sessions::{ - create_smart_quick_start_session, get_smart_session_config, update_smart_session_config, - list_smart_sessions_command, toggle_smart_session_mode, cleanup_old_smart_sessions_command, -}; -use process::ProcessRegistryState; use file_watcher::FileWatcherState; +use process::ProcessRegistryState; use std::sync::Mutex; -use tauri::Manager; use tauri::menu::{MenuBuilder, MenuItemBuilder, SubmenuBuilder}; +use tauri::Manager; use tauri_plugin_log::{Target, TargetKind}; fn main() { @@ -105,30 +104,34 @@ fn main() { .plugin(tauri_plugin_shell::init()) .plugin(tauri_plugin_fs::init()) .plugin(tauri_plugin_clipboard_manager::init()) - .plugin(tauri_plugin_log::Builder::new() - .level(log::LevelFilter::Debug) - .targets([ - Target::new(TargetKind::LogDir { file_name: None }), - Target::new(TargetKind::Stdout), - ]) - .build()) + .plugin( + tauri_plugin_log::Builder::new() + .level(log::LevelFilter::Debug) + .targets([ + Target::new(TargetKind::LogDir { file_name: None }), + Target::new(TargetKind::Stdout), + ]) + .build(), + ) // App menu: include standard Edit actions so OS hotkeys (Undo/Redo/Cut/Copy/Paste/Select All) // work across all pages, plus a DevTools toggle. .menu(|app| { #[cfg(target_os = "macos")] { use tauri::menu::AboutMetadataBuilder; - + // Create macOS app menu with Quit let app_menu = SubmenuBuilder::new(app, "Claudia") - .about(Some(AboutMetadataBuilder::new() - .version(Some(env!("CARGO_PKG_VERSION"))) - .build())) + .about(Some( + AboutMetadataBuilder::new() + .version(Some(env!("CARGO_PKG_VERSION"))) + .build(), + )) .separator() .quit() .build() .unwrap(); - + let edit_menu = SubmenuBuilder::new(app, "Edit") .undo() .redo() @@ -139,26 +142,28 @@ fn main() { .select_all() .build() .unwrap(); - + let window_menu = SubmenuBuilder::new(app, "Window") .close_window() .minimize() .separator() - .item(&MenuItemBuilder::new("Toggle DevTools") - .id("toggle-devtools") - .accelerator("CmdOrCtrl+Alt+I") - .build(app) - .unwrap()) + .item( + &MenuItemBuilder::new("Toggle DevTools") + .id("toggle-devtools") + .accelerator("CmdOrCtrl+Alt+I") + .build(app) + .unwrap(), + ) .build() .unwrap(); - + MenuBuilder::new(app) .item(&app_menu) .item(&edit_menu) .item(&window_menu) .build() } - + #[cfg(not(target_os = "macos"))] { let toggle_devtools = MenuItemBuilder::new("Toggle DevTools") @@ -166,19 +171,19 @@ fn main() { .accelerator("CmdOrCtrl+Alt+I") .build(app) .unwrap(); - + let close_window = MenuItemBuilder::new("Close Window") .id("close-window") .accelerator("CmdOrCtrl+W") .build(app) .unwrap(); - + let quit = MenuItemBuilder::new("Quit") .id("quit") .accelerator("CmdOrCtrl+Q") .build(app) .unwrap(); - + let edit_menu = SubmenuBuilder::new(app, "Edit") .undo() .redo() @@ -211,7 +216,7 @@ fn main() { .setup(|app| { // Initialize agents database let conn = init_database(&app.handle()).expect("Failed to initialize agents database"); - + // Load and apply proxy settings from the database { let db = AgentDb(Mutex::new(conn)); @@ -219,7 +224,7 @@ fn main() { Ok(conn) => { // Directly query proxy settings from the database let mut settings = commands::proxy::ProxySettings::default(); - + let keys = vec![ ("proxy_enabled", "enabled"), ("proxy_http", "http_proxy"), @@ -227,7 +232,7 @@ fn main() { ("proxy_no", "no_proxy"), ("proxy_all", "all_proxy"), ]; - + for (db_key, field) in keys { if let Ok(value) = conn.query_row( "SELECT value FROM app_settings WHERE key = ?1", @@ -236,15 +241,23 @@ fn main() { ) { match field { "enabled" => settings.enabled = value == "true", - "http_proxy" => settings.http_proxy = Some(value).filter(|s| !s.is_empty()), - "https_proxy" => settings.https_proxy = Some(value).filter(|s| !s.is_empty()), - "no_proxy" => settings.no_proxy = Some(value).filter(|s| !s.is_empty()), - "all_proxy" => settings.all_proxy = Some(value).filter(|s| !s.is_empty()), + "http_proxy" => { + settings.http_proxy = Some(value).filter(|s| !s.is_empty()) + } + "https_proxy" => { + settings.https_proxy = Some(value).filter(|s| !s.is_empty()) + } + "no_proxy" => { + settings.no_proxy = Some(value).filter(|s| !s.is_empty()) + } + "all_proxy" => { + settings.all_proxy = Some(value).filter(|s| !s.is_empty()) + } _ => {} } } } - + log::info!("Loaded proxy settings: enabled={}", settings.enabled); settings } @@ -253,11 +266,11 @@ fn main() { commands::proxy::ProxySettings::default() } }; - + // Apply the proxy settings apply_proxy_settings(&proxy_settings); } - + // Re-open the connection for the app to manage let conn = init_database(&app.handle()).expect("Failed to initialize agents database"); app.manage(AgentDb(Mutex::new(conn))); @@ -285,7 +298,7 @@ fn main() { // Initialize process registry app.manage(ProcessRegistryState::default()); - + // Initialize file watcher state let file_watcher_state = FileWatcherState::new(); file_watcher_state.init(app.handle().clone()); @@ -337,7 +350,6 @@ fn main() { get_hooks_config, update_hooks_config, validate_hook_command, - // Checkpoint Management create_checkpoint, restore_checkpoint, @@ -353,7 +365,6 @@ fn main() { get_checkpoint_settings, clear_checkpoint_manager, get_checkpoint_state_stats, - // Agent Management list_agents, create_agent, @@ -385,26 +396,22 @@ fn main() { import_agent_from_github, get_model_mappings, update_model_mapping, - // Usage & Analytics get_usage_stats, get_usage_by_date_range, get_usage_details, get_session_stats, - // File Usage Index (SQLite) usage_scan_index, usage_scan_progress, usage_get_summary, usage_import_diffs, - // Usage Cache Management usage_scan_update, usage_get_stats_cached, usage_clear_cache, usage_force_scan, usage_check_updates, - // MCP (Model Context Protocol) mcp_add, mcp_list, @@ -419,7 +426,6 @@ fn main() { mcp_read_project_config, mcp_save_project_config, mcp_export_servers, - // Storage Management storage_list_tables, storage_read_table, @@ -428,7 +434,6 @@ fn main() { storage_insert_row, storage_execute_sql, storage_reset_database, - // Smart Sessions Management create_smart_quick_start_session, get_smart_session_config, @@ -436,22 +441,18 @@ fn main() { list_smart_sessions_command, toggle_smart_session_mode, cleanup_old_smart_sessions_command, - // Slash Commands commands::slash_commands::slash_commands_list, commands::slash_commands::slash_command_get, commands::slash_commands::slash_command_save, commands::slash_commands::slash_command_delete, - // Proxy Settings get_proxy_settings, save_proxy_settings, - // Language Settings get_current_language, set_language, get_supported_languages, - // Relay Stations relay_stations_list, relay_station_get, @@ -474,12 +475,10 @@ fn main() { relay_station_update_token, relay_station_delete_token, packycode_get_user_quota, - // PackyCode Nodes test_all_packycode_nodes, auto_select_best_node, get_packycode_nodes, - // File System read_directory_tree, search_files_by_name, @@ -490,14 +489,12 @@ fn main() { read_file, write_file, get_file_tree, - // Git get_git_status, get_git_history, get_git_branches, get_git_diff, get_git_commits, - // Terminal create_terminal_session, send_terminal_input, @@ -505,7 +502,6 @@ fn main() { list_terminal_sessions, resize_terminal, cleanup_terminal_sessions, - // CCR (Claude Code Router) check_ccr_installation, get_ccr_version, @@ -515,7 +511,6 @@ fn main() { restart_ccr_service, open_ccr_ui, get_ccr_config_path, - // System utilities flush_dns, ]) diff --git a/src-tauri/src/process/registry.rs b/src-tauri/src/process/registry.rs index 30c8e94..f4f33b5 100644 --- a/src-tauri/src/process/registry.rs +++ b/src-tauri/src/process/registry.rs @@ -7,13 +7,8 @@ use tokio::process::Child; /// Type of process being tracked #[derive(Debug, Clone, Serialize, Deserialize)] pub enum ProcessType { - AgentRun { - agent_id: i64, - agent_name: String, - }, - ClaudeSession { - session_id: String, - }, + AgentRun { agent_id: i64, agent_name: String }, + ClaudeSession { session_id: String }, } /// Information about a running agent process @@ -72,7 +67,10 @@ impl ProcessRegistry { ) -> Result<(), String> { let process_info = ProcessInfo { run_id, - process_type: ProcessType::AgentRun { agent_id, agent_name }, + process_type: ProcessType::AgentRun { + agent_id, + agent_name, + }, pid, started_at: Utc::now(), project_path, @@ -96,7 +94,10 @@ impl ProcessRegistry { ) -> Result<(), String> { let process_info = ProcessInfo { run_id, - process_type: ProcessType::AgentRun { agent_id, agent_name }, + process_type: ProcessType::AgentRun { + agent_id, + agent_name, + }, pid, started_at: Utc::now(), project_path, @@ -106,7 +107,7 @@ impl ProcessRegistry { // For sidecar processes, we register without the child handle since it's managed differently let mut processes = self.processes.lock().map_err(|e| e.to_string())?; - + let process_handle = ProcessHandle { info: process_info, child: Arc::new(Mutex::new(None)), // No tokio::process::Child handle for sidecar @@ -127,7 +128,7 @@ impl ProcessRegistry { model: String, ) -> Result { let run_id = self.generate_id()?; - + let process_info = ProcessInfo { run_id, process_type: ProcessType::ClaudeSession { session_id }, @@ -140,7 +141,7 @@ impl ProcessRegistry { // Register without child - Claude sessions use ClaudeProcessState for process management let mut processes = self.processes.lock().map_err(|e| e.to_string())?; - + let process_handle = ProcessHandle { info: process_info, child: Arc::new(Mutex::new(None)), // No child handle for Claude sessions @@ -175,25 +176,24 @@ impl ProcessRegistry { let processes = self.processes.lock().map_err(|e| e.to_string())?; Ok(processes .values() - .filter_map(|handle| { - match &handle.info.process_type { - ProcessType::ClaudeSession { .. } => Some(handle.info.clone()), - _ => None, - } + .filter_map(|handle| match &handle.info.process_type { + ProcessType::ClaudeSession { .. } => Some(handle.info.clone()), + _ => None, }) .collect()) } /// Get a specific Claude session by session ID - pub fn get_claude_session_by_id(&self, session_id: &str) -> Result, String> { + pub fn get_claude_session_by_id( + &self, + session_id: &str, + ) -> Result, String> { let processes = self.processes.lock().map_err(|e| e.to_string())?; Ok(processes .values() - .find(|handle| { - match &handle.info.process_type { - ProcessType::ClaudeSession { session_id: sid } => sid == session_id, - _ => false, - } + .find(|handle| match &handle.info.process_type { + ProcessType::ClaudeSession { session_id: sid } => sid == session_id, + _ => false, }) .map(|handle| handle.info.clone())) } @@ -221,11 +221,9 @@ impl ProcessRegistry { let processes = self.processes.lock().map_err(|e| e.to_string())?; Ok(processes .values() - .filter_map(|handle| { - match &handle.info.process_type { - ProcessType::AgentRun { .. } => Some(handle.info.clone()), - _ => None, - } + .filter_map(|handle| match &handle.info.process_type { + ProcessType::AgentRun { .. } => Some(handle.info.clone()), + _ => None, }) .collect()) } @@ -273,17 +271,26 @@ impl ProcessRegistry { } } } else { - warn!("No child handle available for process {} (PID: {}), attempting system kill", run_id, pid); + warn!( + "No child handle available for process {} (PID: {}), attempting system kill", + run_id, pid + ); false // Process handle not available, try fallback } }; // If direct kill didn't work, try system command as fallback if !kill_sent { - info!("Attempting fallback kill for process {} (PID: {})", run_id, pid); + info!( + "Attempting fallback kill for process {} (PID: {})", + run_id, pid + ); match self.kill_process_by_pid(run_id, pid) { Ok(true) => return Ok(true), - Ok(false) => warn!("Fallback kill also failed for process {} (PID: {})", run_id, pid), + Ok(false) => warn!( + "Fallback kill also failed for process {} (PID: {})", + run_id, pid + ), Err(e) => error!("Error during fallback kill: {}", e), } // Continue with the rest of the cleanup even if fallback failed diff --git a/src/App.tsx b/src/App.tsx index 8a18b37..401dd50 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -80,6 +80,41 @@ function AppContent() { const [projectForSettings, setProjectForSettings] = useState(null); const [previousView] = useState("welcome"); const [showAgentsModal, setShowAgentsModal] = useState(false); + + const translateWithFallback = ( + primaryKey: string, + params: Record = {}, + fallbackKeys: string[] = [], + fallbackDefault: string | ((params: Record) => string) = primaryKey + ) => { + const defaultNamespace = Array.isArray(i18n.options?.defaultNS) + ? i18n.options.defaultNS[0] ?? "common" + : (i18n.options?.defaultNS ?? "common"); + + const candidateKeys = [primaryKey, ...fallbackKeys]; + const rawLanguage = i18n.language || i18n.resolvedLanguage; + const normalizedLanguage = rawLanguage?.split('-')[0]; + const localesToTry = [rawLanguage, normalizedLanguage, 'en'].filter(Boolean) as string[]; + const missingToken = '__i18n_missing__'; + + for (const key of candidateKeys) { + for (const locale of localesToTry) { + const fixedT = i18n.getFixedT(locale, defaultNamespace); + const translated = fixedT(key, { + ...params, + defaultValue: missingToken, + }); + + if (translated !== missingToken) { + return translated; + } + } + } + + return typeof fallbackDefault === 'function' + ? (fallbackDefault as (params: Record) => string)(params) + : fallbackDefault; + }; // Initialize analytics lifecycle tracking useAppLifecycle(); @@ -292,10 +327,12 @@ function AppContent() { // Create a new tab for the smart session const newTabId = createChatTab(); + const sessionDisplayName = smartSession.display_name || t('messages.smartSessionDefaultTitle'); + // 直接更新新建标签的会话上下文,避免依赖事件时序 updateTab(newTabId, { type: 'chat', - title: smartSession.display_name || 'Smart Session', + title: sessionDisplayName, initialProjectPath: smartSession.project_path, sessionData: null, status: 'active' @@ -307,13 +344,17 @@ function AppContent() { } switchToTab(newTabId); - // Show success message - setToast({ - message: t('smartSessionCreated', { - name: smartSession.display_name, - path: smartSession.project_path - }), - type: "success" + // Show success message,若主键缺失则回退到默认提示 + const successMessage = translateWithFallback( + 'messages.smartSessionCreated', + { name: sessionDisplayName }, + ['messages.smartSessionDefaultToast'], + `Smart session '${sessionDisplayName}' is ready to use.` + ); + + setToast({ + message: successMessage, + type: "success" }); trackEvent.journeyMilestone({ @@ -324,11 +365,17 @@ function AppContent() { } catch (error) { console.error('Failed to create smart session:', error); - setToast({ - message: t('failedToCreateSmartSession', { - error: error instanceof Error ? error.message : String(error) - }), - type: "error" + const rawError = error instanceof Error ? error.message : String(error); + const fallbackErrorMessage = translateWithFallback( + 'messages.failedToCreateSmartSession', + { error: rawError }, + ['messages.failedToCreateSmartSessionFallback'], + `Failed to create smart session: ${rawError}` + ); + + setToast({ + message: fallbackErrorMessage, + type: "error" }); } }; diff --git a/src/components/AgentsModal.tsx b/src/components/AgentsModal.tsx index 4ab69bd..339d698 100644 --- a/src/components/AgentsModal.tsx +++ b/src/components/AgentsModal.tsx @@ -150,11 +150,11 @@ export const AgentsModal: React.FC = ({ open, onOpenChange }) if (filePath) { const agent = await api.importAgentFromFile(filePath as string); loadAgents(); // Refresh list - setToast({ message: `Agent "${agent.name}" imported successfully`, type: "success" }); + setToast({ message: t('agents.importedSuccessfully', { name: agent.name }), type: "success" }); } } catch (error) { console.error('Failed to import agent:', error); - setToast({ message: "Failed to import agent", type: "error" }); + setToast({ message: t('agents.importFailed'), type: "error" }); } }; @@ -175,11 +175,11 @@ export const AgentsModal: React.FC = ({ open, onOpenChange }) if (filePath) { await invoke('write_file', { path: filePath, content: JSON.stringify(exportData, null, 2) }); - setToast({ message: "Agent exported successfully", type: "success" }); + setToast({ message: t('agents.exportedSuccessfully', { name: agent.name }), type: "success" }); } } catch (error) { console.error('Failed to export agent:', error); - setToast({ message: "Failed to export agent", type: "error" }); + setToast({ message: t('agents.exportFailed'), type: "error" }); } }; @@ -424,7 +424,7 @@ export const AgentsModal: React.FC = ({ open, onOpenChange }) onImportSuccess={() => { setShowGitHubBrowser(false); loadAgents(); // Refresh the agents list - setToast({ message: "Agent imported successfully", type: "success" }); + setToast({ message: t('agents.importedSuccessfully'), type: "success" }); }} /> diff --git a/src/components/CcrRouterManager.tsx b/src/components/CcrRouterManager.tsx index a008fdd..318aa35 100644 --- a/src/components/CcrRouterManager.tsx +++ b/src/components/CcrRouterManager.tsx @@ -7,12 +7,14 @@ import { Badge } from "@/components/ui/badge"; import { Toast, ToastContainer } from "@/components/ui/toast"; import { ccrApi, type CcrServiceStatus } from "@/lib/api"; import { open } from '@tauri-apps/plugin-shell'; +import { useTranslation } from '@/hooks/useTranslation'; interface CcrRouterManagerProps { onBack: () => void; } export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { + const { t } = useTranslation(); const [serviceStatus, setServiceStatus] = useState(null); const [loading, setLoading] = useState(true); const [actionLoading, setActionLoading] = useState(false); @@ -34,7 +36,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { } catch (error) { console.error("Failed to load CCR service status:", error); setToast({ - message: `加载CCR服务状态失败: ${error}`, + message: t('ccr.loadStatusFailed', { error: String(error) }), type: "error" }); } finally { @@ -63,7 +65,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { } catch (error) { console.error("Failed to start CCR service:", error); setToast({ - message: `启动CCR服务失败: ${error}`, + message: t('ccr.startFailed', { error: String(error) }), type: "error" }); } finally { @@ -83,7 +85,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { } catch (error) { console.error("Failed to stop CCR service:", error); setToast({ - message: `停止CCR服务失败: ${error}`, + message: t('ccr.stopFailed', { error: String(error) }), type: "error" }); } finally { @@ -103,7 +105,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { } catch (error) { console.error("Failed to restart CCR service:", error); setToast({ - message: `重启CCR服务失败: ${error}`, + message: t('ccr.restartFailed', { error: String(error) }), type: "error" }); } finally { @@ -118,14 +120,14 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { // 如果服务未运行,先尝试启动 if (!serviceStatus?.is_running) { setToast({ - message: "检测到服务未运行,正在启动...", + message: t('ccr.serviceStarting'), type: "info" }); const startResult = await ccrApi.startService(); setServiceStatus(startResult.status); if (!startResult.status.is_running) { - throw new Error("服务启动失败"); + throw new Error(t('ccr.serviceStartFailed')); } // 等待服务完全启动 @@ -134,7 +136,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { await ccrApi.openUI(); setToast({ - message: "正在打开CCR UI...", + message: t('ccr.openingUI'), type: "info" }); @@ -145,7 +147,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { } catch (error) { console.error("Failed to open CCR UI:", error); setToast({ - message: `打开CCR UI失败: ${error}`, + message: t('ccr.openUIFailed', { error: String(error) }), type: "error" }); } finally { @@ -159,7 +161,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { if (!serviceStatus?.is_running) { setActionLoading(true); setToast({ - message: "检测到服务未运行,正在启动...", + message: t('ccr.serviceStarting'), type: "info" }); @@ -167,7 +169,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { setServiceStatus(startResult.status); if (!startResult.status.is_running) { - throw new Error("服务启动失败"); + throw new Error(t('ccr.serviceStartFailed')); } // 等待服务完全启动 @@ -178,14 +180,14 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) { if (serviceStatus?.endpoint) { open(`${serviceStatus.endpoint}/ui/`); setToast({ - message: "正在打开CCR管理界面...", + message: t('ccr.openingAdmin'), type: "info" }); } } catch (error) { console.error("Failed to open CCR UI in browser:", error); setToast({ - message: `打开管理界面失败: ${error}`, + message: t('ccr.openAdminFailed', { error: String(error) }), type: "error" }); setActionLoading(false); diff --git a/src/components/ClaudeCodeSession.tsx b/src/components/ClaudeCodeSession.tsx index 211604f..c5e5385 100644 --- a/src/components/ClaudeCodeSession.tsx +++ b/src/components/ClaudeCodeSession.tsx @@ -1519,7 +1519,7 @@ export const ClaudeCodeSession: React.FC = ({ -

滚动到顶部

+

{t('claudeSession.scrollToTop', 'Scroll to top')}

@@ -1540,7 +1540,7 @@ export const ClaudeCodeSession: React.FC = ({ -

滚动到底部

+

{t('claudeSession.scrollToBottom', 'Scroll to bottom')}

@@ -1665,7 +1665,7 @@ export const ClaudeCodeSession: React.FC = ({
{totalTokens.toLocaleString()} - tokens + {t('usage.tokens')}
)} @@ -1705,7 +1705,7 @@ export const ClaudeCodeSession: React.FC = ({ -

File Explorer

+

{t('app.fileExplorer')}

@@ -1726,7 +1726,7 @@ export const ClaudeCodeSession: React.FC = ({ -

Git Panel

+

{t('app.gitPanel')}

@@ -1747,7 +1747,7 @@ export const ClaudeCodeSession: React.FC = ({ -

{isFileWatching ? '停止文件监控' : '启动文件监控'}

+

{isFileWatching ? t('claudeSession.stopFileWatch', 'Stop file watching') : t('claudeSession.startFileWatch', 'Start file watching')}

diff --git a/src/components/Settings.tsx b/src/components/Settings.tsx index c0dac16..b3402e4 100644 --- a/src/components/Settings.tsx +++ b/src/components/Settings.tsx @@ -136,7 +136,7 @@ export const Settings: React.FC = ({ setModelMappings(mappings); } catch (err) { console.error("Failed to load model mappings:", err); - setToast({ message: "加载模型映射失败", type: "error" }); + setToast({ message: t('settings.modelMappings.loadFailed'), type: "error" }); } finally { setLoadingMappings(false); } @@ -163,10 +163,10 @@ export const Settings: React.FC = ({ await api.updateModelMapping(mapping.alias, mapping.model_name); } setModelMappingsChanged(false); - setToast({ message: "模型映射已保存", type: "success" }); + setToast({ message: t('settings.modelMappings.saved'), type: "success" }); } catch (err) { console.error("Failed to save model mappings:", err); - setToast({ message: "保存模型映射失败", type: "error" }); + setToast({ message: t('settings.modelMappings.saveFailed'), type: "error" }); } }; @@ -696,9 +696,9 @@ export const Settings: React.FC = ({ {/* Model Mappings Configuration */}
- +

- 配置模型别名(sonnet、opus、haiku)对应的实际模型版本 + {t('settings.modelMappings.description')}

@@ -720,29 +720,29 @@ export const Settings: React.FC = ({ className="font-mono text-sm" />

- {mapping.alias === 'sonnet' && '平衡性能与成本的主力模型'} - {mapping.alias === 'opus' && '最强大的旗舰模型,适合复杂任务'} - {mapping.alias === 'haiku' && '快速响应的轻量级模型'} + {mapping.alias === 'sonnet' && t('settings.modelMappings.aliasDescriptions.sonnet')} + {mapping.alias === 'opus' && t('settings.modelMappings.aliasDescriptions.opus')} + {mapping.alias === 'haiku' && t('settings.modelMappings.aliasDescriptions.haiku')}

))} {modelMappings.length === 0 && (
-

暂无模型映射配置

-

数据库初始化可能未完成,请尝试重启应用

+

{t('settings.modelMappings.emptyTitle')}

+

{t('settings.modelMappings.emptySubtitle')}

)} {modelMappingsChanged && (

- 模型映射已修改,点击保存以应用更改 + {t('settings.modelMappings.changedNotice')}

)}

- 说明:Agent执行时会根据这里的配置解析模型别名。例如,如果设置 sonnet → claude-sonnet-4-20250514,那么所有使用 "sonnet" 的Agent都会调用该模型版本。 + {t('settings.modelMappings.note')} {t('settings.modelMappings.noteContent')}

diff --git a/src/components/TabContent.tsx b/src/components/TabContent.tsx index 258f16a..4b2eafb 100644 --- a/src/components/TabContent.tsx +++ b/src/components/TabContent.tsx @@ -398,9 +398,11 @@ export const TabContent: React.FC = () => { console.log('[TabContent] Handling create-smart-session-tab:', { tabId, sessionData }); // Update the existing tab with smart session data and switch immediately + const displayName = sessionData.display_name || t('smartSessionDefaultTitle'); + updateTab(tabId, { type: 'chat', - title: sessionData.display_name || 'Smart Session', + title: displayName, initialProjectPath: sessionData.project_path, sessionData: null, // No existing session, this is a new session workspace }); diff --git a/src/locales/en/common.json b/src/locales/en/common.json index f2eea20..74ce9f8 100644 --- a/src/locales/en/common.json +++ b/src/locales/en/common.json @@ -536,8 +536,43 @@ "allowRuleExample": "e.g., Bash(npm run test:*)", "denyRuleExample": "e.g., Bash(curl:*)", "apiKeyHelperPath": "/path/to/generate_api_key.sh" + }, + "modelMappings": { + "title": "Model alias mappings", + "description": "Configure actual model versions for aliases (sonnet, opus, haiku)", + "loadFailed": "Failed to load model mappings", + "saved": "Model mappings saved", + "saveFailed": "Failed to save model mappings", + "emptyTitle": "No model mappings configured", + "emptySubtitle": "Database may not be initialized yet. Try restarting the app.", + "changedNotice": "Model mappings changed. Click Save to apply.", + "note": "Note:", + "noteContent": "Agents using aliases will resolve to configured versions. For example, sonnet → claude-sonnet-4-20250514.", + "aliasDescriptions": { + "sonnet": "Balanced model for most tasks", + "opus": "Most capable flagship model for complex tasks", + "haiku": "Fast, lightweight model" + } } }, + "ccr": { + "loadStatusFailed": "Failed to load CCR service status: {{error}}", + "startFailed": "Failed to start CCR service: {{error}}", + "stopFailed": "Failed to stop CCR service: {{error}}", + "restartFailed": "Failed to restart CCR service: {{error}}", + "serviceStarting": "Service not running, starting...", + "serviceStartFailed": "Service failed to start", + "openingUI": "Opening CCR UI...", + "openUIFailed": "Failed to open CCR UI: {{error}}", + "openingAdmin": "Opening CCR admin...", + "openAdminFailed": "Failed to open admin UI: {{error}}" + }, + "claudeSession": { + "scrollToTop": "Scroll to top", + "scrollToBottom": "Scroll to bottom", + "startFileWatch": "Start file watching", + "stopFileWatch": "Stop file watching" + }, "mcp": { "title": "MCP Server Management", "servers": "Servers", @@ -766,8 +801,11 @@ "claudeCodeNotFound": "Claude Code not found", "selectClaudeInstallation": "Select Claude Installation", "installClaudeCode": "Install Claude Code", - "smartSessionCreated": "Smart session '{{name}}' created at: {{path}}", + "smartSessionCreated": "Smart session '{{name}}' is ready to use.", + "smartSessionDefaultToast": "Smart session '{{name}}' is ready to use.", + "smartSessionDefaultTitle": "Smart Session", "failedToCreateSmartSession": "Failed to create smart session: {{error}}", + "failedToCreateSmartSessionFallback": "Failed to create smart session: {{error}}", "noTabsOpen": "No tabs open", "clickPlusToStartChat": "Click the + button to start a new chat", "noAgentRunIdSpecified": "No agent run ID specified", diff --git a/src/locales/zh/common.json b/src/locales/zh/common.json index 502b6a8..46f585e 100644 --- a/src/locales/zh/common.json +++ b/src/locales/zh/common.json @@ -516,7 +516,42 @@ "path": "路径", "source": "来源", "version": "版本", - "versionUnknown": "版本未知" + "versionUnknown": "版本未知", + "modelMappings": { + "title": "模型别名映射", + "description": "配置别名(sonnet、opus、haiku)对应的实际模型版本", + "loadFailed": "加载模型映射失败", + "saved": "模型映射已保存", + "saveFailed": "保存模型映射失败", + "emptyTitle": "暂无模型映射配置", + "emptySubtitle": "数据库初始化可能未完成,请尝试重启应用", + "changedNotice": "模型映射已修改,点击保存以应用更改", + "note": "说明:", + "noteContent": "Agent 执行时会根据此配置解析模型别名,例如 sonnet → claude-sonnet-4-20250514。", + "aliasDescriptions": { + "sonnet": "平衡性能与成本的主力模型", + "opus": "最强大的旗舰模型,适合复杂任务", + "haiku": "快速响应的轻量级模型" + } + } + }, + "ccr": { + "loadStatusFailed": "加载 CCR 服务状态失败:{{error}}", + "startFailed": "启动 CCR 服务失败:{{error}}", + "stopFailed": "停止 CCR 服务失败:{{error}}", + "restartFailed": "重启 CCR 服务失败:{{error}}", + "serviceStarting": "检测到服务未运行,正在启动...", + "serviceStartFailed": "服务启动失败", + "openingUI": "正在打开 CCR UI...", + "openUIFailed": "打开 CCR UI 失败:{{error}}", + "openingAdmin": "正在打开 CCR 管理界面...", + "openAdminFailed": "打开管理界面失败:{{error}}" + }, + "claudeSession": { + "scrollToTop": "滚动到顶部", + "scrollToBottom": "滚动到底部", + "startFileWatch": "启动文件监控", + "stopFileWatch": "停止文件监控" }, "mcp": { "title": "MCP 服务器管理", @@ -709,8 +744,11 @@ "claudeCodeNotFound": "未找到 Claude Code", "selectClaudeInstallation": "选择 Claude 安装", "installClaudeCode": "安装 Claude Code", - "smartSessionCreated": "智能会话 '{{name}}' 已创建在:{{path}}", + "smartSessionCreated": "智能会话「{{name}}」已就绪。", + "smartSessionDefaultToast": "智能会话「{{name}}」已就绪。", + "smartSessionDefaultTitle": "智能会话", "failedToCreateSmartSession": "创建智能会话失败:{{error}}", + "failedToCreateSmartSessionFallback": "创建智能会话失败:{{error}}", "session": "会话", "letClaudeDecide": "让 Claude 决定", "basicReasoning": "基础推理",