修复 i18n

This commit is contained in:
2025-10-17 17:20:46 +08:00
parent 9d30fd0dac
commit 0e32c6e64c
35 changed files with 2581 additions and 1674 deletions

View File

@@ -61,7 +61,10 @@ pub fn find_claude_binary(app_handle: &tauri::AppHandle) -> Result<String, Strin
// On Windows, if stored path exists but is not executable (shell script), try .cmd version
#[cfg(target_os = "windows")]
if path_buf.exists() && !stored_path.ends_with(".cmd") && !stored_path.ends_with(".exe") {
if path_buf.exists()
&& !stored_path.ends_with(".cmd")
&& !stored_path.ends_with(".exe")
{
// Test if the current path works by trying to get version
if let Err(_) = get_claude_version(&stored_path) {
// If it fails, try the .cmd version
@@ -71,7 +74,10 @@ pub fn find_claude_binary(app_handle: &tauri::AppHandle) -> Result<String, Strin
if let Ok(_) = get_claude_version(&cmd_path) {
final_path = cmd_path;
path_buf = cmd_path_buf;
info!("Using .cmd version instead of shell script: {}", final_path);
info!(
"Using .cmd version instead of shell script: {}",
final_path
);
}
}
}
@@ -208,7 +214,10 @@ fn find_which_installations() -> Vec<ClaudeInstallation> {
// In production (DMG), we need to ensure proper PATH is set
let enhanced_path = build_enhanced_path();
debug!("Using enhanced PATH for {}: {}", command_name, enhanced_path);
debug!(
"Using enhanced PATH for {}: {}",
command_name, enhanced_path
);
cmd.env("PATH", enhanced_path);
match cmd.output() {
@@ -243,7 +252,10 @@ fn find_which_installations() -> Vec<ClaudeInstallation> {
// Convert /c/path to C:\path
let windows_path = path.replace("/c/", "C:\\").replace("/", "\\");
windows_path
} else if path.starts_with("/") && path.len() > 3 && path.chars().nth(2) == Some('/') {
} else if path.starts_with("/")
&& path.len() > 3
&& path.chars().nth(2) == Some('/')
{
// Convert /X/path to X:\path where X is drive letter
let drive = path.chars().nth(1).unwrap();
let rest = &path[3..];
@@ -284,7 +296,10 @@ fn find_which_installations() -> Vec<ClaudeInstallation> {
// Verify the path exists
if !PathBuf::from(&final_path).exists() {
warn!("Path from '{}' does not exist: {}", command_name, final_path);
warn!(
"Path from '{}' does not exist: {}",
command_name, final_path
);
continue;
}
@@ -418,7 +433,8 @@ fn find_standard_installations() -> Vec<ClaudeInstallation> {
if output.status.success() {
debug!("claude is available in PATH");
// Combine stdout and stderr for robust version extraction
let mut combined: Vec<u8> = Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1);
let mut combined: Vec<u8> =
Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1);
combined.extend_from_slice(&output.stdout);
if !output.stderr.is_empty() {
combined.extend_from_slice(b"\n");
@@ -448,7 +464,8 @@ fn get_claude_version(path: &str) -> Result<Option<String>, String> {
Ok(output) => {
if output.status.success() {
// Combine stdout and stderr for robust version extraction
let mut combined: Vec<u8> = Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1);
let mut combined: Vec<u8> =
Vec::with_capacity(output.stdout.len() + output.stderr.len() + 1);
combined.extend_from_slice(&output.stdout);
if !output.stderr.is_empty() {
combined.extend_from_slice(b"\n");
@@ -481,7 +498,8 @@ fn extract_version_from_output(stdout: &[u8]) -> Option<String> {
// - A dot, followed by
// - One or more digits
// - Optionally followed by pre-release/build metadata
let version_regex = regex::Regex::new(r"(\d+\.\d+\.\d+(?:-[a-zA-Z0-9.-]+)?(?:\+[a-zA-Z0-9.-]+)?)").ok()?;
let version_regex =
regex::Regex::new(r"(\d+\.\d+\.\d+(?:-[a-zA-Z0-9.-]+)?(?:\+[a-zA-Z0-9.-]+)?)").ok()?;
if let Some(captures) = version_regex.captures(&output_str) {
if let Some(version_match) = captures.get(1) {
@@ -616,7 +634,8 @@ pub fn create_command_with_env(program: &str) -> Command {
if program.contains("/.nvm/versions/node/") {
if let Some(node_bin_dir) = std::path::Path::new(program).parent() {
// Ensure the Node.js bin directory is in PATH
let current_path = cmd.get_envs()
let current_path = cmd
.get_envs()
.find(|(k, _)| k.to_str() == Some("PATH"))
.and_then(|(_, v)| v)
.and_then(|v| v.to_str())

View File

@@ -1,10 +1,10 @@
use std::fs;
use std::path::PathBuf;
use std::collections::HashMap;
use crate::commands::relay_stations::RelayStation;
use dirs::home_dir;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use dirs::home_dir;
use crate::commands::relay_stations::RelayStation;
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
/// Claude 配置文件结构
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -39,11 +39,17 @@ pub struct StatusLineConfig {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClaudeEnv {
#[serde(rename = "ANTHROPIC_AUTH_TOKEN", skip_serializing_if = "Option::is_none")]
#[serde(
rename = "ANTHROPIC_AUTH_TOKEN",
skip_serializing_if = "Option::is_none"
)]
pub anthropic_auth_token: Option<String>,
#[serde(rename = "ANTHROPIC_BASE_URL", skip_serializing_if = "Option::is_none")]
pub anthropic_base_url: Option<String>,
#[serde(rename = "CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC", skip_serializing_if = "Option::is_none")]
#[serde(
rename = "CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC",
skip_serializing_if = "Option::is_none"
)]
pub disable_nonessential_traffic: Option<String>,
// 使用 flatten 来支持任何其他环境变量
#[serde(flatten)]
@@ -97,12 +103,12 @@ pub fn read_claude_config() -> Result<ClaudeConfig, String> {
});
}
let content = fs::read_to_string(&config_path)
.map_err(|e| format!("读取配置文件失败: {}", e))?;
let content =
fs::read_to_string(&config_path).map_err(|e| format!("读取配置文件失败: {}", e))?;
// 首先尝试解析为 JSON Value以便处理可能的格式问题
let mut json_value: Value = serde_json::from_str(&content)
.map_err(|e| format!("解析配置文件失败: {}", e))?;
let mut json_value: Value =
serde_json::from_str(&content).map_err(|e| format!("解析配置文件失败: {}", e))?;
// 如果JSON解析成功再转换为ClaudeConfig
if let Some(obj) = json_value.as_object_mut() {
@@ -112,8 +118,7 @@ pub fn read_claude_config() -> Result<ClaudeConfig, String> {
}
}
serde_json::from_value(json_value)
.map_err(|e| format!("转换配置结构失败: {}", e))
serde_json::from_value(json_value).map_err(|e| format!("转换配置结构失败: {}", e))
}
/// 写入 Claude 配置文件
@@ -125,16 +130,14 @@ pub fn write_claude_config(config: &ClaudeConfig) -> Result<(), String> {
// 确保目录存在
if let Some(parent) = config_path.parent() {
log::info!("确保目录存在: {:?}", parent);
fs::create_dir_all(parent)
.map_err(|e| {
fs::create_dir_all(parent).map_err(|e| {
let error_msg = format!("创建配置目录失败: {}", e);
log::error!("{}", error_msg);
error_msg
})?;
}
let content = serde_json::to_string_pretty(config)
.map_err(|e| {
let content = serde_json::to_string_pretty(config).map_err(|e| {
let error_msg = format!("序列化配置失败: {}", e);
log::error!("{}", error_msg);
error_msg
@@ -142,8 +145,7 @@ pub fn write_claude_config(config: &ClaudeConfig) -> Result<(), String> {
log::info!("准备写入内容:\n{}", content);
fs::write(&config_path, &content)
.map_err(|e| {
fs::write(&config_path, &content).map_err(|e| {
let error_msg = format!("写入配置文件失败: {} (路径: {:?})", e, config_path);
log::error!("{}", error_msg);
error_msg
@@ -159,8 +161,7 @@ pub fn backup_claude_config() -> Result<(), String> {
let backup_path = get_config_backup_path()?;
if config_path.exists() {
fs::copy(&config_path, &backup_path)
.map_err(|e| format!("备份配置文件失败: {}", e))?;
fs::copy(&config_path, &backup_path).map_err(|e| format!("备份配置文件失败: {}", e))?;
}
Ok(())
@@ -175,8 +176,7 @@ pub fn restore_claude_config() -> Result<(), String> {
return Err("备份文件不存在".to_string());
}
fs::copy(&backup_path, &config_path)
.map_err(|e| format!("恢复配置文件失败: {}", e))?;
fs::copy(&backup_path, &config_path).map_err(|e| format!("恢复配置文件失败: {}", e))?;
Ok(())
}

View File

@@ -10,8 +10,8 @@ use std::io::{BufRead, BufReader};
use std::process::Stdio;
use std::sync::Mutex;
use tauri::{AppHandle, Emitter, Manager, State};
use tauri_plugin_shell::ShellExt;
use tauri_plugin_shell::process::CommandEvent;
use tauri_plugin_shell::ShellExt;
use tokio::io::{AsyncBufReadExt, BufReader as TokioBufReader};
use tokio::process::Command;
@@ -321,7 +321,6 @@ pub fn init_database(app: &AppHandle) -> SqliteResult<Connection> {
[],
)?;
// Create settings table for app-wide settings
conn.execute(
"CREATE TABLE IF NOT EXISTS app_settings (
@@ -355,11 +354,9 @@ pub fn init_database(app: &AppHandle) -> SqliteResult<Connection> {
)?;
// Initialize default model mappings if empty
let count: i64 = conn.query_row(
"SELECT COUNT(*) FROM model_mappings",
[],
|row| row.get(0),
).unwrap_or(0);
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM model_mappings", [], |row| row.get(0))
.unwrap_or(0);
if count == 0 {
conn.execute(
@@ -728,8 +725,7 @@ pub async fn execute_agent(
let execution_model = model.unwrap_or(agent.model.clone());
// Resolve model alias to actual model name using mappings
let resolved_model = get_model_by_alias(&db, &execution_model)
.unwrap_or_else(|_| {
let resolved_model = get_model_by_alias(&db, &execution_model).unwrap_or_else(|_| {
warn!("Model alias '{}' not found, using as-is", execution_model);
execution_model.clone()
});
@@ -766,7 +762,10 @@ pub async fn execute_agent(
std::fs::write(&settings_path, settings_content)
.map_err(|e| format!("Failed to write settings.json: {}", e))?;
info!("Created settings.json with agent hooks at: {:?}", settings_path);
info!(
"Created settings.json with agent hooks at: {:?}",
settings_path
);
} else {
info!("settings.json already exists at: {:?}", settings_path);
}
@@ -809,9 +808,34 @@ pub async fn execute_agent(
// Execute based on whether we should use sidecar or system binary
if should_use_sidecar(&claude_path) {
spawn_agent_sidecar(app, run_id, agent_id, agent.name.clone(), args, project_path, task, resolved_model, db, registry).await
spawn_agent_sidecar(
app,
run_id,
agent_id,
agent.name.clone(),
args,
project_path,
task,
resolved_model,
db,
registry,
)
.await
} else {
spawn_agent_system(app, run_id, agent_id, agent.name.clone(), claude_path, args, project_path, task, resolved_model, db, registry).await
spawn_agent_system(
app,
run_id,
agent_id,
agent.name.clone(),
claude_path,
args,
project_path,
task,
resolved_model,
db,
registry,
)
.await
}
}
@@ -839,11 +863,7 @@ fn create_agent_sidecar_command(
// Pass through proxy environment variables if they exist (only uppercase)
for (key, value) in std::env::vars() {
if key == "HTTP_PROXY"
|| key == "HTTPS_PROXY"
|| key == "NO_PROXY"
|| key == "ALL_PROXY"
{
if key == "HTTP_PROXY" || key == "HTTPS_PROXY" || key == "NO_PROXY" || key == "ALL_PROXY" {
debug!("Setting proxy env var for agent sidecar: {}={}", key, value);
sidecar_cmd = sidecar_cmd.env(&key, &value);
}
@@ -899,7 +919,10 @@ async fn spawn_agent_sidecar(
// Get the PID from child
let pid = child.pid();
let now = chrono::Utc::now().to_rfc3339();
info!("✅ Claude sidecar process spawned successfully with PID: {}", pid);
info!(
"✅ Claude sidecar process spawned successfully with PID: {}",
pid
);
// Update the database with PID and status
{
@@ -983,8 +1006,9 @@ async fn spawn_agent_sidecar(
// Extract session ID from JSONL output
if let Ok(json) = serde_json::from_str::<JsonValue>(&line) {
if json.get("type").and_then(|t| t.as_str()) == Some("system") &&
json.get("subtype").and_then(|s| s.as_str()) == Some("init") {
if json.get("type").and_then(|t| t.as_str()) == Some("system")
&& json.get("subtype").and_then(|s| s.as_str()) == Some("init")
{
if let Some(sid) = json.get("session_id").and_then(|s| s.as_str()) {
if let Ok(mut current_session_id) = session_id_clone.lock() {
if current_session_id.is_empty() {
@@ -1024,7 +1048,10 @@ async fn spawn_agent_sidecar(
let _ = app_handle.emit("agent-error", &line);
}
CommandEvent::Terminated(payload) => {
info!("Claude sidecar process terminated with code: {:?}", payload.code);
info!(
"Claude sidecar process terminated with code: {:?}",
payload.code
);
// Get the session ID
let extracted_session_id = if let Ok(sid) = session_id.lock() {
@@ -1050,7 +1077,10 @@ async fn spawn_agent_sidecar(
}
}
info!("📖 Finished reading Claude sidecar events. Total lines: {}", line_count);
info!(
"📖 Finished reading Claude sidecar events. Total lines: {}",
line_count
);
});
Ok(run_id)
@@ -1162,8 +1192,9 @@ async fn spawn_agent_system(
// Extract session ID from JSONL output
if let Ok(json) = serde_json::from_str::<JsonValue>(&line) {
// Claude Code uses "session_id" (underscore), not "sessionId"
if json.get("type").and_then(|t| t.as_str()) == Some("system") &&
json.get("subtype").and_then(|s| s.as_str()) == Some("init") {
if json.get("type").and_then(|t| t.as_str()) == Some("system")
&& json.get("subtype").and_then(|s| s.as_str()) == Some("init")
{
if let Some(sid) = json.get("session_id").and_then(|s| s.as_str()) {
if let Ok(mut current_session_id) = session_id_clone.lock() {
if current_session_id.is_empty() {
@@ -1182,7 +1213,10 @@ async fn spawn_agent_system(
}
}
Err(e) => {
error!("❌ Failed to update session ID immediately: {}", e);
error!(
"❌ Failed to update session ID immediately: {}",
e
);
}
}
}
@@ -1342,7 +1376,10 @@ async fn spawn_agent_system(
// Update the run record with session ID and mark as completed - open a new connection
if let Ok(conn) = Connection::open(&db_path_for_monitor) {
info!("🔄 Updating database with extracted session ID: {}", extracted_session_id);
info!(
"🔄 Updating database with extracted session ID: {}",
extracted_session_id
);
match conn.execute(
"UPDATE agent_runs SET session_id = ?1, status = 'completed', completed_at = CURRENT_TIMESTAMP WHERE id = ?2",
params![extracted_session_id, run_id],
@@ -1359,7 +1396,10 @@ async fn spawn_agent_system(
}
}
} else {
error!("❌ Failed to open database to update session ID for run {}", run_id);
error!(
"❌ Failed to open database to update session ID for run {}",
run_id
);
}
// Cleanup will be handled by the cleanup_finished_processes function
@@ -1419,10 +1459,8 @@ pub async fn list_running_sessions(
// Cross-check with the process registry to ensure accuracy
// Get actually running processes from the registry
let registry_processes = registry.0.get_running_agent_processes()?;
let registry_run_ids: std::collections::HashSet<i64> = registry_processes
.iter()
.map(|p| p.run_id)
.collect();
let registry_run_ids: std::collections::HashSet<i64> =
registry_processes.iter().map(|p| p.run_id).collect();
// Filter out any database entries that aren't actually running in the registry
// This handles cases where processes crashed without updating the database
@@ -1624,7 +1662,10 @@ pub async fn get_session_output(
// Search for the session file in all project directories
let mut session_file_path = None;
log::info!("Searching for session file {} in all project directories", run.session_id);
log::info!(
"Searching for session file {} in all project directories",
run.session_id
);
if let Ok(entries) = std::fs::read_dir(&projects_dir) {
for entry in entries.filter_map(Result::ok) {
@@ -1652,7 +1693,11 @@ pub async fn get_session_output(
match tokio::fs::read_to_string(&session_path).await {
Ok(content) => Ok(content),
Err(e) => {
log::error!("Failed to read session file {}: {}", session_path.display(), e);
log::error!(
"Failed to read session file {}: {}",
session_path.display(),
e
);
// Fallback to live output if file read fails
let live_output = registry.0.get_live_output(run_id)?;
Ok(live_output)
@@ -1660,7 +1705,10 @@ pub async fn get_session_output(
}
} else {
// If session file not found, try the old method as fallback
log::warn!("Session file not found for {}, trying legacy method", run.session_id);
log::warn!(
"Session file not found for {}, trying legacy method",
run.session_id
);
match read_session_jsonl(&run.session_id, &run.project_path).await {
Ok(content) => Ok(content),
Err(_) => {
@@ -2174,7 +2222,10 @@ pub async fn load_agent_session_history(
// Search for the session file in all project directories
let mut session_file_path = None;
log::info!("Searching for session file {} in all project directories", session_id);
log::info!(
"Searching for session file {} in all project directories",
session_id
);
if let Ok(entries) = std::fs::read_dir(&projects_dir) {
for entry in entries.filter_map(Result::ok) {

View File

@@ -1,10 +1,10 @@
use serde::{Deserialize, Serialize};
use std::process::{Command, Stdio};
use log::{debug, error, info};
use std::net::TcpStream;
use std::time::Duration;
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use std::net::TcpStream;
use std::process::{Command, Stdio};
use std::sync::Mutex;
use std::time::Duration;
// 全局变量存储找到的 CCR 路径
static CCR_PATH: Lazy<Mutex<Option<String>>> = Lazy::new(|| Mutex::new(None));
@@ -52,7 +52,9 @@ fn get_possible_ccr_paths() -> Vec<String> {
paths.extend(candidate_binaries().into_iter().map(|s| s.to_string()));
// 获取用户主目录
let home = std::env::var("HOME").or_else(|_| std::env::var("USERPROFILE")).unwrap_or_default();
let home = std::env::var("HOME")
.or_else(|_| std::env::var("USERPROFILE"))
.unwrap_or_default();
#[cfg(target_os = "macos")]
{
@@ -81,12 +83,18 @@ fn get_possible_ccr_paths() -> Vec<String> {
#[cfg(target_os = "windows")]
{
// Windows 特定路径
let program_files = std::env::var("ProgramFiles").unwrap_or_else(|_| "C:\\Program Files".to_string());
let program_files_x86 = std::env::var("ProgramFiles(x86)").unwrap_or_else(|_| "C:\\Program Files (x86)".to_string());
let appdata = std::env::var("APPDATA").unwrap_or_else(|_| format!("{}\\AppData\\Roaming", home));
let program_files =
std::env::var("ProgramFiles").unwrap_or_else(|_| "C:\\Program Files".to_string());
let program_files_x86 = std::env::var("ProgramFiles(x86)")
.unwrap_or_else(|_| "C:\\Program Files (x86)".to_string());
let appdata =
std::env::var("APPDATA").unwrap_or_else(|_| format!("{}\\AppData\\Roaming", home));
for bin in [
"ccr.exe", "ccr.cmd", "claude-code-router.exe", "claude-code-router.cmd",
"ccr.exe",
"ccr.cmd",
"claude-code-router.exe",
"claude-code-router.cmd",
] {
paths.push(bin.to_string());
paths.push(format!("{}\\npm\\{}", appdata, bin));
@@ -114,7 +122,11 @@ fn get_possible_ccr_paths() -> Vec<String> {
/// 获取扩展的 PATH 环境变量
fn get_extended_path() -> String {
let mut extended_path = std::env::var("PATH").unwrap_or_default();
let separator = if cfg!(target_os = "windows") { ";" } else { ":" };
let separator = if cfg!(target_os = "windows") {
";"
} else {
":"
};
// 添加常见的额外路径
let additional_paths = if cfg!(target_os = "macos") {
@@ -129,10 +141,7 @@ fn get_extended_path() -> String {
} else if cfg!(target_os = "windows") {
vec![]
} else {
vec![
"/usr/local/bin",
"/opt/bin",
]
vec!["/usr/local/bin", "/opt/bin"]
};
// 添加用户特定路径
@@ -149,7 +158,9 @@ fn get_extended_path() -> String {
for entry in entries.flatten() {
let p = entry.path().join("bin");
if p.exists() {
if let Some(s) = p.to_str() { list.push(s.to_string()); }
if let Some(s) = p.to_str() {
list.push(s.to_string());
}
}
}
}
@@ -161,7 +172,9 @@ fn get_extended_path() -> String {
for entry in entries.flatten() {
let p = entry.path().join("bin");
if p.exists() {
if let Some(s) = p.to_str() { list.push(s.to_string()); }
if let Some(s) = p.to_str() {
list.push(s.to_string());
}
}
}
}
@@ -172,16 +185,16 @@ fn get_extended_path() -> String {
for entry in entries.flatten() {
let p = entry.path().join("installation").join("bin");
if p.exists() {
if let Some(s) = p.to_str() { list.push(s.to_string()); }
if let Some(s) = p.to_str() {
list.push(s.to_string());
}
}
}
}
list
} else if cfg!(target_os = "windows") {
if let Ok(appdata) = std::env::var("APPDATA") {
vec![
format!("{}\\npm", appdata),
]
vec![format!("{}\\npm", appdata)]
} else {
vec![]
}
@@ -237,9 +250,15 @@ fn find_ccr_via_shell() -> Option<String> {
.env("PATH", get_extended_path())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output() {
.output()
{
if output.status.success() {
let path = String::from_utf8_lossy(&output.stdout).lines().next().unwrap_or("").trim().to_string();
let path = String::from_utf8_lossy(&output.stdout)
.lines()
.next()
.unwrap_or("")
.trim()
.to_string();
if !path.is_empty() && test_ccr_command(&path) {
info!("Found ccr via shell: {}", path);
return Some(path);
@@ -258,15 +277,24 @@ fn find_ccr_via_shell() -> Option<String> {
for config in shell_configs {
if std::path::Path::new(&config).exists() {
let cmd = format!("source {} && (command -v ccr || command -v claude-code-router)", config);
let cmd = format!(
"source {} && (command -v ccr || command -v claude-code-router)",
config
);
if let Ok(output) = Command::new("sh")
.args(&["-c", &cmd])
.env("PATH", get_extended_path())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output() {
.output()
{
if output.status.success() {
let path = String::from_utf8_lossy(&output.stdout).lines().next().unwrap_or("").trim().to_string();
let path = String::from_utf8_lossy(&output.stdout)
.lines()
.next()
.unwrap_or("")
.trim()
.to_string();
if !path.is_empty() && test_ccr_command(&path) {
info!("Found ccr via shell config {}: {}", config, path);
return Some(path);
@@ -346,9 +374,15 @@ fn find_ccr_path() -> Option<String> {
.arg(format!("command -v {} || which {}", name, name))
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output() {
.output()
{
if output.status.success() {
let path = String::from_utf8_lossy(&output.stdout).lines().next().unwrap_or("").trim().to_string();
let path = String::from_utf8_lossy(&output.stdout)
.lines()
.next()
.unwrap_or("")
.trim()
.to_string();
if !path.is_empty() && test_ccr_command(&path) {
info!("Found {} using shell which: {}", name, path);
if let Ok(mut cached) = CCR_PATH.lock() {
@@ -361,7 +395,11 @@ fn find_ccr_path() -> Option<String> {
}
// 然后检查扩展后的 PATH
let separator = if cfg!(target_os = "windows") { ";" } else { ":" };
let separator = if cfg!(target_os = "windows") {
";"
} else {
":"
};
for path_dir in extended_path.split(separator) {
for name in candidate_binaries() {
let candidate = if cfg!(target_os = "windows") {
@@ -409,7 +447,10 @@ fn find_ccr_path() -> Option<String> {
}
}
error!("CCR not found in any location. Original PATH: {:?}", std::env::var("PATH"));
error!(
"CCR not found in any location. Original PATH: {:?}",
std::env::var("PATH")
);
error!("Extended PATH: {}", extended_path);
error!("Searched paths: {:?}", possible_paths);
None
@@ -448,7 +489,10 @@ fn test_ccr_command(path: &str) -> bool {
path.to_string()
};
debug!("Testing CCR command at: {} (real path: {})", path, real_path);
debug!(
"Testing CCR command at: {} (real path: {})",
path, real_path
);
// 如果是 .js 文件,使用 node 来执行
if real_path.ends_with(".js") {
@@ -567,7 +611,9 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
for entry in entries.flatten() {
let p = entry.path().join("bin");
if p.exists() {
if let Some(s) = p.to_str() { scan_dirs.push(s.to_string()); }
if let Some(s) = p.to_str() {
scan_dirs.push(s.to_string());
}
}
}
}
@@ -580,7 +626,9 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
for entry in entries.flatten() {
let p = entry.path().join("bin");
if p.exists() {
if let Some(s) = p.to_str() { scan_dirs.push(s.to_string()); }
if let Some(s) = p.to_str() {
scan_dirs.push(s.to_string());
}
}
}
}
@@ -590,7 +638,9 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
for entry in entries.flatten() {
let p = entry.path().join("installation").join("bin");
if p.exists() {
if let Some(s) = p.to_str() { scan_dirs.push(s.to_string()); }
if let Some(s) = p.to_str() {
scan_dirs.push(s.to_string());
}
}
}
}
@@ -613,7 +663,8 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
.env("PATH", get_extended_path())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output() {
.output()
{
Ok(output) => {
if output.status.success() {
let version = String::from_utf8_lossy(&output.stdout);
@@ -623,7 +674,7 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
format!("Direct execution FAILED: {}", stderr.trim())
}
}
Err(e) => format!("Direct execution ERROR: {}", e)
Err(e) => format!("Direct execution ERROR: {}", e),
}
} else {
"No candidate binary found in Node manager dirs".to_string()
@@ -638,7 +689,9 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
let files: Vec<String> = entries
.filter_map(|e| e.ok())
.filter_map(|e| e.file_name().to_str().map(|s| s.to_string()))
.filter(|name| name.contains("ccr") || name.contains("claude-code-router"))
.filter(|name| {
name.contains("ccr") || name.contains("claude-code-router")
})
.collect();
if !files.is_empty() {
scan_summary.push(format!("{} -> {:?}", dir, files));
@@ -728,7 +781,8 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
info!("CCR status stderr: {}", stderr_output);
// 检查状态 - 明确检测运行和停止状态
let is_running = if status_output.contains("") || status_output.contains("Status: Not Running") {
let is_running =
if status_output.contains("") || status_output.contains("Status: Not Running") {
// 明确显示未运行
false
} else if status_output.contains("") || status_output.contains("Status: Running") {
@@ -755,9 +809,14 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
info!("Parsing line for port: {}", line);
// 检查是否包含端口信息
if line.contains("Port:") || line.contains("port:") || line.contains("端口:") || line.contains("🌐") {
if line.contains("Port:")
|| line.contains("port:")
|| line.contains("端口:")
|| line.contains("🌐")
{
// 查找数字
let numbers: String = line.chars()
let numbers: String = line
.chars()
.skip_while(|c| !c.is_numeric())
.take_while(|c| c.is_numeric())
.collect();
@@ -775,15 +834,20 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
// 提取API端点信息 - 支持多种格式
for line in status_output.lines() {
info!("Parsing line for endpoint: {}", line);
if line.contains("API Endpoint:") || line.contains("Endpoint:") ||
line.contains("http://") || line.contains("https://") || line.contains("📡") {
if line.contains("API Endpoint:")
|| line.contains("Endpoint:")
|| line.contains("http://")
|| line.contains("https://")
|| line.contains("📡")
{
// 尝试提取URL
if let Some(start) = line.find("http") {
let url_part = &line[start..];
// 找到URL的结束位置空格或行尾
let end = url_part.find(char::is_whitespace).unwrap_or(url_part.len());
let url = &url_part[..end];
if url.contains(":") && (url.contains("localhost") || url.contains("127.0.0.1")) {
if url.contains(":") && (url.contains("localhost") || url.contains("127.0.0.1"))
{
endpoint = Some(url.to_string());
info!("Successfully extracted endpoint: {}", url);
break;
@@ -795,9 +859,14 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
// 提取进程ID信息 - 支持多种格式
for line in status_output.lines() {
info!("Parsing line for PID: {}", line);
if line.contains("Process ID:") || line.contains("PID:") || line.contains("pid:") || line.contains("🆔") {
if line.contains("Process ID:")
|| line.contains("PID:")
|| line.contains("pid:")
|| line.contains("🆔")
{
// 查找数字
let numbers: String = line.chars()
let numbers: String = line
.chars()
.skip_while(|c| !c.is_numeric())
.take_while(|c| c.is_numeric())
.collect();
@@ -828,7 +897,8 @@ pub async fn get_ccr_service_status() -> Result<CcrServiceStatus, String> {
if !is_running {
info!("Status command didn't detect running service, checking port 3456...");
// 尝试连接默认端口
match TcpStream::connect_timeout(&"127.0.0.1:3456".parse().unwrap(), Duration::from_secs(1)) {
match TcpStream::connect_timeout(&"127.0.0.1:3456".parse().unwrap(), Duration::from_secs(1))
{
Ok(_) => {
info!("Port 3456 is open, service appears to be running");
return Ok(CcrServiceStatus {
@@ -998,12 +1068,9 @@ pub async fn open_ccr_ui() -> Result<String, String> {
/// 获取 CCR 配置路径
#[tauri::command]
pub async fn get_ccr_config_path() -> Result<String, String> {
let home_dir = dirs::home_dir()
.ok_or("Could not find home directory")?;
let home_dir = dirs::home_dir().ok_or("Could not find home directory")?;
let config_path = home_dir
.join(".claude-code-router")
.join("config.json");
let config_path = home_dir.join(".claude-code-router").join("config.json");
Ok(config_path.to_string_lossy().to_string())
}

View File

@@ -10,7 +10,6 @@ use tauri::{AppHandle, Emitter, Manager};
use tokio::process::{Child, Command};
use tokio::sync::Mutex;
/// Global state to track current Claude process
pub struct ClaudeProcessState {
pub current_process: Arc<Mutex<Option<Child>>>,
@@ -267,11 +266,7 @@ fn create_command_with_env(program: &str) -> Command {
}
/// Creates a system binary command with the given arguments
fn create_system_command(
claude_path: &str,
args: Vec<String>,
project_path: &str,
) -> Command {
fn create_system_command(claude_path: &str, args: Vec<String>, project_path: &str) -> Command {
let mut cmd = create_command_with_env(claude_path);
// Add all arguments
@@ -293,8 +288,23 @@ pub async fn watch_claude_project_directory(
app_handle: tauri::AppHandle,
) -> Result<(), String> {
use crate::file_watcher::FileWatcherState;
let project_path_buf = PathBuf::from(&project_path);
log::info!("Starting to watch Claude project directory for project: {}", project_path);
// 支持直接传入位于 ~/.claude 或 ~/.claudia 下的特殊目录(例如智能会话)
if (project_path.contains("/.claude/") || project_path.contains("/.claudia/"))
&& project_path_buf.exists()
{
let file_watcher_state = app_handle.state::<FileWatcherState>();
let path_str = project_path_buf.to_string_lossy().to_string();
return file_watcher_state
.with_manager(|manager| manager.watch_path(&path_str, false))
.map_err(|e| format!("Failed to watch Claude project directory: {}", e));
}
log::info!(
"Starting to watch Claude project directory for project: {}",
project_path
);
let claude_dir = get_claude_dir().map_err(|e| e.to_string())?;
let projects_dir = claude_dir.join("projects");
@@ -316,9 +326,11 @@ pub async fn watch_claude_project_directory(
let file_watcher_state = app_handle.state::<FileWatcherState>();
let path_str = path.to_string_lossy().to_string();
return file_watcher_state.with_manager(|manager| {
manager.watch_path(&path_str, false)
}).map_err(|e| format!("Failed to watch Claude project directory: {}", e));
return file_watcher_state
.with_manager(|manager| manager.watch_path(&path_str, false))
.map_err(|e| {
format!("Failed to watch Claude project directory: {}", e)
});
}
}
}
@@ -336,8 +348,21 @@ pub async fn unwatch_claude_project_directory(
app_handle: tauri::AppHandle,
) -> Result<(), String> {
use crate::file_watcher::FileWatcherState;
let project_path_buf = PathBuf::from(&project_path);
log::info!("Stopping watch on Claude project directory for project: {}", project_path);
// 对智能会话等位于 ~/.claude* 下的目录执行直接取消
if project_path.contains("/.claude/") || project_path.contains("/.claudia/") {
let file_watcher_state = app_handle.state::<FileWatcherState>();
let path_str = project_path_buf.to_string_lossy().to_string();
return file_watcher_state
.with_manager(|manager| manager.unwatch_path(&path_str))
.map_err(|e| format!("Failed to stop watching Claude project directory: {}", e));
}
log::info!(
"Stopping watch on Claude project directory for project: {}",
project_path
);
let claude_dir = get_claude_dir().map_err(|e| e.to_string())?;
let projects_dir = claude_dir.join("projects");
@@ -359,9 +384,14 @@ pub async fn unwatch_claude_project_directory(
let file_watcher_state = app_handle.state::<FileWatcherState>();
let path_str = path.to_string_lossy().to_string();
return file_watcher_state.with_manager(|manager| {
manager.unwatch_path(&path_str)
}).map_err(|e| format!("Failed to stop watching Claude project directory: {}", e));
return file_watcher_state
.with_manager(|manager| manager.unwatch_path(&path_str))
.map_err(|e| {
format!(
"Failed to stop watching Claude project directory: {}",
e
)
});
}
}
}
@@ -660,13 +690,11 @@ pub async fn check_claude_version(_app: AppHandle) -> Result<ClaudeVersionStatus
// Find the best installation (highest version or first found)
let best_installation = installations
.into_iter()
.max_by(|a, b| {
match (&a.version, &b.version) {
.max_by(|a, b| match (&a.version, &b.version) {
(Some(v1), Some(v2)) => v1.cmp(v2),
(Some(_), None) => std::cmp::Ordering::Greater,
(None, Some(_)) => std::cmp::Ordering::Less,
(None, None) => std::cmp::Ordering::Equal,
}
})
.unwrap(); // Safe because we checked is_empty() above
@@ -866,8 +894,6 @@ pub async fn load_session_history(
Ok(messages)
}
/// Execute a new interactive Claude Code session with streaming output
#[tauri::command]
pub async fn execute_claude_code(
@@ -887,7 +913,7 @@ pub async fn execute_claude_code(
// Map opus-plan to the appropriate Claude CLI parameter
let claude_model = match model.as_str() {
"opus-plan" => "opusplan".to_string(),
_ => model.clone()
_ => model.clone(),
};
let args = vec![
@@ -924,7 +950,7 @@ pub async fn continue_claude_code(
// Map opus-plan to the appropriate Claude CLI parameter
let claude_model = match model.as_str() {
"opus-plan" => "opusplan".to_string(),
_ => model.clone()
_ => model.clone(),
};
let args = vec![
@@ -964,7 +990,7 @@ pub async fn resume_claude_code(
// Map opus-plan to the appropriate Claude CLI parameter
let claude_model = match model.as_str() {
"opus-plan" => "opusplan".to_string(),
_ => model.clone()
_ => model.clone(),
};
let args = vec![
@@ -1003,8 +1029,12 @@ pub async fn cancel_claude_execution(
let registry = app.state::<crate::process::ProcessRegistryState>();
match registry.0.get_claude_session_by_id(sid) {
Ok(Some(process_info)) => {
log::info!("Found process in registry for session {}: run_id={}, PID={}",
sid, process_info.run_id, process_info.pid);
log::info!(
"Found process in registry for session {}: run_id={}, PID={}",
sid,
process_info.run_id,
process_info.pid
);
match registry.0.kill_process(process_info.run_id).await {
Ok(success) => {
if success {
@@ -1037,7 +1067,10 @@ pub async fn cancel_claude_execution(
if let Some(mut child) = current_process.take() {
// Try to get the PID before killing
let pid = child.id();
log::info!("Attempting to kill Claude process via ClaudeProcessState with PID: {:?}", pid);
log::info!(
"Attempting to kill Claude process via ClaudeProcessState with PID: {:?}",
pid
);
// Kill the process
match child.kill().await {
@@ -1046,7 +1079,10 @@ pub async fn cancel_claude_execution(
killed = true;
}
Err(e) => {
log::error!("Failed to kill Claude process via ClaudeProcessState: {}", e);
log::error!(
"Failed to kill Claude process via ClaudeProcessState: {}",
e
);
// Method 3: If we have a PID, try system kill as last resort
if let Some(pid) = pid {
@@ -1131,9 +1167,15 @@ pub async fn get_claude_session_output(
}
/// Helper function to spawn Claude process and handle streaming
async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String, model: String, project_path: String) -> Result<(), String> {
use tokio::io::{AsyncBufReadExt, BufReader};
async fn spawn_claude_process(
app: AppHandle,
mut cmd: Command,
prompt: String,
model: String,
project_path: String,
) -> Result<(), String> {
use std::sync::Mutex;
use tokio::io::{AsyncBufReadExt, BufReader};
// Spawn the process
let mut child = cmd
@@ -1146,10 +1188,7 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String,
// Get the child PID for logging
let pid = child.id().unwrap_or(0);
log::info!(
"Spawned Claude process with PID: {:?}",
pid
);
log::info!("Spawned Claude process with PID: {:?}", pid);
// Create readers first (before moving child)
let stdout_reader = BufReader::new(stdout);
@@ -1264,10 +1303,8 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String,
// Add a small delay to ensure all messages are processed
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
if let Some(ref session_id) = *session_id_holder_clone3.lock().unwrap() {
let _ = app_handle_wait.emit(
&format!("claude-complete:{}", session_id),
status.success(),
);
let _ = app_handle_wait
.emit(&format!("claude-complete:{}", session_id), status.success());
}
// Also emit to the generic event for backward compatibility
let _ = app_handle_wait.emit("claude-complete", status.success());
@@ -1277,8 +1314,8 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String,
// Add a small delay to ensure all messages are processed
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
if let Some(ref session_id) = *session_id_holder_clone3.lock().unwrap() {
let _ = app_handle_wait
.emit(&format!("claude-complete:{}", session_id), false);
let _ =
app_handle_wait.emit(&format!("claude-complete:{}", session_id), false);
}
// Also emit to the generic event for backward compatibility
let _ = app_handle_wait.emit("claude-complete", false);
@@ -1298,7 +1335,6 @@ async fn spawn_claude_process(app: AppHandle, mut cmd: Command, prompt: String,
Ok(())
}
/// Lists files and directories in a given path
#[tauri::command]
pub async fn list_directory_contents(directory_path: String) -> Result<Vec<FileEntry>, String> {
@@ -2015,38 +2051,51 @@ pub async fn track_session_messages(
/// Gets hooks configuration from settings at specified scope
#[tauri::command]
pub async fn get_hooks_config(scope: String, project_path: Option<String>) -> Result<serde_json::Value, String> {
log::info!("Getting hooks config for scope: {}, project: {:?}", scope, project_path);
pub async fn get_hooks_config(
scope: String,
project_path: Option<String>,
) -> Result<serde_json::Value, String> {
log::info!(
"Getting hooks config for scope: {}, project: {:?}",
scope,
project_path
);
let settings_path = match scope.as_str() {
"user" => {
get_claude_dir()
"user" => get_claude_dir()
.map_err(|e| e.to_string())?
.join("settings.json")
},
.join("settings.json"),
"project" => {
let path = project_path.ok_or("Project path required for project scope")?;
PathBuf::from(path).join(".claude").join("settings.json")
},
}
"local" => {
let path = project_path.ok_or("Project path required for local scope")?;
PathBuf::from(path).join(".claude").join("settings.local.json")
},
_ => return Err("Invalid scope".to_string())
PathBuf::from(path)
.join(".claude")
.join("settings.local.json")
}
_ => return Err("Invalid scope".to_string()),
};
if !settings_path.exists() {
log::info!("Settings file does not exist at {:?}, returning empty hooks", settings_path);
log::info!(
"Settings file does not exist at {:?}, returning empty hooks",
settings_path
);
return Ok(serde_json::json!({}));
}
let content = fs::read_to_string(&settings_path)
.map_err(|e| format!("Failed to read settings: {}", e))?;
let settings: serde_json::Value = serde_json::from_str(&content)
.map_err(|e| format!("Failed to parse settings: {}", e))?;
let settings: serde_json::Value =
serde_json::from_str(&content).map_err(|e| format!("Failed to parse settings: {}", e))?;
Ok(settings.get("hooks").cloned().unwrap_or(serde_json::json!({})))
Ok(settings
.get("hooks")
.cloned()
.unwrap_or(serde_json::json!({})))
}
/// Updates hooks configuration in settings at specified scope
@@ -2054,39 +2103,40 @@ pub async fn get_hooks_config(scope: String, project_path: Option<String>) -> Re
pub async fn update_hooks_config(
scope: String,
hooks: serde_json::Value,
project_path: Option<String>
project_path: Option<String>,
) -> Result<String, String> {
log::info!("Updating hooks config for scope: {}, project: {:?}", scope, project_path);
log::info!(
"Updating hooks config for scope: {}, project: {:?}",
scope,
project_path
);
let settings_path = match scope.as_str() {
"user" => {
get_claude_dir()
"user" => get_claude_dir()
.map_err(|e| e.to_string())?
.join("settings.json")
},
.join("settings.json"),
"project" => {
let path = project_path.ok_or("Project path required for project scope")?;
let claude_dir = PathBuf::from(path).join(".claude");
fs::create_dir_all(&claude_dir)
.map_err(|e| format!("Failed to create .claude directory: {}", e))?;
claude_dir.join("settings.json")
},
}
"local" => {
let path = project_path.ok_or("Project path required for local scope")?;
let claude_dir = PathBuf::from(path).join(".claude");
fs::create_dir_all(&claude_dir)
.map_err(|e| format!("Failed to create .claude directory: {}", e))?;
claude_dir.join("settings.local.json")
},
_ => return Err("Invalid scope".to_string())
}
_ => return Err("Invalid scope".to_string()),
};
// Read existing settings or create new
let mut settings = if settings_path.exists() {
let content = fs::read_to_string(&settings_path)
.map_err(|e| format!("Failed to read settings: {}", e))?;
serde_json::from_str(&content)
.map_err(|e| format!("Failed to parse settings: {}", e))?
serde_json::from_str(&content).map_err(|e| format!("Failed to parse settings: {}", e))?
} else {
serde_json::json!({})
};
@@ -2130,6 +2180,6 @@ pub async fn validate_hook_command(command: String) -> Result<serde_json::Value,
}))
}
}
Err(e) => Err(format!("Failed to validate command: {}", e))
Err(e) => Err(format!("Failed to validate command: {}", e)),
}
}

View File

@@ -1,8 +1,8 @@
use crate::file_watcher::FileWatcherState;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
use tauri::State;
use crate::file_watcher::FileWatcherState;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct FileNode {
@@ -23,15 +23,13 @@ pub struct FileSystemChange {
/// 读取文件内容
#[tauri::command]
pub async fn read_file(path: String) -> Result<String, String> {
fs::read_to_string(&path)
.map_err(|e| format!("Failed to read file: {}", e))
fs::read_to_string(&path).map_err(|e| format!("Failed to read file: {}", e))
}
/// 写入文件内容
#[tauri::command]
pub async fn write_file(path: String, content: String) -> Result<(), String> {
fs::write(&path, content)
.map_err(|e| format!("Failed to write file: {}", e))
fs::write(&path, content).map_err(|e| format!("Failed to write file: {}", e))
}
/// 读取目录树结构
@@ -47,7 +45,8 @@ pub async fn read_directory_tree(
}
let max_depth = max_depth.unwrap_or(5);
let ignore_patterns = ignore_patterns.unwrap_or_else(|| vec![
let ignore_patterns = ignore_patterns.unwrap_or_else(|| {
vec![
String::from("node_modules"),
String::from(".git"),
String::from("target"),
@@ -57,10 +56,10 @@ pub async fn read_directory_tree(
String::from(".vscode"),
String::from("__pycache__"),
String::from(".DS_Store"),
]);
]
});
read_directory_recursive(path, 0, max_depth, &ignore_patterns)
.map_err(|e| e.to_string())
read_directory_recursive(path, 0, max_depth, &ignore_patterns).map_err(|e| e.to_string())
}
fn read_directory_recursive(
@@ -69,7 +68,8 @@ fn read_directory_recursive(
max_depth: u32,
ignore_patterns: &[String],
) -> std::io::Result<FileNode> {
let name = path.file_name()
let name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("")
.to_string();
@@ -81,9 +81,9 @@ fn read_directory_recursive(
if current_depth < max_depth {
// Check if directory should be ignored
let should_ignore = ignore_patterns.iter().any(|pattern| {
&name == pattern || name.starts_with('.')
});
let should_ignore = ignore_patterns
.iter()
.any(|pattern| &name == pattern || name.starts_with('.'));
if !should_ignore {
let entries = fs::read_dir(path)?;
@@ -107,12 +107,10 @@ fn read_directory_recursive(
}
// Sort children: directories first, then files, alphabetically
children.sort_by(|a, b| {
match (a.file_type.as_str(), b.file_type.as_str()) {
children.sort_by(|a, b| match (a.file_type.as_str(), b.file_type.as_str()) {
("directory", "file") => std::cmp::Ordering::Less,
("file", "directory") => std::cmp::Ordering::Greater,
_ => a.name.to_lowercase().cmp(&b.name.to_lowercase()),
}
});
}
}
@@ -123,7 +121,8 @@ fn read_directory_recursive(
file_type: String::from("directory"),
children: Some(children),
size: None,
modified: metadata.modified()
modified: metadata
.modified()
.ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| d.as_secs()),
@@ -135,7 +134,8 @@ fn read_directory_recursive(
file_type: String::from("file"),
children: None,
size: Some(metadata.len()),
modified: metadata.modified()
modified: metadata
.modified()
.ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| d.as_secs()),
@@ -176,8 +176,7 @@ fn search_recursive(
return Ok(());
}
let entries = fs::read_dir(dir)
.map_err(|e| format!("Failed to read directory: {}", e))?;
let entries = fs::read_dir(dir).map_err(|e| format!("Failed to read directory: {}", e))?;
for entry in entries {
if results.len() >= max_results {
@@ -186,7 +185,8 @@ fn search_recursive(
let entry = entry.map_err(|e| format!("Failed to read entry: {}", e))?;
let path = entry.path();
let file_name = path.file_name()
let file_name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("")
.to_lowercase();
@@ -200,7 +200,8 @@ fn search_recursive(
if !file_name.starts_with('.')
&& file_name != "node_modules"
&& file_name != "target"
&& file_name != "dist" {
&& file_name != "dist"
{
let _ = search_recursive(&path, query, results, max_results);
}
}
@@ -217,10 +218,10 @@ pub async fn get_file_info(path: String) -> Result<FileNode, String> {
return Err(format!("Path does not exist: {}", path.display()));
}
let metadata = fs::metadata(path)
.map_err(|e| format!("Failed to get metadata: {}", e))?;
let metadata = fs::metadata(path).map_err(|e| format!("Failed to get metadata: {}", e))?;
let name = path.file_name()
let name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("")
.to_string();
@@ -239,7 +240,8 @@ pub async fn get_file_info(path: String) -> Result<FileNode, String> {
} else {
None
},
modified: metadata.modified()
modified: metadata
.modified()
.ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| d.as_secs()),
@@ -255,9 +257,7 @@ pub async fn watch_directory(
) -> Result<(), String> {
let recursive = recursive.unwrap_or(false);
watcher_state.with_manager(|manager| {
manager.watch_path(&path, recursive)
})
watcher_state.with_manager(|manager| manager.watch_path(&path, recursive))
}
/// 停止监听指定路径
@@ -266,9 +266,7 @@ pub async fn unwatch_directory(
watcher_state: State<'_, FileWatcherState>,
path: String,
) -> Result<(), String> {
watcher_state.with_manager(|manager| {
manager.unwatch_path(&path)
})
watcher_state.with_manager(|manager| manager.unwatch_path(&path))
}
/// 获取当前监听的路径列表
@@ -276,9 +274,7 @@ pub async fn unwatch_directory(
pub async fn get_watched_paths(
watcher_state: State<'_, FileWatcherState>,
) -> Result<Vec<String>, String> {
watcher_state.with_manager(|manager| {
Ok(manager.get_watched_paths())
})
watcher_state.with_manager(|manager| Ok(manager.get_watched_paths()))
}
/// 获取文件树(简化版,供文件浏览器使用)
@@ -302,8 +298,8 @@ pub async fn get_file_tree(project_path: String) -> Result<Vec<FileNode>, String
];
// 增加最大深度为 10以支持更深的文件夹结构
let root_node = read_directory_recursive(path, 0, 10, &ignore_patterns)
.map_err(|e| e.to_string())?;
let root_node =
read_directory_recursive(path, 0, 10, &ignore_patterns).map_err(|e| e.to_string())?;
// Return children of root node if it has any
Ok(root_node.children.unwrap_or_default())

View File

@@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize};
use std::process::Command;
use std::path::Path;
use std::process::Command;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct GitStatus {
@@ -94,8 +94,7 @@ pub async fn get_git_status(path: String) -> Result<GitStatus, String> {
.output()
.ok();
let remote_url = remote_output
.and_then(|o| {
let remote_url = remote_output.and_then(|o| {
if o.status.success() {
Some(String::from_utf8_lossy(&o.stdout).trim().to_string())
} else {
@@ -161,7 +160,14 @@ fn get_tracking_info(path: &Path) -> Result<(u32, u32), String> {
Ok((ahead, behind))
}
fn parse_git_status(status_text: &str) -> (Vec<GitFileStatus>, Vec<GitFileStatus>, Vec<GitFileStatus>, Vec<GitFileStatus>) {
fn parse_git_status(
status_text: &str,
) -> (
Vec<GitFileStatus>,
Vec<GitFileStatus>,
Vec<GitFileStatus>,
Vec<GitFileStatus>,
) {
let mut staged = Vec::new();
let mut modified = Vec::new();
let mut untracked = Vec::new();
@@ -197,7 +203,7 @@ fn parse_git_status(status_text: &str) -> (Vec<GitFileStatus>, Vec<GitFileStatus
status: "modified".to_string(),
staged: false,
});
},
}
"A " | "AM" => staged.push(GitFileStatus {
path: file_path,
status: "added".to_string(),

View File

@@ -1,6 +1,6 @@
use tauri::command;
use serde::{Deserialize, Serialize};
use crate::i18n;
use serde::{Deserialize, Serialize};
use tauri::command;
#[derive(Debug, Serialize, Deserialize)]
pub struct LanguageSettings {
@@ -14,8 +14,7 @@ pub async fn get_current_language() -> Result<String, String> {
#[command]
pub async fn set_language(locale: String) -> Result<(), String> {
i18n::set_locale(&locale)
.map_err(|e| format!("Failed to set language: {}", e))?;
i18n::set_locale(&locale).map_err(|e| format!("Failed to set language: {}", e))?;
log::info!("Language changed to: {}", locale);
Ok(())
@@ -23,5 +22,8 @@ pub async fn set_language(locale: String) -> Result<(), String> {
#[command]
pub async fn get_supported_languages() -> Result<Vec<String>, String> {
Ok(i18n::SUPPORTED_LOCALES.iter().map(|&s| s.to_string()).collect())
Ok(i18n::SUPPORTED_LOCALES
.iter()
.map(|&s| s.to_string())
.collect())
}

View File

@@ -792,7 +792,12 @@ pub async fn mcp_export_servers(app: AppHandle) -> Result<MCPExportResult, Strin
}
Ok(MCPExportResult {
format: if export_configs.len() == 1 { "single" } else { "multiple" }.to_string(),
format: if export_configs.len() == 1 {
"single"
} else {
"multiple"
}
.to_string(),
servers: export_configs,
})
}

View File

@@ -1,19 +1,19 @@
pub mod agents;
pub mod ccr;
pub mod claude;
pub mod mcp;
pub mod usage;
pub mod usage_index;
pub mod usage_cache;
pub mod storage;
pub mod slash_commands;
pub mod proxy;
pub mod language;
pub mod relay_stations;
pub mod relay_adapters;
pub mod packycode_nodes;
pub mod filesystem;
pub mod git;
pub mod terminal;
pub mod ccr;
pub mod system;
pub mod language;
pub mod mcp;
pub mod packycode_nodes;
pub mod proxy;
pub mod relay_adapters;
pub mod relay_stations;
pub mod slash_commands;
pub mod smart_sessions;
pub mod storage;
pub mod system;
pub mod terminal;
pub mod usage;
pub mod usage_cache;
pub mod usage_index;

View File

@@ -1,8 +1,8 @@
use anyhow::Result;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::time::{Duration, Instant};
use reqwest::Client;
use tauri::command;
use anyhow::Result;
/// PackyCode 节点类型
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -156,7 +156,11 @@ async fn test_node_speed(node: &PackycodeNode) -> NodeSpeedTestResult {
},
response_time,
success,
error: if success { None } else { Some("响应时间过长".to_string()) },
error: if success {
None
} else {
Some("响应时间过长".to_string())
},
}
}
Err(e) => {
@@ -192,15 +196,13 @@ pub async fn test_all_packycode_nodes() -> Result<Vec<NodeSpeedTestResult>, Stri
let mut results = Vec::new();
// 并发测试所有节点
let futures: Vec<_> = nodes
.iter()
.map(|node| test_node_speed(node))
.collect();
let futures: Vec<_> = nodes.iter().map(|node| test_node_speed(node)).collect();
// 等待所有测试完成
for (i, future) in futures.into_iter().enumerate() {
let result = future.await;
log::info!("节点 {} 测速结果: {}ms, 成功: {}",
log::info!(
"节点 {} 测速结果: {}ms, 成功: {}",
nodes[i].name,
result.response_time,
result.success
@@ -209,12 +211,10 @@ pub async fn test_all_packycode_nodes() -> Result<Vec<NodeSpeedTestResult>, Stri
}
// 按响应时间排序(成功的节点优先,然后按延迟排序)
results.sort_by(|a, b| {
match (a.success, b.success) {
results.sort_by(|a, b| match (a.success, b.success) {
(true, false) => std::cmp::Ordering::Less,
(false, true) => std::cmp::Ordering::Greater,
_ => a.response_time.cmp(&b.response_time),
}
});
Ok(results)
@@ -244,7 +244,8 @@ pub async fn auto_select_best_node() -> Result<PackycodeNode, String> {
for (i, future) in futures.into_iter().enumerate() {
let result = future.await;
log::info!("节点 {} - 延迟: {}ms, 可用: {}",
log::info!(
"节点 {} - 延迟: {}ms, 可用: {}",
test_nodes[i].name,
result.response_time,
result.success
@@ -255,9 +256,10 @@ pub async fn auto_select_best_node() -> Result<PackycodeNode, String> {
None => {
log::info!("初始最佳节点: {}", result.node.name);
best_node = Some((result.node, result.response_time));
},
}
Some((_, best_time)) if result.response_time < *best_time => {
log::info!("发现更快节点: {} ({}ms < {}ms)",
log::info!(
"发现更快节点: {} ({}ms < {}ms)",
result.node.name,
result.response_time,
best_time
@@ -273,7 +275,7 @@ pub async fn auto_select_best_node() -> Result<PackycodeNode, String> {
Some((node, time)) => {
log::info!("最佳节点选择: {} (延迟: {}ms)", node.name, time);
Ok(node)
},
}
None => {
log::error!("没有找到可用的节点");
Err("没有找到可用的节点".to_string())

View File

@@ -1,6 +1,6 @@
use rusqlite::params;
use serde::{Deserialize, Serialize};
use tauri::State;
use rusqlite::params;
use crate::commands::agents::AgentDb;
@@ -72,8 +72,14 @@ pub async fn save_proxy_settings(
// Save each setting
let values = vec![
("proxy_enabled", settings.enabled.to_string()),
("proxy_http", settings.http_proxy.clone().unwrap_or_default()),
("proxy_https", settings.https_proxy.clone().unwrap_or_default()),
(
"proxy_http",
settings.http_proxy.clone().unwrap_or_default(),
),
(
"proxy_https",
settings.https_proxy.clone().unwrap_or_default(),
),
("proxy_no", settings.no_proxy.clone().unwrap_or_default()),
("proxy_all", settings.all_proxy.clone().unwrap_or_default()),
];
@@ -82,7 +88,8 @@ pub async fn save_proxy_settings(
conn.execute(
"INSERT OR REPLACE INTO app_settings (key, value) VALUES (?1, ?2)",
params![key, value],
).map_err(|e| format!("Failed to save {}: {}", key, e))?;
)
.map_err(|e| format!("Failed to save {}: {}", key, e))?;
}
// Apply the proxy settings immediately to the current process

View File

@@ -8,7 +8,7 @@ use std::time::Duration;
use tauri::{command, State};
use crate::commands::agents::AgentDb;
use crate::commands::relay_stations::{RelayStationAdapter, RelayStation};
use crate::commands::relay_stations::{RelayStation, RelayStationAdapter};
use crate::i18n;
// 创建HTTP客户端的辅助函数
@@ -97,16 +97,38 @@ pub trait StationAdapter: Send + Sync {
async fn test_connection(&self, station: &RelayStation) -> Result<ConnectionTestResult>;
/// 获取使用日志
async fn get_usage_logs(&self, station: &RelayStation, user_id: &str, page: Option<usize>, size: Option<usize>) -> Result<Value>;
async fn get_usage_logs(
&self,
station: &RelayStation,
user_id: &str,
page: Option<usize>,
size: Option<usize>,
) -> Result<Value>;
/// 列出 Tokens
async fn list_tokens(&self, station: &RelayStation, page: Option<usize>, size: Option<usize>) -> Result<TokenPaginationResponse>;
async fn list_tokens(
&self,
station: &RelayStation,
page: Option<usize>,
size: Option<usize>,
) -> Result<TokenPaginationResponse>;
/// 创建 Token
async fn create_token(&self, station: &RelayStation, name: &str, quota: Option<i64>) -> Result<TokenInfo>;
async fn create_token(
&self,
station: &RelayStation,
name: &str,
quota: Option<i64>,
) -> Result<TokenInfo>;
/// 更新 Token
async fn update_token(&self, station: &RelayStation, token_id: &str, name: Option<&str>, quota: Option<i64>) -> Result<TokenInfo>;
async fn update_token(
&self,
station: &RelayStation,
token_id: &str,
name: Option<&str>,
quota: Option<i64>,
) -> Result<TokenInfo>;
/// 删除 Token
async fn delete_token(&self, station: &RelayStation, token_id: &str) -> Result<String>;
@@ -137,7 +159,10 @@ impl StationAdapter for PackycodeAdapter {
metadata: Some({
let mut map = HashMap::new();
map.insert("adapter_type".to_string(), json!("packycode"));
map.insert("support_features".to_string(), json!(["quota_query", "usage_stats"]));
map.insert(
"support_features".to_string(),
json!(["quota_query", "usage_stats"]),
);
map
}),
quota_per_unit: Some(1),
@@ -162,21 +187,20 @@ impl StationAdapter for PackycodeAdapter {
Ok(UserInfo {
id: "packycode_user".to_string(),
username: data.get("username")
username: data
.get("username")
.and_then(|v| v.as_str())
.unwrap_or("PackyCode用户")
.to_string(),
display_name: Some("PackyCode用户".to_string()),
email: data.get("email")
email: data
.get("email")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
quota: data.get("quota")
.and_then(|v| v.as_i64())
.unwrap_or(0),
used_quota: data.get("used_quota")
.and_then(|v| v.as_i64())
.unwrap_or(0),
request_count: data.get("request_count")
quota: data.get("quota").and_then(|v| v.as_i64()).unwrap_or(0),
used_quota: data.get("used_quota").and_then(|v| v.as_i64()).unwrap_or(0),
request_count: data
.get("request_count")
.and_then(|v| v.as_i64())
.unwrap_or(0),
group: "default".to_string(),
@@ -194,8 +218,10 @@ impl StationAdapter for PackycodeAdapter {
success: true,
response_time,
message: format!("{} - 连接成功", info.name),
details: Some(format!("服务版本: {}",
info.version.unwrap_or_else(|| "Unknown".to_string()))),
details: Some(format!(
"服务版本: {}",
info.version.unwrap_or_else(|| "Unknown".to_string())
)),
})
}
Err(e) => {
@@ -210,7 +236,13 @@ impl StationAdapter for PackycodeAdapter {
}
}
async fn get_usage_logs(&self, _station: &RelayStation, _user_id: &str, _page: Option<usize>, _size: Option<usize>) -> Result<Value> {
async fn get_usage_logs(
&self,
_station: &RelayStation,
_user_id: &str,
_page: Option<usize>,
_size: Option<usize>,
) -> Result<Value> {
// PackyCode 暂不支持详细使用日志
Ok(json!({
"logs": [],
@@ -218,21 +250,45 @@ impl StationAdapter for PackycodeAdapter {
}))
}
async fn list_tokens(&self, _station: &RelayStation, _page: Option<usize>, _size: Option<usize>) -> Result<TokenPaginationResponse> {
async fn list_tokens(
&self,
_station: &RelayStation,
_page: Option<usize>,
_size: Option<usize>,
) -> Result<TokenPaginationResponse> {
// PackyCode 使用单一 Token不支持多 Token 管理
Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token")))
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.packycode_single_token"
)))
}
async fn create_token(&self, _station: &RelayStation, _name: &str, _quota: Option<i64>) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token")))
async fn create_token(
&self,
_station: &RelayStation,
_name: &str,
_quota: Option<i64>,
) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.packycode_single_token"
)))
}
async fn update_token(&self, _station: &RelayStation, _token_id: &str, _name: Option<&str>, _quota: Option<i64>) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token")))
async fn update_token(
&self,
_station: &RelayStation,
_token_id: &str,
_name: Option<&str>,
_quota: Option<i64>,
) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.packycode_single_token"
)))
}
async fn delete_token(&self, _station: &RelayStation, _token_id: &str) -> Result<String> {
Err(anyhow::anyhow!(i18n::t("relay_adapter.packycode_single_token")))
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.packycode_single_token"
)))
}
}
@@ -285,8 +341,7 @@ impl StationAdapter for CustomAdapter {
let response_time = start_time.elapsed().as_millis() as u64;
match response {
Ok(resp) => {
Ok(ConnectionTestResult {
Ok(resp) => Ok(ConnectionTestResult {
success: resp.status().is_success(),
response_time,
message: if resp.status().is_success() {
@@ -295,40 +350,67 @@ impl StationAdapter for CustomAdapter {
format!("HTTP {}: 服务器响应错误", resp.status())
},
details: Some(format!("响应状态: {}", resp.status())),
})
}
Err(e) => {
Ok(ConnectionTestResult {
}),
Err(e) => Ok(ConnectionTestResult {
success: false,
response_time,
message: format!("连接失败: {}", e),
details: None,
})
}
}),
}
}
async fn get_usage_logs(&self, _station: &RelayStation, _user_id: &str, _page: Option<usize>, _size: Option<usize>) -> Result<Value> {
async fn get_usage_logs(
&self,
_station: &RelayStation,
_user_id: &str,
_page: Option<usize>,
_size: Option<usize>,
) -> Result<Value> {
Ok(json!({
"logs": [],
"message": "自定义适配器暂不支持使用日志查询"
}))
}
async fn list_tokens(&self, _station: &RelayStation, _page: Option<usize>, _size: Option<usize>) -> Result<TokenPaginationResponse> {
Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available")))
async fn list_tokens(
&self,
_station: &RelayStation,
_page: Option<usize>,
_size: Option<usize>,
) -> Result<TokenPaginationResponse> {
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.token_management_not_available"
)))
}
async fn create_token(&self, _station: &RelayStation, _name: &str, _quota: Option<i64>) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available")))
async fn create_token(
&self,
_station: &RelayStation,
_name: &str,
_quota: Option<i64>,
) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.token_management_not_available"
)))
}
async fn update_token(&self, _station: &RelayStation, _token_id: &str, _name: Option<&str>, _quota: Option<i64>) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available")))
async fn update_token(
&self,
_station: &RelayStation,
_token_id: &str,
_name: Option<&str>,
_quota: Option<i64>,
) -> Result<TokenInfo> {
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.token_management_not_available"
)))
}
async fn delete_token(&self, _station: &RelayStation, _token_id: &str) -> Result<String> {
Err(anyhow::anyhow!(i18n::t("relay_adapter.token_management_not_available")))
Err(anyhow::anyhow!(i18n::t(
"relay_adapter.token_management_not_available"
)))
}
}
@@ -349,7 +431,7 @@ pub fn create_adapter(adapter_type: &RelayStationAdapter) -> Box<dyn StationAdap
#[command]
pub async fn relay_station_get_info(
station_id: String,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<StationInfo, String> {
// 获取中转站配置
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
@@ -358,8 +440,7 @@ pub async fn relay_station_get_info(
let adapter = create_adapter(&station.adapter);
// 获取站点信息
adapter.get_station_info(&station).await
.map_err(|e| {
adapter.get_station_info(&station).await.map_err(|e| {
log::error!("Failed to get station info: {}", e);
i18n::t("relay_adapter.get_info_failed")
})
@@ -370,12 +451,14 @@ pub async fn relay_station_get_info(
pub async fn relay_station_get_user_info(
station_id: String,
user_id: String,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<UserInfo, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
let adapter = create_adapter(&station.adapter);
adapter.get_user_info(&station, &user_id).await
adapter
.get_user_info(&station, &user_id)
.await
.map_err(|e| {
log::error!("Failed to get user info: {}", e);
i18n::t("relay_adapter.get_user_info_failed")
@@ -386,13 +469,12 @@ pub async fn relay_station_get_user_info(
#[command]
pub async fn relay_station_test_connection(
station_id: String,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<ConnectionTestResult, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
let adapter = create_adapter(&station.adapter);
adapter.test_connection(&station).await
.map_err(|e| {
adapter.test_connection(&station).await.map_err(|e| {
log::error!("Connection test failed: {}", e);
i18n::t("relay_adapter.connection_test_failed")
})
@@ -405,12 +487,14 @@ pub async fn relay_station_get_usage_logs(
user_id: String,
page: Option<usize>,
size: Option<usize>,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<Value, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
let adapter = create_adapter(&station.adapter);
adapter.get_usage_logs(&station, &user_id, page, size).await
adapter
.get_usage_logs(&station, &user_id, page, size)
.await
.map_err(|e| {
log::error!("Failed to get usage logs: {}", e);
i18n::t("relay_adapter.get_usage_logs_failed")
@@ -423,12 +507,14 @@ pub async fn relay_station_list_tokens(
station_id: String,
page: Option<usize>,
size: Option<usize>,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<TokenPaginationResponse, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
let adapter = create_adapter(&station.adapter);
adapter.list_tokens(&station, page, size).await
adapter
.list_tokens(&station, page, size)
.await
.map_err(|e| {
log::error!("Failed to list tokens: {}", e);
i18n::t("relay_adapter.list_tokens_failed")
@@ -441,12 +527,14 @@ pub async fn relay_station_create_token(
station_id: String,
name: String,
quota: Option<i64>,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<TokenInfo, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
let adapter = create_adapter(&station.adapter);
adapter.create_token(&station, &name, quota).await
adapter
.create_token(&station, &name, quota)
.await
.map_err(|e| {
log::error!("Failed to create token: {}", e);
i18n::t("relay_adapter.create_token_failed")
@@ -460,12 +548,14 @@ pub async fn relay_station_update_token(
token_id: String,
name: Option<String>,
quota: Option<i64>,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<TokenInfo, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
let adapter = create_adapter(&station.adapter);
adapter.update_token(&station, &token_id, name.as_deref(), quota).await
adapter
.update_token(&station, &token_id, name.as_deref(), quota)
.await
.map_err(|e| {
log::error!("Failed to update token: {}", e);
i18n::t("relay_adapter.update_token_failed")
@@ -477,12 +567,14 @@ pub async fn relay_station_update_token(
pub async fn relay_station_delete_token(
station_id: String,
token_id: String,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<String, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await?;
let adapter = create_adapter(&station.adapter);
adapter.delete_token(&station, &token_id).await
adapter
.delete_token(&station, &token_id)
.await
.map_err(|e| {
log::error!("Failed to delete token: {}", e);
i18n::t("relay_adapter.delete_token_failed")
@@ -509,9 +601,10 @@ pub struct PackycodeUserQuota {
#[command]
pub async fn packycode_get_user_quota(
station_id: String,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<PackycodeUserQuota, String> {
let station = crate::commands::relay_stations::relay_station_get(station_id, db).await
let station = crate::commands::relay_stations::relay_station_get(station_id, db)
.await
.map_err(|e| format!("Failed to get station: {}", e))?;
if station.adapter.as_str() != "packycode" {
@@ -519,7 +612,8 @@ pub async fn packycode_get_user_quota(
}
// 根据服务类型构建不同的 URL
let url = if station.api_url.contains("share-api") || station.api_url.contains("share.packycode") {
let url =
if station.api_url.contains("share-api") || station.api_url.contains("share.packycode") {
// 滴滴车服务
"https://share.packycode.com/api/backend/users/info"
} else {
@@ -564,7 +658,9 @@ pub async fn packycode_get_user_quota(
});
}
let data: Value = response.json().await
let data: Value = response
.json()
.await
.map_err(|e| format!("解析响应失败: {}", e))?;
// 辅助函数:将值转换为 f64
@@ -572,7 +668,9 @@ pub async fn packycode_get_user_quota(
if v.is_null() {
0.0
} else if v.is_string() {
v.as_str().and_then(|s| s.parse::<f64>().ok()).unwrap_or(0.0)
v.as_str()
.and_then(|s| s.parse::<f64>().ok())
.unwrap_or(0.0)
} else if v.is_f64() {
v.as_f64().unwrap_or(0.0)
} else if v.is_i64() {
@@ -589,20 +687,23 @@ pub async fn packycode_get_user_quota(
monthly_spent_usd: to_f64(data.get("monthly_spent_usd").unwrap_or(&Value::Null)),
balance_usd: to_f64(data.get("balance_usd").unwrap_or(&Value::Null)),
total_spent_usd: to_f64(data.get("total_spent_usd").unwrap_or(&Value::Null)),
plan_type: data.get("plan_type")
plan_type: data
.get("plan_type")
.and_then(|v| v.as_str())
.unwrap_or("basic")
.to_string(),
plan_expires_at: data.get("plan_expires_at")
plan_expires_at: data
.get("plan_expires_at")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
username: data.get("username")
username: data
.get("username")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
email: data.get("email")
email: data
.get("email")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
opus_enabled: data.get("opus_enabled")
.and_then(|v| v.as_bool()),
opus_enabled: data.get("opus_enabled").and_then(|v| v.as_bool()),
})
}

View File

@@ -1,14 +1,14 @@
use anyhow::Result;
use chrono::Utc;
use rusqlite::{params, Connection, OptionalExtension, Row};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tauri::{command, State};
use anyhow::Result;
use chrono::Utc;
use rusqlite::{params, Connection, Row, OptionalExtension};
use uuid::Uuid;
use crate::claude_config;
use crate::commands::agents::AgentDb;
use crate::i18n;
use crate::claude_config;
/// 中转站适配器类型
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -153,18 +153,34 @@ impl RelayStation {
let auth_method_str: String = row.get("auth_method")?;
let adapter_config_str: Option<String> = row.get("adapter_config")?;
let adapter = serde_json::from_str(&format!("\"{}\"", adapter_str))
.map_err(|_| rusqlite::Error::InvalidColumnType(0, "adapter".to_string(), rusqlite::types::Type::Text))?;
let adapter = serde_json::from_str(&format!("\"{}\"", adapter_str)).map_err(|_| {
rusqlite::Error::InvalidColumnType(
0,
"adapter".to_string(),
rusqlite::types::Type::Text,
)
})?;
let auth_method = serde_json::from_str(&format!("\"{}\"", auth_method_str))
.map_err(|_| rusqlite::Error::InvalidColumnType(0, "auth_method".to_string(), rusqlite::types::Type::Text))?;
let auth_method =
serde_json::from_str(&format!("\"{}\"", auth_method_str)).map_err(|_| {
rusqlite::Error::InvalidColumnType(
0,
"auth_method".to_string(),
rusqlite::types::Type::Text,
)
})?;
let adapter_config = if let Some(config_str) = adapter_config_str {
if config_str.trim().is_empty() {
None
} else {
Some(serde_json::from_str(&config_str)
.map_err(|_| rusqlite::Error::InvalidColumnType(0, "adapter_config".to_string(), rusqlite::types::Type::Text))?)
Some(serde_json::from_str(&config_str).map_err(|_| {
rusqlite::Error::InvalidColumnType(
0,
"adapter_config".to_string(),
rusqlite::types::Type::Text,
)
})?)
}
} else {
None
@@ -253,13 +269,15 @@ pub async fn relay_stations_list(db: State<'_, AgentDb>) -> Result<Vec<RelayStat
[],
);
let mut stmt = conn.prepare("SELECT * FROM relay_stations ORDER BY display_order ASC, created_at DESC")
let mut stmt = conn
.prepare("SELECT * FROM relay_stations ORDER BY display_order ASC, created_at DESC")
.map_err(|e| {
log::error!("Failed to prepare statement: {}", e);
i18n::t("database.query_failed")
})?;
let stations = stmt.query_map([], |row| RelayStation::from_row(row))
let stations = stmt
.query_map([], |row| RelayStation::from_row(row))
.map_err(|e| {
log::error!("Failed to query relay stations: {}", e);
i18n::t("database.query_failed")
@@ -276,22 +294,21 @@ pub async fn relay_stations_list(db: State<'_, AgentDb>) -> Result<Vec<RelayStat
/// 获取单个中转站
#[command]
pub async fn relay_station_get(
id: String,
db: State<'_, AgentDb>
) -> Result<RelayStation, String> {
pub async fn relay_station_get(id: String, db: State<'_, AgentDb>) -> Result<RelayStation, String> {
let conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
i18n::t("database.lock_failed")
})?;
let mut stmt = conn.prepare("SELECT * FROM relay_stations WHERE id = ?1")
let mut stmt = conn
.prepare("SELECT * FROM relay_stations WHERE id = ?1")
.map_err(|e| {
log::error!("Failed to prepare statement: {}", e);
i18n::t("database.query_failed")
})?;
let station = stmt.query_row(params![id], |row| RelayStation::from_row(row))
let station = stmt
.query_row(params![id], |row| RelayStation::from_row(row))
.map_err(|e| {
log::error!("Failed to get relay station {}: {}", id, e);
i18n::t("relay_station.not_found")
@@ -305,7 +322,7 @@ pub async fn relay_station_get(
#[command]
pub async fn relay_station_create(
request: CreateRelayStationRequest,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<RelayStation, String> {
let conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
@@ -326,23 +343,25 @@ pub async fn relay_station_create(
let adapter_str = serde_json::to_string(&request.adapter)
.map_err(|_| i18n::t("relay_station.invalid_adapter"))?
.trim_matches('"').to_string();
.trim_matches('"')
.to_string();
let auth_method_str = serde_json::to_string(&request.auth_method)
.map_err(|_| i18n::t("relay_station.invalid_auth_method"))?
.trim_matches('"').to_string();
.trim_matches('"')
.to_string();
let adapter_config_str = request.adapter_config.as_ref()
let adapter_config_str = request
.adapter_config
.as_ref()
.map(|config| serde_json::to_string(config))
.transpose()
.map_err(|_| i18n::t("relay_station.invalid_config"))?;
// 如果要启用这个新中转站,先禁用所有其他中转站
if request.enabled {
conn.execute(
"UPDATE relay_stations SET enabled = 0",
[],
).map_err(|e| {
conn.execute("UPDATE relay_stations SET enabled = 0", [])
.map_err(|e| {
log::error!("Failed to disable other relay stations: {}", e);
i18n::t("relay_station.create_failed")
})?;
@@ -397,7 +416,7 @@ pub async fn relay_station_create(
#[command]
pub async fn relay_station_update(
request: UpdateRelayStationRequest,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<RelayStation, String> {
let conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
@@ -411,13 +430,17 @@ pub async fn relay_station_update(
let adapter_str = serde_json::to_string(&request.adapter)
.map_err(|_| i18n::t("relay_station.invalid_adapter"))?
.trim_matches('"').to_string();
.trim_matches('"')
.to_string();
let auth_method_str = serde_json::to_string(&request.auth_method)
.map_err(|_| i18n::t("relay_station.invalid_auth_method"))?
.trim_matches('"').to_string();
.trim_matches('"')
.to_string();
let adapter_config_str = request.adapter_config.as_ref()
let adapter_config_str = request
.adapter_config
.as_ref()
.map(|config| serde_json::to_string(config))
.transpose()
.map_err(|_| i18n::t("relay_station.invalid_config"))?;
@@ -427,13 +450,15 @@ pub async fn relay_station_update(
conn.execute(
"UPDATE relay_stations SET enabled = 0 WHERE id != ?1",
params![request.id],
).map_err(|e| {
)
.map_err(|e| {
log::error!("Failed to disable other relay stations: {}", e);
i18n::t("relay_station.update_failed")
})?;
}
let rows_affected = conn.execute(
let rows_affected = conn
.execute(
r#"
UPDATE relay_stations
SET name = ?2, description = ?3, api_url = ?4, adapter = ?5, auth_method = ?6,
@@ -453,7 +478,8 @@ pub async fn relay_station_update(
if request.enabled { 1 } else { 0 },
now
],
).map_err(|e| {
)
.map_err(|e| {
log::error!("Failed to update relay station: {}", e);
i18n::t("relay_station.update_failed")
})?;
@@ -484,16 +510,14 @@ pub async fn relay_station_update(
/// 删除中转站
#[command]
pub async fn relay_station_delete(
id: String,
db: State<'_, AgentDb>
) -> Result<String, String> {
pub async fn relay_station_delete(id: String, db: State<'_, AgentDb>) -> Result<String, String> {
let conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
i18n::t("database.lock_failed")
})?;
let rows_affected = conn.execute("DELETE FROM relay_stations WHERE id = ?1", params![id])
let rows_affected = conn
.execute("DELETE FROM relay_stations WHERE id = ?1", params![id])
.map_err(|e| {
log::error!("Failed to delete relay station: {}", e);
i18n::t("relay_station.delete_failed")
@@ -512,7 +536,7 @@ pub async fn relay_station_delete(
pub async fn relay_station_toggle_enable(
id: String,
enabled: bool,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<String, String> {
let conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
@@ -526,7 +550,8 @@ pub async fn relay_station_toggle_enable(
conn.execute(
"UPDATE relay_stations SET enabled = 0, updated_at = ?1 WHERE id != ?2",
params![now, id],
).map_err(|e| {
)
.map_err(|e| {
log::error!("Failed to disable other relay stations: {}", e);
i18n::t("relay_station.update_failed")
})?;
@@ -549,10 +574,12 @@ pub async fn relay_station_toggle_enable(
}
// 更新目标中转站的启用状态
let rows_affected = conn.execute(
let rows_affected = conn
.execute(
"UPDATE relay_stations SET enabled = ?1, updated_at = ?2 WHERE id = ?3",
params![if enabled { 1 } else { 0 }, now, id],
).map_err(|e| {
)
.map_err(|e| {
log::error!("Failed to toggle relay station enable status: {}", e);
i18n::t("relay_station.update_failed")
})?;
@@ -571,16 +598,16 @@ pub async fn relay_station_toggle_enable(
/// 内部方法:获取单个中转站
fn relay_station_get_internal(conn: &Connection, id: &str) -> Result<RelayStation, String> {
let mut stmt = conn.prepare(
"SELECT * FROM relay_stations WHERE id = ?1"
).map_err(|e| {
let mut stmt = conn
.prepare("SELECT * FROM relay_stations WHERE id = ?1")
.map_err(|e| {
log::error!("Failed to prepare statement: {}", e);
i18n::t("database.query_failed")
})?;
let station = stmt.query_row(params![id], |row| {
RelayStation::from_row(row)
}).map_err(|e| {
let station = stmt
.query_row(params![id], |row| RelayStation::from_row(row))
.map_err(|e| {
log::error!("Failed to get relay station: {}", e);
i18n::t("relay_station.not_found")
})?;
@@ -589,7 +616,11 @@ fn relay_station_get_internal(conn: &Connection, id: &str) -> Result<RelayStatio
}
/// 输入验证
fn validate_relay_station_request(name: &str, api_url: &str, system_token: &str) -> Result<(), String> {
fn validate_relay_station_request(
name: &str,
api_url: &str,
system_token: &str,
) -> Result<(), String> {
if name.trim().is_empty() {
return Err(i18n::t("relay_station.name_required"));
}
@@ -599,12 +630,18 @@ fn validate_relay_station_request(name: &str, api_url: &str, system_token: &str)
}
// 验证 URL 格式
let parsed_url = url::Url::parse(api_url)
.map_err(|_| i18n::t("relay_station.invalid_url"))?;
let parsed_url = url::Url::parse(api_url).map_err(|_| i18n::t("relay_station.invalid_url"))?;
// 允许本地开发环境使用 HTTP
let is_localhost = parsed_url.host_str()
.map(|host| host == "localhost" || host == "127.0.0.1" || host == "::1" || host.starts_with("192.168.") || host.starts_with("10."))
let is_localhost = parsed_url
.host_str()
.map(|host| {
host == "localhost"
|| host == "127.0.0.1"
|| host == "::1"
|| host.starts_with("192.168.")
|| host.starts_with("10.")
})
.unwrap_or(false);
// 非本地环境必须使用 HTTPS
@@ -621,7 +658,10 @@ fn validate_relay_station_request(name: &str, api_url: &str, system_token: &str)
}
// 检查 Token 是否包含特殊字符
if system_token.chars().any(|c| c.is_whitespace() || c.is_control()) {
if system_token
.chars()
.any(|c| c.is_whitespace() || c.is_control())
{
return Err(i18n::t("relay_station.token_invalid_chars"));
}
@@ -634,31 +674,30 @@ pub fn mask_token(token: &str) -> String {
if token.len() <= 8 {
"*".repeat(token.len())
} else {
format!("{}...{}", &token[..4], &token[token.len()-4..])
format!("{}...{}", &token[..4], &token[token.len() - 4..])
}
}
/// 手动同步中转站配置到 Claude 配置文件
#[command]
pub async fn relay_station_sync_config(
db: State<'_, AgentDb>
) -> Result<String, String> {
pub async fn relay_station_sync_config(db: State<'_, AgentDb>) -> Result<String, String> {
let conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
i18n::t("database.lock_failed")
})?;
// 查找当前启用的中转站
let mut stmt = conn.prepare(
"SELECT * FROM relay_stations WHERE enabled = 1 LIMIT 1"
).map_err(|e| {
let mut stmt = conn
.prepare("SELECT * FROM relay_stations WHERE enabled = 1 LIMIT 1")
.map_err(|e| {
log::error!("Failed to prepare statement: {}", e);
i18n::t("database.query_failed")
})?;
let station_opt = stmt.query_row([], |row| {
RelayStation::from_row(row)
}).optional().map_err(|e| {
let station_opt = stmt
.query_row([], |row| RelayStation::from_row(row))
.optional()
.map_err(|e| {
log::error!("Failed to query enabled relay station: {}", e);
i18n::t("database.query_failed")
})?;
@@ -668,8 +707,14 @@ pub async fn relay_station_sync_config(
claude_config::apply_relay_station_to_config(&station)
.map_err(|e| format!("配置同步失败: {}", e))?;
log::info!("Synced relay station {} config to Claude settings", station.name);
Ok(format!("已同步中转站 {} 的配置到 Claude 设置", station.name))
log::info!(
"Synced relay station {} config to Claude settings",
station.name
);
Ok(format!(
"已同步中转站 {} 的配置到 Claude 设置",
station.name
))
} else {
// 没有启用的中转站,清除配置
claude_config::clear_relay_station_from_config()
@@ -683,8 +728,7 @@ pub async fn relay_station_sync_config(
/// 恢复 Claude 配置备份
#[command]
pub async fn relay_station_restore_config() -> Result<String, String> {
claude_config::restore_claude_config()
.map_err(|e| format!("恢复配置失败: {}", e))?;
claude_config::restore_claude_config().map_err(|e| format!("恢复配置失败: {}", e))?;
log::info!("Restored Claude config from backup");
Ok("已从备份恢复 Claude 配置".to_string())
@@ -697,16 +741,17 @@ pub async fn relay_station_get_current_config() -> Result<HashMap<String, Option
config.insert(
"api_url".to_string(),
claude_config::get_current_api_url().unwrap_or(None)
claude_config::get_current_api_url().unwrap_or(None),
);
config.insert(
"api_token".to_string(),
claude_config::get_current_api_token().unwrap_or(None)
claude_config::get_current_api_token()
.unwrap_or(None)
.map(|token: String| {
// 脱敏显示 token
mask_token(&token)
})
}),
);
Ok(config)
@@ -726,13 +771,15 @@ pub async fn relay_stations_export(db: State<'_, AgentDb>) -> Result<Vec<RelaySt
i18n::t("database.init_failed")
})?;
let mut stmt = conn.prepare("SELECT * FROM relay_stations ORDER BY created_at DESC")
let mut stmt = conn
.prepare("SELECT * FROM relay_stations ORDER BY created_at DESC")
.map_err(|e| {
log::error!("Failed to prepare statement: {}", e);
i18n::t("database.query_failed")
})?;
let stations = stmt.query_map([], |row| RelayStation::from_row(row))
let stations = stmt
.query_map([], |row| RelayStation::from_row(row))
.map_err(|e| {
log::error!("Failed to query relay stations: {}", e);
i18n::t("database.query_failed")
@@ -767,7 +814,7 @@ pub struct ImportRelayStationsRequest {
#[command]
pub async fn relay_stations_import(
request: ImportRelayStationsRequest,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<ImportResult, String> {
let mut conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
@@ -788,8 +835,7 @@ pub async fn relay_stations_import(
// 如果需要清除现有配置
if request.clear_existing {
tx.execute("DELETE FROM relay_stations", [])
.map_err(|e| {
tx.execute("DELETE FROM relay_stations", []).map_err(|e| {
log::error!("Failed to clear existing relay stations: {}", e);
i18n::t("relay_station.clear_failed")
})?;
@@ -798,13 +844,15 @@ pub async fn relay_stations_import(
// 获取现有的中转站列表(用于重复检查)
let existing_stations: Vec<(String, String)> = if !request.clear_existing {
let mut stmt = tx.prepare("SELECT api_url, system_token FROM relay_stations")
let mut stmt = tx
.prepare("SELECT api_url, system_token FROM relay_stations")
.map_err(|e| {
log::error!("Failed to prepare statement: {}", e);
i18n::t("database.query_failed")
})?;
let stations_iter = stmt.query_map([], |row| {
let stations_iter = stmt
.query_map([], |row| {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
})
.map_err(|e| {
@@ -837,7 +885,11 @@ pub async fn relay_stations_import(
for station_request in request.stations {
// 验证输入
if let Err(e) = validate_relay_station_request(&station_request.name, &station_request.api_url, &station_request.system_token) {
if let Err(e) = validate_relay_station_request(
&station_request.name,
&station_request.api_url,
&station_request.system_token,
) {
log::warn!("Skipping invalid station {}: {}", station_request.name, e);
failed_count += 1;
continue;
@@ -849,7 +901,11 @@ pub async fn relay_stations_import(
});
if is_duplicate {
log::info!("Skipping duplicate station: {} ({})", station_request.name, station_request.api_url);
log::info!(
"Skipping duplicate station: {} ({})",
station_request.name,
station_request.api_url
);
skipped_count += 1;
continue;
}
@@ -858,13 +914,17 @@ pub async fn relay_stations_import(
let adapter_str = serde_json::to_string(&station_request.adapter)
.map_err(|_| i18n::t("relay_station.invalid_adapter"))?
.trim_matches('"').to_string();
.trim_matches('"')
.to_string();
let auth_method_str = serde_json::to_string(&station_request.auth_method)
.map_err(|_| i18n::t("relay_station.invalid_auth_method"))?
.trim_matches('"').to_string();
.trim_matches('"')
.to_string();
let adapter_config_str = station_request.adapter_config.as_ref()
let adapter_config_str = station_request
.adapter_config
.as_ref()
.map(|config| serde_json::to_string(config))
.transpose()
.map_err(|_| i18n::t("relay_station.invalid_config"))?;
@@ -925,7 +985,7 @@ pub async fn relay_stations_import(
#[command]
pub async fn relay_station_update_order(
station_ids: Vec<String>,
db: State<'_, AgentDb>
db: State<'_, AgentDb>,
) -> Result<(), String> {
let conn = db.0.lock().map_err(|e| {
log::error!("Failed to acquire database lock: {}", e);
@@ -943,7 +1003,8 @@ pub async fn relay_station_update_order(
tx.execute(
"UPDATE relay_stations SET display_order = ?1, updated_at = ?2 WHERE id = ?3",
params![index as i32, Utc::now().timestamp(), station_id],
).map_err(|e| {
)
.map_err(|e| {
log::error!("Failed to update station order: {}", e);
i18n::t("database.update_failed")
})?;
@@ -955,6 +1016,9 @@ pub async fn relay_station_update_order(
i18n::t("database.transaction_failed")
})?;
log::info!("Updated display order for {} relay stations", station_ids.len());
log::info!(
"Updated display order for {} relay stations",
station_ids.len()
);
Ok(())
}

View File

@@ -112,16 +112,11 @@ fn extract_command_info(file_path: &Path, base_path: &Path) -> Result<(String, O
}
/// Load a single command from a markdown file
fn load_command_from_file(
file_path: &Path,
base_path: &Path,
scope: &str,
) -> Result<SlashCommand> {
fn load_command_from_file(file_path: &Path, base_path: &Path, scope: &str) -> Result<SlashCommand> {
debug!("Loading command from: {:?}", file_path);
// Read file content
let content = fs::read_to_string(file_path)
.context("Failed to read command file")?;
let content = fs::read_to_string(file_path).context("Failed to read command file")?;
// Parse frontmatter
let (frontmatter, body) = parse_markdown_with_frontmatter(&content)?;
@@ -136,7 +131,11 @@ fn load_command_from_file(
};
// Generate unique ID
let id = format!("{}-{}", scope, file_path.to_string_lossy().replace('/', "-"));
let id = format!(
"{}-{}",
scope,
file_path.to_string_lossy().replace('/', "-")
);
// Check for special content
let has_bash_commands = body.contains("!`");
@@ -376,8 +375,7 @@ pub async fn slash_command_save(
}
// Create directories if needed
fs::create_dir_all(&file_path)
.map_err(|e| format!("Failed to create directories: {}", e))?;
fs::create_dir_all(&file_path).map_err(|e| format!("Failed to create directories: {}", e))?;
// Add filename
file_path = file_path.join(format!("{}.md", name));
@@ -416,7 +414,10 @@ pub async fn slash_command_save(
/// Delete a slash command
#[tauri::command]
pub async fn slash_command_delete(command_id: String, project_path: Option<String>) -> Result<String, String> {
pub async fn slash_command_delete(
command_id: String,
project_path: Option<String>,
) -> Result<String, String> {
info!("Deleting slash command: {}", command_id);
// First, we need to determine if this is a project command by parsing the ID

View File

@@ -1,9 +1,9 @@
use anyhow::{Context, Result};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
use tauri::AppHandle;
use chrono::{DateTime, Utc};
use uuid::Uuid;
/// 智能会话结果
@@ -106,8 +106,7 @@ fn get_config_path() -> Result<PathBuf> {
.context("Failed to get home directory")?
.join(".claudia");
fs::create_dir_all(&claudia_dir)
.context("Failed to create .claudia directory")?;
fs::create_dir_all(&claudia_dir).context("Failed to create .claudia directory")?;
Ok(claudia_dir.join("smart_sessions_config.json"))
}
@@ -122,11 +121,11 @@ pub fn load_smart_session_config() -> Result<SmartSessionConfig> {
return Ok(default_config);
}
let config_content = fs::read_to_string(&config_path)
.context("Failed to read smart session config")?;
let config_content =
fs::read_to_string(&config_path).context("Failed to read smart session config")?;
let config: SmartSessionConfig = serde_json::from_str(&config_content)
.context("Failed to parse smart session config")?;
let config: SmartSessionConfig =
serde_json::from_str(&config_content).context("Failed to parse smart session config")?;
Ok(config)
}
@@ -135,11 +134,10 @@ pub fn load_smart_session_config() -> Result<SmartSessionConfig> {
pub fn save_smart_session_config(config: &SmartSessionConfig) -> Result<()> {
let config_path = get_config_path()?;
let config_content = serde_json::to_string_pretty(config)
.context("Failed to serialize smart session config")?;
let config_content =
serde_json::to_string_pretty(config).context("Failed to serialize smart session config")?;
fs::write(&config_path, config_content)
.context("Failed to write smart session config")?;
fs::write(&config_path, config_content).context("Failed to write smart session config")?;
Ok(())
}
@@ -151,13 +149,11 @@ pub fn generate_smart_session_path(
) -> Result<PathBuf> {
let timestamp = chrono::Utc::now();
let session_name = session_name.unwrap_or_else(|| {
match config.naming_pattern.as_str() {
let session_name = session_name.unwrap_or_else(|| match config.naming_pattern.as_str() {
"chat-{timestamp}" => format!("chat-{}", timestamp.format("%Y-%m-%d-%H%M%S")),
"session-{date}" => format!("session-{}", timestamp.format("%Y-%m-%d")),
"conversation-{datetime}" => format!("conversation-{}", timestamp.format("%Y%m%d_%H%M%S")),
_ => format!("chat-{}", timestamp.format("%Y-%m-%d-%H%M%S")),
}
});
let session_path = config.base_directory.join(&session_name);
@@ -177,13 +173,11 @@ pub fn create_smart_session_environment(session_path: &PathBuf) -> Result<()> {
let config = load_smart_session_config()?;
// 创建主目录
fs::create_dir_all(session_path)
.context("Failed to create smart session directory")?;
fs::create_dir_all(session_path).context("Failed to create smart session directory")?;
// 创建 .claude 子目录
let claude_dir = session_path.join(".claude");
fs::create_dir_all(&claude_dir)
.context("Failed to create .claude directory")?;
fs::create_dir_all(&claude_dir).context("Failed to create .claude directory")?;
// 创建基础 Claude 设置文件
let claude_settings = serde_json::json!({
@@ -194,7 +188,10 @@ pub fn create_smart_session_environment(session_path: &PathBuf) -> Result<()> {
});
let settings_path = claude_dir.join("settings.json");
fs::write(&settings_path, serde_json::to_string_pretty(&claude_settings)?)
fs::write(
&settings_path,
serde_json::to_string_pretty(&claude_settings)?,
)
.context("Failed to write Claude settings")?;
// 创建模板文件
@@ -211,7 +208,8 @@ pub fn create_smart_session_environment(session_path: &PathBuf) -> Result<()> {
}
// 替换模板变量
let content = template.content
let content = template
.content
.replace("{session_id}", &session_id)
.replace("{created_at}", &created_at)
.replace("{project_path}", &session_path.to_string_lossy());
@@ -229,7 +227,10 @@ pub fn create_smart_session_environment(session_path: &PathBuf) -> Result<()> {
}
}
log::info!("Created smart session environment at: {}", session_path.display());
log::info!(
"Created smart session environment at: {}",
session_path.display()
);
Ok(())
}
@@ -239,8 +240,7 @@ fn get_sessions_history_path() -> Result<PathBuf> {
.context("Failed to get home directory")?
.join(".claudia");
fs::create_dir_all(&claudia_dir)
.context("Failed to create .claudia directory")?;
fs::create_dir_all(&claudia_dir).context("Failed to create .claudia directory")?;
Ok(claudia_dir.join("smart_sessions_history.json"))
}
@@ -266,8 +266,8 @@ pub fn save_smart_session_record(session_path: &PathBuf) -> Result<String> {
let history_path = get_sessions_history_path()?;
let mut sessions: Vec<SmartSession> = if history_path.exists() {
let content = fs::read_to_string(&history_path)
.context("Failed to read sessions history")?;
let content =
fs::read_to_string(&history_path).context("Failed to read sessions history")?;
serde_json::from_str(&content).unwrap_or_default()
} else {
Vec::new()
@@ -275,11 +275,10 @@ pub fn save_smart_session_record(session_path: &PathBuf) -> Result<String> {
sessions.push(session);
let history_content = serde_json::to_string_pretty(&sessions)
.context("Failed to serialize sessions history")?;
let history_content =
serde_json::to_string_pretty(&sessions).context("Failed to serialize sessions history")?;
fs::write(&history_path, history_content)
.context("Failed to write sessions history")?;
fs::write(&history_path, history_content).context("Failed to write sessions history")?;
Ok(session_id)
}
@@ -292,11 +291,10 @@ pub fn list_smart_sessions() -> Result<Vec<SmartSession>> {
return Ok(Vec::new());
}
let content = fs::read_to_string(&history_path)
.context("Failed to read sessions history")?;
let content = fs::read_to_string(&history_path).context("Failed to read sessions history")?;
let sessions: Vec<SmartSession> = serde_json::from_str(&content)
.context("Failed to parse sessions history")?;
let sessions: Vec<SmartSession> =
serde_json::from_str(&content).context("Failed to parse sessions history")?;
// 过滤仍然存在的会话
let existing_sessions: Vec<SmartSession> = sessions
@@ -329,7 +327,11 @@ pub fn cleanup_old_smart_sessions(days: u32) -> Result<u32> {
let session_path = PathBuf::from(&session.project_path);
if session_path.exists() {
if let Err(e) = fs::remove_dir_all(&session_path) {
log::warn!("Failed to remove session directory {}: {}", session_path.display(), e);
log::warn!(
"Failed to remove session directory {}: {}",
session_path.display(),
e
);
} else {
cleaned_count += 1;
log::info!("Cleaned up expired session: {}", session.display_name);
@@ -363,8 +365,8 @@ pub async fn create_smart_quick_start_session(
) -> Result<SmartSessionResult, String> {
log::info!("Creating smart quick start session: {:?}", session_name);
let config = load_smart_session_config()
.map_err(|e| format!("Failed to load config: {}", e))?;
let config =
load_smart_session_config().map_err(|e| format!("Failed to load config: {}", e))?;
if !config.enabled {
return Err("Smart sessions are disabled".to_string());
@@ -396,22 +398,22 @@ pub async fn create_smart_quick_start_session(
session_type: "smart".to_string(),
};
log::info!("Smart session created successfully: {}", result.project_path);
log::info!(
"Smart session created successfully: {}",
result.project_path
);
Ok(result)
}
/// 获取智能会话配置
#[tauri::command]
pub async fn get_smart_session_config() -> Result<SmartSessionConfig, String> {
load_smart_session_config()
.map_err(|e| format!("Failed to load smart session config: {}", e))
load_smart_session_config().map_err(|e| format!("Failed to load smart session config: {}", e))
}
/// 更新智能会话配置
#[tauri::command]
pub async fn update_smart_session_config(
config: SmartSessionConfig,
) -> Result<(), String> {
pub async fn update_smart_session_config(config: SmartSessionConfig) -> Result<(), String> {
save_smart_session_config(&config)
.map_err(|e| format!("Failed to save smart session config: {}", e))
}
@@ -419,20 +421,18 @@ pub async fn update_smart_session_config(
/// 列出智能会话
#[tauri::command]
pub async fn list_smart_sessions_command() -> Result<Vec<SmartSession>, String> {
list_smart_sessions()
.map_err(|e| format!("Failed to list smart sessions: {}", e))
list_smart_sessions().map_err(|e| format!("Failed to list smart sessions: {}", e))
}
/// 切换智能会话模式
#[tauri::command]
pub async fn toggle_smart_session_mode(enabled: bool) -> Result<(), String> {
let mut config = load_smart_session_config()
.map_err(|e| format!("Failed to load config: {}", e))?;
let mut config =
load_smart_session_config().map_err(|e| format!("Failed to load config: {}", e))?;
config.enabled = enabled;
save_smart_session_config(&config)
.map_err(|e| format!("Failed to save config: {}", e))?;
save_smart_session_config(&config).map_err(|e| format!("Failed to save config: {}", e))?;
log::info!("Smart session mode toggled: {}", enabled);
Ok(())
@@ -441,6 +441,5 @@ pub async fn toggle_smart_session_mode(enabled: bool) -> Result<(), String> {
/// 清理过期智能会话
#[tauri::command]
pub async fn cleanup_old_smart_sessions_command(days: u32) -> Result<u32, String> {
cleanup_old_smart_sessions(days)
.map_err(|e| format!("Failed to cleanup old sessions: {}", e))
cleanup_old_smart_sessions(days).map_err(|e| format!("Failed to cleanup old sessions: {}", e))
}

View File

@@ -1,10 +1,10 @@
use super::agents::AgentDb;
use anyhow::Result;
use rusqlite::{params, Connection, Result as SqliteResult, types::ValueRef};
use rusqlite::{params, types::ValueRef, Connection, Result as SqliteResult};
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value as JsonValue};
use std::collections::HashMap;
use tauri::{AppHandle, Manager, State};
use super::agents::AgentDb;
/// Represents metadata about a database table
#[derive(Debug, Serialize, Deserialize, Clone)]
@@ -69,11 +69,9 @@ pub async fn storage_list_tables(db: State<'_, AgentDb>) -> Result<Vec<TableInfo
for table_name in table_names {
// Get row count
let row_count: i64 = conn
.query_row(
&format!("SELECT COUNT(*) FROM {}", table_name),
[],
|row| row.get(0),
)
.query_row(&format!("SELECT COUNT(*) FROM {}", table_name), [], |row| {
row.get(0)
})
.unwrap_or(0);
// Get column information
@@ -162,7 +160,10 @@ pub async fn storage_read_table(
} else {
let where_clause = search_conditions.join(" OR ");
(
format!("SELECT * FROM {} WHERE {} LIMIT ? OFFSET ?", tableName, where_clause),
format!(
"SELECT * FROM {} WHERE {} LIMIT ? OFFSET ?",
tableName, where_clause
),
format!("SELECT COUNT(*) FROM {} WHERE {}", tableName, where_clause),
)
}
@@ -183,9 +184,7 @@ pub async fn storage_read_table(
let total_pages = (total_rows as f64 / pageSize as f64).ceil() as i64;
// Query data
let mut data_stmt = conn
.prepare(&query)
.map_err(|e| e.to_string())?;
let mut data_stmt = conn.prepare(&query).map_err(|e| e.to_string())?;
let rows: Vec<Map<String, JsonValue>> = data_stmt
.query_map(params![pageSize, offset], |row| {
@@ -203,7 +202,10 @@ pub async fn storage_read_table(
}
}
ValueRef::Text(s) => JsonValue::String(String::from_utf8_lossy(s).to_string()),
ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode(&base64::engine::general_purpose::STANDARD, b)),
ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode(
&base64::engine::general_purpose::STANDARD,
b,
)),
};
row_map.insert(col.name.clone(), value);
}
@@ -275,7 +277,10 @@ pub async fn storage_update_row(
}
// Execute update
conn.execute(&query, rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())))
conn.execute(
&query,
rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())),
)
.map_err(|e| format!("Failed to update row: {}", e))?;
Ok(())
@@ -316,7 +321,10 @@ pub async fn storage_delete_row(
.collect::<Result<Vec<_>, _>>()?;
// Execute delete
conn.execute(&query, rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())))
conn.execute(
&query,
rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())),
)
.map_err(|e| format!("Failed to delete row: {}", e))?;
Ok(())
@@ -339,14 +347,16 @@ pub async fn storage_insert_row(
// Build INSERT query
let columns: Vec<&String> = values.keys().collect();
let placeholders: Vec<String> = (1..=columns.len())
.map(|i| format!("?{}", i))
.collect();
let placeholders: Vec<String> = (1..=columns.len()).map(|i| format!("?{}", i)).collect();
let query = format!(
"INSERT INTO {} ({}) VALUES ({})",
tableName,
columns.iter().map(|c| c.as_str()).collect::<Vec<_>>().join(", "),
columns
.iter()
.map(|c| c.as_str())
.collect::<Vec<_>>()
.join(", "),
placeholders.join(", ")
);
@@ -357,7 +367,10 @@ pub async fn storage_insert_row(
.collect::<Result<Vec<_>, _>>()?;
// Execute insert
conn.execute(&query, rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())))
conn.execute(
&query,
rusqlite::params_from_iter(params.iter().map(|p| p.as_ref())),
)
.map_err(|e| format!("Failed to insert row: {}", e))?;
Ok(conn.last_insert_rowid())
@@ -399,8 +412,13 @@ pub async fn storage_execute_sql(
JsonValue::String(f.to_string())
}
}
ValueRef::Text(s) => JsonValue::String(String::from_utf8_lossy(s).to_string()),
ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode(&base64::engine::general_purpose::STANDARD, b)),
ValueRef::Text(s) => {
JsonValue::String(String::from_utf8_lossy(s).to_string())
}
ValueRef::Blob(b) => JsonValue::String(base64::Engine::encode(
&base64::engine::general_purpose::STANDARD,
b,
)),
};
row_values.push(value);
}
@@ -435,8 +453,7 @@ pub async fn storage_reset_database(app: AppHandle) -> Result<(), String> {
{
// Drop all existing tables within a scoped block
let db_state = app.state::<AgentDb>();
let conn = db_state.0.lock()
.map_err(|e| e.to_string())?;
let conn = db_state.0.lock().map_err(|e| e.to_string())?;
// Disable foreign key constraints temporarily to allow dropping tables
conn.execute("PRAGMA foreign_keys = OFF", [])
@@ -463,18 +480,15 @@ pub async fn storage_reset_database(app: AppHandle) -> Result<(), String> {
// Update the managed state with the new connection
{
let db_state = app.state::<AgentDb>();
let mut conn_guard = db_state.0.lock()
.map_err(|e| e.to_string())?;
let mut conn_guard = db_state.0.lock().map_err(|e| e.to_string())?;
*conn_guard = new_conn;
}
// Run VACUUM to optimize the database
{
let db_state = app.state::<AgentDb>();
let conn = db_state.0.lock()
.map_err(|e| e.to_string())?;
conn.execute("VACUUM", [])
.map_err(|e| e.to_string())?;
let conn = db_state.0.lock().map_err(|e| e.to_string())?;
conn.execute("VACUUM", []).map_err(|e| e.to_string())?;
}
Ok(())

View File

@@ -15,7 +15,11 @@ pub async fn flush_dns() -> Result<String, String> {
return Ok("DNS cache flushed".into());
} else {
let err = String::from_utf8_lossy(&output.stderr).to_string();
return Err(if err.is_empty() { "ipconfig /flushdns failed".into() } else { err });
return Err(if err.is_empty() {
"ipconfig /flushdns failed".into()
} else {
err
});
}
}
@@ -31,7 +35,11 @@ pub async fn flush_dns() -> Result<String, String> {
return Ok("DNS cache flushed".into());
} else {
let err = String::from_utf8_lossy(&output.stderr).to_string();
return Err(if err.is_empty() { "dscacheutil -flushcache failed".into() } else { err });
return Err(if err.is_empty() {
"dscacheutil -flushcache failed".into()
} else {
err
});
}
}
@@ -41,7 +49,13 @@ pub async fn flush_dns() -> Result<String, String> {
let attempts: Vec<(&str, Vec<&str>)> = vec![
("resolvectl", vec!["flush-caches"]),
("systemd-resolve", vec!["--flush-caches"]),
("sh", vec!["-c", "service nscd restart || service dnsmasq restart || rc-service nscd restart"]),
(
"sh",
vec![
"-c",
"service nscd restart || service dnsmasq restart || rc-service nscd restart",
],
),
];
for (cmd, args) in attempts {
@@ -59,4 +73,3 @@ pub async fn flush_dns() -> Result<String, String> {
Err("No supported DNS flush method succeeded on this Linux system".into())
}
}

View File

@@ -1,12 +1,12 @@
use std::collections::HashMap;
use std::sync::Arc;
use anyhow::Result;
use portable_pty::{native_pty_system, Child, CommandBuilder, MasterPty, PtySize};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::io::{Read, Write};
use std::sync::Arc;
use tauri::{AppHandle, Emitter, State};
use tokio::sync::Mutex;
use uuid::Uuid;
use anyhow::Result;
use portable_pty::{native_pty_system, CommandBuilder, PtySize, Child, MasterPty};
use std::io::{Read, Write};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TerminalSession {
@@ -35,11 +35,18 @@ pub async fn create_terminal_session(
) -> Result<String, String> {
let session_id = Uuid::new_v4().to_string();
log::info!("Creating terminal session: {} in {}", session_id, working_directory);
log::info!(
"Creating terminal session: {} in {}",
session_id,
working_directory
);
// Check if working directory exists
if !std::path::Path::new(&working_directory).exists() {
return Err(format!("Working directory does not exist: {}", working_directory));
return Err(format!(
"Working directory does not exist: {}",
working_directory
));
}
let session = TerminalSession {
@@ -53,12 +60,14 @@ pub async fn create_terminal_session(
let pty_system = native_pty_system();
// Create PTY pair with size
let pty_pair = pty_system.openpty(PtySize {
let pty_pair = pty_system
.openpty(PtySize {
rows: 30,
cols: 120,
pixel_width: 0,
pixel_height: 0,
}).map_err(|e| format!("Failed to create PTY: {}", e))?;
})
.map_err(|e| format!("Failed to create PTY: {}", e))?;
// Get shell command
let shell = get_default_shell();
@@ -105,40 +114,65 @@ pub async fn create_terminal_session(
// Unix-specific environment
cmd.env("TERM", "xterm-256color");
cmd.env("COLORTERM", "truecolor");
cmd.env("LANG", std::env::var("LANG").unwrap_or_else(|_| "en_US.UTF-8".to_string()));
cmd.env("LC_ALL", std::env::var("LC_ALL").unwrap_or_else(|_| "en_US.UTF-8".to_string()));
cmd.env("LC_CTYPE", std::env::var("LC_CTYPE").unwrap_or_else(|_| "en_US.UTF-8".to_string()));
cmd.env(
"LANG",
std::env::var("LANG").unwrap_or_else(|_| "en_US.UTF-8".to_string()),
);
cmd.env(
"LC_ALL",
std::env::var("LC_ALL").unwrap_or_else(|_| "en_US.UTF-8".to_string()),
);
cmd.env(
"LC_CTYPE",
std::env::var("LC_CTYPE").unwrap_or_else(|_| "en_US.UTF-8".to_string()),
);
// Inherit other Unix environment variables
for (key, value) in std::env::vars() {
if !key.starts_with("TERM") && !key.starts_with("COLORTERM") &&
!key.starts_with("LC_") && !key.starts_with("LANG") &&
!key.starts_with("TAURI_") && !key.starts_with("VITE_") {
if !key.starts_with("TERM")
&& !key.starts_with("COLORTERM")
&& !key.starts_with("LC_")
&& !key.starts_with("LANG")
&& !key.starts_with("TAURI_")
&& !key.starts_with("VITE_")
{
cmd.env(&key, &value);
}
}
}
// Spawn the shell process
let child = pty_pair.slave.spawn_command(cmd)
let child = pty_pair
.slave
.spawn_command(cmd)
.map_err(|e| format!("Failed to spawn shell: {}", e))?;
log::info!("Shell process spawned successfully for session: {}", session_id);
log::info!(
"Shell process spawned successfully for session: {}",
session_id
);
// Get writer for stdin
let writer = pty_pair.master.take_writer()
let writer = pty_pair
.master
.take_writer()
.map_err(|e| format!("Failed to get PTY writer: {}", e))?;
// Start reading output in background
let session_id_clone = session_id.clone();
let app_handle_clone = app_handle.clone();
let mut reader = pty_pair.master.try_clone_reader()
let mut reader = pty_pair
.master
.try_clone_reader()
.map_err(|e| format!("Failed to get PTY reader: {}", e))?;
// Spawn reader thread
std::thread::spawn(move || {
let mut buffer = [0u8; 4096];
log::info!("PTY reader thread started for session: {}", session_id_clone);
log::info!(
"PTY reader thread started for session: {}",
session_id_clone
);
loop {
match reader.read(&mut buffer) {
Ok(0) => {
@@ -147,16 +181,29 @@ pub async fn create_terminal_session(
}
Ok(n) => {
let data = String::from_utf8_lossy(&buffer[..n]).to_string();
log::debug!("PTY reader got {} bytes for session {}: {:?}", n, session_id_clone, data);
let _ = app_handle_clone.emit(&format!("terminal-output:{}", session_id_clone), &data);
log::debug!(
"PTY reader got {} bytes for session {}: {:?}",
n,
session_id_clone,
data
);
let _ = app_handle_clone
.emit(&format!("terminal-output:{}", session_id_clone), &data);
}
Err(e) => {
log::error!("Error reading PTY output for session {}: {}", session_id_clone, e);
log::error!(
"Error reading PTY output for session {}: {}",
session_id_clone,
e
);
break;
}
}
}
log::debug!("PTY reader thread finished for session: {}", session_id_clone);
log::debug!(
"PTY reader thread finished for session: {}",
session_id_clone
);
});
// Store the session with PTY writer, master PTY and child process
@@ -190,15 +237,20 @@ pub async fn send_terminal_input(
// Write to PTY
let mut writer = child.writer.lock().await;
writer.write_all(input.as_bytes())
writer
.write_all(input.as_bytes())
.map_err(|e| format!("Failed to write to terminal: {}", e))?;
writer.flush()
writer
.flush()
.map_err(|e| format!("Failed to flush terminal input: {}", e))?;
return Ok(());
}
}
Err(format!("Terminal session not found or not active: {}", session_id))
Err(format!(
"Terminal session not found or not active: {}",
session_id
))
}
/// Closes a terminal session
@@ -227,7 +279,8 @@ pub async fn list_terminal_sessions(
) -> Result<Vec<String>, String> {
let state = terminal_state.lock().await;
let sessions: Vec<String> = state.iter()
let sessions: Vec<String> = state
.iter()
.filter_map(|(id, (session, _))| {
if session.is_active {
Some(id.clone())
@@ -251,7 +304,10 @@ pub async fn resize_terminal(
// Note: With the current architecture, resize is not supported
// To support resize, we would need to keep a reference to the PTY master
// or use a different approach
log::warn!("Terminal resize not currently supported for session: {}", session_id);
log::warn!(
"Terminal resize not currently supported for session: {}",
session_id
);
Ok(())
}
@@ -288,9 +344,17 @@ pub async fn cleanup_terminal_sessions(
fn get_default_shell() -> String {
if cfg!(target_os = "windows") {
// Try PowerShell Core (pwsh) first, then Windows PowerShell, fallback to cmd
if std::process::Command::new("pwsh").arg("--version").output().is_ok() {
if std::process::Command::new("pwsh")
.arg("--version")
.output()
.is_ok()
{
"pwsh".to_string()
} else if std::process::Command::new("powershell").arg("-Version").output().is_ok() {
} else if std::process::Command::new("powershell")
.arg("-Version")
.output()
.is_ok()
{
"powershell".to_string()
} else {
"cmd.exe".to_string()

View File

@@ -152,40 +152,86 @@ fn calculate_cost(model: &str, usage: &UsageData) -> f64 {
// 独立的模型价格匹配函数,更精确的模型识别
fn match_model_prices(model_lower: &str) -> (f64, f64, f64, f64) {
// Claude Opus 4.1 (最新最强)
if model_lower.contains("opus") && (model_lower.contains("4-1") || model_lower.contains("4.1")) {
(OPUS_4_1_INPUT_PRICE, OPUS_4_1_OUTPUT_PRICE, OPUS_4_1_CACHE_WRITE_PRICE, OPUS_4_1_CACHE_READ_PRICE)
if model_lower.contains("opus") && (model_lower.contains("4-1") || model_lower.contains("4.1"))
{
(
OPUS_4_1_INPUT_PRICE,
OPUS_4_1_OUTPUT_PRICE,
OPUS_4_1_CACHE_WRITE_PRICE,
OPUS_4_1_CACHE_READ_PRICE,
)
}
// Claude Sonnet 4
else if model_lower.contains("sonnet") && (model_lower.contains("-4-") || model_lower.contains("sonnet-4")) {
(SONNET_4_INPUT_PRICE, SONNET_4_OUTPUT_PRICE, SONNET_4_CACHE_WRITE_PRICE, SONNET_4_CACHE_READ_PRICE)
else if model_lower.contains("sonnet")
&& (model_lower.contains("-4-") || model_lower.contains("sonnet-4"))
{
(
SONNET_4_INPUT_PRICE,
SONNET_4_OUTPUT_PRICE,
SONNET_4_CACHE_WRITE_PRICE,
SONNET_4_CACHE_READ_PRICE,
)
}
// Claude Haiku 3.5
else if model_lower.contains("haiku") {
(HAIKU_3_5_INPUT_PRICE, HAIKU_3_5_OUTPUT_PRICE, HAIKU_3_5_CACHE_WRITE_PRICE, HAIKU_3_5_CACHE_READ_PRICE)
(
HAIKU_3_5_INPUT_PRICE,
HAIKU_3_5_OUTPUT_PRICE,
HAIKU_3_5_CACHE_WRITE_PRICE,
HAIKU_3_5_CACHE_READ_PRICE,
)
}
// Claude 3.x Sonnet 系列3.7, 3.5
else if model_lower.contains("sonnet") &&
(model_lower.contains("3-7") || model_lower.contains("3.7") ||
model_lower.contains("3-5") || model_lower.contains("3.5")) {
(SONNET_3_INPUT_PRICE, SONNET_3_OUTPUT_PRICE, SONNET_3_CACHE_WRITE_PRICE, SONNET_3_CACHE_READ_PRICE)
else if model_lower.contains("sonnet")
&& (model_lower.contains("3-7")
|| model_lower.contains("3.7")
|| model_lower.contains("3-5")
|| model_lower.contains("3.5"))
{
(
SONNET_3_INPUT_PRICE,
SONNET_3_OUTPUT_PRICE,
SONNET_3_CACHE_WRITE_PRICE,
SONNET_3_CACHE_READ_PRICE,
)
}
// Claude 3 Opus (旧版)
else if model_lower.contains("opus") && model_lower.contains("3") {
(OPUS_3_INPUT_PRICE, OPUS_3_OUTPUT_PRICE, OPUS_3_CACHE_WRITE_PRICE, OPUS_3_CACHE_READ_PRICE)
(
OPUS_3_INPUT_PRICE,
OPUS_3_OUTPUT_PRICE,
OPUS_3_CACHE_WRITE_PRICE,
OPUS_3_CACHE_READ_PRICE,
)
}
// 默认 Sonnet未明确版本号时
else if model_lower.contains("sonnet") {
(SONNET_3_INPUT_PRICE, SONNET_3_OUTPUT_PRICE, SONNET_3_CACHE_WRITE_PRICE, SONNET_3_CACHE_READ_PRICE)
(
SONNET_3_INPUT_PRICE,
SONNET_3_OUTPUT_PRICE,
SONNET_3_CACHE_WRITE_PRICE,
SONNET_3_CACHE_READ_PRICE,
)
}
// 默认 Opus未明确版本号时假设是最新版
else if model_lower.contains("opus") {
(OPUS_4_1_INPUT_PRICE, OPUS_4_1_OUTPUT_PRICE, OPUS_4_1_CACHE_WRITE_PRICE, OPUS_4_1_CACHE_READ_PRICE)
(
OPUS_4_1_INPUT_PRICE,
OPUS_4_1_OUTPUT_PRICE,
OPUS_4_1_CACHE_WRITE_PRICE,
OPUS_4_1_CACHE_READ_PRICE,
)
}
// 未知模型
else {
log::warn!("Unknown model for cost calculation: {}", model_lower);
// 默认使用 Sonnet 3 的价格(保守估计)
(SONNET_3_INPUT_PRICE, SONNET_3_OUTPUT_PRICE, SONNET_3_CACHE_WRITE_PRICE, SONNET_3_CACHE_READ_PRICE)
(
SONNET_3_INPUT_PRICE,
SONNET_3_OUTPUT_PRICE,
SONNET_3_CACHE_WRITE_PRICE,
SONNET_3_CACHE_READ_PRICE,
)
}
}
@@ -236,7 +282,8 @@ pub fn parse_jsonl_file(
// 智能去重策略
let has_io_tokens = usage.input_tokens.unwrap_or(0) > 0
|| usage.output_tokens.unwrap_or(0) > 0;
let has_cache_tokens = usage.cache_creation_input_tokens.unwrap_or(0) > 0
let has_cache_tokens = usage.cache_creation_input_tokens.unwrap_or(0)
> 0
|| usage.cache_read_input_tokens.unwrap_or(0) > 0;
let should_skip = if has_io_tokens {
@@ -254,7 +301,9 @@ pub fn parse_jsonl_file(
}
} else if has_cache_tokens {
// 缓存令牌:使用 message_id + request_id 宽松去重
if let (Some(msg_id), Some(req_id)) = (&message.id, &entry.request_id) {
if let (Some(msg_id), Some(req_id)) =
(&message.id, &entry.request_id)
{
let unique_hash = format!("cache:{}:{}", msg_id, req_id);
if processed_hashes.contains(&unique_hash) {
true
@@ -287,9 +336,12 @@ pub fn parse_jsonl_file(
.unwrap_or_else(|| encoded_project_name.to_string());
// 转换时间戳为本地时间格式
let local_timestamp = if let Ok(dt) = DateTime::parse_from_rfc3339(&entry.timestamp) {
let local_timestamp =
if let Ok(dt) = DateTime::parse_from_rfc3339(&entry.timestamp) {
// 转换为本地时区并格式化为 ISO 格式
dt.with_timezone(&Local).format("%Y-%m-%d %H:%M:%S%.3f").to_string()
dt.with_timezone(&Local)
.format("%Y-%m-%d %H:%M:%S%.3f")
.to_string()
} else {
// 如果解析失败,保留原始时间戳
entry.timestamp.clone()
@@ -414,7 +466,9 @@ pub fn get_usage_stats(days: Option<u32>) -> Result<UsageStats, String> {
// 处理新的本地时间格式 "YYYY-MM-DD HH:MM:SS.sss"
let date = if e.timestamp.contains(' ') {
// 新格式:直接解析日期部分
e.timestamp.split(' ').next()
e.timestamp
.split(' ')
.next()
.and_then(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").ok())
} else if let Ok(dt) = DateTime::parse_from_rfc3339(&e.timestamp) {
// 旧格式RFC3339 格式
@@ -487,7 +541,12 @@ pub fn get_usage_stats(days: Option<u32>) -> Result<UsageStats, String> {
// 处理新的本地时间格式 "YYYY-MM-DD HH:MM:SS.sss"
let date = if entry.timestamp.contains(' ') {
// 新格式:直接提取日期部分
entry.timestamp.split(' ').next().unwrap_or(&entry.timestamp).to_string()
entry
.timestamp
.split(' ')
.next()
.unwrap_or(&entry.timestamp)
.to_string()
} else if let Ok(dt) = DateTime::parse_from_rfc3339(&entry.timestamp) {
// 旧格式RFC3339 格式
dt.with_timezone(&Local).date_naive().to_string()
@@ -631,7 +690,9 @@ pub fn get_usage_by_date_range(start_date: String, end_date: String) -> Result<U
// 处理新的本地时间格式 "YYYY-MM-DD HH:MM:SS.sss"
let date = if e.timestamp.contains(' ') {
// 新格式:直接解析日期部分
e.timestamp.split(' ').next()
e.timestamp
.split(' ')
.next()
.and_then(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").ok())
} else if let Ok(dt) = DateTime::parse_from_rfc3339(&e.timestamp) {
// 旧格式RFC3339 格式
@@ -716,7 +777,12 @@ pub fn get_usage_by_date_range(start_date: String, end_date: String) -> Result<U
// 处理新的本地时间格式 "YYYY-MM-DD HH:MM:SS.sss"
let date = if entry.timestamp.contains(' ') {
// 新格式:直接提取日期部分
entry.timestamp.split(' ').next().unwrap_or(&entry.timestamp).to_string()
entry
.timestamp
.split(' ')
.next()
.unwrap_or(&entry.timestamp)
.to_string()
} else if let Ok(dt) = DateTime::parse_from_rfc3339(&entry.timestamp) {
// 旧格式RFC3339 格式
dt.with_timezone(&Local).date_naive().to_string()
@@ -889,7 +955,9 @@ pub fn get_session_stats(
// 处理新的本地时间格式 "YYYY-MM-DD HH:MM:SS.sss"
let date = if e.timestamp.contains(' ') {
// 新格式:直接解析日期部分
e.timestamp.split(' ').next()
e.timestamp
.split(' ')
.next()
.and_then(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").ok())
} else if let Ok(dt) = DateTime::parse_from_rfc3339(&e.timestamp) {
// 旧格式RFC3339 格式

View File

@@ -9,8 +9,7 @@ use tauri::{command, State};
use walkdir::WalkDir;
use super::usage::{
UsageStats, ModelUsage, DailyUsage, ProjectUsage, UsageEntry,
parse_jsonl_file
parse_jsonl_file, DailyUsage, ModelUsage, ProjectUsage, UsageEntry, UsageStats,
};
#[derive(Default)]
@@ -100,18 +99,22 @@ fn get_file_mtime_ms(path: &Path) -> i64 {
}
fn get_file_size(path: &Path) -> i64 {
fs::metadata(path)
.map(|m| m.len() as i64)
.unwrap_or(0)
fs::metadata(path).map(|m| m.len() as i64).unwrap_or(0)
}
fn generate_unique_hash(entry: &UsageEntry, has_io_tokens: bool, has_cache_tokens: bool) -> String {
if has_io_tokens {
// For I/O tokens: use session_id + timestamp + model
format!("io:{}:{}:{}", entry.session_id, entry.timestamp, entry.model)
format!(
"io:{}:{}:{}",
entry.session_id, entry.timestamp, entry.model
)
} else if has_cache_tokens {
// For cache tokens: use timestamp + model + project
format!("cache:{}:{}:{}", entry.timestamp, entry.model, entry.project_path)
format!(
"cache:{}:{}:{}",
entry.timestamp, entry.model, entry.project_path
)
} else {
// Fallback
format!("other:{}:{}", entry.timestamp, entry.session_id)
@@ -173,12 +176,14 @@ pub async fn usage_scan_update(state: State<'_, UsageCacheState>) -> Result<Scan
.prepare("SELECT file_path, file_size, mtime_ms FROM scanned_files")
.map_err(|e| e.to_string())?;
let rows = stmt.query_map(params![], |row| {
let rows = stmt
.query_map(params![], |row| {
Ok((
row.get::<_, String>(0)?,
(row.get::<_, i64>(1)?, row.get::<_, i64>(2)?),
))
}).map_err(|e| e.to_string())?;
})
.map_err(|e| e.to_string())?;
for row in rows {
if let Ok((path, data)) = row {
@@ -210,7 +215,9 @@ pub async fn usage_scan_update(state: State<'_, UsageCacheState>) -> Result<Scan
let current_size = get_file_size(&path);
let current_mtime = get_file_mtime_ms(&path);
let needs_processing = if let Some((stored_size, stored_mtime)) = existing_files.get(&path_str) {
let needs_processing = if let Some((stored_size, stored_mtime)) =
existing_files.get(&path_str)
{
current_size != *stored_size || current_mtime != *stored_mtime
} else {
true // New file
@@ -292,9 +299,15 @@ pub async fn usage_scan_update(state: State<'_, UsageCacheState>) -> Result<Scan
// Remove entries for files that no longer exist
for (old_path, _) in existing_files {
if !all_current_files.contains(&old_path) {
tx.execute("DELETE FROM usage_entries WHERE file_path = ?1", params![old_path])
tx.execute(
"DELETE FROM usage_entries WHERE file_path = ?1",
params![old_path],
)
.map_err(|e| e.to_string())?;
tx.execute("DELETE FROM scanned_files WHERE file_path = ?1", params![old_path])
tx.execute(
"DELETE FROM scanned_files WHERE file_path = ?1",
params![old_path],
)
.map_err(|e| e.to_string())?;
}
}
@@ -344,8 +357,13 @@ pub async fn usage_get_stats_cached(
};
// Query total stats
let (total_cost, total_input, total_output, total_cache_creation, total_cache_read): (f64, i64, i64, i64, i64) =
if let Some(cutoff) = &date_filter {
let (total_cost, total_input, total_output, total_cache_creation, total_cache_read): (
f64,
i64,
i64,
i64,
i64,
) = if let Some(cutoff) = &date_filter {
conn.query_row(
"SELECT
COALESCE(SUM(cost), 0.0),
@@ -356,8 +374,17 @@ pub async fn usage_get_stats_cached(
FROM usage_entries
WHERE timestamp >= ?1",
params![cutoff],
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?)),
).map_err(|e| e.to_string())?
|row| {
Ok((
row.get(0)?,
row.get(1)?,
row.get(2)?,
row.get(3)?,
row.get(4)?,
))
},
)
.map_err(|e| e.to_string())?
} else {
conn.query_row(
"SELECT
@@ -368,8 +395,17 @@ pub async fn usage_get_stats_cached(
COALESCE(SUM(cache_read_tokens), 0)
FROM usage_entries",
params![],
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?, row.get(3)?, row.get(4)?)),
).map_err(|e| e.to_string())?
|row| {
Ok((
row.get(0)?,
row.get(1)?,
row.get(2)?,
row.get(3)?,
row.get(4)?,
))
},
)
.map_err(|e| e.to_string())?
};
let total_tokens = total_input + total_output + total_cache_creation + total_cache_read;
@@ -380,13 +416,15 @@ pub async fn usage_get_stats_cached(
"SELECT COUNT(DISTINCT session_id) FROM usage_entries WHERE timestamp >= ?1",
params![cutoff],
|row| row.get(0),
).map_err(|e| e.to_string())?
)
.map_err(|e| e.to_string())?
} else {
conn.query_row(
"SELECT COUNT(DISTINCT session_id) FROM usage_entries",
params![],
|row| row.get(0),
).map_err(|e| e.to_string())?
)
.map_err(|e| e.to_string())?
};
// Get stats by model
@@ -436,15 +474,19 @@ pub async fn usage_get_stats_cached(
};
let rows = if let Some(cutoff) = &date_filter {
stmt.query_map(params![cutoff], create_model_usage).map_err(|e| e.to_string())?
stmt.query_map(params![cutoff], create_model_usage)
.map_err(|e| e.to_string())?
} else {
stmt.query_map(params![], create_model_usage).map_err(|e| e.to_string())?
stmt.query_map(params![], create_model_usage)
.map_err(|e| e.to_string())?
};
for row in rows {
if let Ok(mut usage) = row {
usage.total_tokens = usage.input_tokens + usage.output_tokens +
usage.cache_creation_tokens + usage.cache_read_tokens;
usage.total_tokens = usage.input_tokens
+ usage.output_tokens
+ usage.cache_creation_tokens
+ usage.cache_read_tokens;
by_model.push(usage);
}
}
@@ -494,8 +536,10 @@ pub async fn usage_get_stats_cached(
Ok(DailyUsage {
date: row.get(0)?,
total_cost: row.get(1)?,
total_tokens: (row.get::<_, i64>(2)? + row.get::<_, i64>(3)? +
row.get::<_, i64>(4)? + row.get::<_, i64>(5)?) as u64,
total_tokens: (row.get::<_, i64>(2)?
+ row.get::<_, i64>(3)?
+ row.get::<_, i64>(4)?
+ row.get::<_, i64>(5)?) as u64,
input_tokens: row.get::<_, i64>(2)? as u64,
output_tokens: row.get::<_, i64>(3)? as u64,
cache_creation_tokens: row.get::<_, i64>(4)? as u64,
@@ -506,9 +550,11 @@ pub async fn usage_get_stats_cached(
};
let rows = if let Some(cutoff) = &date_filter {
stmt.query_map(params![cutoff], create_daily_usage).map_err(|e| e.to_string())?
stmt.query_map(params![cutoff], create_daily_usage)
.map_err(|e| e.to_string())?
} else {
stmt.query_map(params![], create_daily_usage).map_err(|e| e.to_string())?
stmt.query_map(params![], create_daily_usage)
.map_err(|e| e.to_string())?
};
for row in rows {
@@ -559,15 +605,18 @@ pub async fn usage_get_stats_cached(
};
let rows = if let Some(cutoff) = &date_filter {
stmt.query_map(params![cutoff], create_project_usage).map_err(|e| e.to_string())?
stmt.query_map(params![cutoff], create_project_usage)
.map_err(|e| e.to_string())?
} else {
stmt.query_map(params![], create_project_usage).map_err(|e| e.to_string())?
stmt.query_map(params![], create_project_usage)
.map_err(|e| e.to_string())?
};
for row in rows {
if let Ok(mut project) = row {
// Extract project name from path
project.project_name = project.project_path
project.project_name = project
.project_path
.split('/')
.last()
.unwrap_or(&project.project_path)
@@ -627,12 +676,14 @@ pub async fn check_files_changed(state: &State<'_, UsageCacheState>) -> Result<b
.map_err(|e| e.to_string())?;
let mut known_files = std::collections::HashMap::new();
let rows = stmt.query_map([], |row| {
let rows = stmt
.query_map([], |row| {
Ok((
row.get::<_, String>(0)?,
(row.get::<_, i64>(1)?, row.get::<_, i64>(2)?),
))
}).map_err(|e| e.to_string())?;
})
.map_err(|e| e.to_string())?;
for row in rows {
if let Ok((path, data)) = row {

View File

@@ -32,14 +32,20 @@ pub struct UsageSummary {
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImportResult { pub inserted: u64, pub skipped: u64, pub errors: u64 }
pub struct ImportResult {
pub inserted: u64,
pub skipped: u64,
pub errors: u64,
}
fn db_path_for(project_root: &Path) -> PathBuf {
project_root.join(".claudia/cache/usage.sqlite")
}
fn ensure_parent_dir(p: &Path) -> std::io::Result<()> {
if let Some(dir) = p.parent() { std::fs::create_dir_all(dir)?; }
if let Some(dir) = p.parent() {
std::fs::create_dir_all(dir)?;
}
Ok(())
}
@@ -101,7 +107,9 @@ fn sha256_file(path: &Path) -> std::io::Result<String> {
let mut buf = [0u8; 8192];
loop {
let n = file.read(&mut buf)?;
if n == 0 { break; }
if n == 0 {
break;
}
hasher.update(&buf[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
@@ -124,9 +132,13 @@ fn count_lines_chars_tokens(path: &Path) -> std::io::Result<(u64, u64, u64)> {
fn should_exclude(rel: &str, excludes: &HashSet<String>) -> bool {
// simple prefix/segment check
let default = ["node_modules/", "dist/", "target/", ".git/" ];
if default.iter().any(|p| rel.starts_with(p)) { return true; }
if rel.ends_with(".lock") { return true; }
let default = ["node_modules/", "dist/", "target/", ".git/"];
if default.iter().any(|p| rel.starts_with(p)) {
return true;
}
if rel.ends_with(".lock") {
return true;
}
excludes.iter().any(|p| rel.starts_with(p))
}
@@ -137,34 +149,56 @@ pub async fn usage_scan_index(
state: State<'_, UsageIndexState>,
) -> Result<String, String> {
let project = PathBuf::from(project_root.clone());
if !project.is_dir() { return Err("project_root is not a directory".into()); }
if !project.is_dir() {
return Err("project_root is not a directory".into());
}
let job_id = uuid::Uuid::new_v4().to_string();
{
let mut jobs = state.jobs.lock().map_err(|e| e.to_string())?;
jobs.insert(job_id.clone(), ScanProgress{ processed:0, total:0, started_ts: Utc::now().timestamp_millis(), finished_ts: None});
jobs.insert(
job_id.clone(),
ScanProgress {
processed: 0,
total: 0,
started_ts: Utc::now().timestamp_millis(),
finished_ts: None,
},
);
}
let excludes: HashSet<String> = exclude.unwrap_or_default().into_iter().collect();
let state_jobs = state.jobs.clone();
let job_id_task = job_id.clone();
let job_id_ret = job_id.clone();
tauri::async_runtime::spawn(async move {
let mut conn = match open_db(&project) { Ok(c)=>c, Err(e)=>{ log::error!("DB open error: {}", e); return; } };
let mut conn = match open_db(&project) {
Ok(c) => c,
Err(e) => {
log::error!("DB open error: {}", e);
return;
}
};
// First pass: count total
let mut total: u64 = 0;
for entry in WalkDir::new(&project).into_iter().filter_map(Result::ok) {
if entry.file_type().is_file() {
if let Ok(rel) = entry.path().strip_prefix(&project) {
let rel = rel.to_string_lossy().replace('\\',"/");
if should_exclude(&format!("{}/", rel).trim_end_matches('/'), &excludes) { continue; }
let rel = rel.to_string_lossy().replace('\\', "/");
if should_exclude(&format!("{}/", rel).trim_end_matches('/'), &excludes) {
continue;
}
total += 1;
}
}
}
{
if let Ok(mut jobs) = state_jobs.lock() { if let Some(p) = jobs.get_mut(&job_id_task){ p.total = total; } }
if let Ok(mut jobs) = state_jobs.lock() {
if let Some(p) = jobs.get_mut(&job_id_task) {
p.total = total;
}
}
}
// Cache existing file meta
let mut existing: HashMap<String,(i64,i64,String,i64)> = HashMap::new(); // rel -> (size, mtime, sha, file_id)
let mut existing: HashMap<String, (i64, i64, String, i64)> = HashMap::new(); // rel -> (size, mtime, sha, file_id)
{
let stmt = conn.prepare("SELECT id, rel_path, size_bytes, mtime_ms, sha256 FROM files WHERE project_root=?1").ok();
if let Some(mut st) = stmt {
@@ -176,7 +210,11 @@ pub async fn usage_scan_index(
let sha: String = row.get(4)?;
Ok((rel, (size, mtime, sha, id)))
});
if let Ok(rows) = rows { for r in rows.flatten(){ existing.insert(r.0, r.1); } }
if let Ok(rows) = rows {
for r in rows.flatten() {
existing.insert(r.0, r.1);
}
}
}
}
@@ -188,17 +226,37 @@ pub async fn usage_scan_index(
for entry in WalkDir::new(&project).into_iter().filter_map(Result::ok) {
if entry.file_type().is_file() {
if let Ok(relp) = entry.path().strip_prefix(&project) {
let rel = relp.to_string_lossy().replace('\\',"/");
let rel = relp.to_string_lossy().replace('\\', "/");
let rel_norm = rel.clone();
if should_exclude(&format!("{}/", rel_norm).trim_end_matches('/'), &excludes) { continue; }
let md = match entry.metadata() { Ok(m)=>m, Err(_)=>{ continue } };
if should_exclude(
&format!("{}/", rel_norm).trim_end_matches('/'),
&excludes,
) {
continue;
}
let md = match entry.metadata() {
Ok(m) => m,
Err(_) => continue,
};
let size = md.len() as i64;
let mtime = md.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_millis() as i64).unwrap_or(0);
let mtime = md
.modified()
.ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| d.as_millis() as i64)
.unwrap_or(0);
let mut content_changed = true;
let sha: String;
if let Some((esize, emtime, esha, _fid)) = existing.get(&rel_norm) {
if *esize == size && *emtime == mtime { content_changed = false; sha = esha.clone(); }
else { sha = sha256_file(entry.path()).unwrap_or_default(); if sha == *esha { content_changed = false; } }
if *esize == size && *emtime == mtime {
content_changed = false;
sha = esha.clone();
} else {
sha = sha256_file(entry.path()).unwrap_or_default();
if sha == *esha {
content_changed = false;
}
}
} else {
sha = sha256_file(entry.path()).unwrap_or_default();
}
@@ -211,14 +269,19 @@ pub async fn usage_scan_index(
).ok();
// get file_id
let file_id: i64 = tx.query_row(
let file_id: i64 = tx
.query_row(
"SELECT id FROM files WHERE project_root=?1 AND rel_path=?2",
params![project.to_string_lossy(), rel_norm], |row| row.get(0)
).unwrap_or(-1);
params![project.to_string_lossy(), rel_norm],
|row| row.get(0),
)
.unwrap_or(-1);
// metrics
if content_changed {
if let Ok((lines, chars, tokens)) = count_lines_chars_tokens(entry.path()) {
if let Ok((lines, chars, tokens)) =
count_lines_chars_tokens(entry.path())
{
tx.execute(
"INSERT INTO file_metrics(file_id, snapshot_ts, lines, tokens, chars) VALUES (?1,?2,?3,?4,?5)",
params![file_id, now, lines as i64, tokens as i64, chars as i64]
@@ -228,11 +291,27 @@ pub async fn usage_scan_index(
"SELECT lines, tokens, snapshot_ts FROM file_metrics WHERE file_id=?1 ORDER BY snapshot_ts DESC LIMIT 1 OFFSET 1",
params![file_id], |r| Ok((r.get(0)?, r.get(1)?, r.get(2)?))
).ok();
let (added_l, removed_l, added_t, removed_t, prev_ts, change_type) = match prev {
None => (lines as i64, 0, tokens as i64, 0, None, "created".to_string()),
let (added_l, removed_l, added_t, removed_t, prev_ts, change_type) =
match prev {
None => (
lines as i64,
0,
tokens as i64,
0,
None,
"created".to_string(),
),
Some((pl, pt, pts)) => {
let dl = lines as i64 - pl; let dt = tokens as i64 - pt;
(dl.max(0), (-dl).max(0), dt.max(0), (-dt).max(0), Some(pts), "modified".to_string())
let dl = lines as i64 - pl;
let dt = tokens as i64 - pt;
(
dl.max(0),
(-dl).max(0),
dt.max(0),
(-dt).max(0),
Some(pts),
"modified".to_string(),
)
}
};
tx.execute(
@@ -243,22 +322,42 @@ pub async fn usage_scan_index(
}
seen.insert(rel_norm);
processed += 1;
if let Ok(mut jobs) = state_jobs.lock() { if let Some(p) = jobs.get_mut(&job_id_task){ p.processed = processed; } }
if let Ok(mut jobs) = state_jobs.lock() {
if let Some(p) = jobs.get_mut(&job_id_task) {
p.processed = processed;
}
}
}
}
}
// deletions: files in DB but not seen
let mut to_delete: Vec<(i64,i64,i64)> = Vec::new(); // (file_id, last_lines, last_tokens)
let mut to_delete: Vec<(i64, i64, i64)> = Vec::new(); // (file_id, last_lines, last_tokens)
{
let stmt = tx.prepare("SELECT f.id, m.lines, m.tokens FROM files f LEFT JOIN file_metrics m ON m.file_id=f.id WHERE f.project_root=?1 AND m.snapshot_ts=(SELECT MAX(snapshot_ts) FROM file_metrics WHERE file_id=f.id)").ok();
if let Some(mut st) = stmt {
let rows = st.query_map(params![project.to_string_lossy()], |row| Ok((row.get(0)?, row.get::<_,Option<i64>>(1).unwrap_or(None).unwrap_or(0), row.get::<_,Option<i64>>(2).unwrap_or(None).unwrap_or(0)))) ;
if let Ok(rows) = rows { for r in rows.flatten() { to_delete.push(r); } }
let rows = st.query_map(params![project.to_string_lossy()], |row| {
Ok((
row.get(0)?,
row.get::<_, Option<i64>>(1).unwrap_or(None).unwrap_or(0),
row.get::<_, Option<i64>>(2).unwrap_or(None).unwrap_or(0),
))
});
if let Ok(rows) = rows {
for r in rows.flatten() {
to_delete.push(r);
}
}
}
}
for (fid, last_lines, last_tokens) in to_delete {
let rel: String = tx.query_row("SELECT rel_path FROM files WHERE id=?1", params![fid], |r| r.get(0)).unwrap_or_default();
let rel: String = tx
.query_row(
"SELECT rel_path FROM files WHERE id=?1",
params![fid],
|r| r.get(0),
)
.unwrap_or_default();
if !seen.contains(&rel) {
tx.execute(
"INSERT INTO file_diffs(file_id, snapshot_ts, prev_snapshot_ts, added_lines, removed_lines, added_tokens, removed_tokens, change_type) VALUES (?1,?2,NULL,0,?3,0,?4,'deleted')",
@@ -270,41 +369,76 @@ pub async fn usage_scan_index(
tx.commit().ok();
}
if let Ok(mut jobs) = state_jobs.lock() { if let Some(p) = jobs.get_mut(&job_id_task){ p.finished_ts = Some(Utc::now().timestamp_millis()); } }
if let Ok(mut jobs) = state_jobs.lock() {
if let Some(p) = jobs.get_mut(&job_id_task) {
p.finished_ts = Some(Utc::now().timestamp_millis());
}
}
});
Ok(job_id_ret)
}
#[tauri::command]
pub fn usage_scan_progress(job_id: String, state: State<'_, UsageIndexState>) -> Result<ScanProgress, String> {
pub fn usage_scan_progress(
job_id: String,
state: State<'_, UsageIndexState>,
) -> Result<ScanProgress, String> {
let jobs = state.jobs.lock().map_err(|e| e.to_string())?;
jobs.get(&job_id).cloned().ok_or_else(|| "job not found".into())
jobs.get(&job_id)
.cloned()
.ok_or_else(|| "job not found".into())
}
#[tauri::command]
pub fn usage_get_summary(project_root: String) -> Result<UsageSummary, String> {
let project = PathBuf::from(project_root);
let conn = open_db(&project).map_err(|e| e.to_string())?;
let files: u64 = conn.query_row("SELECT COUNT(*) FROM files WHERE project_root=?1", params![project.to_string_lossy()], |r| r.get::<_,i64>(0)).unwrap_or(0) as u64;
let mut lines: u64 = 0; let mut tokens: u64 = 0; let mut last_ts: Option<i64> = None;
let files: u64 = conn
.query_row(
"SELECT COUNT(*) FROM files WHERE project_root=?1",
params![project.to_string_lossy()],
|r| r.get::<_, i64>(0),
)
.unwrap_or(0) as u64;
let mut lines: u64 = 0;
let mut tokens: u64 = 0;
let mut last_ts: Option<i64> = None;
let mut stmt = conn.prepare("SELECT MAX(snapshot_ts), SUM(lines), SUM(tokens) FROM file_metrics WHERE file_id IN (SELECT id FROM files WHERE project_root=?1)").map_err(|e| e.to_string())?;
let res = stmt.query_row(params![project.to_string_lossy()], |r| {
Ok((r.get::<_,Option<i64>>(0)?, r.get::<_,Option<i64>>(1)?, r.get::<_,Option<i64>>(2)?))
Ok((
r.get::<_, Option<i64>>(0)?,
r.get::<_, Option<i64>>(1)?,
r.get::<_, Option<i64>>(2)?,
))
});
if let Ok((mx, lsum, tsum)) = res { last_ts = mx; lines = lsum.unwrap_or(0) as u64; tokens = tsum.unwrap_or(0) as u64; }
Ok(UsageSummary{ files, tokens, lines, last_scan_ts: last_ts })
if let Ok((mx, lsum, tsum)) = res {
last_ts = mx;
lines = lsum.unwrap_or(0) as u64;
tokens = tsum.unwrap_or(0) as u64;
}
Ok(UsageSummary {
files,
tokens,
lines,
last_scan_ts: last_ts,
})
}
#[derive(Debug, Deserialize)]
struct ExternalDiff {
rel_path: String,
snapshot_ts: i64,
#[serde(default)] prev_snapshot_ts: Option<i64>,
#[serde(default)] added_lines: i64,
#[serde(default)] removed_lines: i64,
#[serde(default)] added_tokens: i64,
#[serde(default)] removed_tokens: i64,
#[serde(default)]
prev_snapshot_ts: Option<i64>,
#[serde(default)]
added_lines: i64,
#[serde(default)]
removed_lines: i64,
#[serde(default)]
added_tokens: i64,
#[serde(default)]
removed_tokens: i64,
change_type: String,
}
@@ -313,19 +447,33 @@ pub fn usage_import_diffs(project_root: String, path: String) -> Result<ImportRe
let project = PathBuf::from(project_root);
let mut conn = open_db(&project).map_err(|e| e.to_string())?;
let data = std::fs::read_to_string(&path).map_err(|e| e.to_string())?;
let mut inserted=0u64; let mut skipped=0u64; let mut errors=0u64;
let mut inserted = 0u64;
let mut skipped = 0u64;
let mut errors = 0u64;
let tx = conn.transaction().map_err(|e| e.to_string())?;
// try as JSON array
let mut diffs: Vec<ExternalDiff> = Vec::new();
match serde_json::from_str::<serde_json::Value>(&data) {
Ok(serde_json::Value::Array(arr)) => {
for v in arr { if let Ok(d) = serde_json::from_value::<ExternalDiff>(v) { diffs.push(d); } }
},
for v in arr {
if let Ok(d) = serde_json::from_value::<ExternalDiff>(v) {
diffs.push(d);
}
}
}
_ => {
// try NDJSON
for line in data.lines() {
let l = line.trim(); if l.is_empty() { continue; }
match serde_json::from_str::<ExternalDiff>(l) { Ok(d)=>diffs.push(d), Err(_)=>{ errors+=1; } }
let l = line.trim();
if l.is_empty() {
continue;
}
match serde_json::from_str::<ExternalDiff>(l) {
Ok(d) => diffs.push(d),
Err(_) => {
errors += 1;
}
}
}
}
}
@@ -336,18 +484,31 @@ pub fn usage_import_diffs(project_root: String, path: String) -> Result<ImportRe
ON CONFLICT(project_root, rel_path) DO NOTHING",
params![project.to_string_lossy(), d.rel_path],
).ok();
let file_id: Option<i64> = tx.query_row(
let file_id: Option<i64> = tx
.query_row(
"SELECT id FROM files WHERE project_root=?1 AND rel_path=?2",
params![project.to_string_lossy(), d.rel_path], |r| r.get(0)
).ok();
params![project.to_string_lossy(), d.rel_path],
|r| r.get(0),
)
.ok();
if let Some(fid) = file_id {
let res = tx.execute(
"INSERT INTO file_diffs(file_id, snapshot_ts, prev_snapshot_ts, added_lines, removed_lines, added_tokens, removed_tokens, change_type) VALUES (?1,?2,?3,?4,?5,?6,?7,?8)",
params![fid, d.snapshot_ts, d.prev_snapshot_ts, d.added_lines, d.removed_lines, d.added_tokens, d.removed_tokens, d.change_type]
);
if res.is_ok() { inserted+=1; } else { skipped+=1; }
} else { errors+=1; }
if res.is_ok() {
inserted += 1;
} else {
skipped += 1;
}
} else {
errors += 1;
}
}
tx.commit().map_err(|e| e.to_string())?;
Ok(ImportResult{ inserted, skipped, errors })
Ok(ImportResult {
inserted,
skipped,
errors,
})
}

View File

@@ -52,20 +52,19 @@ impl FileWatcherManager {
// 创建文件监听器
let mut watcher = RecommendedWatcher::new(
move |res: Result<Event, notify::Error>| {
match res {
move |res: Result<Event, notify::Error>| match res {
Ok(event) => {
Self::handle_event(event, &app_handle, &last_events);
}
Err(e) => {
log::error!("Watch error: {:?}", e);
}
}
},
Config::default()
.with_poll_interval(Duration::from_secs(1))
.with_compare_contents(false),
).map_err(|e| format!("Failed to create watcher: {}", e))?;
)
.map_err(|e| format!("Failed to create watcher: {}", e))?;
// 开始监听
let mode = if recursive {
@@ -108,7 +107,11 @@ impl FileWatcherManager {
}
/// 处理文件系统事件
fn handle_event(event: Event, app_handle: &AppHandle, last_events: &Arc<Mutex<HashMap<PathBuf, SystemTime>>>) {
fn handle_event(
event: Event,
app_handle: &AppHandle,
last_events: &Arc<Mutex<HashMap<PathBuf, SystemTime>>>,
) {
// 过滤不需要的事件
let change_type = match event.kind {
EventKind::Create(_) => "created",
@@ -126,7 +129,9 @@ impl FileWatcherManager {
if let Some(last_time) = last_events.get(&path) {
// 如果距离上次事件不到500ms忽略
if now.duration_since(*last_time).unwrap_or(Duration::ZERO) < Duration::from_millis(500) {
if now.duration_since(*last_time).unwrap_or(Duration::ZERO)
< Duration::from_millis(500)
{
false
} else {
last_events.insert(path.clone(), now);

View File

@@ -44,25 +44,45 @@ impl SimpleI18n {
("en-US", "claude-not-installed") => "Claude Code is not installed".to_string(),
// Relay Station English translations
("en-US", "relay_adapter.custom_no_test") => "Custom configuration, connection test skipped".to_string(),
("en-US", "relay_adapter.packycode_single_token") => "PackyCode only supports single API key".to_string(),
("en-US", "relay_adapter.user_info_not_available") => "User info not available for this configuration".to_string(),
("en-US", "relay_adapter.usage_logs_not_available") => "Usage logs not available for this configuration".to_string(),
("en-US", "relay_adapter.token_management_not_available") => "Token management not available for this configuration".to_string(),
("en-US", "relay_adapter.custom_no_test") => {
"Custom configuration, connection test skipped".to_string()
}
("en-US", "relay_adapter.packycode_single_token") => {
"PackyCode only supports single API key".to_string()
}
("en-US", "relay_adapter.user_info_not_available") => {
"User info not available for this configuration".to_string()
}
("en-US", "relay_adapter.usage_logs_not_available") => {
"Usage logs not available for this configuration".to_string()
}
("en-US", "relay_adapter.token_management_not_available") => {
"Token management not available for this configuration".to_string()
}
("en-US", "relay_adapter.connection_success") => "Connection successful".to_string(),
("en-US", "relay_adapter.api_error") => "API returned error".to_string(),
("en-US", "relay_adapter.parse_error") => "Failed to parse response".to_string(),
("en-US", "relay_adapter.http_error") => "HTTP request failed".to_string(),
("en-US", "relay_adapter.network_error") => "Network connection failed".to_string(),
("en-US", "relay_station.enabled_success") => "Relay station enabled successfully".to_string(),
("en-US", "relay_station.disabled_success") => "Relay station disabled successfully".to_string(),
("en-US", "relay_station.enabled_success") => {
"Relay station enabled successfully".to_string()
}
("en-US", "relay_station.disabled_success") => {
"Relay station disabled successfully".to_string()
}
("en-US", "relay_station.name_required") => "Station name is required".to_string(),
("en-US", "relay_station.api_url_required") => "API URL is required".to_string(),
("en-US", "relay_station.invalid_url") => "Invalid URL format".to_string(),
("en-US", "relay_station.https_required") => "API URL must use HTTPS protocol for security".to_string(),
("en-US", "relay_station.https_required") => {
"API URL must use HTTPS protocol for security".to_string()
}
("en-US", "relay_station.token_required") => "API token is required".to_string(),
("en-US", "relay_station.token_too_short") => "API token is too short (minimum 10 characters)".to_string(),
("en-US", "relay_station.token_invalid_chars") => "API token contains invalid characters".to_string(),
("en-US", "relay_station.token_too_short") => {
"API token is too short (minimum 10 characters)".to_string()
}
("en-US", "relay_station.token_invalid_chars") => {
"API token contains invalid characters".to_string()
}
// 中文翻译
("zh-CN", "error-failed-to-create") => "创建失败".to_string(),
@@ -73,10 +93,18 @@ impl SimpleI18n {
// Relay Station Chinese translations
("zh-CN", "relay_adapter.custom_no_test") => "自定义配置,跳过连接测试".to_string(),
("zh-CN", "relay_adapter.packycode_single_token") => "PackyCode 仅支持单个 API 密钥".to_string(),
("zh-CN", "relay_adapter.user_info_not_available") => "该配置不支持用户信息查询".to_string(),
("zh-CN", "relay_adapter.usage_logs_not_available") => "该配置不支持使用日志查询".to_string(),
("zh-CN", "relay_adapter.token_management_not_available") => "该配置不支持 Token 管理".to_string(),
("zh-CN", "relay_adapter.packycode_single_token") => {
"PackyCode 仅支持单个 API 密钥".to_string()
}
("zh-CN", "relay_adapter.user_info_not_available") => {
"该配置不支持用户信息查询".to_string()
}
("zh-CN", "relay_adapter.usage_logs_not_available") => {
"该配置不支持使用日志查询".to_string()
}
("zh-CN", "relay_adapter.token_management_not_available") => {
"该配置不支持 Token 管理".to_string()
}
("zh-CN", "relay_adapter.connection_success") => "连接成功".to_string(),
("zh-CN", "relay_adapter.api_error") => "API 返回错误".to_string(),
("zh-CN", "relay_adapter.parse_error") => "解析响应失败".to_string(),
@@ -87,9 +115,13 @@ impl SimpleI18n {
("zh-CN", "relay_station.name_required") => "中转站名称不能为空".to_string(),
("zh-CN", "relay_station.api_url_required") => "API地址不能为空".to_string(),
("zh-CN", "relay_station.invalid_url") => "无效的URL格式".to_string(),
("zh-CN", "relay_station.https_required") => "出于安全考虑API地址必须使用HTTPS协议".to_string(),
("zh-CN", "relay_station.https_required") => {
"出于安全考虑API地址必须使用HTTPS协议".to_string()
}
("zh-CN", "relay_station.token_required") => "API令牌不能为空".to_string(),
("zh-CN", "relay_station.token_too_short") => "API令牌太短至少需要10个字符".to_string(),
("zh-CN", "relay_station.token_too_short") => {
"API令牌太短至少需要10个字符".to_string()
}
("zh-CN", "relay_station.token_invalid_chars") => "API令牌包含无效字符".to_string(),
// 默认情况

View File

@@ -5,9 +5,9 @@ pub mod checkpoint;
pub mod claude_binary;
pub mod claude_config;
pub mod commands;
pub mod process;
pub mod i18n;
pub mod file_watcher;
pub mod i18n;
pub mod process;
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {

View File

@@ -3,98 +3,97 @@
mod checkpoint;
mod claude_binary;
mod commands;
mod process;
mod i18n;
mod claude_config;
mod commands;
mod file_watcher;
mod i18n;
mod process;
use checkpoint::state::CheckpointState;
use commands::agents::{
cleanup_finished_processes, create_agent, delete_agent, execute_agent, export_agent,
export_agent_to_file, fetch_github_agent_content, fetch_github_agents, get_agent,
get_agent_run, get_agent_run_with_real_time_metrics, get_claude_binary_path,
get_live_session_output, get_session_output, get_session_status, import_agent,
import_agent_from_file, import_agent_from_github, init_database, kill_agent_session,
list_agent_runs, list_agent_runs_with_metrics, list_agents, list_claude_installations,
list_running_sessions, load_agent_session_history, set_claude_binary_path, stream_session_output, update_agent, AgentDb,
get_model_mappings, update_model_mapping,
get_live_session_output, get_model_mappings, get_session_output, get_session_status,
import_agent, import_agent_from_file, import_agent_from_github, init_database,
kill_agent_session, list_agent_runs, list_agent_runs_with_metrics, list_agents,
list_claude_installations, list_running_sessions, load_agent_session_history,
set_claude_binary_path, stream_session_output, update_agent, update_model_mapping, AgentDb,
};
use commands::claude::{
cancel_claude_execution, check_auto_checkpoint, check_claude_version, cleanup_old_checkpoints,
clear_checkpoint_manager, continue_claude_code, create_checkpoint, execute_claude_code,
find_claude_md_files, fork_from_checkpoint, get_checkpoint_diff, get_checkpoint_settings,
get_checkpoint_state_stats, get_claude_session_output, get_claude_settings, get_project_sessions,
get_recently_modified_files, get_session_timeline, get_system_prompt, list_checkpoints,
list_directory_contents, list_projects, list_running_claude_sessions, load_session_history,
open_new_session, read_claude_md_file, restore_checkpoint, resume_claude_code,
save_claude_md_file, save_claude_settings, save_system_prompt, search_files,
track_checkpoint_message, track_session_messages, update_checkpoint_settings,
get_hooks_config, update_hooks_config, validate_hook_command,
watch_claude_project_directory, unwatch_claude_project_directory,
ClaudeProcessState,
get_checkpoint_state_stats, get_claude_session_output, get_claude_settings, get_hooks_config,
get_project_sessions, get_recently_modified_files, get_session_timeline, get_system_prompt,
list_checkpoints, list_directory_contents, list_projects, list_running_claude_sessions,
load_session_history, open_new_session, read_claude_md_file, restore_checkpoint,
resume_claude_code, save_claude_md_file, save_claude_settings, save_system_prompt,
search_files, track_checkpoint_message, track_session_messages,
unwatch_claude_project_directory, update_checkpoint_settings, update_hooks_config,
validate_hook_command, watch_claude_project_directory, ClaudeProcessState,
};
use commands::mcp::{
mcp_add, mcp_add_from_claude_desktop, mcp_add_json, mcp_get, mcp_get_server_status, mcp_list,
mcp_read_project_config, mcp_remove, mcp_reset_project_choices, mcp_save_project_config,
mcp_serve, mcp_test_connection, mcp_export_servers,
mcp_add, mcp_add_from_claude_desktop, mcp_add_json, mcp_export_servers, mcp_get,
mcp_get_server_status, mcp_list, mcp_read_project_config, mcp_remove,
mcp_reset_project_choices, mcp_save_project_config, mcp_serve, mcp_test_connection,
};
use commands::ccr::{
check_ccr_installation, get_ccr_config_path, get_ccr_service_status, get_ccr_version,
open_ccr_ui, restart_ccr_service, start_ccr_service, stop_ccr_service,
};
use commands::filesystem::{
get_file_info, get_file_tree, get_watched_paths, read_directory_tree, read_file,
search_files_by_name, unwatch_directory, watch_directory, write_file,
};
use commands::git::{
get_git_branches, get_git_commits, get_git_diff, get_git_history, get_git_status,
};
use commands::language::{get_current_language, get_supported_languages, set_language};
use commands::packycode_nodes::{
auto_select_best_node, get_packycode_nodes, test_all_packycode_nodes,
};
use commands::proxy::{apply_proxy_settings, get_proxy_settings, save_proxy_settings};
use commands::relay_adapters::{
packycode_get_user_quota, relay_station_create_token, relay_station_delete_token,
relay_station_get_info, relay_station_get_usage_logs, relay_station_get_user_info,
relay_station_list_tokens, relay_station_test_connection, relay_station_update_token,
};
use commands::relay_stations::{
relay_station_create, relay_station_delete, relay_station_get,
relay_station_get_current_config, relay_station_restore_config, relay_station_sync_config,
relay_station_toggle_enable, relay_station_update, relay_station_update_order,
relay_stations_export, relay_stations_import, relay_stations_list,
};
use commands::smart_sessions::{
cleanup_old_smart_sessions_command, create_smart_quick_start_session, get_smart_session_config,
list_smart_sessions_command, toggle_smart_session_mode, update_smart_session_config,
};
use commands::storage::{
storage_delete_row, storage_execute_sql, storage_insert_row, storage_list_tables,
storage_read_table, storage_reset_database, storage_update_row,
};
use commands::system::flush_dns;
use commands::terminal::{
cleanup_terminal_sessions, close_terminal_session, create_terminal_session,
list_terminal_sessions, resize_terminal, send_terminal_input, TerminalState,
};
use commands::usage::{
get_session_stats, get_usage_by_date_range, get_usage_details, get_usage_stats,
};
use commands::usage_cache::{
usage_check_updates, usage_clear_cache, usage_force_scan, usage_get_stats_cached,
usage_scan_update, UsageCacheState,
};
use commands::usage_index::{
usage_get_summary, usage_import_diffs, usage_scan_index, usage_scan_progress, UsageIndexState,
};
use commands::usage_cache::{
usage_scan_update, usage_get_stats_cached, usage_clear_cache, usage_force_scan, usage_check_updates, UsageCacheState,
};
use commands::storage::{
storage_list_tables, storage_read_table, storage_update_row, storage_delete_row,
storage_insert_row, storage_execute_sql, storage_reset_database,
};
use commands::proxy::{get_proxy_settings, save_proxy_settings, apply_proxy_settings};
use commands::language::{get_current_language, set_language, get_supported_languages};
use commands::relay_stations::{
relay_stations_list, relay_station_get, relay_station_create, relay_station_update,
relay_station_delete, relay_station_toggle_enable, relay_station_sync_config,
relay_station_restore_config, relay_station_get_current_config,
relay_stations_export, relay_stations_import, relay_station_update_order,
};
use commands::relay_adapters::{
relay_station_get_info, relay_station_get_user_info,
relay_station_test_connection, relay_station_get_usage_logs, relay_station_list_tokens,
relay_station_create_token, relay_station_update_token, relay_station_delete_token,
packycode_get_user_quota,
};
use commands::packycode_nodes::{
test_all_packycode_nodes, auto_select_best_node, get_packycode_nodes,
};
use commands::filesystem::{
read_directory_tree, search_files_by_name, get_file_info, watch_directory,
read_file, write_file, get_file_tree, unwatch_directory, get_watched_paths,
};
use commands::git::{
get_git_status, get_git_history, get_git_branches, get_git_diff, get_git_commits,
};
use commands::terminal::{
create_terminal_session, send_terminal_input, close_terminal_session,
list_terminal_sessions, resize_terminal, cleanup_terminal_sessions, TerminalState,
};
use commands::ccr::{
check_ccr_installation, get_ccr_version, get_ccr_service_status, start_ccr_service,
stop_ccr_service, restart_ccr_service, open_ccr_ui, get_ccr_config_path,
};
use commands::system::flush_dns;
use commands::smart_sessions::{
create_smart_quick_start_session, get_smart_session_config, update_smart_session_config,
list_smart_sessions_command, toggle_smart_session_mode, cleanup_old_smart_sessions_command,
};
use process::ProcessRegistryState;
use file_watcher::FileWatcherState;
use process::ProcessRegistryState;
use std::sync::Mutex;
use tauri::Manager;
use tauri::menu::{MenuBuilder, MenuItemBuilder, SubmenuBuilder};
use tauri::Manager;
use tauri_plugin_log::{Target, TargetKind};
fn main() {
@@ -105,13 +104,15 @@ fn main() {
.plugin(tauri_plugin_shell::init())
.plugin(tauri_plugin_fs::init())
.plugin(tauri_plugin_clipboard_manager::init())
.plugin(tauri_plugin_log::Builder::new()
.plugin(
tauri_plugin_log::Builder::new()
.level(log::LevelFilter::Debug)
.targets([
Target::new(TargetKind::LogDir { file_name: None }),
Target::new(TargetKind::Stdout),
])
.build())
.build(),
)
// App menu: include standard Edit actions so OS hotkeys (Undo/Redo/Cut/Copy/Paste/Select All)
// work across all pages, plus a DevTools toggle.
.menu(|app| {
@@ -121,9 +122,11 @@ fn main() {
// Create macOS app menu with Quit
let app_menu = SubmenuBuilder::new(app, "Claudia")
.about(Some(AboutMetadataBuilder::new()
.about(Some(
AboutMetadataBuilder::new()
.version(Some(env!("CARGO_PKG_VERSION")))
.build()))
.build(),
))
.separator()
.quit()
.build()
@@ -144,11 +147,13 @@ fn main() {
.close_window()
.minimize()
.separator()
.item(&MenuItemBuilder::new("Toggle DevTools")
.item(
&MenuItemBuilder::new("Toggle DevTools")
.id("toggle-devtools")
.accelerator("CmdOrCtrl+Alt+I")
.build(app)
.unwrap())
.unwrap(),
)
.build()
.unwrap();
@@ -236,10 +241,18 @@ fn main() {
) {
match field {
"enabled" => settings.enabled = value == "true",
"http_proxy" => settings.http_proxy = Some(value).filter(|s| !s.is_empty()),
"https_proxy" => settings.https_proxy = Some(value).filter(|s| !s.is_empty()),
"no_proxy" => settings.no_proxy = Some(value).filter(|s| !s.is_empty()),
"all_proxy" => settings.all_proxy = Some(value).filter(|s| !s.is_empty()),
"http_proxy" => {
settings.http_proxy = Some(value).filter(|s| !s.is_empty())
}
"https_proxy" => {
settings.https_proxy = Some(value).filter(|s| !s.is_empty())
}
"no_proxy" => {
settings.no_proxy = Some(value).filter(|s| !s.is_empty())
}
"all_proxy" => {
settings.all_proxy = Some(value).filter(|s| !s.is_empty())
}
_ => {}
}
}
@@ -337,7 +350,6 @@ fn main() {
get_hooks_config,
update_hooks_config,
validate_hook_command,
// Checkpoint Management
create_checkpoint,
restore_checkpoint,
@@ -353,7 +365,6 @@ fn main() {
get_checkpoint_settings,
clear_checkpoint_manager,
get_checkpoint_state_stats,
// Agent Management
list_agents,
create_agent,
@@ -385,26 +396,22 @@ fn main() {
import_agent_from_github,
get_model_mappings,
update_model_mapping,
// Usage & Analytics
get_usage_stats,
get_usage_by_date_range,
get_usage_details,
get_session_stats,
// File Usage Index (SQLite)
usage_scan_index,
usage_scan_progress,
usage_get_summary,
usage_import_diffs,
// Usage Cache Management
usage_scan_update,
usage_get_stats_cached,
usage_clear_cache,
usage_force_scan,
usage_check_updates,
// MCP (Model Context Protocol)
mcp_add,
mcp_list,
@@ -419,7 +426,6 @@ fn main() {
mcp_read_project_config,
mcp_save_project_config,
mcp_export_servers,
// Storage Management
storage_list_tables,
storage_read_table,
@@ -428,7 +434,6 @@ fn main() {
storage_insert_row,
storage_execute_sql,
storage_reset_database,
// Smart Sessions Management
create_smart_quick_start_session,
get_smart_session_config,
@@ -436,22 +441,18 @@ fn main() {
list_smart_sessions_command,
toggle_smart_session_mode,
cleanup_old_smart_sessions_command,
// Slash Commands
commands::slash_commands::slash_commands_list,
commands::slash_commands::slash_command_get,
commands::slash_commands::slash_command_save,
commands::slash_commands::slash_command_delete,
// Proxy Settings
get_proxy_settings,
save_proxy_settings,
// Language Settings
get_current_language,
set_language,
get_supported_languages,
// Relay Stations
relay_stations_list,
relay_station_get,
@@ -474,12 +475,10 @@ fn main() {
relay_station_update_token,
relay_station_delete_token,
packycode_get_user_quota,
// PackyCode Nodes
test_all_packycode_nodes,
auto_select_best_node,
get_packycode_nodes,
// File System
read_directory_tree,
search_files_by_name,
@@ -490,14 +489,12 @@ fn main() {
read_file,
write_file,
get_file_tree,
// Git
get_git_status,
get_git_history,
get_git_branches,
get_git_diff,
get_git_commits,
// Terminal
create_terminal_session,
send_terminal_input,
@@ -505,7 +502,6 @@ fn main() {
list_terminal_sessions,
resize_terminal,
cleanup_terminal_sessions,
// CCR (Claude Code Router)
check_ccr_installation,
get_ccr_version,
@@ -515,7 +511,6 @@ fn main() {
restart_ccr_service,
open_ccr_ui,
get_ccr_config_path,
// System utilities
flush_dns,
])

View File

@@ -7,13 +7,8 @@ use tokio::process::Child;
/// Type of process being tracked
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ProcessType {
AgentRun {
agent_id: i64,
agent_name: String,
},
ClaudeSession {
session_id: String,
},
AgentRun { agent_id: i64, agent_name: String },
ClaudeSession { session_id: String },
}
/// Information about a running agent process
@@ -72,7 +67,10 @@ impl ProcessRegistry {
) -> Result<(), String> {
let process_info = ProcessInfo {
run_id,
process_type: ProcessType::AgentRun { agent_id, agent_name },
process_type: ProcessType::AgentRun {
agent_id,
agent_name,
},
pid,
started_at: Utc::now(),
project_path,
@@ -96,7 +94,10 @@ impl ProcessRegistry {
) -> Result<(), String> {
let process_info = ProcessInfo {
run_id,
process_type: ProcessType::AgentRun { agent_id, agent_name },
process_type: ProcessType::AgentRun {
agent_id,
agent_name,
},
pid,
started_at: Utc::now(),
project_path,
@@ -175,25 +176,24 @@ impl ProcessRegistry {
let processes = self.processes.lock().map_err(|e| e.to_string())?;
Ok(processes
.values()
.filter_map(|handle| {
match &handle.info.process_type {
.filter_map(|handle| match &handle.info.process_type {
ProcessType::ClaudeSession { .. } => Some(handle.info.clone()),
_ => None,
}
})
.collect())
}
/// Get a specific Claude session by session ID
pub fn get_claude_session_by_id(&self, session_id: &str) -> Result<Option<ProcessInfo>, String> {
pub fn get_claude_session_by_id(
&self,
session_id: &str,
) -> Result<Option<ProcessInfo>, String> {
let processes = self.processes.lock().map_err(|e| e.to_string())?;
Ok(processes
.values()
.find(|handle| {
match &handle.info.process_type {
.find(|handle| match &handle.info.process_type {
ProcessType::ClaudeSession { session_id: sid } => sid == session_id,
_ => false,
}
})
.map(|handle| handle.info.clone()))
}
@@ -221,11 +221,9 @@ impl ProcessRegistry {
let processes = self.processes.lock().map_err(|e| e.to_string())?;
Ok(processes
.values()
.filter_map(|handle| {
match &handle.info.process_type {
.filter_map(|handle| match &handle.info.process_type {
ProcessType::AgentRun { .. } => Some(handle.info.clone()),
_ => None,
}
})
.collect())
}
@@ -273,17 +271,26 @@ impl ProcessRegistry {
}
}
} else {
warn!("No child handle available for process {} (PID: {}), attempting system kill", run_id, pid);
warn!(
"No child handle available for process {} (PID: {}), attempting system kill",
run_id, pid
);
false // Process handle not available, try fallback
}
};
// If direct kill didn't work, try system command as fallback
if !kill_sent {
info!("Attempting fallback kill for process {} (PID: {})", run_id, pid);
info!(
"Attempting fallback kill for process {} (PID: {})",
run_id, pid
);
match self.kill_process_by_pid(run_id, pid) {
Ok(true) => return Ok(true),
Ok(false) => warn!("Fallback kill also failed for process {} (PID: {})", run_id, pid),
Ok(false) => warn!(
"Fallback kill also failed for process {} (PID: {})",
run_id, pid
),
Err(e) => error!("Error during fallback kill: {}", e),
}
// Continue with the rest of the cleanup even if fallback failed

View File

@@ -81,6 +81,41 @@ function AppContent() {
const [previousView] = useState<View>("welcome");
const [showAgentsModal, setShowAgentsModal] = useState(false);
const translateWithFallback = (
primaryKey: string,
params: Record<string, unknown> = {},
fallbackKeys: string[] = [],
fallbackDefault: string | ((params: Record<string, unknown>) => string) = primaryKey
) => {
const defaultNamespace = Array.isArray(i18n.options?.defaultNS)
? i18n.options.defaultNS[0] ?? "common"
: (i18n.options?.defaultNS ?? "common");
const candidateKeys = [primaryKey, ...fallbackKeys];
const rawLanguage = i18n.language || i18n.resolvedLanguage;
const normalizedLanguage = rawLanguage?.split('-')[0];
const localesToTry = [rawLanguage, normalizedLanguage, 'en'].filter(Boolean) as string[];
const missingToken = '__i18n_missing__';
for (const key of candidateKeys) {
for (const locale of localesToTry) {
const fixedT = i18n.getFixedT(locale, defaultNamespace);
const translated = fixedT(key, {
...params,
defaultValue: missingToken,
});
if (translated !== missingToken) {
return translated;
}
}
}
return typeof fallbackDefault === 'function'
? (fallbackDefault as (params: Record<string, unknown>) => string)(params)
: fallbackDefault;
};
// Initialize analytics lifecycle tracking
useAppLifecycle();
const trackEvent = useTrackEvent();
@@ -292,10 +327,12 @@ function AppContent() {
// Create a new tab for the smart session
const newTabId = createChatTab();
const sessionDisplayName = smartSession.display_name || t('messages.smartSessionDefaultTitle');
// 直接更新新建标签的会话上下文,避免依赖事件时序
updateTab(newTabId, {
type: 'chat',
title: smartSession.display_name || 'Smart Session',
title: sessionDisplayName,
initialProjectPath: smartSession.project_path,
sessionData: null,
status: 'active'
@@ -307,12 +344,16 @@ function AppContent() {
}
switchToTab(newTabId);
// Show success message
// Show success message,若主键缺失则回退到默认提示
const successMessage = translateWithFallback(
'messages.smartSessionCreated',
{ name: sessionDisplayName },
['messages.smartSessionDefaultToast'],
`Smart session '${sessionDisplayName}' is ready to use.`
);
setToast({
message: t('smartSessionCreated', {
name: smartSession.display_name,
path: smartSession.project_path
}),
message: successMessage,
type: "success"
});
@@ -324,10 +365,16 @@ function AppContent() {
} catch (error) {
console.error('Failed to create smart session:', error);
const rawError = error instanceof Error ? error.message : String(error);
const fallbackErrorMessage = translateWithFallback(
'messages.failedToCreateSmartSession',
{ error: rawError },
['messages.failedToCreateSmartSessionFallback'],
`Failed to create smart session: ${rawError}`
);
setToast({
message: t('failedToCreateSmartSession', {
error: error instanceof Error ? error.message : String(error)
}),
message: fallbackErrorMessage,
type: "error"
});
}

View File

@@ -150,11 +150,11 @@ export const AgentsModal: React.FC<AgentsModalProps> = ({ open, onOpenChange })
if (filePath) {
const agent = await api.importAgentFromFile(filePath as string);
loadAgents(); // Refresh list
setToast({ message: `Agent "${agent.name}" imported successfully`, type: "success" });
setToast({ message: t('agents.importedSuccessfully', { name: agent.name }), type: "success" });
}
} catch (error) {
console.error('Failed to import agent:', error);
setToast({ message: "Failed to import agent", type: "error" });
setToast({ message: t('agents.importFailed'), type: "error" });
}
};
@@ -175,11 +175,11 @@ export const AgentsModal: React.FC<AgentsModalProps> = ({ open, onOpenChange })
if (filePath) {
await invoke('write_file', { path: filePath, content: JSON.stringify(exportData, null, 2) });
setToast({ message: "Agent exported successfully", type: "success" });
setToast({ message: t('agents.exportedSuccessfully', { name: agent.name }), type: "success" });
}
} catch (error) {
console.error('Failed to export agent:', error);
setToast({ message: "Failed to export agent", type: "error" });
setToast({ message: t('agents.exportFailed'), type: "error" });
}
};
@@ -424,7 +424,7 @@ export const AgentsModal: React.FC<AgentsModalProps> = ({ open, onOpenChange })
onImportSuccess={() => {
setShowGitHubBrowser(false);
loadAgents(); // Refresh the agents list
setToast({ message: "Agent imported successfully", type: "success" });
setToast({ message: t('agents.importedSuccessfully'), type: "success" });
}}
/>

View File

@@ -7,12 +7,14 @@ import { Badge } from "@/components/ui/badge";
import { Toast, ToastContainer } from "@/components/ui/toast";
import { ccrApi, type CcrServiceStatus } from "@/lib/api";
import { open } from '@tauri-apps/plugin-shell';
import { useTranslation } from '@/hooks/useTranslation';
interface CcrRouterManagerProps {
onBack: () => void;
}
export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
const { t } = useTranslation();
const [serviceStatus, setServiceStatus] = useState<CcrServiceStatus | null>(null);
const [loading, setLoading] = useState(true);
const [actionLoading, setActionLoading] = useState(false);
@@ -34,7 +36,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
} catch (error) {
console.error("Failed to load CCR service status:", error);
setToast({
message: `加载CCR服务状态失败: ${error}`,
message: t('ccr.loadStatusFailed', { error: String(error) }),
type: "error"
});
} finally {
@@ -63,7 +65,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
} catch (error) {
console.error("Failed to start CCR service:", error);
setToast({
message: `启动CCR服务失败: ${error}`,
message: t('ccr.startFailed', { error: String(error) }),
type: "error"
});
} finally {
@@ -83,7 +85,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
} catch (error) {
console.error("Failed to stop CCR service:", error);
setToast({
message: `停止CCR服务失败: ${error}`,
message: t('ccr.stopFailed', { error: String(error) }),
type: "error"
});
} finally {
@@ -103,7 +105,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
} catch (error) {
console.error("Failed to restart CCR service:", error);
setToast({
message: `重启CCR服务失败: ${error}`,
message: t('ccr.restartFailed', { error: String(error) }),
type: "error"
});
} finally {
@@ -118,14 +120,14 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
// 如果服务未运行,先尝试启动
if (!serviceStatus?.is_running) {
setToast({
message: "检测到服务未运行,正在启动...",
message: t('ccr.serviceStarting'),
type: "info"
});
const startResult = await ccrApi.startService();
setServiceStatus(startResult.status);
if (!startResult.status.is_running) {
throw new Error("服务启动失败");
throw new Error(t('ccr.serviceStartFailed'));
}
// 等待服务完全启动
@@ -134,7 +136,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
await ccrApi.openUI();
setToast({
message: "正在打开CCR UI...",
message: t('ccr.openingUI'),
type: "info"
});
@@ -145,7 +147,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
} catch (error) {
console.error("Failed to open CCR UI:", error);
setToast({
message: `打开CCR UI失败: ${error}`,
message: t('ccr.openUIFailed', { error: String(error) }),
type: "error"
});
} finally {
@@ -159,7 +161,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
if (!serviceStatus?.is_running) {
setActionLoading(true);
setToast({
message: "检测到服务未运行,正在启动...",
message: t('ccr.serviceStarting'),
type: "info"
});
@@ -167,7 +169,7 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
setServiceStatus(startResult.status);
if (!startResult.status.is_running) {
throw new Error("服务启动失败");
throw new Error(t('ccr.serviceStartFailed'));
}
// 等待服务完全启动
@@ -178,14 +180,14 @@ export function CcrRouterManager({ onBack }: CcrRouterManagerProps) {
if (serviceStatus?.endpoint) {
open(`${serviceStatus.endpoint}/ui/`);
setToast({
message: "正在打开CCR管理界面...",
message: t('ccr.openingAdmin'),
type: "info"
});
}
} catch (error) {
console.error("Failed to open CCR UI in browser:", error);
setToast({
message: `打开管理界面失败: ${error}`,
message: t('ccr.openAdminFailed', { error: String(error) }),
type: "error"
});
setActionLoading(false);

View File

@@ -1519,7 +1519,7 @@ export const ClaudeCodeSession: React.FC<ClaudeCodeSessionProps> = ({
</Button>
</TooltipTrigger>
<TooltipContent side="left">
<p></p>
<p>{t('claudeSession.scrollToTop', 'Scroll to top')}</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>
@@ -1540,7 +1540,7 @@ export const ClaudeCodeSession: React.FC<ClaudeCodeSessionProps> = ({
</Button>
</TooltipTrigger>
<TooltipContent side="left">
<p></p>
<p>{t('claudeSession.scrollToBottom', 'Scroll to bottom')}</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>
@@ -1665,7 +1665,7 @@ export const ClaudeCodeSession: React.FC<ClaudeCodeSessionProps> = ({
<div className="flex items-center gap-1.5 text-xs bg-muted/50 rounded-full px-2.5 py-1">
<Hash className="h-3 w-3 text-muted-foreground" />
<span className="font-mono">{totalTokens.toLocaleString()}</span>
<span className="text-muted-foreground">tokens</span>
<span className="text-muted-foreground">{t('usage.tokens')}</span>
</div>
)}
@@ -1705,7 +1705,7 @@ export const ClaudeCodeSession: React.FC<ClaudeCodeSessionProps> = ({
</Button>
</TooltipTrigger>
<TooltipContent>
<p>File Explorer</p>
<p>{t('app.fileExplorer')}</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>
@@ -1726,7 +1726,7 @@ export const ClaudeCodeSession: React.FC<ClaudeCodeSessionProps> = ({
</Button>
</TooltipTrigger>
<TooltipContent>
<p>Git Panel</p>
<p>{t('app.gitPanel')}</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>
@@ -1747,7 +1747,7 @@ export const ClaudeCodeSession: React.FC<ClaudeCodeSessionProps> = ({
</Button>
</TooltipTrigger>
<TooltipContent>
<p>{isFileWatching ? '停止文件监控' : '启动文件监控'}</p>
<p>{isFileWatching ? t('claudeSession.stopFileWatch', 'Stop file watching') : t('claudeSession.startFileWatch', 'Start file watching')}</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>

View File

@@ -136,7 +136,7 @@ export const Settings: React.FC<SettingsProps> = ({
setModelMappings(mappings);
} catch (err) {
console.error("Failed to load model mappings:", err);
setToast({ message: "加载模型映射失败", type: "error" });
setToast({ message: t('settings.modelMappings.loadFailed'), type: "error" });
} finally {
setLoadingMappings(false);
}
@@ -163,10 +163,10 @@ export const Settings: React.FC<SettingsProps> = ({
await api.updateModelMapping(mapping.alias, mapping.model_name);
}
setModelMappingsChanged(false);
setToast({ message: "模型映射已保存", type: "success" });
setToast({ message: t('settings.modelMappings.saved'), type: "success" });
} catch (err) {
console.error("Failed to save model mappings:", err);
setToast({ message: "保存模型映射失败", type: "error" });
setToast({ message: t('settings.modelMappings.saveFailed'), type: "error" });
}
};
@@ -696,9 +696,9 @@ export const Settings: React.FC<SettingsProps> = ({
{/* Model Mappings Configuration */}
<div className="space-y-4">
<div>
<Label className="text-sm font-medium mb-2 block"></Label>
<Label className="text-sm font-medium mb-2 block">{t('settings.modelMappings.title')}</Label>
<p className="text-xs text-muted-foreground mb-4">
sonnetopushaiku
{t('settings.modelMappings.description')}
</p>
</div>
@@ -720,29 +720,29 @@ export const Settings: React.FC<SettingsProps> = ({
className="font-mono text-sm"
/>
<p className="text-xs text-muted-foreground">
{mapping.alias === 'sonnet' && '平衡性能与成本的主力模型'}
{mapping.alias === 'opus' && '最强大的旗舰模型,适合复杂任务'}
{mapping.alias === 'haiku' && '快速响应的轻量级模型'}
{mapping.alias === 'sonnet' && t('settings.modelMappings.aliasDescriptions.sonnet')}
{mapping.alias === 'opus' && t('settings.modelMappings.aliasDescriptions.opus')}
{mapping.alias === 'haiku' && t('settings.modelMappings.aliasDescriptions.haiku')}
</p>
</div>
))}
{modelMappings.length === 0 && (
<div className="text-center py-8 text-muted-foreground">
<p className="text-sm"></p>
<p className="text-xs mt-2"></p>
<p className="text-sm">{t('settings.modelMappings.emptyTitle')}</p>
<p className="text-xs mt-2">{t('settings.modelMappings.emptySubtitle')}</p>
</div>
)}
{modelMappingsChanged && (
<p className="text-xs text-amber-600 dark:text-amber-400">
{t('settings.modelMappings.changedNotice')}
</p>
)}
<div className="pt-2">
<p className="text-xs text-muted-foreground">
<strong></strong>Agent执行时会根据这里的配置解析模型别名 sonnet claude-sonnet-4-20250514使 "sonnet" Agent都会调用该模型版本
<strong>{t('settings.modelMappings.note')}</strong> {t('settings.modelMappings.noteContent')}
</p>
</div>
</div>

View File

@@ -398,9 +398,11 @@ export const TabContent: React.FC = () => {
console.log('[TabContent] Handling create-smart-session-tab:', { tabId, sessionData });
// Update the existing tab with smart session data and switch immediately
const displayName = sessionData.display_name || t('smartSessionDefaultTitle');
updateTab(tabId, {
type: 'chat',
title: sessionData.display_name || 'Smart Session',
title: displayName,
initialProjectPath: sessionData.project_path,
sessionData: null, // No existing session, this is a new session workspace
});

View File

@@ -536,7 +536,42 @@
"allowRuleExample": "e.g., Bash(npm run test:*)",
"denyRuleExample": "e.g., Bash(curl:*)",
"apiKeyHelperPath": "/path/to/generate_api_key.sh"
},
"modelMappings": {
"title": "Model alias mappings",
"description": "Configure actual model versions for aliases (sonnet, opus, haiku)",
"loadFailed": "Failed to load model mappings",
"saved": "Model mappings saved",
"saveFailed": "Failed to save model mappings",
"emptyTitle": "No model mappings configured",
"emptySubtitle": "Database may not be initialized yet. Try restarting the app.",
"changedNotice": "Model mappings changed. Click Save to apply.",
"note": "Note:",
"noteContent": "Agents using aliases will resolve to configured versions. For example, sonnet → claude-sonnet-4-20250514.",
"aliasDescriptions": {
"sonnet": "Balanced model for most tasks",
"opus": "Most capable flagship model for complex tasks",
"haiku": "Fast, lightweight model"
}
}
},
"ccr": {
"loadStatusFailed": "Failed to load CCR service status: {{error}}",
"startFailed": "Failed to start CCR service: {{error}}",
"stopFailed": "Failed to stop CCR service: {{error}}",
"restartFailed": "Failed to restart CCR service: {{error}}",
"serviceStarting": "Service not running, starting...",
"serviceStartFailed": "Service failed to start",
"openingUI": "Opening CCR UI...",
"openUIFailed": "Failed to open CCR UI: {{error}}",
"openingAdmin": "Opening CCR admin...",
"openAdminFailed": "Failed to open admin UI: {{error}}"
},
"claudeSession": {
"scrollToTop": "Scroll to top",
"scrollToBottom": "Scroll to bottom",
"startFileWatch": "Start file watching",
"stopFileWatch": "Stop file watching"
},
"mcp": {
"title": "MCP Server Management",
@@ -766,8 +801,11 @@
"claudeCodeNotFound": "Claude Code not found",
"selectClaudeInstallation": "Select Claude Installation",
"installClaudeCode": "Install Claude Code",
"smartSessionCreated": "Smart session '{{name}}' created at: {{path}}",
"smartSessionCreated": "Smart session '{{name}}' is ready to use.",
"smartSessionDefaultToast": "Smart session '{{name}}' is ready to use.",
"smartSessionDefaultTitle": "Smart Session",
"failedToCreateSmartSession": "Failed to create smart session: {{error}}",
"failedToCreateSmartSessionFallback": "Failed to create smart session: {{error}}",
"noTabsOpen": "No tabs open",
"clickPlusToStartChat": "Click the + button to start a new chat",
"noAgentRunIdSpecified": "No agent run ID specified",

View File

@@ -516,7 +516,42 @@
"path": "路径",
"source": "来源",
"version": "版本",
"versionUnknown": "版本未知"
"versionUnknown": "版本未知",
"modelMappings": {
"title": "模型别名映射",
"description": "配置别名sonnet、opus、haiku对应的实际模型版本",
"loadFailed": "加载模型映射失败",
"saved": "模型映射已保存",
"saveFailed": "保存模型映射失败",
"emptyTitle": "暂无模型映射配置",
"emptySubtitle": "数据库初始化可能未完成,请尝试重启应用",
"changedNotice": "模型映射已修改,点击保存以应用更改",
"note": "说明:",
"noteContent": "Agent 执行时会根据此配置解析模型别名,例如 sonnet → claude-sonnet-4-20250514。",
"aliasDescriptions": {
"sonnet": "平衡性能与成本的主力模型",
"opus": "最强大的旗舰模型,适合复杂任务",
"haiku": "快速响应的轻量级模型"
}
}
},
"ccr": {
"loadStatusFailed": "加载 CCR 服务状态失败:{{error}}",
"startFailed": "启动 CCR 服务失败:{{error}}",
"stopFailed": "停止 CCR 服务失败:{{error}}",
"restartFailed": "重启 CCR 服务失败:{{error}}",
"serviceStarting": "检测到服务未运行,正在启动...",
"serviceStartFailed": "服务启动失败",
"openingUI": "正在打开 CCR UI...",
"openUIFailed": "打开 CCR UI 失败:{{error}}",
"openingAdmin": "正在打开 CCR 管理界面...",
"openAdminFailed": "打开管理界面失败:{{error}}"
},
"claudeSession": {
"scrollToTop": "滚动到顶部",
"scrollToBottom": "滚动到底部",
"startFileWatch": "启动文件监控",
"stopFileWatch": "停止文件监控"
},
"mcp": {
"title": "MCP 服务器管理",
@@ -709,8 +744,11 @@
"claudeCodeNotFound": "未找到 Claude Code",
"selectClaudeInstallation": "选择 Claude 安装",
"installClaudeCode": "安装 Claude Code",
"smartSessionCreated": "智能会话 '{{name}}' 已创建在:{{path}}",
"smartSessionCreated": "智能会话{{name}}」已就绪。",
"smartSessionDefaultToast": "智能会话「{{name}}」已就绪。",
"smartSessionDefaultTitle": "智能会话",
"failedToCreateSmartSession": "创建智能会话失败:{{error}}",
"failedToCreateSmartSessionFallback": "创建智能会话失败:{{error}}",
"session": "会话",
"letClaudeDecide": "让 Claude 决定",
"basicReasoning": "基础推理",