From 0e8e63415e7c09e328f1762374fa837a47765505 Mon Sep 17 00:00:00 2001 From: bowen628 Date: Sat, 21 Mar 2026 00:12:17 +0800 Subject: [PATCH 1/5] feat: add SSH remote workspace support - SSH connection management with saved connections, key/agent/password auth - Remote file system access via SFTP (read, write, edit, delete, rename) - Remote terminal sessions over SSH PTY - Multi-workspace registry: path-keyed HashMap replaces single global state, allowing multiple concurrent remote workspaces without path conflicts - Per-workspace connection status indicator (green/yellow/red dot) in sidebar - Auto-reconnect on startup: retries up to 5 times with 10s intervals, keeps workspace in sidebar with error state if reconnection fails - Session list re-initialized after SSH reconnect to fix timing race where sessions loaded before workspace was registered in state manager - Snapshot/rollback skipped for remote workspaces (no local .bitfun dir); rollback commands return empty success instead of "directory not found" error - Agentic tools (file read/write/edit, bash, glob, grep) routed through SSH when workspace path matches a registered remote workspace Co-Authored-By: Claude Sonnet 4.6 --- src/apps/desktop/Cargo.toml | 3 +- src/apps/desktop/src/api/agentic_api.rs | 12 +- src/apps/desktop/src/api/app_state.rs | 204 ++- src/apps/desktop/src/api/commands.rs | 304 +++- src/apps/desktop/src/api/dto.rs | 19 + src/apps/desktop/src/api/mod.rs | 5 +- src/apps/desktop/src/api/session_api.rs | 18 +- src/apps/desktop/src/api/snapshot_service.rs | 34 +- src/apps/desktop/src/api/ssh_api.rs | 323 ++++ src/apps/desktop/src/api/terminal_api.rs | 285 +++- src/apps/desktop/src/api/tool_api.rs | 1 + src/apps/desktop/src/lib.rs | 23 + src/apps/server/src/ai_relay.rs | 237 +++ src/apps/server/src/bootstrap.rs | 234 +++ src/apps/server/src/rpc_dispatcher.rs | 310 ++++ src/crates/core/Cargo.toml | 14 +- .../src/agentic/coordination/coordinator.rs | 114 +- .../src/agentic/execution/execution_engine.rs | 2 + .../src/agentic/execution/round_executor.rs | 3 +- .../core/src/agentic/execution/types.rs | 4 + src/crates/core/src/agentic/mod.rs | 2 +- .../src/agentic/session/session_manager.rs | 80 +- .../core/src/agentic/tools/framework.rs | 19 + .../tools/implementations/bash_tool.rs | 42 +- .../tools/implementations/delete_file_tool.rs | 105 +- .../tools/implementations/file_edit_tool.rs | 51 +- .../tools/implementations/file_read_tool.rs | 123 +- .../tools/implementations/file_write_tool.rs | 29 +- .../tools/implementations/glob_tool.rs | 88 +- .../tools/implementations/grep_tool.rs | 246 ++- .../agentic/tools/implementations/ls_tool.rs | 126 +- .../tools/implementations/task_tool.rs | 42 +- .../tool-runtime/src/fs/backend.rs | 81 + .../tool-runtime/src/fs/mod.rs | 3 + .../tools/implementations/web_tools.rs | 1 + .../agentic/tools/pipeline/tool_pipeline.rs | 1 + .../core/src/agentic/tools/pipeline/types.rs | 2 + src/crates/core/src/agentic/workspace.rs | 282 ++++ .../infrastructure/filesystem/file_tree.rs | 51 + src/crates/core/src/service/mod.rs | 1 + .../core/src/service/remote_ssh/manager.rs | 1333 +++++++++++++++++ src/crates/core/src/service/remote_ssh/mod.rs | 24 + .../core/src/service/remote_ssh/remote_fs.rs | 331 ++++ .../src/service/remote_ssh/remote_terminal.rs | 295 ++++ .../core/src/service/remote_ssh/types.rs | 242 +++ .../src/service/remote_ssh/workspace_state.rs | 265 ++++ .../core/src/service/snapshot/manager.rs | 16 +- .../core/src/service/terminal/src/api.rs | 3 + .../core/src/service/workspace/manager.rs | 42 +- .../core/src/service/workspace/service.rs | 4 + src/web-ui/src/app/App.tsx | 55 +- .../src/app/components/NavPanel/MainNav.tsx | 56 + .../sections/workspaces/WorkspaceItem.tsx | 24 +- .../workspaces/WorkspaceListSection.scss | 55 + .../TitleBar/RemoteConnectionIndicator.tsx | 49 + .../src/app/components/TitleBar/TitleBar.scss | 53 + .../src/app/components/TitleBar/TitleBar.tsx | 12 +- .../app/scenes/shell/hooks/useShellEntries.ts | 19 +- .../features/ssh-remote/ConfirmDialog.scss | 41 + .../src/features/ssh-remote/ConfirmDialog.tsx | 73 + .../ssh-remote/PasswordInputDialog.scss | 61 + .../ssh-remote/PasswordInputDialog.tsx | 119 ++ .../ssh-remote/RemoteFileBrowser.scss | 512 +++++++ .../features/ssh-remote/RemoteFileBrowser.tsx | 521 +++++++ .../ssh-remote/SSHConnectionDialog.scss | 220 +++ .../ssh-remote/SSHConnectionDialog.tsx | 622 ++++++++ .../features/ssh-remote/SSHRemoteProvider.tsx | 487 ++++++ src/web-ui/src/features/ssh-remote/index.ts | 11 + src/web-ui/src/features/ssh-remote/sshApi.ts | 191 +++ src/web-ui/src/features/ssh-remote/types.ts | 84 ++ .../src/features/ssh-remote/useSSHRemote.ts | 180 +++ .../api/adapters/tauri-adapter.ts | 82 +- .../api/service-api/GlobalAPI.ts | 18 + .../api/service-api/tauri-commands.ts | 2 + .../services/business/workspaceManager.ts | 85 +- src/web-ui/src/locales/en-US/common.json | 49 + src/web-ui/src/locales/zh-CN/common.json | 49 + src/web-ui/src/shared/types/global-state.ts | 16 + .../src/tools/terminal/types/session.ts | 1 + 79 files changed, 9368 insertions(+), 458 deletions(-) create mode 100644 src/apps/desktop/src/api/ssh_api.rs create mode 100644 src/apps/server/src/ai_relay.rs create mode 100644 src/apps/server/src/bootstrap.rs create mode 100644 src/apps/server/src/rpc_dispatcher.rs create mode 100644 src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/backend.rs create mode 100644 src/crates/core/src/service/remote_ssh/manager.rs create mode 100644 src/crates/core/src/service/remote_ssh/mod.rs create mode 100644 src/crates/core/src/service/remote_ssh/remote_fs.rs create mode 100644 src/crates/core/src/service/remote_ssh/remote_terminal.rs create mode 100644 src/crates/core/src/service/remote_ssh/types.rs create mode 100644 src/crates/core/src/service/remote_ssh/workspace_state.rs create mode 100644 src/web-ui/src/app/components/TitleBar/RemoteConnectionIndicator.tsx create mode 100644 src/web-ui/src/features/ssh-remote/ConfirmDialog.scss create mode 100644 src/web-ui/src/features/ssh-remote/ConfirmDialog.tsx create mode 100644 src/web-ui/src/features/ssh-remote/PasswordInputDialog.scss create mode 100644 src/web-ui/src/features/ssh-remote/PasswordInputDialog.tsx create mode 100644 src/web-ui/src/features/ssh-remote/RemoteFileBrowser.scss create mode 100644 src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx create mode 100644 src/web-ui/src/features/ssh-remote/SSHConnectionDialog.scss create mode 100644 src/web-ui/src/features/ssh-remote/SSHConnectionDialog.tsx create mode 100644 src/web-ui/src/features/ssh-remote/SSHRemoteProvider.tsx create mode 100644 src/web-ui/src/features/ssh-remote/index.ts create mode 100644 src/web-ui/src/features/ssh-remote/sshApi.ts create mode 100644 src/web-ui/src/features/ssh-remote/types.ts create mode 100644 src/web-ui/src/features/ssh-remote/useSSHRemote.ts diff --git a/src/apps/desktop/Cargo.toml b/src/apps/desktop/Cargo.toml index 8bf8c308..bed49304 100644 --- a/src/apps/desktop/Cargo.toml +++ b/src/apps/desktop/Cargo.toml @@ -19,7 +19,7 @@ serde_json = { workspace = true } [dependencies] # Internal crates -bitfun-core = { path = "../../crates/core" } +bitfun-core = { path = "../../crates/core", features = ["ssh-remote"] } bitfun-transport = { path = "../../crates/transport", features = ["tauri-adapter"] } # Tauri @@ -42,6 +42,7 @@ similar = { workspace = true } ignore = { workspace = true } urlencoding = { workspace = true } reqwest = { workspace = true } +thiserror = "1.0" [target.'cfg(windows)'.dependencies] win32job = { workspace = true } diff --git a/src/apps/desktop/src/api/agentic_api.rs b/src/apps/desktop/src/api/agentic_api.rs index ff936d18..bb47461b 100644 --- a/src/apps/desktop/src/api/agentic_api.rs +++ b/src/apps/desktop/src/api/agentic_api.rs @@ -2,7 +2,6 @@ use log::warn; use serde::{Deserialize, Serialize}; -use std::path::PathBuf; use std::sync::Arc; use tauri::{AppHandle, State}; @@ -14,6 +13,7 @@ use bitfun_core::agentic::coordination::{ use bitfun_core::agentic::core::*; use bitfun_core::agentic::image_analysis::ImageContextData; use bitfun_core::agentic::tools::image_context::get_image_context; +use bitfun_core::service::remote_ssh::workspace_state::get_effective_session_path; #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] @@ -435,8 +435,9 @@ pub async fn delete_session( coordinator: State<'_, Arc>, request: DeleteSessionRequest, ) -> Result<(), String> { + let effective_path = get_effective_session_path(&request.workspace_path).await; coordinator - .delete_session(&PathBuf::from(request.workspace_path), &request.session_id) + .delete_session(&effective_path, &request.session_id) .await .map_err(|e| format!("Failed to delete session: {}", e)) } @@ -446,8 +447,9 @@ pub async fn restore_session( coordinator: State<'_, Arc>, request: RestoreSessionRequest, ) -> Result { + let effective_path = get_effective_session_path(&request.workspace_path).await; let session = coordinator - .restore_session(&PathBuf::from(request.workspace_path), &request.session_id) + .restore_session(&effective_path, &request.session_id) .await .map_err(|e| format!("Failed to restore session: {}", e))?; @@ -459,8 +461,10 @@ pub async fn list_sessions( coordinator: State<'_, Arc>, request: ListSessionsRequest, ) -> Result, String> { + // Map remote workspace path to local session storage path + let effective_path = get_effective_session_path(&request.workspace_path).await; let summaries = coordinator - .list_sessions(&PathBuf::from(request.workspace_path)) + .list_sessions(&effective_path) .await .map_err(|e| format!("Failed to list sessions: {}", e))?; diff --git a/src/apps/desktop/src/api/app_state.rs b/src/apps/desktop/src/api/app_state.rs index 349849e0..4bf6d38c 100644 --- a/src/apps/desktop/src/api/app_state.rs +++ b/src/apps/desktop/src/api/app_state.rs @@ -5,13 +5,28 @@ use bitfun_core::agentic::{agents, tools}; use bitfun_core::infrastructure::ai::{AIClient, AIClientFactory}; use bitfun_core::miniapp::{initialize_global_miniapp_manager, JsWorkerPool, MiniAppManager}; use bitfun_core::service::{ai_rules, config, filesystem, mcp, token_usage, workspace}; +use bitfun_core::service::remote_ssh::{ + init_remote_workspace_manager, SSHConnectionManager, RemoteFileService, RemoteTerminalManager, +}; use bitfun_core::util::errors::*; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::sync::Arc; +use thiserror::Error; use tokio::sync::RwLock; +/// Errors that can occur when accessing SSH remote services +#[derive(Error, Debug)] +pub enum SSHServiceError { + #[error("SSH manager not initialized")] + ManagerNotInitialized, + #[error("Remote file service not initialized")] + FileServiceNotInitialized, + #[error("Remote terminal manager not initialized")] + TerminalManagerNotInitialized, +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct HealthStatus { pub status: String, @@ -28,6 +43,15 @@ pub struct AppStatistics { pub uptime_seconds: u64, } +/// Remote workspace information +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteWorkspace { + pub connection_id: String, + pub connection_name: String, + pub remote_path: String, +} + pub struct AppState { pub ai_client: Arc>>, pub ai_client_factory: Arc, @@ -46,6 +70,11 @@ pub struct AppState { pub js_worker_pool: Option>, pub statistics: Arc>, pub start_time: std::time::Instant, + // SSH Remote connection state + pub ssh_manager: Arc>>, + pub remote_file_service: Arc>>, + pub remote_terminal_manager: Arc>>, + pub remote_workspace: Arc>>, } impl AppState { @@ -143,6 +172,74 @@ impl AppState { } } + // Initialize SSH Remote services synchronously so they're ready before app starts + let ssh_data_dir = dirs::data_local_dir() + .unwrap_or_else(|| std::path::PathBuf::from(".")) + .join("BitFun") + .join("ssh"); + let ssh_manager = Arc::new(RwLock::new(None)); + let ssh_manager_clone = ssh_manager.clone(); + let remote_file_service = Arc::new(RwLock::new(None)); + let remote_file_service_clone = remote_file_service.clone(); + let remote_terminal_manager = Arc::new(RwLock::new(None)); + let remote_terminal_manager_clone = remote_terminal_manager.clone(); + // Create remote_workspace before spawn so we can pass it in + let remote_workspace = Arc::new(RwLock::new(None)); + let remote_workspace_clone = remote_workspace.clone(); + + // Initialize SSH services synchronously (not spawned) so they're ready before app starts + let manager = SSHConnectionManager::new(ssh_data_dir.clone()); + if let Err(e) = manager.load_saved_connections().await { + log::error!("Failed to load saved SSH connections: {}", e); + } else { + log::info!("SSH connections loaded successfully"); + } + if let Err(e) = manager.load_known_hosts().await { + log::error!("Failed to load known hosts: {}", e); + } + + // Load persisted remote workspaces (may be multiple) + match manager.load_remote_workspace().await { + Ok(_) => { + let workspaces = manager.get_remote_workspaces().await; + if !workspaces.is_empty() { + log::info!("Loaded {} persisted remote workspace(s)", workspaces.len()); + // Use the first one for the legacy single-workspace field + let first = &workspaces[0]; + let app_workspace = RemoteWorkspace { + connection_id: first.connection_id.clone(), + remote_path: first.remote_path.clone(), + connection_name: first.connection_name.clone(), + }; + *remote_workspace_clone.write().await = Some(app_workspace); + } + } + Err(e) => { + log::warn!("Failed to load remote workspace: {}", e); + } + } + + let manager_arc = Arc::new(manager); + let manager_for_fs = Arc::new(tokio::sync::RwLock::new(Some(manager_arc.as_ref().clone()))); + let fs = RemoteFileService::new(manager_for_fs.clone()); + let tm = RemoteTerminalManager::new(manager_arc.as_ref().clone()); + + // Clone for storing in AppState + let fs_for_state = fs.clone(); + let tm_for_state = tm.clone(); + + *ssh_manager_clone.write().await = Some((*manager_arc).clone()); + *remote_file_service_clone.write().await = Some(fs_for_state); + *remote_terminal_manager_clone.write().await = Some(tm_for_state); + + // Note: We do NOT activate the global remote workspace state here because + // there is no live SSH connection yet. The persisted workspace info is loaded + // into self.remote_workspace so the frontend can query it via remote_get_workspace_info + // and drive the reconnection flow. The global state will be activated when the + // frontend successfully reconnects and calls remote_open_workspace → set_remote_workspace. + + log::info!("SSH Remote services initialized with SFTP, PTY, and known hosts support"); + let app_state = Self { ai_client, ai_client_factory, @@ -161,6 +258,11 @@ impl AppState { js_worker_pool, statistics, start_time, + // SSH Remote connection state + ssh_manager, + remote_file_service, + remote_terminal_manager, + remote_workspace, }; log::info!("AppState initialized successfully"); @@ -207,4 +309,104 @@ impl AppState { .map(|tool| tool.name().to_string()) .collect() } -} + + // SSH Remote connection methods + + /// Get SSH connection manager synchronously (must be called within async context) + pub async fn get_ssh_manager_async(&self) -> Result { + self.ssh_manager.read().await.clone() + .ok_or(SSHServiceError::ManagerNotInitialized) + } + + /// Get remote file service synchronously (must be called within async context) + pub async fn get_remote_file_service_async(&self) -> Result { + self.remote_file_service.read().await.clone() + .ok_or(SSHServiceError::FileServiceNotInitialized) + } + + /// Get remote terminal manager synchronously (must be called within async context) + pub async fn get_remote_terminal_manager_async(&self) -> Result { + self.remote_terminal_manager.read().await.clone() + .ok_or(SSHServiceError::TerminalManagerNotInitialized) + } + + /// Set current remote workspace + pub async fn set_remote_workspace(&self, workspace: RemoteWorkspace) -> Result<(), SSHServiceError> { + // Update local state + *self.remote_workspace.write().await = Some(workspace.clone()); + + // Persist to SSHConnectionManager for restoration on restart + if let Ok(manager) = self.get_ssh_manager_async().await { + let core_workspace = bitfun_core::service::remote_ssh::RemoteWorkspace { + connection_id: workspace.connection_id.clone(), + remote_path: workspace.remote_path.clone(), + connection_name: workspace.connection_name.clone(), + }; + if let Err(e) = manager.set_remote_workspace(core_workspace).await { + log::warn!("Failed to persist remote workspace: {}", e); + } + } + + // Register in the global workspace registry + let state_manager = init_remote_workspace_manager(); + + // Ensure shared services are set (idempotent if already set) + let manager = self.get_ssh_manager_async().await?; + let fs = self.get_remote_file_service_async().await?; + let terminal = self.get_remote_terminal_manager_async().await?; + + state_manager.set_ssh_manager(manager.clone()).await; + state_manager.set_file_service(fs.clone()).await; + state_manager.set_terminal_manager(terminal.clone()).await; + + // Register this workspace (does not overwrite other workspaces) + log::info!("register_remote_workspace: connection_id={}, remote_path={}, connection_name={}", + workspace.connection_id, workspace.remote_path, workspace.connection_name); + state_manager.register_remote_workspace( + workspace.remote_path.clone(), + workspace.connection_id.clone(), + workspace.connection_name.clone(), + ).await; + log::info!("Remote workspace registered: {} on {}", + workspace.remote_path, workspace.connection_name); + Ok(()) + } + + /// Get current remote workspace + pub async fn get_remote_workspace_async(&self) -> Option { + self.remote_workspace.read().await.clone() + } + + /// Clear current remote workspace + pub async fn clear_remote_workspace(&self) { + // Get the remote_path before clearing so we can unregister the specific workspace + let remote_path = { + let guard = self.remote_workspace.read().await; + guard.as_ref().map(|w| w.remote_path.clone()) + }; + + // Clear local state + *self.remote_workspace.write().await = None; + + // Remove this specific workspace from persistence (not all of them) + if let Some(path) = &remote_path { + if let Ok(manager) = self.get_ssh_manager_async().await { + if let Err(e) = manager.remove_remote_workspace(path).await { + log::warn!("Failed to remove persisted remote workspace: {}", e); + } + } + + // Unregister from the global registry + if let Some(state_manager) = bitfun_core::service::remote_ssh::get_remote_workspace_manager() { + state_manager.unregister_remote_workspace(path).await; + } + } + + log::info!("Remote workspace unregistered: {:?}", remote_path); + } + + /// Check if currently in a remote workspace + pub async fn is_remote_workspace(&self) -> bool { + self.remote_workspace.read().await.is_some() + } +} \ No newline at end of file diff --git a/src/apps/desktop/src/api/commands.rs b/src/apps/desktop/src/api/commands.rs index 4e746117..af94a82a 100644 --- a/src/apps/desktop/src/api/commands.rs +++ b/src/apps/desktop/src/api/commands.rs @@ -16,6 +16,14 @@ pub struct OpenWorkspaceRequest { pub path: String, } +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OpenRemoteWorkspaceRequest { + pub remote_path: String, + pub connection_id: String, + pub connection_name: String, +} + #[derive(Debug, Deserialize, Default)] pub struct CreateAssistantWorkspaceRequest {} @@ -652,6 +660,89 @@ pub async fn open_workspace( } } +#[tauri::command] +pub async fn open_remote_workspace( + state: State<'_, AppState>, + app: tauri::AppHandle, + request: OpenRemoteWorkspaceRequest, +) -> Result { + use bitfun_core::service::workspace::WorkspaceCreateOptions; + + let display_name = request + .remote_path + .split('/') + .filter(|s| !s.is_empty()) + .last() + .unwrap_or(&request.remote_path) + .to_string(); + + let options = WorkspaceCreateOptions { + scan_options: ScanOptions { + calculate_statistics: false, + ..ScanOptions::default() + }, + auto_set_current: true, + add_to_recent: true, + workspace_kind: WorkspaceKind::Remote, + assistant_id: None, + display_name: Some(display_name), + description: None, + tags: Vec::new(), + }; + + match state + .workspace_service + .open_workspace_with_options(request.remote_path.clone().into(), options) + .await + { + Ok(mut workspace_info) => { + workspace_info.metadata.insert( + "connectionId".to_string(), + serde_json::Value::String(request.connection_id.clone()), + ); + workspace_info.metadata.insert( + "connectionName".to_string(), + serde_json::Value::String(request.connection_name.clone()), + ); + + { + let manager = state.workspace_service.get_manager(); + let mut manager = manager.write().await; + if let Some(ws) = manager.get_workspaces_mut().get_mut(&workspace_info.id) { + ws.metadata = workspace_info.metadata.clone(); + } + } + if let Err(e) = state.workspace_service.manual_save().await { + warn!("Failed to save workspace data after opening remote workspace: {}", e); + } + + apply_active_workspace_context(&state, &app, &workspace_info).await; + + // Also update the RemoteWorkspaceStateManager so tools can use this connection + let remote_workspace = crate::api::RemoteWorkspace { + connection_id: request.connection_id.clone(), + connection_name: request.connection_name.clone(), + remote_path: request.remote_path.clone(), + }; + if let Err(e) = state.set_remote_workspace(remote_workspace).await { + warn!("Failed to set remote workspace state: {}", e); + } + + info!( + "Remote workspace opened: name={}, remote_path={}, connection_id={}", + workspace_info.name, + workspace_info.root_path.display(), + request.connection_id + ); + Ok(WorkspaceInfoDto::from_workspace_info(&workspace_info)) + } + Err(e) => { + error!("Failed to open remote workspace: {}", e); + Err(format!("Failed to open remote workspace: {}", e)) + } + } +} + #[tauri::command] pub async fn create_assistant_workspace( state: State<'_, AppState>, @@ -903,12 +994,26 @@ pub async fn close_workspace( app: tauri::AppHandle, request: CloseWorkspaceRequest, ) -> Result<(), String> { + // Check if the workspace being closed is a remote workspace before closing it + let is_remote = state + .workspace_service + .get_workspace(&request.workspace_id) + .await + .map(|w| w.workspace_kind == WorkspaceKind::Remote) + .unwrap_or(false); + match state .workspace_service .close_workspace(&request.workspace_id) .await { Ok(_) => { + // If it was a remote workspace, also clear the persisted remote workspace data + // so it doesn't get re-opened on next restart + if is_remote { + state.clear_remote_workspace().await; + } + if let Some(workspace_info) = state.workspace_service.get_current_workspace().await { apply_active_workspace_context(&state, &app, &workspace_info).await; } else { @@ -1100,14 +1205,17 @@ pub async fn get_file_tree( request: GetFileTreeRequest, ) -> Result { use std::path::Path; + use bitfun_core::service::remote_ssh::workspace_state::is_remote_path; - let path_buf = Path::new(&request.path); - if !path_buf.exists() { - return Err("Directory does not exist".to_string()); - } - - if !path_buf.is_dir() { - return Err("Path is not a directory".to_string()); + let is_remote = is_remote_path(&request.path).await; + if !is_remote { + let path_buf = Path::new(&request.path); + if !path_buf.exists() { + return Err("Directory does not exist".to_string()); + } + if !path_buf.is_dir() { + return Err("Path is not a directory".to_string()); + } } let filesystem_service = &state.filesystem_service; @@ -1134,7 +1242,7 @@ pub async fn get_file_tree( json } - let root_name = path_buf + let root_name = Path::new(&request.path) .file_name() .and_then(|n| n.to_str()) .unwrap_or(&request.path); @@ -1164,14 +1272,17 @@ pub async fn get_directory_children( request: GetDirectoryChildrenRequest, ) -> Result { use std::path::Path; + use bitfun_core::service::remote_ssh::workspace_state::is_remote_path; - let path_buf = Path::new(&request.path); - if !path_buf.exists() { - return Err("Directory does not exist".to_string()); - } - - if !path_buf.is_dir() { - return Err("Path is not a directory".to_string()); + let is_remote = is_remote_path(&request.path).await; + if !is_remote { + let path_buf = Path::new(&request.path); + if !path_buf.exists() { + return Err("Directory does not exist".to_string()); + } + if !path_buf.is_dir() { + return Err("Path is not a directory".to_string()); + } } let filesystem_service = &state.filesystem_service; @@ -1209,17 +1320,20 @@ pub async fn get_directory_children_paginated( request: GetDirectoryChildrenPaginatedRequest, ) -> Result { use std::path::Path; + use bitfun_core::service::remote_ssh::workspace_state::is_remote_path; let offset = request.offset.unwrap_or(0); let limit = request.limit.unwrap_or(100); - let path_buf = Path::new(&request.path); - if !path_buf.exists() { - return Err("Directory does not exist".to_string()); - } - - if !path_buf.is_dir() { - return Err("Path is not a directory".to_string()); + let is_remote = is_remote_path(&request.path).await; + if !is_remote { + let path_buf = Path::new(&request.path); + if !path_buf.exists() { + return Err("Directory does not exist".to_string()); + } + if !path_buf.is_dir() { + return Err("Path is not a directory".to_string()); + } } let filesystem_service = &state.filesystem_service; @@ -1265,6 +1379,17 @@ pub async fn read_file_content( state: State<'_, AppState>, request: ReadFileContentRequest, ) -> Result { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.file_path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + let bytes = remote_fs.read_file(&entry.connection_id, &request.file_path).await + .map_err(|e| format!("Failed to read remote file: {}", e))?; + return String::from_utf8(bytes) + .map_err(|e| format!("File is not valid UTF-8: {}", e)); + } + match state.filesystem_service.read_file(&request.file_path).await { Ok(result) => Ok(result.content), Err(e) => { @@ -1282,6 +1407,16 @@ pub async fn write_file_content( state: State<'_, AppState>, request: WriteFileContentRequest, ) -> Result<(), String> { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.file_path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + remote_fs.write_file(&entry.connection_id, &request.file_path, request.content.as_bytes()).await + .map_err(|e| format!("Failed to write remote file: {}", e))?; + return Ok(()); + } + let full_path = request.file_path; let mut options = FileOperationOptions::default(); options.backup_on_overwrite = false; @@ -1335,21 +1470,57 @@ pub async fn reset_workspace_persona_files( } #[tauri::command] -pub async fn check_path_exists(request: CheckPathExistsRequest) -> Result { +pub async fn check_path_exists( + state: State<'_, AppState>, + request: CheckPathExistsRequest, +) -> Result { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + return remote_fs.exists(&entry.connection_id, &request.path).await + .map_err(|e| format!("Failed to check remote path: {}", e)); + } + let path = std::path::Path::new(&request.path); Ok(path.exists()) } #[tauri::command] pub async fn get_file_metadata( + state: State<'_, AppState>, request: GetFileMetadataRequest, ) -> Result { - use std::fs; use std::time::SystemTime; + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + + let is_file = remote_fs.is_file(&entry.connection_id, &request.path).await + .unwrap_or(false); + let is_dir = remote_fs.is_dir(&entry.connection_id, &request.path).await + .unwrap_or(false); + + let now_ms = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or_default() + .as_millis() as u64; + + return Ok(serde_json::json!({ + "path": request.path, + "modified": now_ms, + "size": 0, + "is_file": is_file, + "is_dir": is_dir, + "is_remote": true + })); + } let path = std::path::Path::new(&request.path); - - match fs::metadata(path) { + match std::fs::metadata(path) { Ok(metadata) => { let modified = metadata .modified() @@ -1385,6 +1556,16 @@ pub async fn rename_file( state: State<'_, AppState>, request: RenameFileRequest, ) -> Result<(), String> { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.old_path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + remote_fs.rename(&entry.connection_id, &request.old_path, &request.new_path).await + .map_err(|e| format!("Failed to rename remote file: {}", e))?; + return Ok(()); + } + state .filesystem_service .move_file(&request.old_path, &request.new_path) @@ -1399,6 +1580,16 @@ pub async fn delete_file( state: State<'_, AppState>, request: DeleteFileRequest, ) -> Result<(), String> { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + remote_fs.remove_file(&entry.connection_id, &request.path).await + .map_err(|e| format!("Failed to delete remote file: {}", e))?; + return Ok(()); + } + state .filesystem_service .delete_file(&request.path) @@ -1413,8 +1604,23 @@ pub async fn delete_directory( state: State<'_, AppState>, request: DeleteDirectoryRequest, ) -> Result<(), String> { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + let recursive = request.recursive.unwrap_or(false); + if let Some(entry) = lookup_remote_connection(&request.path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + if recursive { + remote_fs.remove_dir_all(&entry.connection_id, &request.path).await + .map_err(|e| format!("Failed to delete remote directory: {}", e))?; + } else { + remote_fs.remove_dir_all(&entry.connection_id, &request.path).await + .map_err(|e| format!("Failed to delete remote directory: {}", e))?; + } + return Ok(()); + } + state .filesystem_service .delete_directory(&request.path, recursive) @@ -1429,6 +1635,16 @@ pub async fn create_file( state: State<'_, AppState>, request: CreateFileRequest, ) -> Result<(), String> { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + remote_fs.write_file(&entry.connection_id, &request.path, b"").await + .map_err(|e| format!("Failed to create remote file: {}", e))?; + return Ok(()); + } + let options = FileOperationOptions::default(); state .filesystem_service @@ -1444,6 +1660,16 @@ pub async fn create_directory( state: State<'_, AppState>, request: CreateDirectoryRequest, ) -> Result<(), String> { + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + remote_fs.create_dir_all(&entry.connection_id, &request.path).await + .map_err(|e| format!("Failed to create remote directory: {}", e))?; + return Ok(()); + } + state .filesystem_service .create_directory(&request.path) @@ -1461,9 +1687,35 @@ pub struct ListDirectoryFilesRequest { #[tauri::command] pub async fn list_directory_files( + state: State<'_, AppState>, request: ListDirectoryFilesRequest, ) -> Result, String> { use std::path::Path; + use bitfun_core::service::remote_ssh::workspace_state::lookup_remote_connection; + + if let Some(entry) = lookup_remote_connection(&request.path).await { + let remote_fs = state.get_remote_file_service_async().await + .map_err(|e| format!("Remote file service not available: {}", e))?; + let entries = remote_fs.read_dir(&entry.connection_id, &request.path).await + .map_err(|e| format!("Failed to read remote directory: {}", e))?; + let mut files: Vec = entries.into_iter() + .filter(|e| !e.is_dir) + .filter(|e| { + if let Some(ref extensions) = request.extensions { + if let Some(ext) = Path::new(&e.name).extension().and_then(|x| x.to_str()) { + extensions.iter().any(|x| x.eq_ignore_ascii_case(ext)) + } else { + false + } + } else { + true + } + }) + .map(|e| e.name) + .collect(); + files.sort(); + return Ok(files); + } let dir_path = Path::new(&request.path); if !dir_path.exists() { diff --git a/src/apps/desktop/src/api/dto.rs b/src/apps/desktop/src/api/dto.rs index e7f7cb35..7ec91ed0 100644 --- a/src/apps/desktop/src/api/dto.rs +++ b/src/apps/desktop/src/api/dto.rs @@ -17,6 +17,7 @@ pub enum WorkspaceTypeDto { pub enum WorkspaceKindDto { Normal, Assistant, + Remote, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -55,12 +56,27 @@ pub struct WorkspaceInfoDto { pub tags: Vec, pub statistics: Option, pub identity: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub connection_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub connection_name: Option, } impl WorkspaceInfoDto { pub fn from_workspace_info( info: &bitfun_core::service::workspace::manager::WorkspaceInfo, ) -> Self { + let connection_id = info + .metadata + .get("connectionId") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let connection_name = info + .metadata + .get("connectionName") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + Self { id: info.id.clone(), name: info.name.clone(), @@ -81,6 +97,8 @@ impl WorkspaceInfoDto { .identity .as_ref() .map(WorkspaceIdentityDto::from_workspace_identity), + connection_id, + connection_name, } } } @@ -124,6 +142,7 @@ impl WorkspaceKindDto { match workspace_kind { WorkspaceKind::Normal => WorkspaceKindDto::Normal, WorkspaceKind::Assistant => WorkspaceKindDto::Assistant, + WorkspaceKind::Remote => WorkspaceKindDto::Remote, } } } diff --git a/src/apps/desktop/src/api/mod.rs b/src/apps/desktop/src/api/mod.rs index cf3245a2..9a60278e 100644 --- a/src/apps/desktop/src/api/mod.rs +++ b/src/apps/desktop/src/api/mod.rs @@ -16,6 +16,7 @@ pub mod dto; pub mod git_agent_api; pub mod git_api; pub mod i18n_api; +pub mod insights_api; pub mod lsp_api; pub mod lsp_workspace_api; pub mod mcp_api; @@ -26,6 +27,7 @@ pub mod runtime_api; pub mod session_api; pub mod skill_api; pub mod snapshot_service; +pub mod ssh_api; pub mod startchat_agent_api; pub mod storage_commands; pub mod subagent_api; @@ -33,6 +35,5 @@ pub mod system_api; pub mod terminal_api; pub mod token_usage_api; pub mod tool_api; -pub mod insights_api; -pub use app_state::{AppState, AppStatistics, HealthStatus}; +pub use app_state::{AppState, AppStatistics, HealthStatus, RemoteWorkspace}; diff --git a/src/apps/desktop/src/api/session_api.rs b/src/apps/desktop/src/api/session_api.rs index c7c5d3ed..9971a975 100644 --- a/src/apps/desktop/src/api/session_api.rs +++ b/src/apps/desktop/src/api/session_api.rs @@ -2,11 +2,11 @@ use bitfun_core::agentic::persistence::PersistenceManager; use bitfun_core::infrastructure::PathManager; +use bitfun_core::service::remote_ssh::workspace_state::get_effective_session_path; use bitfun_core::service::session::{ DialogTurnData, SessionMetadata, SessionTranscriptExport, SessionTranscriptExportOptions, }; use serde::{Deserialize, Serialize}; -use std::path::PathBuf; use std::sync::Arc; use tauri::State; @@ -76,7 +76,7 @@ pub async fn list_persisted_sessions( request: ListPersistedSessionsRequest, path_manager: State<'_, Arc>, ) -> Result, String> { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; @@ -91,7 +91,7 @@ pub async fn load_session_turns( request: LoadSessionTurnsRequest, path_manager: State<'_, Arc>, ) -> Result, String> { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; @@ -113,7 +113,7 @@ pub async fn save_session_turn( request: SaveSessionTurnRequest, path_manager: State<'_, Arc>, ) -> Result<(), String> { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; @@ -128,7 +128,7 @@ pub async fn save_session_metadata( request: SaveSessionMetadataRequest, path_manager: State<'_, Arc>, ) -> Result<(), String> { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; @@ -143,7 +143,7 @@ pub async fn export_session_transcript( request: ExportSessionTranscriptRequest, path_manager: State<'_, Arc>, ) -> Result { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; @@ -167,7 +167,7 @@ pub async fn delete_persisted_session( request: DeletePersistedSessionRequest, path_manager: State<'_, Arc>, ) -> Result<(), String> { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; @@ -182,7 +182,7 @@ pub async fn touch_session_activity( request: TouchSessionActivityRequest, path_manager: State<'_, Arc>, ) -> Result<(), String> { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; @@ -197,7 +197,7 @@ pub async fn load_persisted_session_metadata( request: LoadPersistedSessionMetadataRequest, path_manager: State<'_, Arc>, ) -> Result, String> { - let workspace_path = PathBuf::from(&request.workspace_path); + let workspace_path = get_effective_session_path(&request.workspace_path).await; let manager = PersistenceManager::new(path_manager.inner().clone()) .map_err(|e| format!("Failed to create persistence manager: {}", e))?; diff --git a/src/apps/desktop/src/api/snapshot_service.rs b/src/apps/desktop/src/api/snapshot_service.rs index 0a88e360..3eeccca0 100644 --- a/src/apps/desktop/src/api/snapshot_service.rs +++ b/src/apps/desktop/src/api/snapshot_service.rs @@ -1,6 +1,7 @@ //! Snapshot Service API use bitfun_core::infrastructure::try_get_path_manager_arc; +use bitfun_core::service::remote_ssh::workspace_state::is_remote_path; use bitfun_core::service::snapshot::{ ensure_snapshot_manager_for_workspace, get_snapshot_manager_for_workspace, initialize_snapshot_manager_for_workspace, OperationType, SnapshotConfig, SnapshotManager, @@ -172,6 +173,14 @@ pub async fn initialize_snapshot( app_handle: AppHandle, request: SnapshotInitRequest, ) -> Result { + // Remote workspaces don't support snapshot system + if is_remote_path(&request.workspace_path).await { + return Ok(serde_json::json!({ + "success": true, + "message": "Snapshot system skipped for remote workspace" + })); + } + let workspace_dir = PathBuf::from(&request.workspace_path); if !workspace_dir.exists() { @@ -199,13 +208,14 @@ pub async fn initialize_snapshot( })) } -fn resolve_workspace_dir(workspace_path: &str) -> Result { +async fn resolve_workspace_dir(workspace_path: &str) -> Result { if workspace_path.trim().is_empty() { return Err("workspacePath is required".to_string()); } let workspace_dir = PathBuf::from(workspace_path); - if !workspace_dir.exists() { + // Remote paths don't exist on the local filesystem — skip the existence check + if !is_remote_path(workspace_path).await && !workspace_dir.exists() { return Err(format!( "Workspace directory does not exist: {}", workspace_path @@ -218,7 +228,15 @@ fn resolve_workspace_dir(workspace_path: &str) -> Result { async fn ensure_snapshot_manager_ready( workspace_path: &str, ) -> Result, String> { - let workspace_dir = resolve_workspace_dir(workspace_path)?; + // Remote workspaces don't support the snapshot system + if is_remote_path(workspace_path).await { + return Err(format!( + "Snapshot system not supported for remote workspace: {}", + workspace_path + )); + } + + let workspace_dir = resolve_workspace_dir(workspace_path).await?; if let Some(manager) = get_snapshot_manager_for_workspace(&workspace_dir) { return Ok(manager); @@ -292,6 +310,11 @@ pub async fn rollback_session( app_handle: AppHandle, request: RollbackSessionRequest, ) -> Result, String> { + // Remote workspaces have no local snapshots — nothing to roll back + if is_remote_path(&request.workspace_path).await { + return Ok(vec![]); + } + let manager = ensure_snapshot_manager_ready(&request.workspace_path).await?; let restored_files = manager @@ -321,6 +344,11 @@ pub async fn rollback_to_turn( app_handle: AppHandle, request: RollbackTurnRequest, ) -> Result, String> { + // Remote workspaces have no local snapshots — nothing to roll back + if is_remote_path(&request.workspace_path).await { + return Ok(vec![]); + } + { use bitfun_core::agentic::coordination::get_global_coordinator; diff --git a/src/apps/desktop/src/api/ssh_api.rs b/src/apps/desktop/src/api/ssh_api.rs new file mode 100644 index 00000000..f54491fb --- /dev/null +++ b/src/apps/desktop/src/api/ssh_api.rs @@ -0,0 +1,323 @@ +//! SSH Remote Connection API +//! +//! Tauri commands for SSH connection management and remote file operations. + +use tauri::State; + +use bitfun_core::service::remote_ssh::{ + SSHConnectionConfig, SSHConnectionResult, SavedConnection, RemoteTreeNode, + SSHConfigLookupResult, SSHConfigEntry, +}; +use crate::api::app_state::SSHServiceError; +use crate::AppState; + +impl From for String { + fn from(e: SSHServiceError) -> Self { + e.to_string() + } +} + +// === SSH Connection Management === + +#[tauri::command] +pub async fn ssh_list_saved_connections( + state: State<'_, AppState>, +) -> Result, String> { + let manager = state.get_ssh_manager_async().await?; + let connections = manager.get_saved_connections().await; + log::info!("ssh_list_saved_connections returning {} connections", connections.len()); + for conn in &connections { + log::info!(" - id={}, name={}, host={}:{}", conn.id, conn.name, conn.host, conn.port); + } + Ok(connections) +} + +#[tauri::command] +pub async fn ssh_save_connection( + state: State<'_, AppState>, + config: SSHConnectionConfig, +) -> Result<(), String> { + log::info!("ssh_save_connection called: id={}, host={}, port={}, username={}", + config.id, config.host, config.port, config.username); + let manager = state.get_ssh_manager_async().await?; + manager.save_connection(&config).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn ssh_delete_connection( + state: State<'_, AppState>, + connection_id: String, +) -> Result<(), String> { + let manager = state.get_ssh_manager_async().await?; + manager.delete_saved_connection(&connection_id).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn ssh_connect( + state: State<'_, AppState>, + config: SSHConnectionConfig, +) -> Result { + log::info!("ssh_connect called: id={}, host={}, port={}, username={}", + config.id, config.host, config.port, config.username); + + let manager = match state.get_ssh_manager_async().await { + Ok(m) => { + log::info!("ssh_connect: got SSH manager OK"); + m + } + Err(e) => { + log::error!("ssh_connect: failed to get SSH manager: {}", e); + return Err(e.to_string()); + } + }; + + // First save the connection config so it persists across restarts + log::info!("ssh_connect: about to save connection config"); + if let Err(e) = manager.save_connection(&config).await { + log::warn!("ssh_connect: Failed to save connection config before connect: {}", e); + // Continue anyway - connection might still work + } else { + log::info!("ssh_connect: Connection config saved successfully"); + } + + log::info!("ssh_connect: about to establish connection"); + let result = manager.connect(config).await + .map_err(|e| e.to_string()); + log::info!("ssh_connect result: {:?}", result); + result +} + +#[tauri::command] +pub async fn ssh_disconnect( + state: State<'_, AppState>, + connection_id: String, +) -> Result<(), String> { + let manager = state.get_ssh_manager_async().await?; + manager.disconnect(&connection_id).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn ssh_disconnect_all( + state: State<'_, AppState>, +) -> Result<(), String> { + let manager = state.get_ssh_manager_async().await?; + manager.disconnect_all().await; + Ok(()) +} + +#[tauri::command] +pub async fn ssh_is_connected( + state: State<'_, AppState>, + connection_id: String, +) -> Result { + let manager = state.get_ssh_manager_async().await?; + let is_connected = manager.is_connected(&connection_id).await; + log::info!("ssh_is_connected: connection_id={}, is_connected={}", connection_id, is_connected); + Ok(is_connected) +} + +#[tauri::command] +pub async fn ssh_get_config( + state: State<'_, AppState>, + host: String, +) -> Result { + let manager = state.get_ssh_manager_async().await?; + Ok(manager.get_ssh_config(&host).await) +} + +#[tauri::command] +pub async fn ssh_list_config_hosts( + state: State<'_, AppState>, +) -> Result, String> { + let manager = state.get_ssh_manager_async().await?; + Ok(manager.list_ssh_config_hosts().await) +} + +// === Remote File System Operations === + +#[tauri::command] +pub async fn remote_read_file( + state: State<'_, AppState>, + connection_id: String, + path: String, +) -> Result { + let remote_fs = state.get_remote_file_service_async().await?; + let bytes = remote_fs.read_file(&connection_id, &path).await + .map_err(|e| e.to_string())?; + String::from_utf8(bytes).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_write_file( + state: State<'_, AppState>, + connection_id: String, + path: String, + content: String, +) -> Result<(), String> { + let remote_fs = state.get_remote_file_service_async().await?; + remote_fs.write_file(&connection_id, &path, content.as_bytes()).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_exists( + state: State<'_, AppState>, + connection_id: String, + path: String, +) -> Result { + let remote_fs = state.get_remote_file_service_async().await?; + remote_fs.exists(&connection_id, &path).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_read_dir( + state: State<'_, AppState>, + connection_id: String, + path: String, +) -> Result, String> { + let remote_fs = state.get_remote_file_service_async().await?; + remote_fs.read_dir(&connection_id, &path).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_get_tree( + state: State<'_, AppState>, + connection_id: String, + path: String, + depth: Option, +) -> Result { + let remote_fs = state.get_remote_file_service_async().await?; + remote_fs.build_tree(&connection_id, &path, depth).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_create_dir( + state: State<'_, AppState>, + connection_id: String, + path: String, + recursive: bool, +) -> Result<(), String> { + let remote_fs = state.get_remote_file_service_async().await?; + if recursive { + remote_fs.create_dir_all(&connection_id, &path).await + } else { + remote_fs.create_dir(&connection_id, &path).await + } + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_remove( + state: State<'_, AppState>, + connection_id: String, + path: String, + recursive: bool, +) -> Result<(), String> { + let remote_fs = state.get_remote_file_service_async().await?; + if recursive { + remote_fs.remove_dir_all(&connection_id, &path).await + } else { + // Check if it's a directory by trying to read it + let entries = remote_fs.read_dir(&connection_id, &path).await; + match entries { + Ok(_) => { + // It's a directory, but non-recursive remove of non-empty dir + // Try to remove it anyway (will fail if not empty) + remote_fs.remove_dir_all(&connection_id, &path).await + } + Err(_) => { + // Not a directory or empty, remove as file + remote_fs.remove_file(&connection_id, &path).await + } + } + } + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_rename( + state: State<'_, AppState>, + connection_id: String, + old_path: String, + new_path: String, +) -> Result<(), String> { + let remote_fs = state.get_remote_file_service_async().await?; + remote_fs.rename(&connection_id, &old_path, &new_path).await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn remote_execute( + state: State<'_, AppState>, + connection_id: String, + command: String, +) -> Result<(String, String, i32), String> { + let manager = state.get_ssh_manager_async().await?; + manager.execute_command(&connection_id, &command).await + .map_err(|e| e.to_string()) +} + +// === Remote Workspace Management === + +#[tauri::command] +pub async fn remote_open_workspace( + state: State<'_, AppState>, + connection_id: String, + remote_path: String, +) -> Result<(), String> { + let manager = state.get_ssh_manager_async().await?; + + // Verify connection exists + if !manager.is_connected(&connection_id).await { + return Err("Not connected to remote server".to_string()); + } + + // Verify remote path exists + let remote_fs = state.get_remote_file_service_async().await?; + let exists = remote_fs.exists(&connection_id, &remote_path).await + .map_err(|e| e.to_string())?; + + if !exists { + return Err(format!("Remote path does not exist: {}", remote_path)); + } + + // Get connection info for workspace + let connections = manager.get_saved_connections().await; + let conn = connections.iter().find(|c| c.id == connection_id); + + let workspace = crate::api::RemoteWorkspace { + connection_id: connection_id.clone(), + connection_name: conn.map(|c| c.name.clone()).unwrap_or_default(), + remote_path: remote_path.clone(), + }; + + state.set_remote_workspace(workspace).await + .map_err(|e| e.to_string())?; + + log::info!("Opened remote workspace: {} on connection {}", remote_path, connection_id); + Ok(()) +} + +#[tauri::command] +pub async fn remote_close_workspace( + state: State<'_, AppState>, +) -> Result<(), String> { + state.clear_remote_workspace().await; + log::info!("Closed remote workspace"); + Ok(()) +} + +#[tauri::command] +pub async fn remote_get_workspace_info( + state: State<'_, AppState>, +) -> Result, String> { + let workspace = state.get_remote_workspace_async().await; + log::info!("remote_get_workspace_info: returning {:?}", workspace); + Ok(workspace) +} diff --git a/src/apps/desktop/src/api/terminal_api.rs b/src/apps/desktop/src/api/terminal_api.rs index 791f07d4..39c5de42 100644 --- a/src/apps/desktop/src/api/terminal_api.rs +++ b/src/apps/desktop/src/api/terminal_api.rs @@ -20,6 +20,8 @@ use bitfun_core::service::terminal::{ SignalRequest as CoreSignalRequest, TerminalApi, TerminalConfig, WriteRequest as CoreWriteRequest, }; +use bitfun_core::service::terminal::TerminalEvent; +use bitfun_core::service::remote_ssh::workspace_state::get_remote_workspace_manager; pub struct TerminalState { api: Arc>>, @@ -108,6 +110,10 @@ pub struct SessionResponse { pub status: String, pub cols: u16, pub rows: u16, + /// For remote terminals: the SSH connection ID that owns this session. + /// None/null for local terminals. + #[serde(skip_serializing_if = "Option::is_none")] + pub connection_id: Option, } impl From for SessionResponse { @@ -121,6 +127,7 @@ impl From for SessionResponse { status: resp.status, cols: resp.cols, rows: resp.rows, + connection_id: None, } } } @@ -279,11 +286,116 @@ pub async fn terminal_get_shells( Ok(shells.into_iter().map(ShellInfo::from).collect()) } +/// Check if the given working directory belongs to any registered remote workspace. +/// Returns (connection_id, remote_cwd) if so. +async fn lookup_remote_for_terminal(working_directory: Option<&str>) -> Option<(String, String)> { + let wd = working_directory?; + let manager = get_remote_workspace_manager()?; + let entry = manager.lookup_connection(wd).await?; + Some((entry.connection_id, wd.to_string())) +} + +/// Try to find session in remote terminal manager. Returns true if found. +async fn is_remote_session(session_id: &str) -> bool { + if let Some(manager) = get_remote_workspace_manager() { + if let Some(terminal_manager) = manager.get_terminal_manager().await { + return terminal_manager.get_session(session_id).await.is_some(); + } + } + false +} + #[tauri::command] pub async fn terminal_create( + _app: AppHandle, request: CreateSessionRequest, state: State<'_, TerminalState>, ) -> Result { + if let Some((connection_id, remote_cwd)) = lookup_remote_for_terminal(request.working_directory.as_deref()).await { + if let Some(remote_manager) = get_remote_workspace_manager() { + let terminal_manager = remote_manager + .get_terminal_manager() + .await + .ok_or("Remote terminal manager not available")?; + + let result = terminal_manager + .create_session( + request.session_id, + request.name, + &connection_id, + request.cols.unwrap_or(80), + request.rows.unwrap_or(24), + Some(remote_cwd.as_str()), + ) + .await + .map_err(|e| format!("Failed to create remote session: {}", e))?; + + let session = result.session; + let mut rx = result.output_rx; + let session_id = session.id.clone(); + + let response = SessionResponse { + id: session.id, + name: session.name, + shell_type: "Remote".to_string(), + cwd: session.cwd.clone(), + pid: session.pid, + status: format!("{:?}", session.status), + cols: session.cols, + rows: session.rows, + connection_id: Some(connection_id.clone()), + }; + + let app_handle = _app.clone(); + let sid = session_id.clone(); + tokio::spawn(async move { + let _ = app_handle.emit( + "terminal_event", + &TerminalEvent::Ready { + session_id: sid.clone(), + pid: 0, + cwd: String::new(), + }, + ); + + loop { + match rx.recv().await { + Ok(data) => { + let text = String::from_utf8_lossy(&data).to_string(); + if let Err(e) = app_handle.emit( + "terminal_event", + &TerminalEvent::Data { + session_id: sid.clone(), + data: text, + }, + ) { + warn!("Failed to emit remote terminal event: {}", e); + break; + } + } + Err(tokio::sync::broadcast::error::RecvError::Lagged(n)) => { + warn!("Remote terminal output lagged, skipped {} messages: session_id={}", n, sid); + continue; + } + Err(tokio::sync::broadcast::error::RecvError::Closed) => { + break; + } + } + } + + let _ = app_handle.emit( + "terminal_event", + &TerminalEvent::Exit { + session_id: sid, + exit_code: Some(0), + }, + ); + }); + + return Ok(response); + } + } + let api = state.get_or_init_api().await?; let parsed_shell_type = request.shell_type.and_then(|s| parse_shell_type(&s)); @@ -295,6 +407,7 @@ pub async fn terminal_create( env: request.env, cols: request.cols, rows: request.rows, + remote_connection_id: None, }; let session = api @@ -310,6 +423,25 @@ pub async fn terminal_get( session_id: String, state: State<'_, TerminalState>, ) -> Result { + // Try remote first (by session_id lookup, not global flag) + if let Some(remote_manager) = get_remote_workspace_manager() { + if let Some(terminal_manager) = remote_manager.get_terminal_manager().await { + if let Some(session) = terminal_manager.get_session(&session_id).await { + return Ok(SessionResponse { + id: session.id, + name: session.name, + shell_type: "Remote".to_string(), + cwd: session.cwd, + pid: session.pid, + status: format!("{:?}", session.status), + cols: session.cols, + rows: session.rows, + connection_id: Some(session.connection_id), + }); + } + } + } + let api = state.get_or_init_api().await?; let session = api @@ -324,14 +456,35 @@ pub async fn terminal_get( pub async fn terminal_list( state: State<'_, TerminalState>, ) -> Result, String> { - let api = state.get_or_init_api().await?; + let mut all_sessions: Vec = Vec::new(); + + // Collect remote sessions + if let Some(remote_manager) = get_remote_workspace_manager() { + if let Some(terminal_manager) = remote_manager.get_terminal_manager().await { + let remote_sessions = terminal_manager.list_sessions().await; + all_sessions.extend(remote_sessions.into_iter().map(|s| SessionResponse { + id: s.id, + name: s.name, + shell_type: "Remote".to_string(), + cwd: s.cwd, + pid: s.pid, + status: format!("{:?}", s.status), + cols: s.cols, + rows: s.rows, + connection_id: Some(s.connection_id), + })); + } + } - let sessions = api + // Collect local sessions + let api = state.get_or_init_api().await?; + let local_sessions = api .list_sessions() .await .map_err(|e| format!("Failed to list sessions: {}", e))?; + all_sessions.extend(local_sessions.into_iter().map(SessionResponse::from)); - Ok(sessions.into_iter().map(SessionResponse::from).collect()) + Ok(all_sessions) } #[tauri::command] @@ -339,6 +492,22 @@ pub async fn terminal_close( request: CloseSessionRequest, state: State<'_, TerminalState>, ) -> Result<(), String> { + if is_remote_session(&request.session_id).await { + if let Some(remote_manager) = get_remote_workspace_manager() { + let terminal_manager = remote_manager + .get_terminal_manager() + .await + .ok_or("Remote terminal manager not available")?; + + terminal_manager + .close_session(&request.session_id) + .await + .map_err(|e| format!("Failed to close session: {}", e))?; + + return Ok(()); + } + } + let api = state.get_or_init_api().await?; let core_request = CoreCloseSessionRequest { @@ -358,6 +527,22 @@ pub async fn terminal_write( request: WriteRequest, state: State<'_, TerminalState>, ) -> Result<(), String> { + if is_remote_session(&request.session_id).await { + if let Some(remote_manager) = get_remote_workspace_manager() { + let terminal_manager = remote_manager + .get_terminal_manager() + .await + .ok_or("Remote terminal manager not available")?; + + terminal_manager + .write(&request.session_id, request.data.as_bytes()) + .await + .map_err(|e| format!("Failed to write: {}", e))?; + + return Ok(()); + } + } + let api = state.get_or_init_api().await?; let core_request = CoreWriteRequest { @@ -377,6 +562,22 @@ pub async fn terminal_resize( request: ResizeRequest, state: State<'_, TerminalState>, ) -> Result<(), String> { + if is_remote_session(&request.session_id).await { + if let Some(remote_manager) = get_remote_workspace_manager() { + let terminal_manager = remote_manager + .get_terminal_manager() + .await + .ok_or("Remote terminal manager not available")?; + + terminal_manager + .resize(&request.session_id, request.cols, request.rows) + .await + .map_err(|e| format!("Failed to resize: {}", e))?; + + return Ok(()); + } + } + let api = state.get_or_init_api().await?; let core_request = CoreResizeRequest { @@ -397,6 +598,11 @@ pub async fn terminal_signal( request: SignalRequest, state: State<'_, TerminalState>, ) -> Result<(), String> { + if is_remote_session(&request.session_id).await { + // Remote terminals don't support signal yet + return Ok(()); + } + let api = state.get_or_init_api().await?; let core_request = CoreSignalRequest { @@ -416,6 +622,11 @@ pub async fn terminal_ack( request: AcknowledgeRequest, state: State<'_, TerminalState>, ) -> Result<(), String> { + if is_remote_session(&request.session_id).await { + // Remote terminals don't use flow control ack + return Ok(()); + } + let api = state.get_or_init_api().await?; let core_request = CoreAcknowledgeRequest { @@ -435,6 +646,38 @@ pub async fn terminal_execute( request: ExecuteCommandRequest, state: State<'_, TerminalState>, ) -> Result { + if is_remote_session(&request.session_id).await { + if let Some(remote_manager) = get_remote_workspace_manager() { + let terminal_manager = remote_manager + .get_terminal_manager() + .await + .ok_or("Remote terminal manager not available")?; + let session = terminal_manager + .get_session(&request.session_id) + .await + .ok_or("Remote session not found")?; + let ssh_manager = remote_manager + .get_ssh_manager() + .await + .ok_or("SSH manager not available")?; + let (stdout, stderr, exit_code) = ssh_manager + .execute_command(&session.connection_id, &request.command) + .await + .map_err(|e| format!("Failed to execute remote command: {}", e))?; + + return Ok(ExecuteCommandResponse { + command: request.command, + command_id: format!("remote-cmd-{}", std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_millis()), + output: if stderr.is_empty() { stdout } else { format!("{}\n{}", stdout, stderr) }, + exit_code: Some(exit_code), + completion_reason: "completed".to_string(), + }); + } + } + let api = state.get_or_init_api().await?; let core_request = CoreExecuteCommandRequest { @@ -457,6 +700,22 @@ pub async fn terminal_send_command( request: SendCommandRequest, state: State<'_, TerminalState>, ) -> Result<(), String> { + if is_remote_session(&request.session_id).await { + if let Some(remote_manager) = get_remote_workspace_manager() { + let terminal_manager = remote_manager + .get_terminal_manager() + .await + .ok_or("Remote terminal manager not available")?; + + terminal_manager + .write(&request.session_id, format!("{}\n", request.command).as_bytes()) + .await + .map_err(|e| format!("Failed to send command: {}", e))?; + + return Ok(()); + } + } + let api = state.get_or_init_api().await?; let core_request = CoreSendCommandRequest { @@ -476,6 +735,10 @@ pub async fn terminal_has_shell_integration( session_id: String, state: State<'_, TerminalState>, ) -> Result { + if is_remote_session(&session_id).await { + return Ok(false); + } + let api = state.get_or_init_api().await?; Ok(api.has_shell_integration(&session_id).await) } @@ -493,6 +756,22 @@ pub async fn terminal_get_history( session_id: String, state: State<'_, TerminalState>, ) -> Result { + if is_remote_session(&session_id).await { + if let Some(remote_manager) = get_remote_workspace_manager() { + if let Some(terminal_manager) = remote_manager.get_terminal_manager().await { + if let Some(session) = terminal_manager.get_session(&session_id).await { + return Ok(GetHistoryResponse { + session_id: session.id, + data: String::new(), + history_size: 0, + cols: session.cols, + rows: session.rows, + }); + } + } + } + } + let api = state.get_or_init_api().await?; let core_request = CoreGetHistoryRequest { session_id }; diff --git a/src/apps/desktop/src/api/tool_api.rs b/src/apps/desktop/src/api/tool_api.rs index d1928b46..11336521 100644 --- a/src/apps/desktop/src/api/tool_api.rs +++ b/src/apps/desktop/src/api/tool_api.rs @@ -101,6 +101,7 @@ fn build_tool_context(workspace_path: Option<&str>) -> ToolUseContext { image_context_provider: Some(Arc::new(create_image_context_provider())), subagent_parent_info: None, cancellation_token: None, + workspace_services: None, } } diff --git a/src/apps/desktop/src/lib.rs b/src/apps/desktop/src/lib.rs index d42277d6..e1a6303b 100644 --- a/src/apps/desktop/src/lib.rs +++ b/src/apps/desktop/src/lib.rs @@ -549,6 +549,7 @@ pub async fn run() { cleanup_invalid_workspaces, get_opened_workspaces, open_workspace, + open_remote_workspace, create_assistant_workspace, delete_assistant_workspace, reset_assistant_workspace, @@ -638,6 +639,28 @@ pub async fn run() { api::insights_api::load_insights_report, api::insights_api::has_insights_data, api::insights_api::cancel_insights_generation, + // SSH Remote API + api::ssh_api::ssh_list_saved_connections, + api::ssh_api::ssh_save_connection, + api::ssh_api::ssh_delete_connection, + api::ssh_api::ssh_connect, + api::ssh_api::ssh_disconnect, + api::ssh_api::ssh_disconnect_all, + api::ssh_api::ssh_is_connected, + api::ssh_api::ssh_get_config, + api::ssh_api::ssh_list_config_hosts, + api::ssh_api::remote_read_file, + api::ssh_api::remote_write_file, + api::ssh_api::remote_exists, + api::ssh_api::remote_read_dir, + api::ssh_api::remote_get_tree, + api::ssh_api::remote_create_dir, + api::ssh_api::remote_remove, + api::ssh_api::remote_rename, + api::ssh_api::remote_execute, + api::ssh_api::remote_open_workspace, + api::ssh_api::remote_close_workspace, + api::ssh_api::remote_get_workspace_info, ]) .run(tauri::generate_context!()); if let Err(e) = run_result { diff --git a/src/apps/server/src/ai_relay.rs b/src/apps/server/src/ai_relay.rs new file mode 100644 index 00000000..134a3830 --- /dev/null +++ b/src/apps/server/src/ai_relay.rs @@ -0,0 +1,237 @@ +//! AI Relay - Lightweight HTTP proxy for AI API requests +//! +//! When running BitFun Server on a remote machine that cannot directly access +//! AI APIs (due to network restrictions), AI Relay acts as a local proxy: +//! +//! ```text +//! Remote Server Local Machine +//! ┌─────────────┐ SSH Tunnel ┌─────────────┐ +//! │ BitFun │ ───────────────► │ AI Relay │ ──► AI API +//! │ Server │ ssh -R 9090: │ :9090 │ (OpenAI, etc.) +//! └─────────────┘ └─────────────┘ +//! ``` +//! +//! Usage: +//! 1. Start AI Relay on local machine: `bitfun-server --ai-relay --port 9090` +//! 2. SSH to remote with reverse tunnel: `ssh -R 9090:localhost:9090 user@remote` +//! 3. Configure remote BitFun to use proxy: `proxy_url = "http://localhost:9090"` + +use axum::{ + body::Body, + extract::{Request, State}, + http::{HeaderMap, HeaderName, HeaderValue, StatusCode, Uri}, + response::{IntoResponse, Response}, + Router, +}; +use reqwest::Client; +use std::sync::Arc; + +/// AI Relay state +#[derive(Clone)] +pub struct RelayState { + pub client: Client, +} + +/// Create the AI Relay router +pub fn create_relay_router() -> Router { + let client = Client::builder() + .timeout(std::time::Duration::from_secs(300)) + .build() + .expect("Failed to create HTTP client"); + + let state = Arc::new(RelayState { client }); + + Router::new() + .fallback(handle_proxy) + .with_state(state) +} + +/// Handle all incoming requests and proxy them +async fn handle_proxy( + State(state): State>, + req: Request, +) -> Result { + // Get the target URL from the request + let uri = req.uri().clone(); + + // Reconstruct the target URL + let target_url = reconstruct_target_url(&uri)?; + + log::info!( + "AI Relay: proxying {} {}", + req.method(), + target_url + ); + + // Remove hop-by-hop headers + let mut headers = req.headers().clone(); + remove_hop_by_hop_headers(&mut headers); + + // Build the proxied request + let method = reqwest::Method::from_bytes(req.method().as_str().as_bytes()) + .unwrap_or(reqwest::Method::GET); + + let mut builder = state.client.request(method, &target_url); + + // Add headers + for (name, value) in headers.iter() { + if let Ok(header_value) = HeaderValue::from_bytes(value.as_bytes()) { + builder = builder.header(name.as_str(), header_value); + } + } + + // Add body if present + let body_bytes = axum::body::to_bytes(req.into_body(), usize::MAX) + .await + .map_err(|e| ProxyError::RequestError(format!("Failed to read body: {}", e)))?; + + if !body_bytes.is_empty() { + builder = builder.body(body_bytes); + } + + // Send request + let response = builder + .send() + .await + .map_err(|e| ProxyError::UpstreamError(format!("Failed to connect to upstream: {}", e)))?; + + log::info!( + "AI Relay: received response status {}", + response.status() + ); + + // Build response + let status = response.status(); + let mut resp_builder = axum::http::Response::builder() + .status(StatusCode::from_u16(status.as_u16()).unwrap_or(StatusCode::OK)); + + for (name, value) in response.headers() { + if let Ok(header_name) = HeaderName::try_from(name.as_str()) { + if let Ok(header_value) = HeaderValue::from_bytes(value.as_bytes()) { + resp_builder = resp_builder.header(header_name, header_value); + } + } + } + + let body_bytes = response + .bytes() + .await + .map_err(|e| ProxyError::UpstreamError(format!("Failed to read response: {}", e)))?; + + resp_builder + .body(Body::from(body_bytes)) + .map_err(|e| ProxyError::ResponseError(format!("Failed to build response: {}", e))) +} + +/// Reconstruct the target URL from the request +fn reconstruct_target_url(uri: &Uri) -> Result { + // The request should contain the actual target in the path + // Format: /{scheme}/{host}/{path} + // Example: /https/api.openai.com/v1/chat/completions + + let path = uri.path(); + + // Skip the leading slash and extract scheme + let path = path.strip_prefix('/').ok_or_else(|| { + ProxyError::InvalidRequest("Invalid path format".to_string()) + })?; + + let parts: Vec<&str> = path.splitn(3, '/').collect(); + if parts.is_empty() { + return Err(ProxyError::InvalidRequest( + "Missing scheme and host in path".to_string(), + )); + } + + let scheme = *parts.get(0).unwrap_or(&"https"); + let host = parts.get(1).ok_or_else(|| { + ProxyError::InvalidRequest("Missing host in path".to_string()) + })?; + let rest = parts.get(2).unwrap_or(&""); + + // Build the target URL + let target_url = if rest.is_empty() { + format!("{}://{}", scheme, host) + } else { + format!("{}://{}/{}", scheme, host, rest) + }; + + // Add query string if present + if let Some(query) = uri.query() { + Ok(format!("{}?{}", target_url, query)) + } else { + Ok(target_url) + } +} + +/// Remove hop-by-hop headers that should not be forwarded +fn remove_hop_by_hop_headers(headers: &mut HeaderMap) { + const HOP_BY_HOP: &[&str] = &[ + "connection", + "keep-alive", + "proxy-authenticate", + "proxy-authorization", + "te", + "trailers", + "transfer-encoding", + "upgrade", + "host", + ]; + + for header in HOP_BY_HOP { + headers.remove(*header); + } +} + +/// Proxy errors +#[derive(Debug)] +pub enum ProxyError { + InvalidRequest(String), + RequestError(String), + UpstreamError(String), + ResponseError(String), +} + +impl IntoResponse for ProxyError { + fn into_response(self) -> Response { + let (status, message) = match self { + ProxyError::InvalidRequest(msg) => (StatusCode::BAD_REQUEST, msg), + ProxyError::RequestError(msg) => (StatusCode::BAD_REQUEST, msg), + ProxyError::UpstreamError(msg) => (StatusCode::BAD_GATEWAY, msg), + ProxyError::ResponseError(msg) => { + (StatusCode::INTERNAL_SERVER_ERROR, msg) + } + }; + + let body = serde_json::json!({ + "error": message, + }); + + (status, axum::Json(body)).into_response() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_reconstruct_target_url() { + // Test with full path + let uri: Uri = "/https/api.openai.com/v1/chat/completions".parse().unwrap(); + let result = reconstruct_target_url(&uri).unwrap(); + assert_eq!(result, "https://api.openai.com/v1/chat/completions"); + + // Test with query string + let uri: Uri = "/https/api.openai.com/v1/models?limit=10" + .parse() + .unwrap(); + let result = reconstruct_target_url(&uri).unwrap(); + assert_eq!(result, "https://api.openai.com/v1/models?limit=10"); + + // Test with host only + let uri: Uri = "/https/api.openai.com".parse().unwrap(); + let result = reconstruct_target_url(&uri).unwrap(); + assert_eq!(result, "https://api.openai.com"); + } +} diff --git a/src/apps/server/src/bootstrap.rs b/src/apps/server/src/bootstrap.rs new file mode 100644 index 00000000..cdcb4354 --- /dev/null +++ b/src/apps/server/src/bootstrap.rs @@ -0,0 +1,234 @@ +//! Server bootstrap - initializes all core services. +//! +//! Mirrors the Desktop app's init sequence without any Tauri dependency. + +use bitfun_core::agentic::*; +use bitfun_core::infrastructure::ai::AIClientFactory; +use bitfun_core::infrastructure::try_get_path_manager_arc; +use bitfun_core::service::{ + ai_rules, config, filesystem, mcp, token_usage, workspace, +}; +use std::sync::Arc; +use tokio::sync::RwLock; + +/// Shared application state for the server (mirrors Desktop's AppState). +pub struct ServerAppState { + pub ai_client_factory: Arc, + pub workspace_service: Arc, + pub workspace_path: Arc>>, + pub config_service: Arc, + pub filesystem_service: Arc, + pub ai_rules_service: Arc, + pub agent_registry: Arc, + pub mcp_service: Option>, + pub token_usage_service: Arc, + pub coordinator: Arc, + pub scheduler: Arc, + pub event_queue: Arc, + pub event_router: Arc, + pub tool_registry_snapshot: Arc>>, + pub start_time: std::time::Instant, +} + +/// Initialize all core services and return the shared server state. +/// +/// The optional `workspace` path, when provided, is opened automatically. +pub async fn initialize(workspace: Option) -> anyhow::Result> { + log::info!("Initializing BitFun server core services"); + + // 1. Global config + config::initialize_global_config().await?; + let config_service = config::get_global_config_service().await?; + + // 2. AI client factory + AIClientFactory::initialize_global().await?; + let ai_client_factory = AIClientFactory::get_global().await?; + + // 3. Agentic system + let path_manager = try_get_path_manager_arc()?; + + let event_queue = Arc::new(events::EventQueue::new(Default::default())); + let event_router = Arc::new(events::EventRouter::new()); + + let persistence_manager = + Arc::new(persistence::PersistenceManager::new(path_manager.clone())?); + + let history_manager = Arc::new(session::MessageHistoryManager::new( + persistence_manager.clone(), + session::HistoryConfig { + enable_persistence: false, + ..Default::default() + }, + )); + + let compression_manager = Arc::new(session::CompressionManager::new( + persistence_manager.clone(), + session::CompressionConfig { + enable_persistence: false, + ..Default::default() + }, + )); + + let session_manager = Arc::new(session::SessionManager::new( + history_manager, + compression_manager, + persistence_manager, + Default::default(), + )); + + let tool_registry = tools::registry::get_global_tool_registry(); + let tool_state_manager = Arc::new(tools::pipeline::ToolStateManager::new(event_queue.clone())); + + let tool_pipeline = Arc::new(tools::pipeline::ToolPipeline::new( + tool_registry.clone(), + tool_state_manager, + None, // no image context provider in server mode for now + )); + + let stream_processor = Arc::new(execution::StreamProcessor::new(event_queue.clone())); + let round_executor = Arc::new(execution::RoundExecutor::new( + stream_processor, + event_queue.clone(), + tool_pipeline.clone(), + )); + let execution_engine = Arc::new(execution::ExecutionEngine::new( + round_executor, + event_queue.clone(), + session_manager.clone(), + Default::default(), + )); + + let coordinator = Arc::new(coordination::ConversationCoordinator::new( + session_manager.clone(), + execution_engine, + tool_pipeline, + event_queue.clone(), + event_router.clone(), + )); + + coordination::ConversationCoordinator::set_global(coordinator.clone()); + + // Token usage + let token_usage_service = Arc::new( + token_usage::TokenUsageService::new(path_manager.clone()).await?, + ); + let token_usage_subscriber = + Arc::new(token_usage::TokenUsageSubscriber::new(token_usage_service.clone())); + event_router.subscribe_internal("token_usage".to_string(), token_usage_subscriber); + + // Dialog scheduler + let scheduler = + coordination::DialogScheduler::new(coordinator.clone(), session_manager.clone()); + coordinator.set_scheduler_notifier(scheduler.outcome_sender()); + coordination::set_global_scheduler(scheduler.clone()); + + // Cron service + let cron_service = + bitfun_core::service::cron::CronService::new(path_manager.clone(), scheduler.clone()) + .await?; + bitfun_core::service::cron::set_global_cron_service(cron_service.clone()); + let cron_subscriber = Arc::new(bitfun_core::service::cron::CronEventSubscriber::new( + cron_service.clone(), + )); + event_router.subscribe_internal("cron_jobs".to_string(), cron_subscriber); + cron_service.start(); + + // Function agents + let _ = bitfun_core::function_agents::git_func_agent::GitFunctionAgent::new( + ai_client_factory.clone(), + ); + let _ = bitfun_core::function_agents::startchat_func_agent::StartchatFunctionAgent::new( + ai_client_factory.clone(), + ); + + // 4. Services + let workspace_service = Arc::new(workspace::WorkspaceService::new().await?); + workspace::set_global_workspace_service(workspace_service.clone()); + let filesystem_service = Arc::new(filesystem::FileSystemServiceFactory::create_default()); + + ai_rules::initialize_global_ai_rules_service().await?; + let ai_rules_service = ai_rules::get_global_ai_rules_service().await?; + + let agent_registry = agents::get_agent_registry(); + + let mcp_service = match mcp::MCPService::new(config_service.clone()) { + Ok(service) => Some(Arc::new(service)), + Err(e) => { + log::warn!("Failed to initialize MCP service: {}", e); + None + } + }; + + // Tool registry snapshot + let tool_registry_snapshot = { + let lock = tool_registry.read().await; + Arc::new(lock.get_all_tools()) + }; + + // 5. Open workspace if specified + let initial_workspace_path = if let Some(ws_path) = workspace { + let path = std::path::PathBuf::from(&ws_path); + match workspace_service.open_workspace(path.clone()).await { + Ok(info) => { + log::info!( + "Workspace opened: name={}, path={}", + info.name, + info.root_path.display() + ); + + // Initialize snapshot for workspace + if let Err(e) = + bitfun_core::service::snapshot::initialize_snapshot_manager_for_workspace( + info.root_path.clone(), + None, + ) + .await + { + log::warn!("Failed to initialize snapshot system: {}", e); + } + + if let Err(e) = ai_rules_service.set_workspace(info.root_path.clone()).await { + log::warn!("Failed to set AI rules workspace: {}", e); + } + + Some(info.root_path) + } + Err(e) => { + log::error!("Failed to open workspace '{}': {}", ws_path, e); + None + } + } + } else { + // Try to restore last workspace + workspace_service + .get_current_workspace() + .await + .map(|w| w.root_path) + }; + + // LSP + if let Err(e) = bitfun_core::service::lsp::initialize_global_lsp_manager().await { + log::error!("Failed to initialize LSP manager: {}", e); + } + + let state = Arc::new(ServerAppState { + ai_client_factory, + workspace_service, + workspace_path: Arc::new(RwLock::new(initial_workspace_path)), + config_service, + filesystem_service, + ai_rules_service, + agent_registry, + mcp_service, + token_usage_service, + coordinator, + scheduler, + event_queue, + event_router, + tool_registry_snapshot, + start_time: std::time::Instant::now(), + }); + + log::info!("BitFun server core services initialized"); + Ok(state) +} diff --git a/src/apps/server/src/rpc_dispatcher.rs b/src/apps/server/src/rpc_dispatcher.rs new file mode 100644 index 00000000..97cf6585 --- /dev/null +++ b/src/apps/server/src/rpc_dispatcher.rs @@ -0,0 +1,310 @@ +//! WebSocket RPC command dispatcher. +//! +//! Maps Tauri command names (used by the frontend `api.invoke()`) to +//! server-side handler functions. Each handler receives the raw JSON +//! `params` and returns a JSON `result`. + +use crate::bootstrap::ServerAppState; +use anyhow::{anyhow, Result}; +use bitfun_core::agentic::core::SessionConfig; +use bitfun_core::agentic::coordination::{DialogSubmissionPolicy, DialogTriggerSource}; +use std::path::PathBuf; +use std::sync::Arc; + +/// Dispatch a WebSocket RPC method call to the appropriate handler. +/// +/// The `method` string matches the Tauri command name exactly (e.g. +/// `"open_workspace"`, `"terminal_create"`), so the frontend's +/// `api.invoke(name, args)` works identically over both Tauri IPC and +/// WebSocket. +pub async fn dispatch( + method: &str, + params: serde_json::Value, + state: &Arc, +) -> Result { + match method { + // ── Ping ────────────────────────────────────────────── + "ping" => Ok(serde_json::json!({ + "pong": true, + "timestamp": chrono::Utc::now().timestamp(), + })), + + // ── Health / Status ────────────────────────────────── + "get_health_status" => { + let uptime = state.start_time.elapsed().as_secs(); + Ok(serde_json::json!({ + "status": "healthy", + "message": "All services are running normally", + "services": { + "workspace_service": true, + "config_service": true, + "filesystem_service": true, + }, + "uptime_seconds": uptime, + })) + } + + // ── Workspace ──────────────────────────────────────── + "open_workspace" => { + let request = extract_request(¶ms)?; + let path: String = serde_json::from_value( + request.get("path").cloned().ok_or_else(|| anyhow!("Missing path"))?, + )?; + let info = state.workspace_service.open_workspace(path.into()).await + .map_err(|e| anyhow!("{}", e))?; + *state.workspace_path.write().await = Some(info.root_path.clone()); + Ok(serde_json::to_value(&info).unwrap_or_default()) + } + "get_current_workspace" => { + let ws = state.workspace_service.get_current_workspace().await; + Ok(serde_json::to_value(&ws).unwrap_or(serde_json::Value::Null)) + } + "get_recent_workspaces" => { + let list = state.workspace_service.get_recent_workspaces().await; + Ok(serde_json::to_value(&list).unwrap_or_default()) + } + "get_opened_workspaces" => { + let list = state.workspace_service.get_opened_workspaces().await; + Ok(serde_json::to_value(&list).unwrap_or_default()) + } + + // ── File System ────────────────────────────────────── + "read_file_content" => { + let request = extract_request(¶ms)?; + let file_path = get_string(&request, "filePath")?; + let result = state.filesystem_service.read_file(&file_path).await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!(result.content)) + } + "write_file_content" => { + let request = extract_request(¶ms)?; + let file_path = get_string(&request, "filePath")?; + let content = get_string(&request, "content")?; + state.filesystem_service.write_file(&file_path, &content).await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::Value::Null) + } + "check_path_exists" => { + let path_str = if let Some(req) = params.get("request") { + get_string(req, "path")? + } else { + get_string(¶ms, "path")? + }; + let exists = std::path::Path::new(&path_str).exists(); + Ok(serde_json::json!(exists)) + } + "get_file_tree" => { + let request = extract_request(¶ms)?; + let path = get_string(&request, "path")?; + let nodes = state.filesystem_service.build_file_tree(&path).await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::to_value(&nodes).unwrap_or_default()) + } + "fs_exists" => { + let path_str = get_string(¶ms, "path")?; + let exists = std::path::Path::new(&path_str).exists(); + Ok(serde_json::json!(exists)) + } + + // ── Config ─────────────────────────────────────────── + "get_config" => { + let request = extract_request(¶ms)?; + let key = request.get("key").and_then(|v| v.as_str()); + let config: serde_json::Value = state.config_service + .get_config(key).await + .map_err(|e| anyhow!("{}", e))?; + Ok(config) + } + "set_config" => { + let request = extract_request(¶ms)?; + let key = get_string(&request, "key")?; + let value = request.get("value").cloned().ok_or_else(|| anyhow!("Missing value"))?; + state.config_service.set_config(&key, value).await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!("ok")) + } + "get_model_configs" => { + let models = state.config_service.get_ai_models().await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::to_value(&models).unwrap_or_default()) + } + + // ── Agentic (Session / Dialog) ─────────────────────── + "create_session" => { + let request = extract_request(¶ms)?; + let session_name = get_string(&request, "sessionName")?; + let agent_type = get_string(&request, "agentType")?; + let workspace_path = get_string(&request, "workspacePath")?; + let session_id = request.get("sessionId") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + + let config = SessionConfig { + workspace_path: Some(workspace_path.clone()), + ..Default::default() + }; + + let session = state.coordinator + .create_session_with_workspace( + session_id, + session_name, + agent_type, + config, + workspace_path, + ) + .await + .map_err(|e| anyhow!("{}", e))?; + + Ok(serde_json::json!({ + "sessionId": session.session_id, + "sessionName": session.session_name, + "agentType": session.agent_type, + })) + } + "list_sessions" => { + let request = extract_request(¶ms)?; + let workspace_path = get_string(&request, "workspacePath")?; + let sessions = state.coordinator + .list_sessions(&PathBuf::from(workspace_path)) + .await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::to_value(&sessions).unwrap_or_default()) + } + "delete_session" => { + let request = extract_request(¶ms)?; + let session_id = get_string(&request, "sessionId")?; + let workspace_path = get_string(&request, "workspacePath")?; + state.coordinator + .delete_session(&PathBuf::from(workspace_path), &session_id) + .await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!({ "success": true })) + } + "start_dialog_turn" => { + let request = extract_request(¶ms)?; + let session_id = get_string(&request, "sessionId")?; + let user_input = get_string(&request, "userInput")?; + let original_user_input = request.get("originalUserInput") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let agent_type = get_string(&request, "agentType")?; + let workspace_path = request.get("workspacePath") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let turn_id = request.get("turnId") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + + state.scheduler + .submit( + session_id, + user_input, + original_user_input, + turn_id, + agent_type, + workspace_path, + DialogSubmissionPolicy::for_source(DialogTriggerSource::DesktopUi), + None, + ) + .await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!({ "success": true, "message": "Dialog turn started" })) + } + "cancel_dialog_turn" => { + let request = extract_request(¶ms)?; + let session_id = get_string(&request, "sessionId")?; + let dialog_turn_id = get_string(&request, "dialogTurnId")?; + state.coordinator + .cancel_dialog_turn(&session_id, &dialog_turn_id) + .await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!({ "success": true })) + } + "get_session_messages" => { + let request = extract_request(¶ms)?; + let session_id = get_string(&request, "sessionId")?; + let messages = state.coordinator + .get_messages(&session_id) + .await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::to_value(&messages).unwrap_or_default()) + } + "confirm_tool_execution" => { + let request = extract_request(¶ms)?; + let tool_id = get_string(&request, "toolId")?; + let updated_input = request.get("updatedInput").cloned(); + state.coordinator + .confirm_tool(&tool_id, updated_input) + .await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!({ "success": true })) + } + "reject_tool_execution" => { + let request = extract_request(¶ms)?; + let tool_id = get_string(&request, "toolId")?; + let reason = request.get("reason") + .and_then(|v| v.as_str()) + .unwrap_or("User rejected") + .to_string(); + state.coordinator + .reject_tool(&tool_id, reason) + .await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!({ "success": true })) + } + + // ── I18n ───────────────────────────────────────────── + "i18n_get_current_language" => { + let lang: String = state.config_service + .get_config(Some("app.language")).await + .unwrap_or_else(|_| "zh-CN".to_string()); + Ok(serde_json::json!(lang)) + } + "i18n_set_language" => { + let request = extract_request(¶ms)?; + let language = get_string(&request, "language")?; + state.config_service.set_config("app.language", language.clone()).await + .map_err(|e| anyhow!("{}", e))?; + Ok(serde_json::json!(language)) + } + "i18n_get_supported_languages" => { + Ok(serde_json::json!([ + {"id": "zh-CN", "name": "Chinese (Simplified)", "englishName": "Chinese (Simplified)", "nativeName": "简体中文", "rtl": false}, + {"id": "en-US", "name": "English", "englishName": "English", "nativeName": "English", "rtl": false} + ])) + } + + // ── Tools ──────────────────────────────────────────── + "get_all_tools_info" => { + let tools: Vec = state.tool_registry_snapshot + .iter() + .map(|t| serde_json::json!({ + "name": t.name().to_string(), + })) + .collect(); + Ok(serde_json::json!(tools)) + } + + // ── Fallback ───────────────────────────────────────── + _ => { + log::warn!("Unknown RPC method: {}", method); + Err(anyhow!("Unknown command: {}", method)) + } + } +} + +// ── Helpers ─────────────────────────────────────────────────── + +/// Extract the `request` field from params (Tauri convention: `{ request: { ... } }`). +fn extract_request(params: &serde_json::Value) -> Result<&serde_json::Value> { + params + .get("request") + .ok_or_else(|| anyhow!("Missing 'request' field in params")) +} + +fn get_string(obj: &serde_json::Value, key: &str) -> Result { + obj.get(key) + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + .ok_or_else(|| anyhow!("Missing or invalid '{}' field", key)) +} diff --git a/src/crates/core/Cargo.toml b/src/crates/core/Cargo.toml index f2a56564..4fe71bb1 100644 --- a/src/crates/core/Cargo.toml +++ b/src/crates/core/Cargo.toml @@ -107,6 +107,13 @@ qrcode = { workspace = true } # WebSocket client tokio-tungstenite = { workspace = true } +# SSH - Remote SSH support (optional feature) +russh = { version = "0.45", optional = true } +russh-sftp = { version = "2.1", optional = true } +russh-keys = { version = "0.45", features = ["openssl"], optional = true } +shellexpand = { version = "3", optional = true } +ssh_config = { version = "0.1", optional = true } + # Relay server shared library (embedded relay reuses standalone relay logic) bitfun-relay-server = { path = "../../apps/relay-server" } @@ -119,10 +126,7 @@ bitfun-transport = { path = "../transport" } # Tauri dependency (optional, enabled only when needed) tauri = { workspace = true, optional = true } -# Windows-specific dependencies -[target.'cfg(windows)'.dependencies] -win32job = { workspace = true } - [features] -default = [] +default = ["ssh-remote"] tauri-support = ["tauri"] # Optional tauri support +ssh-remote = ["russh", "russh-sftp", "russh-keys", "shellexpand", "ssh_config"] # Optional SSH remote support diff --git a/src/crates/core/src/agentic/coordination/coordinator.rs b/src/crates/core/src/agentic/coordination/coordinator.rs index 4a8dcf20..24985782 100644 --- a/src/crates/core/src/agentic/coordination/coordinator.rs +++ b/src/crates/core/src/agentic/coordination/coordinator.rs @@ -108,15 +108,78 @@ pub struct ConversationCoordinator { } impl ConversationCoordinator { - fn session_workspace_binding(session: &Session) -> Option { - Self::config_workspace_binding(&session.config) + /// Build a workspace binding that is remote-aware. + /// If the global remote workspace is active and matches the session path, + /// returns a `WorkspaceBinding` with remote metadata and correct local + /// session storage path. + async fn build_workspace_binding(config: &SessionConfig) -> Option { + let workspace_path = config.workspace_path.as_ref()?; + let path_buf = PathBuf::from(workspace_path); + + // Check if this path belongs to any registered remote workspace + if let Some(entry) = crate::service::remote_ssh::workspace_state::lookup_remote_connection(workspace_path).await { + if let Some(manager) = crate::service::remote_ssh::workspace_state::get_remote_workspace_manager() { + let local_session_path = manager.get_local_session_path(&entry.connection_id); + return Some(WorkspaceBinding::new_remote( + None, + path_buf, + entry.connection_id, + entry.connection_name, + local_session_path, + )); + } + } + + Some(WorkspaceBinding::new(None, path_buf)) } - fn config_workspace_binding(config: &SessionConfig) -> Option { - config - .workspace_path - .as_ref() - .map(|workspace_path| WorkspaceBinding::new(None, PathBuf::from(workspace_path))) + /// Build `WorkspaceServices` from a resolved `WorkspaceBinding`. + /// For remote bindings, wires up SSH-backed FS/shell; for local ones, + /// returns local implementations. + async fn build_workspace_services( + binding: &Option, + ) -> Option { + let binding = binding.as_ref()?; + + if binding.is_remote() { + let manager = match crate::service::remote_ssh::workspace_state::get_remote_workspace_manager() { + Some(m) => m, + None => { + log::warn!("build_workspace_services: RemoteWorkspaceStateManager not initialized"); + return None; + } + }; + let ssh_manager = match manager.get_ssh_manager().await { + Some(m) => m, + None => { + log::warn!("build_workspace_services: SSH manager not available in state manager"); + return None; + } + }; + let file_service = match manager.get_file_service().await { + Some(f) => f, + None => { + log::warn!("build_workspace_services: File service not available in state manager"); + return None; + } + }; + let connection_id = match binding.connection_id() { + Some(id) => id.to_string(), + None => { + log::warn!("build_workspace_services: No connection_id in workspace binding"); + return None; + } + }; + log::info!("build_workspace_services: Built remote services for connection_id={}", connection_id); + Some(crate::agentic::workspace::remote_workspace_services( + connection_id, + file_service, + ssh_manager, + binding.root_path_string(), + )) + } else { + Some(crate::agentic::workspace::local_workspace_services()) + } } fn normalize_agent_type(agent_type: &str) -> String { @@ -333,7 +396,12 @@ Update the persona files and delete BOOTSTRAP.md as soon as bootstrap is complet } }; - let workspace_path_buf = std::path::PathBuf::from(&workspace_path); + let binding = Self::build_workspace_binding(&session.config).await; + let workspace_path_buf = binding + .as_ref() + .map(|b| b.session_storage_path().to_path_buf()) + .unwrap_or_else(|| PathBuf::from(&workspace_path)); + let persistence_manager = match PersistenceManager::new(path_manager) { Ok(manager) => manager, Err(e) => { @@ -440,7 +508,16 @@ Update the persona files and delete BOOTSTRAP.md as soon as bootstrap is complet Err(_) => return, }; - let workspace_path_buf = std::path::PathBuf::from(workspace_path); + let workspace_path_buf = { + let binding = Self::build_workspace_binding(&SessionConfig { + workspace_path: Some(workspace_path.to_string()), + ..Default::default() + }).await; + binding + .as_ref() + .map(|b| b.session_storage_path().to_path_buf()) + .unwrap_or_else(|| std::path::PathBuf::from(workspace_path)) + }; let persistence_manager = match PersistenceManager::new(path_manager) { Ok(manager) => manager, Err(_) => return, @@ -938,7 +1015,18 @@ Update the persona files and delete BOOTSTRAP.md as soon as bootstrap is complet user_message_metadata = Some(metadata); } - let session_workspace = Self::session_workspace_binding(&session); + let session_workspace = Self::build_workspace_binding(&session.config).await; + + // Build WorkspaceServices based on the workspace type + let workspace_services = Self::build_workspace_services(&session_workspace).await; + + info!( + "Dialog turn workspace context: session_id={}, workspace_path={:?}, is_remote={}, workspace_services={}", + session_id, + session.config.workspace_path, + session_workspace.as_ref().map(|ws| ws.is_remote()).unwrap_or(false), + if workspace_services.is_some() { "available" } else { "NONE" } + ); let wrapped_user_input = self .wrap_user_input( @@ -1037,6 +1125,7 @@ Update the persona files and delete BOOTSTRAP.md as soon as bootstrap is complet context: context_vars, subagent_parent_info: None, skip_tool_confirmation: submission_policy.skip_tool_confirmation, + workspace_services, }; // Auto-generate session title on first message @@ -1558,15 +1647,18 @@ Update the persona files and delete BOOTSTRAP.md as soon as bootstrap is complet None }; + let subagent_workspace = Self::build_workspace_binding(&session.config).await; + let subagent_services = Self::build_workspace_services(&subagent_workspace).await; let execution_context = ExecutionContext { session_id: session.session_id.clone(), dialog_turn_id: dialog_turn_id.clone(), turn_index: 0, agent_type: agent_type.clone(), - workspace: Self::session_workspace_binding(&session), + workspace: subagent_workspace, context: context.unwrap_or_default(), subagent_parent_info: Some(subagent_parent_info), skip_tool_confirmation: false, + workspace_services: subagent_services, }; let initial_messages = vec![Message::user(task_description)]; diff --git a/src/crates/core/src/agentic/execution/execution_engine.rs b/src/crates/core/src/agentic/execution/execution_engine.rs index f4016899..92df5474 100644 --- a/src/crates/core/src/agentic/execution/execution_engine.rs +++ b/src/crates/core/src/agentic/execution/execution_engine.rs @@ -856,6 +856,7 @@ impl ExecutionEngine { agent_type: agent_type.clone(), context_vars: round_context_vars, cancellation_token: CancellationToken::new(), + workspace_services: context.workspace_services.clone(), }; // Execute single model round @@ -1106,6 +1107,7 @@ impl ExecutionEngine { image_context_provider: None, subagent_parent_info: None, cancellation_token: None, + workspace_services: None, }; for tool in &all_tools { if !tool.is_enabled().await { diff --git a/src/crates/core/src/agentic/execution/round_executor.rs b/src/crates/core/src/agentic/execution/round_executor.rs index 34688c8a..6bdbcb14 100644 --- a/src/crates/core/src/agentic/execution/round_executor.rs +++ b/src/crates/core/src/agentic/execution/round_executor.rs @@ -336,7 +336,8 @@ impl RoundExecutor { workspace: context.workspace.clone(), context_vars: context.context_vars.clone(), subagent_parent_info, - allowed_tools: context.available_tools.clone(), // Pass allowed tools list for security validation + allowed_tools: context.available_tools.clone(), + workspace_services: context.workspace_services.clone(), }; // Read tool execution related configuration from global config diff --git a/src/crates/core/src/agentic/execution/types.rs b/src/crates/core/src/agentic/execution/types.rs index 0ca43b04..10cd13d9 100644 --- a/src/crates/core/src/agentic/execution/types.rs +++ b/src/crates/core/src/agentic/execution/types.rs @@ -2,6 +2,7 @@ use crate::agentic::core::Message; use crate::agentic::tools::pipeline::SubagentParentInfo; +use crate::agentic::workspace::WorkspaceServices; use crate::agentic::WorkspaceBinding; use serde_json::Value; use std::collections::HashMap; @@ -18,6 +19,8 @@ pub struct ExecutionContext { pub context: HashMap, pub subagent_parent_info: Option, pub skip_tool_confirmation: bool, + /// Workspace I/O services (filesystem + shell) injected into tools + pub workspace_services: Option, } /// Round context @@ -35,6 +38,7 @@ pub struct RoundContext { pub agent_type: String, pub context_vars: HashMap, pub cancellation_token: CancellationToken, + pub workspace_services: Option, } /// Round result diff --git a/src/crates/core/src/agentic/mod.rs b/src/crates/core/src/agentic/mod.rs index b44a4a69..92570d63 100644 --- a/src/crates/core/src/agentic/mod.rs +++ b/src/crates/core/src/agentic/mod.rs @@ -43,4 +43,4 @@ pub use image_analysis::{ImageAnalyzer, MessageEnhancer}; pub use persistence::PersistenceManager; pub use session::*; pub use side_question::*; -pub use workspace::WorkspaceBinding; +pub use workspace::{WorkspaceBackend, WorkspaceBinding}; diff --git a/src/crates/core/src/agentic/session/session_manager.rs b/src/crates/core/src/agentic/session/session_manager.rs index 79622407..86e1ddd6 100644 --- a/src/crates/core/src/agentic/session/session_manager.rs +++ b/src/crates/core/src/agentic/session/session_manager.rs @@ -62,12 +62,37 @@ impl SessionManager { config.workspace_path.as_ref().map(PathBuf::from) } + /// Resolve the effective storage path for a session's workspace. + /// For remote workspaces, maps the remote path to a local session storage path + /// using `WorkspaceBinding.session_storage_path()`. + async fn effective_workspace_path_from_config(config: &SessionConfig) -> Option { + let workspace_path = config.workspace_path.as_ref()?; + let path_buf = PathBuf::from(workspace_path); + + // Check if this path belongs to any registered remote workspace + if let Some(entry) = crate::service::remote_ssh::workspace_state::lookup_remote_connection(workspace_path).await { + if let Some(manager) = crate::service::remote_ssh::workspace_state::get_remote_workspace_manager() { + return Some(manager.get_local_session_path(&entry.connection_id)); + } + } + + Some(path_buf) + } + + #[allow(dead_code)] fn session_workspace_path(&self, session_id: &str) -> Option { self.sessions .get(session_id) .and_then(|session| Self::session_workspace_from_config(&session.config)) } + /// Resolve the effective storage path for a session by ID. + /// For remote workspaces, maps the remote path to a local session storage path. + async fn effective_session_workspace_path(&self, session_id: &str) -> Option { + let config = self.sessions.get(session_id)?.config.clone(); + Self::effective_workspace_path_from_config(&config).await + } + async fn rebuild_messages_from_turns( &self, workspace_path: &Path, @@ -203,10 +228,16 @@ impl SessionManager { config: SessionConfig, created_by: Option, ) -> BitFunResult { - let workspace_path = Self::session_workspace_from_config(&config).ok_or_else(|| { + let _workspace_path = Self::session_workspace_from_config(&config).ok_or_else(|| { BitFunError::Validation("Session workspace_path is required".to_string()) })?; + let session_storage_path = Self::effective_workspace_path_from_config(&config) + .await + .ok_or_else(|| { + BitFunError::Validation("Session workspace_path is required".to_string()) + })?; + // Check session count limit if self.sessions.len() >= self.config.max_active_sessions { return Err(BitFunError::Validation(format!( @@ -232,11 +263,11 @@ impl SessionManager { // 3. Initialize compression manager self.compression_manager.create_session(&session_id); - // 4. Persist + // 4. Persist to local path (handles remote workspaces correctly) if self.config.enable_persistence { if let Some(session) = self.sessions.get(&session_id) { self.persistence_manager - .save_session(&workspace_path, &session) + .save_session(&session_storage_path, &session) .await?; } } @@ -257,6 +288,8 @@ impl SessionManager { session_id: &str, new_state: SessionState, ) -> BitFunResult<()> { + let effective_path = self.effective_session_workspace_path(session_id).await; + if let Some(mut session) = self.sessions.get_mut(session_id) { session.state = new_state.clone(); session.updated_at = SystemTime::now(); @@ -264,9 +297,9 @@ impl SessionManager { // Persist state changes if self.config.enable_persistence { - if let Some(workspace_path) = Self::session_workspace_from_config(&session.config) { + if let Some(ref workspace_path) = effective_path { self.persistence_manager - .save_session_state(&workspace_path, session_id, &new_state) + .save_session_state(workspace_path, session_id, &new_state) .await?; } } @@ -287,7 +320,7 @@ impl SessionManager { /// Update session title (in-memory + persistence) pub async fn update_session_title(&self, session_id: &str, title: &str) -> BitFunResult<()> { - let workspace_path = self.session_workspace_path(session_id); + let workspace_path = self.effective_session_workspace_path(session_id).await; if let Some(mut session) = self.sessions.get_mut(session_id) { session.session_name = title.to_string(); @@ -331,8 +364,9 @@ impl SessionManager { } if self.config.enable_persistence { + let effective_path = self.effective_session_workspace_path(session_id).await; if let (Some(workspace_path), Some(session)) = ( - self.session_workspace_path(session_id), + effective_path, self.sessions.get(session_id), ) { self.persistence_manager @@ -367,8 +401,9 @@ impl SessionManager { } if self.config.enable_persistence { + let effective_path = self.effective_session_workspace_path(session_id).await; if let (Some(workspace_path), Some(session)) = ( - self.session_workspace_path(session_id), + effective_path, self.sessions.get(session_id), ) { self.persistence_manager @@ -455,10 +490,21 @@ impl SessionManager { // Check if session is already in memory let session_already_in_memory = self.sessions.contains_key(session_id); + let session_storage_path = { + let ws = workspace_path.to_string_lossy().to_string(); + let tmp_config = SessionConfig { + workspace_path: Some(ws), + ..Default::default() + }; + Self::effective_workspace_path_from_config(&tmp_config) + .await + .unwrap_or_else(|| workspace_path.to_path_buf()) + }; + // 1. Load session from storage let mut session = self .persistence_manager - .load_session(workspace_path, session_id) + .load_session(&session_storage_path, session_id) .await?; // Reset session state to Idle @@ -476,7 +522,7 @@ impl SessionManager { let mut latest_turn_index: Option = None; let messages = match self .persistence_manager - .load_latest_turn_context_snapshot(workspace_path, session_id) + .load_latest_turn_context_snapshot(&session_storage_path, session_id) .await? { Some((turn_index, msgs)) => { @@ -484,7 +530,7 @@ impl SessionManager { msgs } None => { - self.rebuild_messages_from_turns(workspace_path, session_id) + self.rebuild_messages_from_turns(&session_storage_path, session_id) .await? } }; @@ -655,7 +701,7 @@ impl SessionManager { .get_session(session_id) .ok_or_else(|| BitFunError::NotFound(format!("Session not found: {}", session_id)))?; let workspace_path = - Self::session_workspace_from_config(&session.config).ok_or_else(|| { + Self::effective_workspace_path_from_config(&session.config).await.ok_or_else(|| { BitFunError::Validation(format!( "Session workspace_path is missing: {}", session_id @@ -744,7 +790,7 @@ impl SessionManager { final_response: String, stats: TurnStats, ) -> BitFunResult<()> { - let workspace_path = self.session_workspace_path(session_id).ok_or_else(|| { + let workspace_path = self.effective_session_workspace_path(session_id).await.ok_or_else(|| { BitFunError::Validation(format!("Session workspace_path is missing: {}", session_id)) })?; let turn_index = self @@ -854,7 +900,7 @@ impl SessionManager { turn_id: &str, error: String, ) -> BitFunResult<()> { - let workspace_path = self.session_workspace_path(session_id).ok_or_else(|| { + let workspace_path = self.effective_session_workspace_path(session_id).await.ok_or_else(|| { BitFunError::Validation(format!("Session workspace_path is missing: {}", session_id)) })?; let turn_index = self @@ -1097,14 +1143,16 @@ impl SessionManager { session_id: &str, compression_state: CompressionState, ) -> BitFunResult<()> { + let effective_path = self.effective_session_workspace_path(session_id).await; + if let Some(mut session) = self.sessions.get_mut(session_id) { session.compression_state = compression_state; session.updated_at = SystemTime::now(); session.last_activity_at = SystemTime::now(); if self.config.enable_persistence { - if let Some(workspace_path) = Self::session_workspace_from_config(&session.config) { + if let Some(ref workspace_path) = effective_path { self.persistence_manager - .save_session(&workspace_path, &session) + .save_session(workspace_path, &session) .await?; } } diff --git a/src/crates/core/src/agentic/tools/framework.rs b/src/crates/core/src/agentic/tools/framework.rs index b3c13976..33c97db4 100644 --- a/src/crates/core/src/agentic/tools/framework.rs +++ b/src/crates/core/src/agentic/tools/framework.rs @@ -1,6 +1,7 @@ //! Tool framework - Tool interface definition and execution context use super::image_context::ImageContextProviderRef; use super::pipeline::SubagentParentInfo; +use crate::agentic::workspace::WorkspaceServices; use crate::agentic::WorkspaceBinding; use crate::util::errors::BitFunResult; use async_trait::async_trait; @@ -29,12 +30,30 @@ pub struct ToolUseContext { pub subagent_parent_info: Option, // Cancel tool execution more timely, especially for tools like TaskTool that need to run for a long time pub cancellation_token: Option, + /// Workspace I/O services (filesystem + shell) — use these instead of + /// checking `get_remote_workspace_manager()` inside individual tools. + pub workspace_services: Option, } impl ToolUseContext { pub fn workspace_root(&self) -> Option<&Path> { self.workspace.as_ref().map(|binding| binding.root_path()) } + + pub fn is_remote(&self) -> bool { + self.workspace + .as_ref() + .map(|ws| ws.is_remote()) + .unwrap_or(false) + } + + pub fn ws_fs(&self) -> Option<&dyn crate::agentic::workspace::WorkspaceFileSystem> { + self.workspace_services.as_ref().map(|s| s.fs.as_ref()) + } + + pub fn ws_shell(&self) -> Option<&dyn crate::agentic::workspace::WorkspaceShell> { + self.workspace_services.as_ref().map(|s| s.shell.as_ref()) + } } /// Tool options diff --git a/src/crates/core/src/agentic/tools/implementations/bash_tool.rs b/src/crates/core/src/agentic/tools/implementations/bash_tool.rs index 7b37fa3a..e03373b2 100644 --- a/src/crates/core/src/agentic/tools/implementations/bash_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/bash_tool.rs @@ -8,7 +8,7 @@ use crate::util::errors::{BitFunError, BitFunResult}; use crate::util::types::event::ToolExecutionProgressInfo; use async_trait::async_trait; use futures::StreamExt; -use log::{debug, error}; +use log::{debug, error, info}; use serde_json::{json, Value}; use std::time::{Duration, Instant}; use terminal_core::shell::{ShellDetector, ShellType}; @@ -395,6 +395,46 @@ Usage notes: .and_then(|v| v.as_str()) .ok_or_else(|| BitFunError::tool("command is required".to_string()))?; + // Remote workspace: execute via injected workspace shell + if context.is_remote() { + if let Some(ws_shell) = context.ws_shell() { + info!("Executing command on remote workspace via SSH: {}", command_str); + + let timeout_ms = input + .get("timeout_ms") + .and_then(|v| v.as_u64()) + .unwrap_or(120_000); + + let (stdout, stderr, exit_code) = ws_shell + .exec(command_str, Some(timeout_ms)) + .await + .map_err(|e| BitFunError::tool(format!("Remote command execution failed: {}", e)))?; + + let output = if stderr.is_empty() { + stdout.clone() + } else { + format!("{}\n{}", stdout, stderr) + }; + + let result = ToolResult::Result { + data: json!({ + "command": command_str, + "stdout": stdout, + "stderr": stderr, + "exit_code": exit_code, + "duration_ms": start_time.elapsed().as_millis() as u64, + "is_remote": true + }), + result_for_assistant: Some(format!( + "[Remote SSH] Command executed on remote server:\n{}\n\nExit code: {}", + output, + exit_code + )), + }; + return Ok(vec![result]); + } + } + let run_in_background = input .get("run_in_background") .and_then(|v| v.as_bool()) diff --git a/src/crates/core/src/agentic/tools/implementations/delete_file_tool.rs b/src/crates/core/src/agentic/tools/implementations/delete_file_tool.rs index 2edc29f7..79724da1 100644 --- a/src/crates/core/src/agentic/tools/implementations/delete_file_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/delete_file_tool.rs @@ -108,9 +108,8 @@ Important notes: async fn validate_input( &self, input: &Value, - _context: Option<&ToolUseContext>, + context: Option<&ToolUseContext>, ) -> ValidationResult { - // Validate path parameter let path_str = match input.get("path").and_then(|v| v.as_str()) { Some(p) => p, None => { @@ -134,7 +133,6 @@ Important notes: let path = Path::new(path_str); - // Validate if path is absolute if !path.is_absolute() { return ValidationResult { result: false, @@ -144,41 +142,41 @@ Important notes: }; } - // Validate if path exists - if !path.exists() { - return ValidationResult { - result: false, - message: Some(format!("Path does not exist: {}", path_str)), - error_code: Some(404), - meta: None, - }; - } - - // If directory, check if recursive deletion is needed - if path.is_dir() { - let recursive = input - .get("recursive") - .and_then(|v| v.as_bool()) - .unwrap_or(false); - - // Check if directory is empty - let is_empty = match fs::read_dir(path).await { - Ok(mut entries) => entries.next_entry().await.ok().flatten().is_none(), - Err(_) => false, - }; - - if !is_empty && !recursive { + let is_remote = context.map(|c| c.is_remote()).unwrap_or(false); + if !is_remote { + if !path.exists() { return ValidationResult { result: false, - message: Some(format!("Directory is not empty: {}. Set recursive=true to delete non-empty directories", path_str)), - error_code: Some(400), - meta: Some(json!({ - "is_directory": true, - "is_empty": false, - "requires_recursive": true - })), + message: Some(format!("Path does not exist: {}", path_str)), + error_code: Some(404), + meta: None, }; } + + if path.is_dir() { + let recursive = input + .get("recursive") + .and_then(|v| v.as_bool()) + .unwrap_or(false); + + let is_empty = match fs::read_dir(path).await { + Ok(mut entries) => entries.next_entry().await.ok().flatten().is_none(), + Err(_) => false, + }; + + if !is_empty && !recursive { + return ValidationResult { + result: false, + message: Some(format!("Directory is not empty: {}. Set recursive=true to delete non-empty directories", path_str)), + error_code: Some(400), + meta: Some(json!({ + "is_directory": true, + "is_empty": false, + "requires_recursive": true + })), + }; + } + } } ValidationResult { @@ -224,7 +222,7 @@ Important notes: async fn call_impl( &self, input: &Value, - _context: &ToolUseContext, + context: &ToolUseContext, ) -> BitFunResult> { let path_str = input .get("path") @@ -236,6 +234,41 @@ Important notes: .and_then(|v| v.as_bool()) .unwrap_or(false); + // Remote workspace: delete via shell command + if context.is_remote() { + let ws_shell = context.ws_shell().ok_or_else(|| { + BitFunError::tool("Workspace shell not available for remote Delete".to_string()) + })?; + + let rm_cmd = if recursive { + format!("rm -rf '{}'", path_str.replace('\'', "'\\''")) + } else { + format!("rm -f '{}'", path_str.replace('\'', "'\\''")) + }; + + let (_stdout, stderr, exit_code) = ws_shell + .exec(&rm_cmd, Some(15_000)) + .await + .map_err(|e| BitFunError::tool(format!("Failed to delete on remote: {}", e)))?; + + if exit_code != 0 && !stderr.is_empty() { + return Err(BitFunError::tool(format!("Remote delete failed: {}", stderr))); + } + + let result_data = json!({ + "success": true, + "path": path_str, + "is_directory": recursive, + "recursive": recursive, + "is_remote": true + }); + let result_text = self.render_result_for_assistant(&result_data); + return Ok(vec![ToolResult::Result { + data: result_data, + result_for_assistant: Some(result_text), + }]); + } + let path = Path::new(path_str); let is_directory = path.is_dir(); @@ -245,7 +278,6 @@ Important notes: path_str ); - // Execute deletion operation if is_directory { if recursive { fs::remove_dir_all(path) @@ -262,7 +294,6 @@ Important notes: .map_err(|e| BitFunError::tool(format!("Failed to delete file: {}", e)))?; } - // Build result let result_data = json!({ "success": true, "path": path_str, diff --git a/src/crates/core/src/agentic/tools/implementations/file_edit_tool.rs b/src/crates/core/src/agentic/tools/implementations/file_edit_tool.rs index 15d8c67b..15bc4f7a 100644 --- a/src/crates/core/src/agentic/tools/implementations/file_edit_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/file_edit_tool.rs @@ -5,7 +5,6 @@ use async_trait::async_trait; use serde_json::{json, Value}; use tool_runtime::fs::edit_file::edit_file; -/// File edit tool pub struct FileEditTool; impl FileEditTool { @@ -96,6 +95,56 @@ Usage: let resolved_path = resolve_path_with_workspace(file_path, context.workspace_root())?; + // When WorkspaceServices is available (both local and remote), + // use the abstract FS to read → edit in memory → write back. + if let Some(ws_fs) = context.ws_fs() { + let content = ws_fs + .read_file_text(&resolved_path) + .await + .map_err(|e| BitFunError::tool(format!("Failed to read file: {}", e)))?; + + let (new_content, match_count) = if replace_all { + let count = content.matches(old_string).count(); + if count == 0 { + return Err(BitFunError::tool(format!( + "old_string not found in file: {}", resolved_path + ))); + } + (content.replace(old_string, new_string), count) + } else { + if !content.contains(old_string) { + return Err(BitFunError::tool(format!( + "old_string not found in file: {}", resolved_path + ))); + } + let count = content.matches(old_string).count(); + if count > 1 { + return Err(BitFunError::tool(format!( + "old_string found {} times in file (expected exactly 1). Include more context to make it unique.", count + ))); + } + (content.replacen(old_string, new_string, 1), 1) + }; + + ws_fs + .write_file(&resolved_path, new_content.as_bytes()) + .await + .map_err(|e| BitFunError::tool(format!("Failed to write file: {}", e)))?; + + let result = ToolResult::Result { + data: json!({ + "file_path": resolved_path, + "old_string": old_string, + "new_string": new_string, + "success": true, + "match_count": match_count, + }), + result_for_assistant: Some(format!("Successfully edited {}", resolved_path)), + }; + return Ok(vec![result]); + } + + // Fallback: direct local edit via tool-runtime (used when no services injected) let edit_result = edit_file(&resolved_path, old_string, new_string, replace_all)?; let result = ToolResult::Result { diff --git a/src/crates/core/src/agentic/tools/implementations/file_read_tool.rs b/src/crates/core/src/agentic/tools/implementations/file_read_tool.rs index b603fa23..27949f54 100644 --- a/src/crates/core/src/agentic/tools/implementations/file_read_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/file_read_tool.rs @@ -10,11 +10,8 @@ use serde_json::{json, Value}; use std::path::Path; use tool_runtime::fs::read_file::read_file; -/// File read tool pub struct FileReadTool { - /// Maximum number of lines to read default_max_lines_to_read: usize, - /// Maximum line length max_line_chars: usize, } @@ -26,13 +23,54 @@ impl FileReadTool { } } - /// Create FileReadTool with custom configuration pub fn with_config(default_max_lines_to_read: usize, max_line_chars: usize) -> Self { Self { default_max_lines_to_read, max_line_chars, } } + + fn format_lines(&self, content: &str, start_line: usize, limit: usize) -> tool_runtime::fs::read_file::ReadFileResult { + let lines: Vec<&str> = content.lines().collect(); + let total_lines = lines.len(); + + if total_lines == 0 { + return tool_runtime::fs::read_file::ReadFileResult { + start_line: 0, + end_line: 0, + total_lines: 0, + content: String::new(), + }; + } + + let start_index = (start_line - 1).min(total_lines - 1); + let end_index = (start_index + limit).min(total_lines); + let selected_lines = &lines[start_index..end_index]; + + let truncated_lines: Vec = selected_lines + .iter() + .enumerate() + .map(|(idx, line)| { + let line_number = start_index + idx + 1; + let line_content = if line.chars().count() > self.max_line_chars { + format!( + "{} [truncated]", + tool_runtime::util::string::truncate_string_by_chars(line, self.max_line_chars) + ) + } else { + line.to_string() + }; + format!("{:>6}\t{}", line_number, line_content) + }) + .collect(); + + tool_runtime::fs::read_file::ReadFileResult { + start_line: start_index + 1, + end_line: end_index, + total_lines, + content: truncated_lines.join("\n"), + } + } } #[async_trait] @@ -98,31 +136,45 @@ Usage: input: &Value, context: Option<&ToolUseContext>, ) -> ValidationResult { - if let Some(file_path) = input.get("file_path").and_then(|v| v.as_str()) { - if file_path.is_empty() { + let file_path = match input.get("file_path").and_then(|v| v.as_str()) { + Some(p) if !p.is_empty() => p, + Some(_) => { return ValidationResult { result: false, message: Some("file_path cannot be empty".to_string()), error_code: Some(400), meta: None, - }; + } } + None => { + return ValidationResult { + result: false, + message: Some("file_path is required".to_string()), + error_code: Some(400), + meta: None, + } + } + }; - let resolved_path = match resolve_path_with_workspace( - file_path, - context.and_then(|ctx| ctx.workspace_root()), - ) { - Ok(path) => path, - Err(err) => { - return ValidationResult { - result: false, - message: Some(err.to_string()), - error_code: Some(400), - meta: None, - }; + let resolved_path = match resolve_path_with_workspace( + file_path, + context.and_then(|ctx| ctx.workspace_root()), + ) { + Ok(path) => path, + Err(err) => { + return ValidationResult { + result: false, + message: Some(err.to_string()), + error_code: Some(400), + meta: None, } - }; + } + }; + // For remote workspaces, skip local filesystem checks — the actual + // read goes through WorkspaceFileSystem in call_impl. + let is_remote = context.map(|c| c.is_remote()).unwrap_or(false); + if !is_remote { let path = Path::new(&resolved_path); if !path.exists() { return ValidationResult { @@ -132,7 +184,6 @@ Usage: meta: None, }; } - if !path.is_file() { return ValidationResult { result: false, @@ -141,21 +192,9 @@ Usage: meta: None, }; } - } else { - return ValidationResult { - result: false, - message: Some("file_path is required".to_string()), - error_code: Some(400), - meta: None, - }; } - ValidationResult { - result: true, - message: None, - error_code: None, - meta: None, - } + ValidationResult::default() } fn render_tool_use_message(&self, input: &Value, options: &ToolRenderOptions) -> String { @@ -192,10 +231,18 @@ Usage: let resolved_path = resolve_path_with_workspace(file_path, context.workspace_root())?; - let read_file_result = read_file(&resolved_path, start_line, limit, self.max_line_chars) - .map_err(|e| BitFunError::tool(e))?; + // Use the workspace file system from context — works for both local and remote. + let read_file_result = if let Some(ws_fs) = context.ws_fs() { + let content = ws_fs + .read_file_text(&resolved_path) + .await + .map_err(|e| BitFunError::tool(format!("Failed to read file: {}", e)))?; + self.format_lines(&content, start_line, limit) + } else { + read_file(&resolved_path, start_line, limit, self.max_line_chars) + .map_err(|e| BitFunError::tool(e))? + }; - // Get matching file-specific rules let file_rules = match get_global_ai_rules_service().await { Ok(rules_service) => { rules_service @@ -211,7 +258,6 @@ Usage: } }; - // Build result string let mut result_for_assistant = format!( "Read lines {}-{} from {} ({} total lines)\n\n{}\n", read_file_result.start_line, @@ -221,7 +267,6 @@ Usage: read_file_result.content ); - // If there are matching rules, append to result if let Some(rules_content) = &file_rules.formatted_content { result_for_assistant.push_str("\n\n"); result_for_assistant.push_str(rules_content); diff --git a/src/crates/core/src/agentic/tools/implementations/file_write_tool.rs b/src/crates/core/src/agentic/tools/implementations/file_write_tool.rs index 5d59df31..7f5b3ae6 100644 --- a/src/crates/core/src/agentic/tools/implementations/file_write_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/file_write_tool.rs @@ -8,7 +8,6 @@ use serde_json::{json, Value}; use std::path::Path; use tokio::fs; -/// File write tool pub struct FileWriteTool; impl FileWriteTool { @@ -101,12 +100,7 @@ Usage: }; } - ValidationResult { - result: true, - message: None, - error_code: None, - meta: None, - } + ValidationResult::default() } fn render_tool_use_message(&self, input: &Value, options: &ToolRenderOptions) -> String { @@ -143,17 +137,22 @@ Usage: .and_then(|v| v.as_str()) .ok_or_else(|| BitFunError::tool("content is required".to_string()))?; - // Create directory if it doesn't exist - if let Some(parent) = Path::new(&resolved_path).parent() { - fs::create_dir_all(parent) + if let Some(ws_fs) = context.ws_fs() { + ws_fs + .write_file(&resolved_path, content.as_bytes()) .await - .map_err(|e| BitFunError::tool(format!("Failed to create directory: {}", e)))?; + .map_err(|e| BitFunError::tool(format!("Failed to write file: {}", e)))?; + } else { + if let Some(parent) = Path::new(&resolved_path).parent() { + fs::create_dir_all(parent) + .await + .map_err(|e| BitFunError::tool(format!("Failed to create directory: {}", e)))?; + } + fs::write(&resolved_path, content).await.map_err(|e| { + BitFunError::tool(format!("Failed to write file {}: {}", resolved_path, e)) + })?; } - fs::write(&resolved_path, content).await.map_err(|e| { - BitFunError::tool(format!("Failed to write file {}: {}", resolved_path, e)) - })?; - let result = ToolResult::Result { data: json!({ "file_path": resolved_path, diff --git a/src/crates/core/src/agentic/tools/implementations/glob_tool.rs b/src/crates/core/src/agentic/tools/implementations/glob_tool.rs index a494af0b..5954eac2 100644 --- a/src/crates/core/src/agentic/tools/implementations/glob_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/glob_tool.rs @@ -7,26 +7,12 @@ use log::warn; use serde_json::{json, Value}; use std::path::{Path, PathBuf}; -/// Search for files matching a glob pattern with optional gitignore and hidden file filtering -/// -/// # Arguments -/// * `search_path` - The root directory to search in -/// * `pattern` - Glob pattern relative to search_path (e.g., "*.rs", "**/*.txt") -/// * `ignore` - If true, apply .gitignore rules -/// * `ignore_hidden` - If true, skip hidden files; if false, include them -/// -/// # Returns -/// A Result containing a Vec of matched file paths as Strings -/// -/// # Behavior -/// - Symlinks are not followed; pub fn glob_with_ignore( search_path: &str, pattern: &str, ignore: bool, ignore_hidden: bool, ) -> Result, Box> { - // Validate search path let path = std::path::Path::new(search_path); if !path.exists() { return Err(format!("Search path '{}' does not exist", search_path).into()); @@ -35,47 +21,34 @@ pub fn glob_with_ignore( return Err(format!("Search path '{}' is not a directory", search_path).into()); } - // Convert search_path to absolute path at the beginning - // Use dunce::canonicalize to avoid Windows UNC path format (\\?\) let search_path_abs = dunce::canonicalize(Path::new(search_path))?; let search_path_str = search_path_abs.to_string_lossy(); - // Convert pattern to absolute form by joining with search_path - // This ensures pattern matching works with absolute paths let absolute_pattern = format!("{}/{}", search_path_str, pattern); - // Compile the glob pattern let glob = GlobBuilder::new(&absolute_pattern) .literal_separator(true) .build()? .compile_matcher(); - // Build the directory walker with specified options using absolute path let walker = WalkBuilder::new(&search_path_abs) - .git_ignore(ignore) // Apply gitignore rules if ignore is true - .hidden(ignore_hidden) // Skip hidden files if ignore_hidden is true + .git_ignore(ignore) + .hidden(ignore_hidden) .build(); let mut results = Vec::new(); - // Walk the directory tree for entry in walker { let entry = match entry { Ok(entry) => entry, Err(err) => { - // The filesystem can change during a walk (e.g. files deleted), or there may be - // broken/permission-denied entries. A single unreadable entry should not fail the - // entire glob. warn!("Glob walker entry error (skipped): {}", err); continue; } }; let path = entry.path().to_path_buf(); - // Match against the glob pattern using absolute path - // Since pattern is now absolute, match directly against the path if glob.is_match(&path) { - // Use dunce::simplified to convert UNC paths to standard Windows paths let simplified_path = dunce::simplified(&path); results.push(simplified_path.to_string_lossy().to_string()); } @@ -104,15 +77,11 @@ fn limit_paths(paths: &[String], limit: usize) -> Vec { } fn call_glob(search_path: &str, pattern: &str, limit: usize) -> Result, String> { - // Check if pattern targets whitelisted directories let is_whitelisted = pattern.starts_with(".bitfun") || pattern.contains("/.bitfun") || pattern.contains("\\.bitfun"); - // Disable gitignore for whitelisted directories to allow searching let apply_gitignore = !is_whitelisted; - - // Disable hidden file filtering for whitelisted directories let ignore_hidden_files = !is_whitelisted; let all_paths = glob_with_ignore(search_path, pattern, apply_gitignore, ignore_hidden_files) @@ -121,6 +90,22 @@ fn call_glob(search_path: &str, pattern: &str, limit: usize) -> Result String { + let name_pattern = if pattern.contains("**/") { + pattern.replacen("**/", "", 1) + } else { + pattern.to_string() + }; + + let escaped_dir = search_dir.replace('\'', "'\\''"); + let escaped_pattern = name_pattern.replace('\'', "'\\''"); + + format!( + "find '{}' -maxdepth 10 -name '{}' -not -path '*/.git/*' -not -path '*/node_modules/*' 2>/dev/null | head -n {}", + escaped_dir, escaped_pattern, limit + ) +} + pub struct GlobTool; impl GlobTool { @@ -215,6 +200,43 @@ impl Tool for GlobTool { .map(|v| v as usize) .unwrap_or(100); + // Remote workspace: use `find` via the workspace shell + if context.is_remote() { + let ws_shell = context.ws_shell().ok_or_else(|| { + BitFunError::tool("Workspace shell not available".to_string()) + })?; + + let search_dir = resolved_path.display().to_string(); + let find_cmd = build_remote_find_command(&search_dir, pattern, limit); + + let (stdout, _stderr, _exit_code) = ws_shell + .exec(&find_cmd, Some(30_000)) + .await + .map_err(|e| BitFunError::tool(format!("Failed to glob on remote: {}", e)))?; + + let matches: Vec = stdout + .lines() + .filter(|l| !l.is_empty()) + .map(|l| l.to_string()) + .collect(); + let limited = limit_paths(&matches, limit); + let result_text = if limited.is_empty() { + format!("No files found matching pattern '{}'", pattern) + } else { + limited.join("\n") + }; + + return Ok(vec![ToolResult::Result { + data: json!({ + "pattern": pattern, + "path": search_dir, + "matches": limited, + "match_count": limited.len() + }), + result_for_assistant: Some(result_text), + }]); + } + let matches = call_glob(&resolved_path.display().to_string(), pattern, limit) .map_err(|e| BitFunError::tool(e))?; diff --git a/src/crates/core/src/agentic/tools/implementations/grep_tool.rs b/src/crates/core/src/agentic/tools/implementations/grep_tool.rs index 6e4c5ac9..91887656 100644 --- a/src/crates/core/src/agentic/tools/implementations/grep_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/grep_tool.rs @@ -6,7 +6,6 @@ use serde_json::{json, Value}; use std::sync::Arc; use tool_runtime::search::grep_search::{grep_search, GrepOptions, OutputMode, ProgressCallback}; -/// Grep tool pub struct GrepTool; impl GrepTool { @@ -14,57 +13,108 @@ impl GrepTool { Self } - fn build_grep_options( + async fn call_remote( &self, input: &Value, context: &ToolUseContext, - ) -> BitFunResult { - // Parse input parameters + ) -> BitFunResult> { + let ws_shell = context.ws_shell().ok_or_else(|| { + BitFunError::tool("Workspace shell not available".to_string()) + })?; + let pattern = input .get("pattern") .and_then(|v| v.as_str()) .ok_or_else(|| BitFunError::tool("pattern is required".to_string()))?; let search_path = input.get("path").and_then(|v| v.as_str()).unwrap_or("."); - - // Parse path: ensure relative paths are relative to workspace let resolved_path = resolve_path_with_workspace(search_path, context.workspace_root())?; let case_insensitive = input.get("-i").and_then(|v| v.as_bool()).unwrap_or(false); + let head_limit = input + .get("head_limit") + .and_then(|v| v.as_u64()) + .map(|v| v as usize) + .unwrap_or(200); + let glob_pattern = input.get("glob").and_then(|v| v.as_str()); + let file_type = input.get("type").and_then(|v| v.as_str()); - let multiline = input - .get("multiline") - .and_then(|v| v.as_bool()) - .unwrap_or(false); + let escaped_path = resolved_path.replace('\'', "'\\''"); + let escaped_pattern = pattern.replace('\'', "'\\''"); + let mut cmd = "rg --no-heading --line-number".to_string(); + if case_insensitive { + cmd.push_str(" -i"); + } + if let Some(glob) = glob_pattern { + cmd.push_str(&format!(" --glob '{}'", glob.replace('\'', "'\\''"))); + } + if let Some(ft) = file_type { + cmd.push_str(&format!(" --type {}", ft)); + } + cmd.push_str(&format!(" '{}' '{}' 2>/dev/null | head -n {}", escaped_pattern, escaped_path, head_limit)); + + let full_cmd = format!( + "if command -v rg >/dev/null 2>&1; then {}; else grep -rn{} '{}' '{}' 2>/dev/null | head -n {}; fi", + cmd, + if case_insensitive { "i" } else { "" }, + escaped_pattern, + escaped_path, + head_limit, + ); + + let (stdout, _stderr, _exit_code) = ws_shell + .exec(&full_cmd, Some(30_000)) + .await + .map_err(|e| BitFunError::tool(format!("Remote grep failed: {}", e)))?; + + let lines: Vec<&str> = stdout.lines().collect(); + let total_matches = lines.len(); + let result_text = if lines.is_empty() { + format!("No matches found for pattern '{}'", pattern) + } else { + stdout.clone() + }; + + Ok(vec![ToolResult::Result { + data: json!({ + "pattern": pattern, + "path": resolved_path, + "output_mode": "content", + "total_matches": total_matches, + "result": result_text, + }), + result_for_assistant: Some(result_text), + }]) + } + + fn build_grep_options( + &self, + input: &Value, + context: &ToolUseContext, + ) -> BitFunResult { + let pattern = input + .get("pattern") + .and_then(|v| v.as_str()) + .ok_or_else(|| BitFunError::tool("pattern is required".to_string()))?; + + let search_path = input.get("path").and_then(|v| v.as_str()).unwrap_or("."); + let resolved_path = resolve_path_with_workspace(search_path, context.workspace_root())?; + + let case_insensitive = input.get("-i").and_then(|v| v.as_bool()).unwrap_or(false); + let multiline = input.get("multiline").and_then(|v| v.as_bool()).unwrap_or(false); let output_mode_str = input .get("output_mode") .and_then(|v| v.as_str()) .unwrap_or("files_with_matches"); let output_mode = OutputMode::from_str(output_mode_str); - let show_line_numbers = input.get("-n").and_then(|v| v.as_bool()).unwrap_or(false); - let context_c = input.get("-C").and_then(|v| v.as_u64()).map(|v| v as usize); - let before_context = input.get("-B").and_then(|v| v.as_u64()).map(|v| v as usize); - let after_context = input.get("-A").and_then(|v| v.as_u64()).map(|v| v as usize); - - let head_limit = input - .get("head_limit") - .and_then(|v| v.as_u64()) - .map(|v| v as usize); - - let glob_pattern = input - .get("glob") - .and_then(|v| v.as_str()) - .map(|s| s.to_string()); - - let file_type = input - .get("type") - .and_then(|v| v.as_str()) - .map(|s| s.to_string()); + let head_limit = input.get("head_limit").and_then(|v| v.as_u64()).map(|v| v as usize); + let glob_pattern = input.get("glob").and_then(|v| v.as_str()).map(|s| s.to_string()); + let file_type = input.get("type").and_then(|v| v.as_str()).map(|s| s.to_string()); let mut options = GrepOptions::new(pattern, resolved_path) .case_insensitive(case_insensitive) @@ -72,24 +122,12 @@ impl GrepTool { .output_mode(output_mode) .show_line_numbers(show_line_numbers); - if let Some(context) = context_c { - options = options.context(context); - } - if let Some(before_context) = before_context { - options = options.before_context(before_context); - } - if let Some(after_context) = after_context { - options = options.after_context(after_context); - } - if let Some(head_limit) = head_limit { - options = options.head_limit(head_limit); - } - if let Some(glob_pattern) = glob_pattern { - options = options.glob(glob_pattern); - } - if let Some(file_type) = file_type { - options = options.file_type(file_type); - } + if let Some(c) = context_c { options = options.context(c); } + if let Some(b) = before_context { options = options.before_context(b); } + if let Some(a) = after_context { options = options.after_context(a); } + if let Some(h) = head_limit { options = options.head_limit(h); } + if let Some(g) = glob_pattern { options = options.glob(g); } + if let Some(t) = file_type { options = options.file_type(t); } Ok(options) } @@ -135,55 +173,23 @@ Usage: "enum": ["content", "files_with_matches", "count"], "description": "Output mode: \"content\" shows matching lines (supports -A/-B/-C context, -n line numbers, head_limit), \"files_with_matches\" shows file paths (supports head_limit), \"count\" shows match counts (supports head_limit). Defaults to \"files_with_matches\"." }, - "-B": { - "type": "number", - "description": "Number of lines to show before each match (rg -B). Requires output_mode: \"content\", ignored otherwise." - }, - "-A": { - "type": "number", - "description": "Number of lines to show after each match (rg -A). Requires output_mode: \"content\", ignored otherwise." - }, - "-C": { - "type": "number", - "description": "Number of lines to show before and after each match (rg -C). Requires output_mode: \"content\", ignored otherwise." - }, - "-n": { - "type": "boolean", - "description": "Show line numbers in output (rg -n). Requires output_mode: \"content\", ignored otherwise." - }, - "-i": { - "type": "boolean", - "description": "Case insensitive search (rg -i)" - }, - "type": { - "type": "string", - "description": "File type to search (rg --type). Common types: js, py, rust, go, java, etc. More efficient than include for standard file types." - }, - "head_limit": { - "type": "number", - "description": "Limit output to first N lines/entries, equivalent to \"| head -N\". Works across all output modes: content (limits output lines), files_with_matches (limits file paths), count (limits count entries). When unspecified, shows all results from ripgrep." - }, - "multiline": { - "type": "boolean", - "description": "Enable multiline mode where . matches newlines and patterns can span lines (rg -U --multiline-dotall). Default: false." - } + "-B": { "type": "number", "description": "Number of lines to show before each match (rg -B). Requires output_mode: \"content\", ignored otherwise." }, + "-A": { "type": "number", "description": "Number of lines to show after each match (rg -A). Requires output_mode: \"content\", ignored otherwise." }, + "-C": { "type": "number", "description": "Number of lines to show before and after each match (rg -C). Requires output_mode: \"content\", ignored otherwise." }, + "-n": { "type": "boolean", "description": "Show line numbers in output (rg -n). Requires output_mode: \"content\", ignored otherwise." }, + "-i": { "type": "boolean", "description": "Case insensitive search (rg -i)" }, + "type": { "type": "string", "description": "File type to search (rg --type). Common types: js, py, rust, go, java, etc." }, + "head_limit": { "type": "number", "description": "Limit output to first N lines/entries." }, + "multiline": { "type": "boolean", "description": "Enable multiline mode where . matches newlines and patterns can span lines (rg -U --multiline-dotall). Default: false." } }, "required": ["pattern"], "additionalProperties": false, }) } - fn is_readonly(&self) -> bool { - true - } - - fn is_concurrency_safe(&self, _input: Option<&Value>) -> bool { - true - } - - fn needs_permissions(&self, _input: Option<&Value>) -> bool { - false - } + fn is_readonly(&self) -> bool { true } + fn is_concurrency_safe(&self, _input: Option<&Value>) -> bool { true } + fn needs_permissions(&self, _input: Option<&Value>) -> bool { false } fn render_tool_use_message( &self, @@ -191,26 +197,12 @@ Usage: _options: &crate::agentic::tools::framework::ToolRenderOptions, ) -> String { let pattern = input.get("pattern").and_then(|v| v.as_str()).unwrap_or(""); - let search_path = input.get("path").and_then(|v| v.as_str()).unwrap_or("."); - let file_type = input.get("type").and_then(|v| v.as_str()); - let glob_pattern = input.get("glob").and_then(|v| v.as_str()); + let output_mode = input.get("output_mode").and_then(|v| v.as_str()).unwrap_or("files_with_matches"); - let output_mode = input - .get("output_mode") - .and_then(|v| v.as_str()) - .unwrap_or("files_with_matches"); - - // Build search scope description - let scope = if search_path == "." { - "Current workspace".to_string() - } else { - search_path.to_string() - }; - - // Add file type filter information + let scope = if search_path == "." { "Current workspace".to_string() } else { search_path.to_string() }; let scope_with_filter = if let Some(ft) = file_type { format!("{} (*.{})", scope, ft) } else if let Some(gp) = glob_pattern { @@ -218,18 +210,13 @@ Usage: } else { scope }; - - // Add output mode information let mode_desc = match output_mode { "content" => "Show matching content", "count" => "Count matches", _ => "List matching files", }; - format!( - "Search \"{}\" | {} | {}", - pattern, scope_with_filter, mode_desc - ) + format!("Search \"{}\" | {} | {}", pattern, scope_with_filter, mode_desc) } async fn call_impl( @@ -237,12 +224,16 @@ Usage: input: &Value, context: &ToolUseContext, ) -> BitFunResult> { + // Remote workspace: use shell-based grep/rg + if context.is_remote() { + return self.call_remote(input, context).await; + } + let grep_options = self.build_grep_options(input, context)?; let pattern = grep_options.pattern.clone(); let path = grep_options.path.clone(); let output_mode = grep_options.output_mode.to_string(); - // Get event system and tool ID for sending progress events let event_system = crate::infrastructure::events::event_system::get_global_event_system(); let tool_use_id = context .tool_call_id @@ -250,7 +241,6 @@ Usage: .unwrap_or_else(|| format!("grep_{}", uuid::Uuid::new_v4())); let tool_name = self.name().to_string(); - // Create progress callback, send progress through event system let tool_use_id_clone = tool_use_id.clone(); let tool_name_clone = tool_name.clone(); let event_system_clone = event_system.clone(); @@ -261,21 +251,19 @@ Usage: files_processed, file_count, total_matches ); - // Send progress through event system let event = crate::infrastructure::events::event_system::BackendEvent::ToolExecutionProgress( - crate::util::types::event::ToolExecutionProgressInfo { - tool_use_id: tool_use_id_clone.clone(), - tool_name: tool_name_clone.clone(), - progress_message, - percentage: None, - timestamp: std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap_or_default() - .as_secs(), - } - ); - - // Send event asynchronously (fire-and-forget) + crate::util::types::event::ToolExecutionProgressInfo { + tool_use_id: tool_use_id_clone.clone(), + tool_name: tool_name_clone.clone(), + progress_message, + percentage: None, + timestamp: std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_secs(), + } + ); + let event_system = event_system_clone.clone(); tokio::spawn(async move { let _ = event_system.emit(event).await; @@ -283,7 +271,6 @@ Usage: }, ); - // Use tokio::task::spawn_blocking to move synchronous operation to thread pool let search_result = tokio::task::spawn_blocking(move || { grep_search(grep_options, Some(progress_callback), Some(500)) }) @@ -295,7 +282,6 @@ Usage: Err(e) => return Err(BitFunError::tool(format!("grep search failed: {}", e))), }; - // Return final result Ok(vec![ToolResult::Result { data: json!({ "pattern": pattern, diff --git a/src/crates/core/src/agentic/tools/implementations/ls_tool.rs b/src/crates/core/src/agentic/tools/implementations/ls_tool.rs index 1b08d3c2..952ad7a9 100644 --- a/src/crates/core/src/agentic/tools/implementations/ls_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/ls_tool.rs @@ -13,6 +13,10 @@ use serde_json::{json, Value}; use std::path::Path; use std::time::SystemTime; +fn shell_escape(s: &str) -> String { + format!("'{}'", s.replace('\'', "'\\''")) +} + /// LS tool - list directory tree pub struct LSTool { /// Default maximum number of entries to return @@ -88,7 +92,7 @@ Usage: async fn validate_input( &self, input: &Value, - _context: Option<&ToolUseContext>, + context: Option<&ToolUseContext>, ) -> ValidationResult { if let Some(path) = input.get("path").and_then(|v| v.as_str()) { if path.is_empty() { @@ -102,7 +106,6 @@ Usage: let path_obj = Path::new(path); - // Validate if path is absolute if !path_obj.is_absolute() { return ValidationResult { result: false, @@ -112,22 +115,25 @@ Usage: }; } - if !path_obj.exists() { - return ValidationResult { - result: false, - message: Some(format!("Directory does not exist: {}", path)), - error_code: Some(404), - meta: None, - }; - } + let is_remote = context.map(|c| c.is_remote()).unwrap_or(false); + if !is_remote { + if !path_obj.exists() { + return ValidationResult { + result: false, + message: Some(format!("Directory does not exist: {}", path)), + error_code: Some(404), + meta: None, + }; + } - if !path_obj.is_dir() { - return ValidationResult { - result: false, - message: Some(format!("Path is not a directory: {}", path)), - error_code: Some(400), - meta: None, - }; + if !path_obj.is_dir() { + return ValidationResult { + result: false, + message: Some(format!("Path is not a directory: {}", path)), + error_code: Some(400), + meta: None, + }; + } } } else { return ValidationResult { @@ -161,7 +167,7 @@ Usage: async fn call_impl( &self, input: &Value, - _context: &ToolUseContext, + context: &ToolUseContext, ) -> BitFunResult> { let path = input .get("path") @@ -174,7 +180,88 @@ Usage: .map(|v| v as usize) .unwrap_or(self.default_limit); - // Parse ignore parameter + // Remote workspace: execute ls via SSH shell + if context.is_remote() { + let ws_shell = context.ws_shell().ok_or_else(|| { + BitFunError::tool("Workspace shell not available for remote LS".to_string()) + })?; + + let ls_cmd = format!( + "find {} -maxdepth 1 -not -name '.*' -not -path {} | head -n {} | sort", + shell_escape(path), + shell_escape(path), + limit + 1 + ); + + let (stdout, _stderr, _exit_code) = ws_shell + .exec(&ls_cmd, Some(15_000)) + .await + .map_err(|e| BitFunError::tool(format!("Failed to list remote directory: {}", e)))?; + + let mut file_lines = Vec::new(); + let mut dir_lines = Vec::new(); + + for line in stdout.lines().filter(|l| !l.is_empty()) { + let name = Path::new(line) + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| line.to_string()); + let is_dir = line.ends_with('/'); + if is_dir || name.is_empty() { + dir_lines.push((name, line.to_string())); + } else { + file_lines.push((name, line.to_string())); + } + } + + // Use a simpler stat-based listing for the text output + let stat_cmd = format!( + "ls -la --time-style=long-iso {} 2>/dev/null || ls -la {}", + shell_escape(path), + shell_escape(path) + ); + let (ls_output, _, _) = ws_shell + .exec(&stat_cmd, Some(15_000)) + .await + .map_err(|e| BitFunError::tool(format!("Failed to list remote directory: {}", e)))?; + + let result_text = format!( + "Directory listing: {}\n\n{}", + path, + ls_output.trim() + ); + + let entries_json: Vec = stdout + .lines() + .filter(|l| !l.is_empty()) + .map(|line| { + let name = Path::new(line) + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| line.to_string()); + json!({ + "name": name, + "path": line, + "is_dir": line.ends_with('/'), + }) + }) + .collect(); + + let total_entries = entries_json.len(); + let result = ToolResult::Result { + data: json!({ + "path": path, + "entries": entries_json, + "total": total_entries, + "limit": limit, + "is_remote": true + }), + result_for_assistant: Some(result_text), + }; + return Ok(vec![result]); + } + + // Local: original implementation let ignore_patterns = input.get("ignore").and_then(|v| v.as_array()).map(|arr| { arr.iter() .filter_map(|v| v.as_str().map(|s| s.to_string())) @@ -183,7 +270,6 @@ Usage: let entries = list_files(path, limit, ignore_patterns).map_err(|e| BitFunError::tool(e))?; - // Build JSON data let entries_json = entries .iter() .filter(|entry| entry.depth == 1) diff --git a/src/crates/core/src/agentic/tools/implementations/task_tool.rs b/src/crates/core/src/agentic/tools/implementations/task_tool.rs index c92ad8d4..bd847a8b 100644 --- a/src/crates/core/src/agentic/tools/implementations/task_tool.rs +++ b/src/crates/core/src/agentic/tools/implementations/task_tool.rs @@ -269,11 +269,14 @@ impl Tool for TaskTool { .workspace_root() .map(|path| path.to_string_lossy().into_owned()); if subagent_type == "Explore" || subagent_type == "FileFinder" { - let workspace_path = requested_workspace_path.as_deref().ok_or_else(|| { - BitFunError::tool( - "workspace_path is required for Explore/FileFinder agent".to_string(), - ) - })?; + let workspace_path = requested_workspace_path + .as_deref() + .or(current_workspace_path.as_deref()) + .ok_or_else(|| { + BitFunError::tool( + "workspace_path is required for Explore/FileFinder agent".to_string(), + ) + })?; if workspace_path.is_empty() { return Err(BitFunError::tool( @@ -281,19 +284,22 @@ impl Tool for TaskTool { )); } - // Validate workspace_path exists and is a directory - let path = std::path::Path::new(&workspace_path); - if !path.exists() { - return Err(BitFunError::tool(format!( - "workspace_path '{}' does not exist", - workspace_path - ))); - } - if !path.is_dir() { - return Err(BitFunError::tool(format!( - "workspace_path '{}' is not a directory", - workspace_path - ))); + // For remote workspaces, skip local filesystem validation — the path + // exists on the remote server, not locally. + if !context.is_remote() { + let path = std::path::Path::new(&workspace_path); + if !path.exists() { + return Err(BitFunError::tool(format!( + "workspace_path '{}' does not exist", + workspace_path + ))); + } + if !path.is_dir() { + return Err(BitFunError::tool(format!( + "workspace_path '{}' is not a directory", + workspace_path + ))); + } } prompt.push_str(&format!( diff --git a/src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/backend.rs b/src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/backend.rs new file mode 100644 index 00000000..487c7672 --- /dev/null +++ b/src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/backend.rs @@ -0,0 +1,81 @@ +//! Backend-aware file system operations +//! +//! This module provides file system operations that can work with both +//! local and remote (SSH) backends. + +use std::fs; +use std::path::Path; + +/// File system backend trait +pub trait FileSystem { + fn read_to_string(&self, path: &Path) -> std::io::Result; + fn write(&self, path: &Path, contents: &str) -> std::io::Result<()>; + fn exists(&self, path: &Path) -> bool; + fn is_dir(&self, path: &Path) -> bool; + fn is_file(&self, path: &Path) -> bool; + fn read_dir(&self, path: &Path) -> std::io::Result>; + fn create_dir(&self, path: &Path) -> std::io::Result<()>; + fn create_dir_all(&self, path: &Path) -> std::io::Result<()>; + fn remove_file(&self, path: &Path) -> std::io::Result<()>; + fn remove_dir_all(&self, path: &Path) -> std::io::Result<()>; + fn rename(&self, from: &Path, to: &Path) -> std::io::Result<()>; +} + +/// Local file system implementation +pub struct LocalFileSystem; + +impl FileSystem for LocalFileSystem { + fn read_to_string(&self, path: &Path) -> std::io::Result { + fs::read_to_string(path) + } + + fn write(&self, path: &Path, contents: &str) -> std::io::Result<()> { + fs::write(path, contents) + } + + fn exists(&self, path: &Path) -> bool { + path.exists() + } + + fn is_dir(&self, path: &Path) -> bool { + path.is_dir() + } + + fn is_file(&self, path: &Path) -> bool { + path.is_file() + } + + fn read_dir(&self, path: &Path) -> std::io::Result> { + let mut entries = Vec::new(); + let dir = std::fs::read_dir(path)?; + for entry in dir { + entries.push(entry?); + } + Ok(entries.into_iter()) + } + + fn create_dir(&self, path: &Path) -> std::io::Result<()> { + fs::create_dir(path) + } + + fn create_dir_all(&self, path: &Path) -> std::io::Result<()> { + fs::create_dir_all(path) + } + + fn remove_file(&self, path: &Path) -> std::io::Result<()> { + fs::remove_file(path) + } + + fn remove_dir_all(&self, path: &Path) -> std::io::Result<()> { + fs::remove_dir_all(path) + } + + fn rename(&self, from: &Path, to: &Path) -> std::io::Result<()> { + fs::rename(from, to) + } +} + +/// Default local file system instance +pub fn local() -> LocalFileSystem { + LocalFileSystem +} diff --git a/src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/mod.rs b/src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/mod.rs index a8c766c5..017b6846 100644 --- a/src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/mod.rs +++ b/src/crates/core/src/agentic/tools/implementations/tool-runtime/src/fs/mod.rs @@ -1,2 +1,5 @@ +pub mod backend; pub mod edit_file; pub mod read_file; + +pub use backend::{FileSystem, LocalFileSystem}; diff --git a/src/crates/core/src/agentic/tools/implementations/web_tools.rs b/src/crates/core/src/agentic/tools/implementations/web_tools.rs index 6f2f546d..205c121d 100644 --- a/src/crates/core/src/agentic/tools/implementations/web_tools.rs +++ b/src/crates/core/src/agentic/tools/implementations/web_tools.rs @@ -562,6 +562,7 @@ mod tests { image_context_provider: None, subagent_parent_info: None, cancellation_token: None, + workspace_services: None, } } diff --git a/src/crates/core/src/agentic/tools/pipeline/tool_pipeline.rs b/src/crates/core/src/agentic/tools/pipeline/tool_pipeline.rs index 0b430793..e3421455 100644 --- a/src/crates/core/src/agentic/tools/pipeline/tool_pipeline.rs +++ b/src/crates/core/src/agentic/tools/pipeline/tool_pipeline.rs @@ -787,6 +787,7 @@ impl ToolPipeline { image_context_provider: self.image_context_provider.clone(), subagent_parent_info: task.context.subagent_parent_info.clone(), cancellation_token: Some(cancellation_token), + workspace_services: task.context.workspace_services.clone(), }; let execution_future = tool.call(&task.tool_call.arguments, &tool_context); diff --git a/src/crates/core/src/agentic/tools/pipeline/types.rs b/src/crates/core/src/agentic/tools/pipeline/types.rs index 9d499336..04130b97 100644 --- a/src/crates/core/src/agentic/tools/pipeline/types.rs +++ b/src/crates/core/src/agentic/tools/pipeline/types.rs @@ -2,6 +2,7 @@ use crate::agentic::core::{ToolCall, ToolExecutionState}; use crate::agentic::events::SubagentParentInfo as EventSubagentParentInfo; +use crate::agentic::workspace::WorkspaceServices; use crate::agentic::WorkspaceBinding; use std::collections::HashMap; use std::time::SystemTime; @@ -60,6 +61,7 @@ pub struct ToolExecutionContext { /// If empty, allow all registered tools /// If not empty, only allow tools in the list to be executed pub allowed_tools: Vec, + pub workspace_services: Option, } /// Tool execution task diff --git a/src/crates/core/src/agentic/workspace.rs b/src/crates/core/src/agentic/workspace.rs index 4f501d01..c90762c2 100644 --- a/src/crates/core/src/agentic/workspace.rs +++ b/src/crates/core/src/agentic/workspace.rs @@ -1,10 +1,28 @@ +use async_trait::async_trait; use std::path::{Path, PathBuf}; +use std::sync::Arc; + +/// Describes whether the workspace is local or remote via SSH. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum WorkspaceBackend { + Local, + Remote { + connection_id: String, + connection_name: String, + }, +} /// Session-bound workspace information used during agent execution. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct WorkspaceBinding { pub workspace_id: Option, + /// For local workspaces this is a local path; for remote workspaces it is + /// the path on the remote server (e.g. `/root/project`). pub root_path: PathBuf, + pub backend: WorkspaceBackend, + /// Local path used for session persistence when the workspace is remote. + /// For local workspaces this is `None` (we use `root_path` directly). + pub local_session_path: Option, } impl WorkspaceBinding { @@ -12,6 +30,26 @@ impl WorkspaceBinding { Self { workspace_id, root_path, + backend: WorkspaceBackend::Local, + local_session_path: None, + } + } + + pub fn new_remote( + workspace_id: Option, + root_path: PathBuf, + connection_id: String, + connection_name: String, + local_session_path: PathBuf, + ) -> Self { + Self { + workspace_id, + root_path, + backend: WorkspaceBackend::Remote { + connection_id, + connection_name, + }, + local_session_path: Some(local_session_path), } } @@ -22,4 +60,248 @@ impl WorkspaceBinding { pub fn root_path_string(&self) -> String { self.root_path.to_string_lossy().to_string() } + + pub fn is_remote(&self) -> bool { + matches!(self.backend, WorkspaceBackend::Remote { .. }) + } + + pub fn connection_id(&self) -> Option<&str> { + match &self.backend { + WorkspaceBackend::Remote { connection_id, .. } => Some(connection_id), + WorkspaceBackend::Local => None, + } + } + + /// The path to use for session persistence. + /// Remote workspaces store sessions locally; local workspaces use root_path. + pub fn session_storage_path(&self) -> &Path { + self.local_session_path.as_deref().unwrap_or(&self.root_path) + } +} + +// ============================================================ +// Workspace-level I/O abstractions — tools program against these +// traits instead of checking is_remote themselves. +// ============================================================ + +/// Unified file system operations that work for both local and remote workspaces. +#[async_trait] +pub trait WorkspaceFileSystem: Send + Sync { + async fn read_file(&self, path: &str) -> anyhow::Result>; + async fn read_file_text(&self, path: &str) -> anyhow::Result; + async fn write_file(&self, path: &str, contents: &[u8]) -> anyhow::Result<()>; + async fn exists(&self, path: &str) -> anyhow::Result; + async fn is_file(&self, path: &str) -> anyhow::Result; + async fn is_dir(&self, path: &str) -> anyhow::Result; +} + +/// Unified shell execution for both local and remote workspaces. +#[async_trait] +pub trait WorkspaceShell: Send + Sync { + /// Execute a command and return (stdout, stderr, exit_code). + async fn exec(&self, command: &str, timeout_ms: Option) -> anyhow::Result<(String, String, i32)>; } + +/// Bundle of workspace I/O services injected into ToolUseContext. +/// Tools call `context.workspace_services()` and use these trait objects +/// instead of directly checking `get_remote_workspace_manager()`. +pub struct WorkspaceServices { + pub fs: Arc, + pub shell: Arc, +} + +impl Clone for WorkspaceServices { + fn clone(&self) -> Self { + Self { + fs: Arc::clone(&self.fs), + shell: Arc::clone(&self.shell), + } + } +} + +impl std::fmt::Debug for WorkspaceServices { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("WorkspaceServices") + .field("fs", &"") + .field("shell", &"") + .finish() + } +} + +// ============================================================ +// Local implementations +// ============================================================ + +/// Local file system implementation of `WorkspaceFileSystem`. +pub struct LocalWorkspaceFs; + +#[async_trait] +impl WorkspaceFileSystem for LocalWorkspaceFs { + async fn read_file(&self, path: &str) -> anyhow::Result> { + Ok(tokio::fs::read(path).await?) + } + + async fn read_file_text(&self, path: &str) -> anyhow::Result { + Ok(tokio::fs::read_to_string(path).await?) + } + + async fn write_file(&self, path: &str, contents: &[u8]) -> anyhow::Result<()> { + if let Some(parent) = Path::new(path).parent() { + tokio::fs::create_dir_all(parent).await?; + } + Ok(tokio::fs::write(path, contents).await?) + } + + async fn exists(&self, path: &str) -> anyhow::Result { + Ok(tokio::fs::try_exists(path).await.unwrap_or(false)) + } + + async fn is_file(&self, path: &str) -> anyhow::Result { + match tokio::fs::metadata(path).await { + Ok(m) => Ok(m.is_file()), + Err(_) => Ok(false), + } + } + + async fn is_dir(&self, path: &str) -> anyhow::Result { + match tokio::fs::metadata(path).await { + Ok(m) => Ok(m.is_dir()), + Err(_) => Ok(false), + } + } +} + +/// Local shell implementation of `WorkspaceShell`. +pub struct LocalWorkspaceShell; + +#[async_trait] +impl WorkspaceShell for LocalWorkspaceShell { + async fn exec(&self, command: &str, timeout_ms: Option) -> anyhow::Result<(String, String, i32)> { + let mut cmd = tokio::process::Command::new("sh"); + cmd.arg("-c").arg(command); + + let timeout = std::time::Duration::from_millis(timeout_ms.unwrap_or(30_000)); + + let output = tokio::time::timeout(timeout, cmd.output()) + .await + .map_err(|_| anyhow::anyhow!("Command timed out after {}ms", timeout.as_millis()))??; + + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + let exit_code = output.status.code().unwrap_or(-1); + Ok((stdout, stderr, exit_code)) + } +} + +/// Build `WorkspaceServices` backed by the local filesystem and shell. +pub fn local_workspace_services() -> WorkspaceServices { + WorkspaceServices { + fs: Arc::new(LocalWorkspaceFs), + shell: Arc::new(LocalWorkspaceShell), + } +} + +// ============================================================ +// Remote (SSH) implementations +// ============================================================ + +use crate::service::remote_ssh::{RemoteFileService, SSHConnectionManager}; + +/// SSH-backed file system implementation. +pub struct RemoteWorkspaceFs { + connection_id: String, + file_service: RemoteFileService, +} + +impl RemoteWorkspaceFs { + pub fn new(connection_id: String, file_service: RemoteFileService) -> Self { + Self { connection_id, file_service } + } +} + +#[async_trait] +impl WorkspaceFileSystem for RemoteWorkspaceFs { + async fn read_file(&self, path: &str) -> anyhow::Result> { + self.file_service + .read_file(&self.connection_id, path) + .await + .map_err(|e| anyhow::anyhow!("{}", e)) + } + + async fn read_file_text(&self, path: &str) -> anyhow::Result { + let bytes = self.read_file(path).await?; + Ok(String::from_utf8_lossy(&bytes).to_string()) + } + + async fn write_file(&self, path: &str, contents: &[u8]) -> anyhow::Result<()> { + self.file_service + .write_file(&self.connection_id, path, contents) + .await + .map_err(|e| anyhow::anyhow!("{}", e)) + } + + async fn exists(&self, path: &str) -> anyhow::Result { + self.file_service + .exists(&self.connection_id, path) + .await + .map_err(|e| anyhow::anyhow!("{}", e)) + } + + async fn is_file(&self, path: &str) -> anyhow::Result { + self.file_service + .is_file(&self.connection_id, path) + .await + .map_err(|e| anyhow::anyhow!("{}", e)) + } + + async fn is_dir(&self, path: &str) -> anyhow::Result { + self.file_service + .is_dir(&self.connection_id, path) + .await + .map_err(|e| anyhow::anyhow!("{}", e)) + } +} + +/// SSH-backed shell implementation. +pub struct RemoteWorkspaceShell { + ssh_manager: SSHConnectionManager, + connection_id: String, + workspace_root: String, +} + +impl RemoteWorkspaceShell { + pub fn new(connection_id: String, ssh_manager: SSHConnectionManager, workspace_root: String) -> Self { + Self { connection_id, ssh_manager, workspace_root } + } +} + +#[async_trait] +impl WorkspaceShell for RemoteWorkspaceShell { + async fn exec(&self, command: &str, _timeout_ms: Option) -> anyhow::Result<(String, String, i32)> { + // Wrap the command with cd to workspace root so all commands + // execute in the correct working directory on the remote server. + let wrapped = format!("cd {} && {}", shell_escape(&self.workspace_root), command); + self.ssh_manager + .execute_command(&self.connection_id, &wrapped) + .await + } +} + +/// Escape a string for safe use in a shell command. +fn shell_escape(s: &str) -> String { + format!("'{}'", s.replace('\'', "'\\''")) +} + +/// Build `WorkspaceServices` backed by SSH for a remote workspace. +pub fn remote_workspace_services( + connection_id: String, + file_service: RemoteFileService, + ssh_manager: SSHConnectionManager, + workspace_root: String, +) -> WorkspaceServices { + WorkspaceServices { + fs: Arc::new(RemoteWorkspaceFs::new(connection_id.clone(), file_service)), + shell: Arc::new(RemoteWorkspaceShell::new(connection_id, ssh_manager, workspace_root)), + } +} + diff --git a/src/crates/core/src/infrastructure/filesystem/file_tree.rs b/src/crates/core/src/infrastructure/filesystem/file_tree.rs index dd7d5e1e..82f4df49 100644 --- a/src/crates/core/src/infrastructure/filesystem/file_tree.rs +++ b/src/crates/core/src/infrastructure/filesystem/file_tree.rs @@ -170,6 +170,11 @@ impl FileTreeService { } pub async fn build_tree(&self, root_path: &str) -> Result, String> { + // For remote workspaces, delegate to get_directory_contents which handles SSH + if crate::service::remote_ssh::workspace_state::is_remote_path(root_path).await { + return self.get_directory_contents(root_path).await; + } + let root_path_buf = PathBuf::from(root_path); if !root_path_buf.exists() { @@ -189,6 +194,23 @@ impl FileTreeService { &self, root_path: &str, ) -> BitFunResult<(Vec, FileTreeStatistics)> { + // For remote workspaces, return simple directory listing with empty stats + if crate::service::remote_ssh::workspace_state::is_remote_path(root_path).await { + let nodes = self.get_directory_contents(root_path).await + .map_err(|e| BitFunError::service(e))?; + let stats = FileTreeStatistics { + total_files: nodes.iter().filter(|n| !n.is_directory).count(), + total_directories: nodes.iter().filter(|n| n.is_directory).count(), + total_size_bytes: 0, + max_depth_reached: 0, + file_type_counts: HashMap::new(), + large_files: Vec::new(), + symlinks_count: 0, + hidden_files_count: 0, + }; + return Ok((nodes, stats)); + } + let root_path_buf = PathBuf::from(root_path); if !root_path_buf.exists() { @@ -607,6 +629,35 @@ impl FileTreeService { } pub async fn get_directory_contents(&self, path: &str) -> Result, String> { + // Check if this path belongs to any registered remote workspace + if let Some(entry) = crate::service::remote_ssh::workspace_state::lookup_remote_connection(path).await { + if let Some(manager) = crate::service::remote_ssh::workspace_state::get_remote_workspace_manager() { + if let Some(file_service) = manager.get_file_service().await { + match file_service.read_dir(&entry.connection_id, path).await { + Ok(entries) => { + let nodes: Vec = entries + .into_iter() + .filter(|e| e.name != "." && e.name != "..") + .map(|e| { + FileTreeNode::new( + e.path.clone(), + e.name.clone(), + e.path.clone(), + e.is_dir, + ) + }) + .collect(); + return Ok(nodes); + } + Err(e) => { + return Err(format!("Failed to read remote directory: {}", e)); + } + } + } + } + } + + // Fall back to local filesystem let path_buf = PathBuf::from(path); if !path_buf.exists() { diff --git a/src/crates/core/src/service/mod.rs b/src/crates/core/src/service/mod.rs index a99cb452..7430e32a 100644 --- a/src/crates/core/src/service/mod.rs +++ b/src/crates/core/src/service/mod.rs @@ -16,6 +16,7 @@ pub mod lsp; // LSP (Language Server Protocol) system pub mod mcp; // MCP (Model Context Protocol) system pub mod project_context; // Project context management pub mod remote_connect; // Remote Connect (phone → desktop) +pub mod remote_ssh; // Remote SSH (desktop → server) pub mod runtime; // Managed runtime and capability management pub mod session; // Session persistence pub mod snapshot; // Snapshot-based change tracking diff --git a/src/crates/core/src/service/remote_ssh/manager.rs b/src/crates/core/src/service/remote_ssh/manager.rs new file mode 100644 index 00000000..9e757c22 --- /dev/null +++ b/src/crates/core/src/service/remote_ssh/manager.rs @@ -0,0 +1,1333 @@ +//! SSH Connection Manager using russh +//! +//! This module manages SSH connections using the pure-Russ SSH implementation + +use crate::service::remote_ssh::types::{ + SavedConnection, ServerInfo, SSHConnectionConfig, SSHConnectionResult, SSHAuthMethod, + SSHConfigEntry, SSHConfigLookupResult, +}; +use anyhow::{anyhow, Context}; +use russh::client::{Handle, Handler, Msg}; +use russh_keys::key::PublicKey; +use russh_keys::PublicKeyBase64; +use russh_sftp::client::fs::ReadDir; +use russh_sftp::client::SftpSession; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::net::TcpStream; +use async_trait::async_trait; +#[cfg(feature = "ssh_config")] +use ssh_config::SSHConfig; + +/// Known hosts entry +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct KnownHostEntry { + pub host: String, + pub port: u16, + pub key_type: String, + pub fingerprint: String, + pub public_key: String, +} + +/// Active SSH connection +struct ActiveConnection { + handle: Arc>, + config: SSHConnectionConfig, + server_info: Option, + sftp_session: Arc>>>, + #[allow(dead_code)] + server_key: Option, +} + +/// SSH client handler with host key verification +struct SSHHandler { + /// Expected host key (if connecting to known host) + expected_key: Option<(String, u16, PublicKey)>, + /// Callback for new host key verification + verify_callback: Option bool + Send + Sync>>, + /// Known hosts storage for verification + known_hosts: Option>>>, + /// Host info for known hosts lookup + host: Option, + port: Option, +} + +impl SSHHandler { + #[allow(dead_code)] + fn new() -> Self { + Self { + expected_key: None, + verify_callback: None, + known_hosts: None, + host: None, + port: None, + } + } + + #[allow(dead_code)] + fn with_expected_key(host: String, port: u16, key: PublicKey) -> Self { + Self { + expected_key: Some((host, port, key)), + verify_callback: None, + known_hosts: None, + host: None, + port: None, + } + } + + #[allow(dead_code)] + fn with_verify_callback(callback: F) -> Self + where + F: Fn(String, u16, &PublicKey) -> bool + Send + Sync + 'static, + { + Self { + expected_key: None, + verify_callback: Some(Box::new(callback)), + known_hosts: None, + host: None, + port: None, + } + } + + fn with_known_hosts( + host: String, + port: u16, + known_hosts: Arc>>, + ) -> Self { + Self { + expected_key: None, + verify_callback: None, + known_hosts: Some(known_hosts), + host: Some(host), + port: Some(port), + } + } +} + +#[derive(Debug)] +struct HandlerError; + +impl std::fmt::Display for HandlerError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "HandlerError") + } +} + +impl std::error::Error for HandlerError {} + +impl From for HandlerError { + fn from(_: russh::Error) -> Self { + HandlerError + } +} + +impl From for HandlerError { + fn from(_s: String) -> Self { + HandlerError + } +} + +#[async_trait] +impl Handler for SSHHandler { + type Error = HandlerError; + + async fn check_server_key( + &mut self, + server_public_key: &PublicKey, + ) -> Result { + let server_fingerprint = server_public_key.fingerprint(); + + // 1. If we have an expected key, verify it matches + if let Some((ref host, port, ref expected)) = self.expected_key { + if expected.fingerprint() == server_fingerprint { + log::debug!("Server key matches expected key for {}:{}", host, port); + return Ok(true); + } + log::warn!("Server key mismatch for {}:{}. Expected fingerprint: {}, got: {}", + host, port, expected.fingerprint(), server_fingerprint); + return Err(HandlerError); + } + + // 2. Check known_hosts for this host + if let (Some(host), Some(port)) = (self.host.as_ref(), self.port) { + if let Some(known_hosts) = self.known_hosts.as_ref() { + let key = format!("{}:{}", host, port); + let known_guard = known_hosts.read().await; + if let Some(known) = known_guard.get(&key) { + // Clone the fingerprint to avoid borrow issues + let stored_fingerprint = known.fingerprint.clone(); + drop(known_guard); + + if stored_fingerprint == server_fingerprint { + log::debug!("Server key verified from known_hosts for {}:{}", host, port); + return Ok(true); + } else { + // Key changed - potential security issue! + log::warn!( + "Host key changed for {}:{}. Expected: {}, got: {}", + host, port, stored_fingerprint, server_fingerprint + ); + return Err(HandlerError); + } + } + } + } + + // 3. If we have a verify callback, use it + if let Some(ref callback) = self.verify_callback { + let host = self.host.as_deref().unwrap_or(""); + let port = self.port.unwrap_or(22); + if callback(host.to_string(), port, server_public_key) { + log::debug!("Server key verified via callback for {}:{}", host, port); + return Ok(true); + } + return Err(HandlerError); + } + + // 4. First time connection - accept the key (like standard SSH client's StrictHostKeyChecking=accept-new) + // This is safe for development and matches user expectations + log::info!( + "First time connection - accepting server key. Host: {}, Port: {}, Fingerprint: {}", + self.host.as_deref().unwrap_or("unknown"), + self.port.unwrap_or(22), + server_fingerprint + ); + Ok(true) + } +} + +/// SSH Connection Manager +#[derive(Clone)] +pub struct SSHConnectionManager { + connections: Arc>>, + saved_connections: Arc>>, + config_path: std::path::PathBuf, + /// Known hosts storage + known_hosts: Arc>>, + known_hosts_path: std::path::PathBuf, + /// Remote workspace persistence (multiple workspaces) + remote_workspaces: Arc>>, + remote_workspace_path: std::path::PathBuf, +} + +impl SSHConnectionManager { + /// Create a new SSH connection manager + pub fn new(data_dir: std::path::PathBuf) -> Self { + let config_path = data_dir.join("ssh_connections.json"); + let known_hosts_path = data_dir.join("known_hosts"); + let remote_workspace_path = data_dir.join("remote_workspace.json"); + Self { + connections: Arc::new(tokio::sync::RwLock::new(HashMap::new())), + saved_connections: Arc::new(tokio::sync::RwLock::new(Vec::new())), + config_path, + known_hosts: Arc::new(tokio::sync::RwLock::new(HashMap::new())), + known_hosts_path, + remote_workspaces: Arc::new(tokio::sync::RwLock::new(Vec::new())), + remote_workspace_path, + } + } + + /// Load known hosts from disk + pub async fn load_known_hosts(&self) -> anyhow::Result<()> { + if !self.known_hosts_path.exists() { + return Ok(()); + } + + let content = tokio::fs::read_to_string(&self.known_hosts_path).await?; + let entries: Vec = serde_json::from_str(&content) + .context("Failed to parse known hosts")?; + + let mut guard = self.known_hosts.write().await; + for entry in entries { + let key = format!("{}:{}", entry.host, entry.port); + guard.insert(key, entry); + } + + Ok(()) + } + + /// Save known hosts to disk + async fn save_known_hosts(&self) -> anyhow::Result<()> { + let guard = self.known_hosts.read().await; + let entries: Vec<_> = guard.values().cloned().collect(); + + if let Some(parent) = self.known_hosts_path.parent() { + tokio::fs::create_dir_all(parent).await?; + } + + let content = serde_json::to_string_pretty(&entries)?; + tokio::fs::write(&self.known_hosts_path, content).await?; + Ok(()) + } + + /// Add a known host + pub async fn add_known_host(&self, host: String, port: u16, key: &PublicKey) -> anyhow::Result<()> { + let entry = KnownHostEntry { + host: host.clone(), + port, + key_type: format!("{:?}", key.name()), + fingerprint: key.fingerprint(), + public_key: key.public_key_bytes().to_vec().iter().map(|b| format!("{:02x}", b)).collect(), + }; + + let key = format!("{}:{}", host, port); + { + let mut guard = self.known_hosts.write().await; + guard.insert(key, entry); + } + + self.save_known_hosts().await + } + + /// Check if host is in known hosts + pub async fn is_known_host(&self, host: &str, port: u16) -> bool { + let key = format!("{}:{}", host, port); + let guard = self.known_hosts.read().await; + guard.contains_key(&key) + } + + /// Get known host entry + pub async fn get_known_host(&self, host: &str, port: u16) -> Option { + let key = format!("{}:{}", host, port); + let guard = self.known_hosts.read().await; + guard.get(&key).cloned() + } + + /// Remove a known host + pub async fn remove_known_host(&self, host: &str, port: u16) -> anyhow::Result<()> { + let key = format!("{}:{}", host, port); + { + let mut guard = self.known_hosts.write().await; + guard.remove(&key); + } + self.save_known_hosts().await + } + + /// List all known hosts + pub async fn list_known_hosts(&self) -> Vec { + let guard = self.known_hosts.read().await; + guard.values().cloned().collect() + } + + // ── Remote Workspace Persistence ───────────────────────────────────────────── + + /// Load remote workspaces from disk + pub async fn load_remote_workspace(&self) -> anyhow::Result<()> { + if !self.remote_workspace_path.exists() { + return Ok(()); + } + + let content = tokio::fs::read_to_string(&self.remote_workspace_path).await?; + // Try array format first, fall back to single-object for backward compat + let workspaces: Vec = + serde_json::from_str(&content) + .or_else(|_| { + // Legacy: single workspace object + serde_json::from_str::(&content) + .map(|ws| vec![ws]) + }) + .context("Failed to parse remote workspace(s)")?; + + let mut guard = self.remote_workspaces.write().await; + *guard = workspaces; + + Ok(()) + } + + /// Save remote workspaces to disk + async fn save_remote_workspaces(&self) -> anyhow::Result<()> { + let guard = self.remote_workspaces.read().await; + + if let Some(parent) = self.remote_workspace_path.parent() { + tokio::fs::create_dir_all(parent).await?; + } + + let content = serde_json::to_string_pretty(&*guard)?; + tokio::fs::write(&self.remote_workspace_path, content).await?; + Ok(()) + } + + /// Add/update a persisted remote workspace + pub async fn set_remote_workspace(&self, workspace: crate::service::remote_ssh::types::RemoteWorkspace) -> anyhow::Result<()> { + { + let mut guard = self.remote_workspaces.write().await; + // Replace existing entry with same remote_path, or append + guard.retain(|w| w.remote_path != workspace.remote_path); + guard.push(workspace); + } + self.save_remote_workspaces().await + } + + /// Get all persisted remote workspaces + pub async fn get_remote_workspaces(&self) -> Vec { + self.remote_workspaces.read().await.clone() + } + + /// Get first persisted remote workspace (legacy compat) + pub async fn get_remote_workspace(&self) -> Option { + self.remote_workspaces.read().await.first().cloned() + } + + /// Remove a specific remote workspace by path + pub async fn remove_remote_workspace(&self, remote_path: &str) -> anyhow::Result<()> { + { + let mut guard = self.remote_workspaces.write().await; + guard.retain(|w| w.remote_path != remote_path); + } + self.save_remote_workspaces().await + } + + /// Clear all remote workspaces + pub async fn clear_remote_workspace(&self) -> anyhow::Result<()> { + { + let mut guard = self.remote_workspaces.write().await; + guard.clear(); + } + if self.remote_workspace_path.exists() { + tokio::fs::remove_file(&self.remote_workspace_path).await?; + } + Ok(()) + } + + /// Look up SSH config for a given host alias or hostname + /// + /// This parses ~/.ssh/config to find connection parameters for the given host. + /// The host parameter can be either an alias defined in SSH config or an actual hostname. + #[cfg(feature = "ssh_config")] + pub async fn get_ssh_config(&self, host: &str) -> SSHConfigLookupResult { + let ssh_config_path = dirs::home_dir() + .map(|p| p.join(".ssh").join("config")) + .unwrap_or_default(); + + if !ssh_config_path.exists() { + log::debug!("SSH config not found at {:?}", ssh_config_path); + return SSHConfigLookupResult { found: false, config: None }; + } + + let config_content = match tokio::fs::read_to_string(&ssh_config_path).await { + Ok(c) => c, + Err(e) => { + log::warn!("Failed to read SSH config: {:?}", e); + return SSHConfigLookupResult { found: false, config: None }; + } + }; + + let config = match SSHConfig::parse_str(&config_content) { + Ok(c) => c, + Err(e) => { + log::warn!("Failed to parse SSH config: {:?}", e); + return SSHConfigLookupResult { found: false, config: None }; + } + }; + + // Use query() to get host configuration - this handles Host pattern matching + let host_settings = config.query(host); + + if host_settings.is_empty() { + log::debug!("No SSH config found for host: {}", host); + return SSHConfigLookupResult { found: false, config: None }; + } + + log::debug!("Found SSH config for host: {} with {} settings", host, host_settings.len()); + + // Extract fields from the HashMap - keys are case-insensitive + let hostname = host_settings.get("Hostname").map(|s| s.to_string()); + let user = host_settings.get("User").map(|s| s.to_string()); + let port = host_settings.get("Port") + .and_then(|s| s.parse::().ok()); + let identity_file = host_settings.get("IdentityFile") + .map(|f| shellexpand::tilde(f).to_string()); + + // Check if proxy command is set (agent forwarding vs proxy command) + let has_proxy_command = host_settings.contains_key("ProxyCommand"); + + return SSHConfigLookupResult { + found: true, + config: Some(SSHConfigEntry { + host: host.to_string(), + hostname, + port, + user, + identity_file, + agent: if has_proxy_command { None } else { Some(true) }, + }), + }; + } + + #[cfg(not(feature = "ssh_config"))] + pub async fn get_ssh_config(&self, _host: &str) -> SSHConfigLookupResult { + SSHConfigLookupResult { found: false, config: None } + } + + /// List all hosts defined in ~/.ssh/config + #[cfg(feature = "ssh_config")] + pub async fn list_ssh_config_hosts(&self) -> Vec { + let ssh_config_path = dirs::home_dir() + .map(|p| p.join(".ssh").join("config")) + .unwrap_or_default(); + + if !ssh_config_path.exists() { + log::debug!("SSH config not found at {:?}", ssh_config_path); + return Vec::new(); + } + + let config_content = match tokio::fs::read_to_string(&ssh_config_path).await { + Ok(c) => c, + Err(e) => { + log::warn!("Failed to read SSH config: {:?}", e); + return Vec::new(); + } + }; + + let config = match SSHConfig::parse_str(&config_content) { + Ok(c) => c, + Err(e) => { + log::warn!("Failed to parse SSH config: {:?}", e); + return Vec::new(); + } + }; + + let mut hosts = Vec::new(); + + // SSHConfig library doesn't expose listing all hosts, so we parse the raw config + // to extract Host entries. This is a simple but effective approach. + for line in config_content.lines() { + let line = line.trim(); + // Match "Host alias1 alias2 ..." lines (but not "HostName") + if line.starts_with("Host ") && !line.starts_with("HostName") { + // Extract everything after "Host " + let host_part = line.strip_prefix("Host ").unwrap_or("").trim(); + if host_part.is_empty() { + continue; + } + // Host can be "alias1 alias2 ..." - we want the first one (main alias) + let aliases: Vec<&str> = host_part.split_whitespace().collect(); + if aliases.is_empty() { + continue; + } + + let alias = aliases[0]; + // Query config for this host to get details + let settings = config.query(alias); + + let identity_file = settings.get("IdentityFile") + .map(|f| shellexpand::tilde(f).to_string()); + + let hostname = settings.get("Hostname").map(|s| s.to_string()); + let user = settings.get("User").map(|s| s.to_string()); + let port = settings.get("Port") + .and_then(|s| s.parse::().ok()); + + hosts.push(SSHConfigEntry { + host: alias.to_string(), + hostname, + port, + user, + identity_file, + agent: None, // Can't easily determine agent setting from raw parsing + }); + } + } + + log::debug!("Found {} hosts in SSH config", hosts.len()); + hosts + } + + #[cfg(not(feature = "ssh_config"))] + pub async fn list_ssh_config_hosts(&self) -> Vec { + Vec::new() + } + + /// Load saved connections from disk + pub async fn load_saved_connections(&self) -> anyhow::Result<()> { + log::info!("load_saved_connections: config_path={:?}, exists={}", self.config_path, self.config_path.exists()); + + if !self.config_path.exists() { + return Ok(()); + } + + let content = tokio::fs::read_to_string(&self.config_path).await?; + log::info!("load_saved_connections: content={}", content); + let saved: Vec = serde_json::from_str(&content) + .context("Failed to parse saved SSH connections")?; + + let mut guard = self.saved_connections.write().await; + *guard = saved; + + log::info!("load_saved_connections: loaded {} connections", guard.len()); + Ok(()) + } + + /// Save connections to disk + async fn save_connections(&self) -> anyhow::Result<()> { + log::info!("save_connections: saving to {:?}", self.config_path); + let guard = self.saved_connections.read().await; + let content = serde_json::to_string_pretty(&*guard)?; + log::info!("save_connections: content={}", content); + + // Ensure parent directory exists + if let Some(parent) = self.config_path.parent() { + tokio::fs::create_dir_all(parent).await?; + } + + tokio::fs::write(&self.config_path, content).await?; + log::info!("save_connections: saved {} connections to {:?}", guard.len(), self.config_path); + Ok(()) + } + + /// Get list of saved connections + pub async fn get_saved_connections(&self) -> Vec { + self.saved_connections.read().await.clone() + } + + /// Save a connection configuration + pub async fn save_connection(&self, config: &SSHConnectionConfig) -> anyhow::Result<()> { + let mut guard = self.saved_connections.write().await; + + // Remove existing entry with same id OR same host+port+username (dedup) + guard.retain(|c| { + c.id != config.id + && !(c.host == config.host && c.port == config.port && c.username == config.username) + }); + + // Add new entry + guard.push(SavedConnection { + id: config.id.clone(), + name: config.name.clone(), + host: config.host.clone(), + port: config.port, + username: config.username.clone(), + auth_type: match &config.auth { + SSHAuthMethod::Password { .. } => crate::service::remote_ssh::types::SavedAuthType::Password, + SSHAuthMethod::PrivateKey { key_path, .. } => crate::service::remote_ssh::types::SavedAuthType::PrivateKey { key_path: key_path.clone() }, + SSHAuthMethod::Agent => crate::service::remote_ssh::types::SavedAuthType::Agent, + }, + default_workspace: config.default_workspace.clone(), + last_connected: Some(chrono::Utc::now().timestamp() as u64), + }); + + drop(guard); + self.save_connections().await + } + + /// Delete a saved connection + pub async fn delete_saved_connection(&self, connection_id: &str) -> anyhow::Result<()> { + let mut guard = self.saved_connections.write().await; + guard.retain(|c| c.id != connection_id); + drop(guard); + self.save_connections().await + } + + /// Connect to a remote SSH server + /// + /// # Arguments + /// * `config` - SSH connection configuration + /// * `timeout_secs` - Connection timeout in seconds (default: 30) + pub async fn connect(&self, config: SSHConnectionConfig) -> anyhow::Result { + self.connect_with_timeout(config, 30).await + } + + /// Connect with custom timeout + pub async fn connect_with_timeout( + &self, + config: SSHConnectionConfig, + timeout_secs: u64, + ) -> anyhow::Result { + let addr = format!("{}:{}", config.host, config.port); + + // Connect to the server with timeout + let stream = tokio::time::timeout( + std::time::Duration::from_secs(timeout_secs), + TcpStream::connect(&addr), + ) + .await + .map_err(|_| anyhow!("Connection timeout after {} seconds", timeout_secs))? + .map_err(|e| anyhow!("Failed to connect to {}: {}", addr, e))?; + + // Create SSH transport config + let key_pair = match &config.auth { + SSHAuthMethod::Password { .. } => None, + SSHAuthMethod::PrivateKey { key_path, passphrase } => { + log::info!("Attempting private key auth with key_path: {}, passphrase provided: {}", key_path, passphrase.is_some()); + // Try to read the specified key file + let expanded = shellexpand::tilde(key_path); + log::info!("Expanded key path: {}", expanded); + let key_content = match std::fs::read_to_string(expanded.as_ref()) { + Ok(content) => { + log::info!("Successfully read {} bytes from key file", content.len()); + content + } + Err(e) => { + // If specified key fails, try default ~/.ssh/id_rsa + log::warn!("Failed to read private key at '{}': {}, trying default ~/.ssh/id_rsa", expanded, e); + if let Ok(home) = std::env::var("HOME") { + let default_key = format!("{}/.ssh/id_rsa", home); + log::info!("Trying default key at: {}", default_key); + std::fs::read_to_string(&default_key) + .map_err(|e| anyhow!("Failed to read private key '{}' and default key '{}': {}", key_path, default_key, e))? + } else { + return Err(anyhow!("Failed to read private key '{}': {}, and could not determine home directory", key_path, e)); + } + } + }; + log::info!("Decoding private key..."); + let key_pair = russh_keys::decode_secret_key( + &key_content, + passphrase.as_ref().map(|s| s.as_str()), + ) + .map_err(|e| anyhow!("Failed to decode private key: {}", e))?; + log::info!("Successfully decoded private key"); + Some(key_pair) + } + SSHAuthMethod::Agent => None, + }; + + let ssh_config = Arc::new(russh::client::Config { + inactivity_timeout: Some(std::time::Duration::from_secs(60)), + keepalive_interval: Some(std::time::Duration::from_secs(30)), + keepalive_max: 3, + ..Default::default() + }); + + // Create handler with known_hosts for verification + let handler = SSHHandler::with_known_hosts( + config.host.clone(), + config.port, + self.known_hosts.clone(), + ); + + // SSH handshake with timeout + log::info!("Starting SSH handshake to {}", addr); + let mut handle = tokio::time::timeout( + std::time::Duration::from_secs(timeout_secs), + russh::client::connect_stream(ssh_config, stream, handler), + ) + .await + .map_err(|_| anyhow!("SSH handshake timeout after {} seconds", timeout_secs))? + .map_err(|e| anyhow!("Failed to establish SSH connection: {:?}", e))?; + log::info!("SSH handshake completed successfully"); + + // Authenticate based on auth method + log::info!("Starting authentication for user {}", config.username); + let auth_success: bool = match &config.auth { + SSHAuthMethod::Password { password } => { + log::debug!("Using password authentication"); + handle.authenticate_password(&config.username, password.clone()).await + .map_err(|e| anyhow!("Password authentication failed: {:?}", e))? + } + SSHAuthMethod::PrivateKey { key_path, passphrase: _ } => { + log::info!("Using public key authentication with key: {}", key_path); + if let Some(ref key) = key_pair { + log::info!("Attempting to authenticate user '{}' with public key", config.username); + let result = handle.authenticate_publickey(&config.username, Arc::new(key.clone())).await; + log::info!("Public key auth result: {:?}", result); + match result { + Ok(true) => { + log::info!("Public key authentication successful"); + true + } + Ok(false) => { + log::warn!("Public key authentication rejected by server for user '{}'", config.username); + false + } + Err(e) => { + log::error!("Public key authentication error: {:?}", e); + return Err(anyhow!("Public key authentication failed: {:?}", e)); + } + } + } else { + return Err(anyhow!("Failed to load private key")); + } + } + SSHAuthMethod::Agent => { + log::debug!("Using SSH agent authentication - agent auth not supported, returning false"); + // Agent auth is not supported in russh - return false to indicate auth failed + // The caller should try another auth method + false + } + }; + + if !auth_success { + log::warn!("Authentication returned false for user {}", config.username); + return Err(anyhow!("Authentication failed for user {}", config.username)); + } + log::info!("Authentication successful for user {}", config.username); + + // Get server info + let server_info = Self::get_server_info_internal(&handle).await; + + let connection_id = config.id.clone(); + + // Store connection + let mut guard = self.connections.write().await; + guard.insert( + connection_id.clone(), + ActiveConnection { + handle: Arc::new(handle), + config, + server_info: server_info.clone(), + sftp_session: Arc::new(tokio::sync::RwLock::new(None)), + server_key: None, + }, + ); + + Ok(SSHConnectionResult { + success: true, + connection_id: Some(connection_id), + error: None, + server_info, + }) + } + + /// Get server information + async fn get_server_info_internal(handle: &Handle) -> Option { + // Try to get server info via SSH session + let (stdout, _stderr, exit_status) = Self::execute_command_internal(handle, "uname -s && hostname && echo $HOME") + .await + .ok()?; + + if exit_status != 0 { + return None; + } + + let lines: Vec<&str> = stdout.trim().lines().collect(); + if lines.len() < 3 { + return None; + } + + Some(ServerInfo { + os_type: lines[0].to_string(), + hostname: lines[1].to_string(), + home_dir: lines[2].to_string(), + }) + } + + /// Execute a command on the remote server + async fn execute_command_internal( + handle: &Handle, + command: &str, + ) -> std::result::Result<(String, String, i32), anyhow::Error> { + let mut session = handle.channel_open_session().await?; + session.exec(true, command).await?; + + let mut stdout = String::new(); + let mut stderr = String::new(); + let mut exit_status: i32 = -1; + + loop { + match session.wait().await { + Some(russh::ChannelMsg::Data { ref data }) => { + stdout.push_str(&String::from_utf8_lossy(data)); + } + Some(russh::ChannelMsg::ExtendedData { ref data, .. }) => { + stderr.push_str(&String::from_utf8_lossy(data)); + } + Some(russh::ChannelMsg::ExitStatus { exit_status: status }) => { + exit_status = status as i32; + } + Some(russh::ChannelMsg::Eof) | Some(russh::ChannelMsg::Close) => { + break; + } + None => { + break; + } + _ => {} + } + } + + Ok((stdout, stderr, exit_status)) + } + + /// Disconnect from a server + pub async fn disconnect(&self, connection_id: &str) -> anyhow::Result<()> { + let mut guard = self.connections.write().await; + guard.remove(connection_id); + Ok(()) + } + + /// Disconnect all connections + pub async fn disconnect_all(&self) { + let mut guard = self.connections.write().await; + guard.clear(); + } + + /// Check if connected + pub async fn is_connected(&self, connection_id: &str) -> bool { + let guard = self.connections.read().await; + guard.contains_key(connection_id) + } + + /// Execute a command on the remote server + pub async fn execute_command( + &self, + connection_id: &str, + command: &str, + ) -> anyhow::Result<(String, String, i32)> { + let guard = self.connections.read().await; + let conn = guard + .get(connection_id) + .ok_or_else(|| anyhow!("Connection {} not found", connection_id))?; + + Self::execute_command_internal(&conn.handle, command) + .await + .map_err(|e| anyhow!("Command execution failed: {}", e)) + } + + /// Get server info for a connection + pub async fn get_server_info(&self, connection_id: &str) -> Option { + let guard = self.connections.read().await; + guard.get(connection_id).and_then(|c| c.server_info.clone()) + } + + /// Get connection configuration + pub async fn get_connection_config(&self, connection_id: &str) -> Option { + let guard = self.connections.read().await; + guard.get(connection_id).map(|c| c.config.clone()) + } + + // ============================================================================ + // SFTP Operations + // ============================================================================ + + /// Get or create SFTP session for a connection + pub async fn get_sftp(&self, connection_id: &str) -> anyhow::Result> { + // First check if we have an existing SFTP session + { + let guard = self.connections.read().await; + if let Some(conn) = guard.get(connection_id) { + let sftp_guard = conn.sftp_session.read().await; + if let Some(ref sftp) = *sftp_guard { + return Ok(sftp.clone()); + } + } + } + + // Get handle (clone the Arc) + let handle: Arc> = { + let guard = self.connections.read().await; + let conn = guard + .get(connection_id) + .ok_or_else(|| anyhow!("Connection {} not found", connection_id))?; + conn.handle.clone() + }; + + // Open a channel and request SFTP subsystem + let channel = handle.channel_open_session().await + .map_err(|e| anyhow!("Failed to open channel for SFTP: {}", e))?; + channel.request_subsystem(true, "sftp").await + .map_err(|e| anyhow!("Failed to request SFTP subsystem: {}", e))?; + + let sftp = SftpSession::new(channel.into_stream()).await + .map_err(|e| anyhow!("Failed to create SFTP session: {}", e))?; + + let sftp = Arc::new(sftp); + + // Store the SFTP session + { + let mut guard = self.connections.write().await; + if let Some(conn) = guard.get_mut(connection_id) { + let mut sftp_guard = conn.sftp_session.write().await; + *sftp_guard = Some(sftp.clone()); + } + } + + Ok(sftp) + } + + /// Read a file via SFTP + pub async fn sftp_read(&self, connection_id: &str, path: &str) -> anyhow::Result> { + let sftp = self.get_sftp(connection_id).await?; + let mut file = sftp.open(path).await + .map_err(|e| anyhow!("Failed to open remote file '{}': {}", path, e))?; + + let mut buffer = Vec::new(); + use tokio::io::AsyncReadExt; + file.read_to_end(&mut buffer).await + .map_err(|e| anyhow!("Failed to read remote file '{}': {}", path, e))?; + + Ok(buffer) + } + + /// Write a file via SFTP + pub async fn sftp_write(&self, connection_id: &str, path: &str, content: &[u8]) -> anyhow::Result<()> { + let sftp = self.get_sftp(connection_id).await?; + let mut file = sftp.create(path).await + .map_err(|e| anyhow!("Failed to create remote file '{}': {}", path, e))?; + + use tokio::io::AsyncWriteExt; + file.write_all(content).await + .map_err(|e| anyhow!("Failed to write remote file '{}': {}", path, e))?; + + file.flush().await + .map_err(|e| anyhow!("Failed to flush remote file '{}': {}", path, e))?; + + Ok(()) + } + + /// Read directory via SFTP + pub async fn sftp_read_dir(&self, connection_id: &str, path: &str) -> anyhow::Result { + let sftp = self.get_sftp(connection_id).await?; + let entries = sftp.read_dir(path).await + .map_err(|e| anyhow!("Failed to read directory '{}': {}", path, e))?; + Ok(entries) + } + + /// Create directory via SFTP + pub async fn sftp_mkdir(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + let sftp = self.get_sftp(connection_id).await?; + sftp.create_dir(path).await + .map_err(|e| anyhow!("Failed to create directory '{}': {}", path, e))?; + Ok(()) + } + + /// Create directory and all parents via SFTP + pub async fn sftp_mkdir_all(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + let sftp = self.get_sftp(connection_id).await?; + + // Check if path exists + match sftp.as_ref().try_exists(path).await { + Ok(true) => return Ok(()), // Already exists + Ok(false) => {} + Err(_) => {} + } + + // Try to create + sftp.as_ref().create_dir(path).await + .map_err(|e| anyhow!("Failed to create directory '{}': {}", path, e))?; + Ok(()) + } + + /// Remove file via SFTP + pub async fn sftp_remove(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + let sftp = self.get_sftp(connection_id).await?; + sftp.remove_file(path).await + .map_err(|e| anyhow!("Failed to remove file '{}': {}", path, e))?; + Ok(()) + } + + /// Remove directory via SFTP + pub async fn sftp_rmdir(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + let sftp = self.get_sftp(connection_id).await?; + sftp.remove_dir(path).await + .map_err(|e| anyhow!("Failed to remove directory '{}': {}", path, e))?; + Ok(()) + } + + /// Rename/move via SFTP + pub async fn sftp_rename(&self, connection_id: &str, old_path: &str, new_path: &str) -> anyhow::Result<()> { + let sftp = self.get_sftp(connection_id).await?; + sftp.rename(old_path, new_path).await + .map_err(|e| anyhow!("Failed to rename '{}' to '{}': {}", old_path, new_path, e))?; + Ok(()) + } + + /// Check if path exists via SFTP + pub async fn sftp_exists(&self, connection_id: &str, path: &str) -> anyhow::Result { + let sftp = self.get_sftp(connection_id).await?; + sftp.as_ref().try_exists(path).await + .map_err(|e| anyhow!("Failed to check if '{}' exists: {}", path, e)) + } + + /// Get file metadata via SFTP + pub async fn sftp_stat(&self, connection_id: &str, path: &str) -> anyhow::Result { + let sftp = self.get_sftp(connection_id).await?; + sftp.as_ref().metadata(path).await + .map_err(|e| anyhow!("Failed to stat '{}': {}", path, e)) + } + + // ============================================================================ + // PTY (Interactive Terminal) Operations + // ============================================================================ + + /// Open a PTY session and start a shell + pub async fn open_pty( + &self, + connection_id: &str, + cols: u32, + rows: u32, + ) -> anyhow::Result { + let guard = self.connections.read().await; + let conn = guard + .get(connection_id) + .ok_or_else(|| anyhow!("Connection {} not found", connection_id))?; + + // Open a session channel + let channel = conn.handle.channel_open_session().await + .map_err(|e| anyhow!("Failed to open channel: {}", e))?; + + // Request PTY — `false` = don't wait for reply (reply handled in reader loop) + channel.request_pty( + false, + "xterm-256color", + cols, + rows, + 0, + 0, + &[], + ).await + .map_err(|e| anyhow!("Failed to request PTY: {}", e))?; + + // Start shell — `false` = don't wait for reply + channel.request_shell(false).await + .map_err(|e| anyhow!("Failed to start shell: {}", e))?; + + Ok(PTYSession { + channel: Arc::new(tokio::sync::Mutex::new(channel)), + connection_id: connection_id.to_string(), + }) + } + + /// Get server key fingerprint for verification + pub async fn get_server_key_fingerprint(&self, connection_id: &str) -> anyhow::Result { + let guard = self.connections.read().await; + let conn = guard + .get(connection_id) + .ok_or_else(|| anyhow!("Connection {} not found", connection_id))?; + + // Return a fingerprint based on connection info + // Note: Actual server key fingerprint requires access to the SSH transport layer + // For security verification, the server key is verified during connection via SSHHandler + let fingerprint = format!("{}:{}:{}", conn.config.host, conn.config.port, conn.config.username); + Ok(fingerprint) + } +} + +/// PTY session for interactive terminal +#[derive(Clone)] +pub struct PTYSession { + channel: Arc>>, + connection_id: String, +} + +impl PTYSession { + /// Extract the inner Channel, consuming the Mutex wrapper. + /// Only works if this is the sole Arc reference. + /// Intended for use by RemoteTerminalManager to hand ownership to the owner task. + pub async fn into_channel(self) -> Option> { + match Arc::try_unwrap(self.channel) { + Ok(mutex) => Some(mutex.into_inner()), + Err(_) => None, + } + } +} + +impl PTYSession { + /// Write data to PTY + pub async fn write(&self, data: &[u8]) -> anyhow::Result<()> { + let channel = self.channel.lock().await; + channel.data(data).await + .map_err(|e| anyhow!("Failed to write to PTY: {}", e))?; + Ok(()) + } + + /// Resize PTY + pub async fn resize(&self, cols: u32, rows: u32) -> anyhow::Result<()> { + let channel = self.channel.lock().await; + // Use default pixel dimensions (80x24 characters) + channel.window_change(cols, rows, 0, 0).await + .map_err(|e| anyhow!("Failed to resize PTY: {}", e))?; + Ok(()) + } + + /// Read data from PTY. + /// Blocks until data is available, PTY closes, or an error occurs. + /// Returns Ok(Some(bytes)) for data, Ok(None) for clean close, Err for errors. + pub async fn read(&self) -> anyhow::Result>> { + let mut channel = self.channel.lock().await; + loop { + match channel.wait().await { + Some(russh::ChannelMsg::Data { data }) => return Ok(Some(data.to_vec())), + Some(russh::ChannelMsg::ExtendedData { data, .. }) => return Ok(Some(data.to_vec())), + Some(russh::ChannelMsg::Eof) | Some(russh::ChannelMsg::Close) => return Ok(None), + Some(russh::ChannelMsg::ExitStatus { .. }) => return Ok(None), + Some(_) => { + // WindowAdjust, Success, RequestSuccess, etc. — skip and keep reading + continue; + } + None => return Ok(None), + } + } + } + + /// Close PTY session + pub async fn close(self) -> anyhow::Result<()> { + let channel = self.channel.lock().await; + channel.eof().await + .map_err(|e| anyhow!("Failed to close PTY: {}", e))?; + channel.close().await + .map_err(|e| anyhow!("Failed to close channel: {}", e))?; + Ok(()) + } + + /// Get connection ID + pub fn connection_id(&self) -> &str { + &self.connection_id + } +} + +// ============================================================================ +// Port Forwarding +// ============================================================================ + +/// Port forwarding entry +#[derive(Debug, Clone)] +pub struct PortForward { + pub id: String, + pub local_port: u16, + pub remote_host: String, + pub remote_port: u16, + pub direction: PortForwardDirection, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum PortForwardDirection { + Local, // -L: forward local port to remote + Remote, // -R: forward remote port to local + Dynamic, // -D: dynamic SOCKS proxy +} + +/// Port forwarding manager +pub struct PortForwardManager { + forwards: Arc>>, + ssh_manager: Arc>>, +} + +impl PortForwardManager { + pub fn new() -> Self { + Self { + forwards: Arc::new(tokio::sync::RwLock::new(HashMap::new())), + ssh_manager: Arc::new(tokio::sync::RwLock::new(None)), + } + } + + pub fn with_ssh_manager(ssh_manager: SSHConnectionManager) -> Self { + Self { + forwards: Arc::new(tokio::sync::RwLock::new(HashMap::new())), + ssh_manager: Arc::new(tokio::sync::RwLock::new(Some(ssh_manager))), + } + } + + pub async fn set_ssh_manager(&self, manager: SSHConnectionManager) { + let mut guard = self.ssh_manager.write().await; + *guard = Some(manager); + } + + /// Start local port forwarding (-L) + /// + /// TODO: Full implementation requires: + /// - TCP listener to accept local connections + /// - SSH channel for each forwarded connection + /// - Proper cleanup when stopping the forward + /// + /// Currently this is a placeholder that only tracks the forward configuration. + pub async fn start_local_forward( + &self, + _connection_id: &str, + local_port: u16, + remote_host: String, + remote_port: u16, + ) -> anyhow::Result { + let id = uuid::Uuid::new_v4().to_string(); + + let forward = PortForward { + id: id.clone(), + local_port, + remote_host: remote_host.clone(), + remote_port, + direction: PortForwardDirection::Local, + }; + + // Store forward entry + let mut guard = self.forwards.write().await; + guard.insert(id.clone(), forward); + + log::info!("[TODO] Local port forward registered: localhost:{} -> {}:{}", + local_port, remote_host, remote_port); + log::warn!("Port forwarding is not fully implemented - connections will not be forwarded"); + + Ok(id) + } + + /// Start remote port forwarding (-R) + /// + /// TODO: Full implementation requires SSH reverse port forwarding channel. + /// This is more complex as it needs to bind to a remote port. + pub async fn start_remote_forward( + &self, + _connection_id: &str, + remote_port: u16, + local_host: String, + local_port: u16, + ) -> anyhow::Result { + let id = uuid::Uuid::new_v4().to_string(); + + let forward = PortForward { + id: id.clone(), + local_port: remote_port, + remote_host: local_host.clone(), + remote_port: local_port, + direction: PortForwardDirection::Remote, + }; + + // Remote port forwarding requires SSH channel forwarding + // This is a placeholder - full implementation would need: + // 1. Open a "reverse" channel on SSH connection + // 2. Bind to remote port + // 3. Forward connections back through the channel + + let mut guard = self.forwards.write().await; + guard.insert(id.clone(), forward); + + log::info!("Started remote port forward (placeholder): *:{} -> {}:{}", + remote_port, local_host, local_port); + + // TODO: Implement actual SSH reverse port forwarding + log::warn!("Remote port forwarding is not fully implemented - data will not be forwarded"); + + Ok(id) + } + + /// Stop a port forward + pub async fn stop_forward(&self, forward_id: &str) -> anyhow::Result<()> { + let mut guard = self.forwards.write().await; + if let Some(forward) = guard.remove(forward_id) { + log::info!("Stopped port forward: {} ({}:{} -> {}:{})", + forward.id, + match forward.direction { + PortForwardDirection::Local => "local", + PortForwardDirection::Remote => "remote", + PortForwardDirection::Dynamic => "dynamic", + }, + forward.local_port, + forward.remote_host, + forward.remote_port); + } + Ok(()) + } + + /// Stop all port forwards + pub async fn stop_all(&self) { + let mut guard = self.forwards.write().await; + let count = guard.len(); + guard.drain(); + log::info!("All {} port forwards stopped", count); + } + + /// List all active forwards + pub async fn list_forwards(&self) -> Vec { + let guard = self.forwards.read().await; + guard.values().cloned().collect() + } + + /// Check if a port is already forwarded + pub async fn is_port_forwarded(&self, port: u16) -> bool { + let guard = self.forwards.read().await; + guard.values().any(|f| f.local_port == port) + } +} + +impl Default for PortForwardManager { + fn default() -> Self { + Self::new() + } +} diff --git a/src/crates/core/src/service/remote_ssh/mod.rs b/src/crates/core/src/service/remote_ssh/mod.rs new file mode 100644 index 00000000..c100848c --- /dev/null +++ b/src/crates/core/src/service/remote_ssh/mod.rs @@ -0,0 +1,24 @@ +//! Remote SSH Service Module +//! +//! Provides SSH connection management and SFTP-based remote file operations. +//! This allows BitFun to work with files on remote servers via SSH, +//! similar to VSCode's Remote SSH extension. + +pub mod manager; +pub mod remote_fs; +pub mod remote_terminal; +pub mod types; +pub mod workspace_state; + +pub use manager::{ + KnownHostEntry, PortForward, PortForwardDirection, PortForwardManager, PTYSession, + SSHConnectionManager, +}; +pub use remote_fs::RemoteFileService; +pub use remote_terminal::{RemoteTerminalManager, RemoteTerminalSession, SessionStatus}; +pub use types::*; +pub use workspace_state::{ + get_remote_workspace_manager, init_remote_workspace_manager, is_remote_workspace_active, + is_remote_path, lookup_remote_connection, + RemoteWorkspaceEntry, RemoteWorkspaceState, RemoteWorkspaceStateManager, +}; diff --git a/src/crates/core/src/service/remote_ssh/remote_fs.rs b/src/crates/core/src/service/remote_ssh/remote_fs.rs new file mode 100644 index 00000000..2fab7d0a --- /dev/null +++ b/src/crates/core/src/service/remote_ssh/remote_fs.rs @@ -0,0 +1,331 @@ +//! Remote file system operations via SFTP +//! +//! This module provides remote file system operations using the SFTP protocol + +use crate::service::remote_ssh::types::{RemoteDirEntry, RemoteFileEntry, RemoteTreeNode}; +use anyhow::anyhow; +use std::sync::Arc; + +/// Remote file service using SFTP protocol +#[derive(Clone)] +pub struct RemoteFileService { + manager: Arc>>, +} + +impl RemoteFileService { + pub fn new( + manager: Arc>>, + ) -> Self { + Self { manager } + } + + /// Get the SSH manager + async fn get_manager(&self, _connection_id: &str) -> anyhow::Result { + let guard = self.manager.read().await; + guard.as_ref() + .cloned() + .ok_or_else(|| anyhow!("SSH manager not initialized")) + } + + /// Read a file from the remote server via SFTP + pub async fn read_file(&self, connection_id: &str, path: &str) -> anyhow::Result> { + let manager = self.get_manager(connection_id).await?; + manager.sftp_read(connection_id, path).await + } + + /// Write content to a remote file via SFTP + pub async fn write_file(&self, connection_id: &str, path: &str, content: &[u8]) -> anyhow::Result<()> { + let manager = self.get_manager(connection_id).await?; + manager.sftp_write(connection_id, path, content).await + } + + /// Check if a remote path exists + pub async fn exists(&self, connection_id: &str, path: &str) -> anyhow::Result { + let manager = self.get_manager(connection_id).await?; + manager.sftp_exists(connection_id, path).await + } + + /// Check if a remote path is a regular file + pub async fn is_file(&self, connection_id: &str, path: &str) -> anyhow::Result { + match self.stat(connection_id, path).await? { + Some(entry) => Ok(entry.is_file), + None => Ok(false), + } + } + + /// Check if a remote path is a directory + pub async fn is_dir(&self, connection_id: &str, path: &str) -> anyhow::Result { + match self.stat(connection_id, path).await? { + Some(entry) => Ok(entry.is_dir), + None => Ok(false), + } + } + + /// Read directory contents via SFTP + pub async fn read_dir(&self, connection_id: &str, path: &str) -> anyhow::Result> { + let manager = self.get_manager(connection_id).await?; + let mut entries = manager.sftp_read_dir(connection_id, path).await?; + + let mut result = Vec::new(); + + for entry in entries.by_ref() { + let name = entry.file_name(); + + // Skip . and .. + if name == "." || name == ".." { + continue; + } + + let full_path = if path.ends_with('/') { + format!("{}{}", path, name) + } else { + format!("{}/{}", path, name) + }; + + let metadata = entry.metadata(); + let is_dir = entry.file_type().is_dir(); + let is_symlink = entry.file_type().is_symlink(); + let is_file = entry.file_type().is_file(); + + // FileAttributes mtime is Unix timestamp in seconds; convert to milliseconds + // for JavaScript Date compatibility + let size = if is_file { metadata.size } else { None }; + let modified = metadata.mtime.map(|t| (t as u64) * 1000); + + // Get permissions string + let permissions = Some(format_permissions(metadata.permissions)); + + result.push(RemoteDirEntry { + name, + path: full_path, + is_dir, + is_file, + is_symlink, + size, + modified, + permissions, + }); + } + + Ok(result) + } + + /// Build a tree of remote directory structure + pub async fn build_tree( + &self, + connection_id: &str, + path: &str, + max_depth: Option, + ) -> anyhow::Result { + let max_depth = max_depth.unwrap_or(3); + Box::pin(self.build_tree_impl(connection_id, path, 0, max_depth)).await + } + + async fn build_tree_impl( + &self, + connection_id: &str, + path: &str, + current_depth: u32, + max_depth: u32, + ) -> anyhow::Result { + let name = std::path::Path::new(path) + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| path.to_string()); + + // Check if this is a directory + let is_dir = match self.exists(connection_id, path).await { + Ok(exists) => exists, + Err(_) => false, + }; + + // Check if it's a directory by trying to read it + let is_dir = if is_dir { + let entries = self.read_dir(connection_id, path).await; + entries.is_ok() + } else { + false + }; + + if !is_dir || current_depth >= max_depth { + return Ok(RemoteTreeNode { + name, + path: path.to_string(), + is_dir, + children: None, + }); + } + + // Read directory contents + let entries = match self.read_dir(connection_id, path).await { + Ok(entries) => entries, + Err(_) => { + return Ok(RemoteTreeNode { + name, + path: path.to_string(), + is_dir: false, + children: None, + }); + } + }; + + let mut children = Vec::new(); + + for entry in entries { + if entry.is_dir { + match Box::pin(self.build_tree_impl( + connection_id, + &entry.path, + current_depth + 1, + max_depth, + )).await { + Ok(child) => children.push(child), + Err(_) => { + children.push(RemoteTreeNode { + name: entry.name, + path: entry.path, + is_dir: true, + children: None, + }); + } + } + } else { + children.push(RemoteTreeNode { + name: entry.name, + path: entry.path, + is_dir: false, + children: None, + }); + } + } + + Ok(RemoteTreeNode { + name, + path: path.to_string(), + is_dir: true, + children: Some(children), + }) + } + + /// Create a directory on the remote server via SFTP + pub async fn create_dir(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + let manager = self.get_manager(connection_id).await?; + manager.sftp_mkdir(connection_id, path).await + } + + /// Create directory and all parent directories via SFTP + pub async fn create_dir_all(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + let manager = self.get_manager(connection_id).await?; + manager.sftp_mkdir_all(connection_id, path).await + } + + /// Remove a file from the remote server via SFTP + pub async fn remove_file(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + let manager = self.get_manager(connection_id).await?; + manager.sftp_remove(connection_id, path).await + } + + /// Remove a directory and its contents recursively via SFTP + pub async fn remove_dir_all(&self, connection_id: &str, path: &str) -> anyhow::Result<()> { + // First, delete all contents + match self.read_dir(connection_id, path).await { + Ok(entries) => { + for entry in entries { + let entry_path = entry.path.clone(); + if entry.is_dir { + Box::pin(self.remove_dir_all(connection_id, &entry_path)).await?; + } else { + let manager = self.get_manager(connection_id).await?; + manager.sftp_remove(connection_id, &entry_path).await?; + } + } + } + Err(_) => {} + } + + // Then remove the directory itself + let manager = self.get_manager(connection_id).await?; + manager.sftp_rmdir(connection_id, path).await + } + + /// Rename/move a remote file or directory via SFTP + pub async fn rename( + &self, + connection_id: &str, + old_path: &str, + new_path: &str, + ) -> anyhow::Result<()> { + let manager = self.get_manager(connection_id).await?; + manager.sftp_rename(connection_id, old_path, new_path).await + } + + /// Get file metadata via SFTP + pub async fn stat(&self, connection_id: &str, path: &str) -> anyhow::Result> { + let manager = self.get_manager(connection_id).await?; + + match manager.sftp_stat(connection_id, path).await { + Ok(attrs) => { + let name = std::path::Path::new(path) + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| path.to_string()); + + let is_dir = attrs.is_dir(); + let is_symlink = attrs.is_symlink(); + // File is neither dir nor symlink + let is_file = !is_dir && !is_symlink; + let size = if is_file { attrs.size } else { None }; + let modified = attrs.mtime.map(|t| (t as u64) * 1000); + let permissions = Some(format_permissions(attrs.permissions)); + + Ok(Some(RemoteFileEntry { + name, + path: path.to_string(), + is_dir, + is_file, + is_symlink, + size, + modified, + permissions, + })) + } + Err(_) => Ok(None), + } + } +} + +/// Format file permissions as string (e.g., "rwxr-xr-x") +fn format_permissions(mode: Option) -> String { + let mode = match mode { + Some(m) => m, + None => return "---------".to_string(), + }; + + let file_type = match mode & 0o170000 { + 0o040000 => 'd', // directory + 0o120000 => 'l', // symbolic link + 0o060000 => 'b', // block device + 0o020000 => 'c', // character device + 0o010000 => 'p', // FIFO + 0o140000 => 's', // socket + _ => '-', // regular file + }; + + let perms = [ + (mode & 0o400 != 0, 'r'), + (mode & 0o200 != 0, 'w'), + (mode & 0o100 != 0, 'x'), + (mode & 0o040 != 0, 'r'), + (mode & 0o020 != 0, 'w'), + (mode & 0o010 != 0, 'x'), + (mode & 0o004 != 0, 'r'), + (mode & 0o002 != 0, 'w'), + (mode & 0o001 != 0, 'x'), + ]; + + let perm_str: String = perms.iter() + .map(|(set, c)| if *set { *c } else { '-' }) + .collect(); + + format!("{}{}", file_type, perm_str) +} diff --git a/src/crates/core/src/service/remote_ssh/remote_terminal.rs b/src/crates/core/src/service/remote_ssh/remote_terminal.rs new file mode 100644 index 00000000..83876df0 --- /dev/null +++ b/src/crates/core/src/service/remote_ssh/remote_terminal.rs @@ -0,0 +1,295 @@ +//! Remote Terminal Session Management with PTY support +//! +//! Architecture: +//! - Each PTY has a single owner task that exclusively holds the russh Channel +//! - Reading: owner task calls `channel.wait()` and broadcasts output via `broadcast::Sender` +//! - Writing: callers send `PtyCommand::Write` via `mpsc::Sender` → owner task → `channel.data()` +//! - This eliminates Mutex deadlock between read and write operations + +use crate::service::remote_ssh::manager::SSHConnectionManager; +use anyhow::Context; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::io::AsyncWriteExt; +use tokio::sync::{broadcast, mpsc, RwLock}; + +fn shell_escape(s: &str) -> String { + if s.chars().all(|c| c.is_alphanumeric() || c == '/' || c == '.' || c == '-' || c == '_') { + s.to_string() + } else { + format!("'{}'", s.replace('\'', "'\\''")) + } +} + +#[derive(Debug, Clone)] +pub struct RemoteTerminalSession { + pub id: String, + pub name: String, + pub connection_id: String, + pub cwd: String, + pub pid: Option, + pub status: SessionStatus, + pub cols: u16, + pub rows: u16, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum SessionStatus { + Active, + Inactive, + Closed, +} + +enum PtyCommand { + Write(Vec), + Resize(u32, u32), + Close, +} + +struct ActiveHandle { + output_tx: broadcast::Sender>, + cmd_tx: mpsc::Sender, +} + +pub struct CreateSessionResult { + pub session: RemoteTerminalSession, + pub output_rx: broadcast::Receiver>, +} + +pub struct RemoteTerminalManager { + sessions: Arc>>, + ssh_manager: Arc>>, + handles: Arc>>, +} + +impl RemoteTerminalManager { + pub fn new(ssh_manager: SSHConnectionManager) -> Self { + Self { + sessions: Arc::new(RwLock::new(HashMap::new())), + ssh_manager: Arc::new(tokio::sync::RwLock::new(Some(ssh_manager))), + handles: Arc::new(RwLock::new(HashMap::new())), + } + } + + pub async fn set_ssh_manager(&self, manager: SSHConnectionManager) { + *self.ssh_manager.write().await = Some(manager); + } + + /// Create a new remote terminal session. + /// Returns a `CreateSessionResult` with a pre-subscribed output receiver. + /// The owner task is spawned immediately — the output_rx is guaranteed to + /// receive all data including the initial shell prompt. + pub async fn create_session( + &self, + session_id: Option, + name: Option, + connection_id: &str, + cols: u16, + rows: u16, + initial_cwd: Option<&str>, + ) -> anyhow::Result { + let ssh_guard = self.ssh_manager.read().await; + let manager = ssh_guard.as_ref().context("SSH manager not initialized")?; + + let session_id = session_id.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()); + let name = name.unwrap_or_else(|| format!("Remote Terminal {}", &session_id[..8])); + + // Open PTY via manager, then extract the raw Channel + let pty = manager.open_pty(connection_id, cols as u32, rows as u32).await?; + let mut channel = pty.into_channel().await + .ok_or_else(|| anyhow::anyhow!("Failed to extract channel from PTYSession — multiple references exist"))?; + + let cwd = if let Some(dir) = initial_cwd { + dir.to_string() + } else { + match manager.execute_command(connection_id, "pwd").await { + Ok((output, _, _)) => output.trim().to_string(), + Err(_) => "/".to_string(), + } + }; + + // broadcast for output, mpsc for commands to the owner task + let (output_tx, output_rx) = broadcast::channel::>(1000); + let (cmd_tx, mut cmd_rx) = mpsc::channel::(100); + + let initial_cd = cwd.clone(); + + let session = RemoteTerminalSession { + id: session_id.clone(), + name, + connection_id: connection_id.to_string(), + cwd, + pid: None, + status: SessionStatus::Active, + cols, + rows, + }; + + { + let mut sessions = self.sessions.write().await; + sessions.insert(session_id.clone(), session.clone()); + } + { + let mut handles = self.handles.write().await; + handles.insert(session_id.clone(), ActiveHandle { + output_tx: output_tx.clone(), + cmd_tx, + }); + } + + let mut writer = channel.make_writer(); + + let task_session_id = session_id.clone(); + let task_handles = self.handles.clone(); + let task_sessions = self.sessions.clone(); + + tokio::spawn(async move { + log::info!("Remote PTY owner task started: session_id={}", task_session_id); + + // cd to workspace directory silently + if initial_cd != "/" { + let cd_cmd = format!("cd {} && clear\n", shell_escape(&initial_cd)); + if let Err(e) = writer.write_all(cd_cmd.as_bytes()).await { + log::warn!("Failed to cd to initial directory: {}", e); + } + let _ = writer.flush().await; + } + + loop { + tokio::select! { + biased; // prioritize commands over reads to avoid write starvation + + cmd = cmd_rx.recv() => { + match cmd { + Some(PtyCommand::Write(data)) => { + if let Err(e) = writer.write_all(&data).await { + log::warn!("PTY write failed: session_id={}, error={}", task_session_id, e); + } + // flush to ensure data is sent immediately + let _ = writer.flush().await; + } + Some(PtyCommand::Resize(cols, rows)) => { + if let Err(e) = channel.window_change(cols, rows, 0, 0).await { + log::warn!("PTY resize failed: session_id={}, error={}", task_session_id, e); + } + } + Some(PtyCommand::Close) | None => { + log::info!("PTY close requested: session_id={}", task_session_id); + let _ = channel.eof().await; + let _ = channel.close().await; + break; + } + } + } + + msg = channel.wait() => { + match msg { + Some(russh::ChannelMsg::Data { data }) => { + let _ = output_tx.send(data.to_vec()); + } + Some(russh::ChannelMsg::ExtendedData { data, .. }) => { + let _ = output_tx.send(data.to_vec()); + } + Some(russh::ChannelMsg::Eof) + | Some(russh::ChannelMsg::Close) + | Some(russh::ChannelMsg::ExitStatus { .. }) => { + log::info!("Remote PTY closed: session_id={}", task_session_id); + break; + } + Some(_) => continue, // WindowAdjust, Success, etc. + None => { + log::info!("Remote PTY channel ended: session_id={}", task_session_id); + break; + } + } + } + } + } + + // Clean up + { + let mut handles = task_handles.write().await; + handles.remove(&task_session_id); + } + { + let mut sessions = task_sessions.write().await; + if let Some(s) = sessions.get_mut(&task_session_id) { + s.status = SessionStatus::Closed; + } + } + log::info!("Remote PTY owner task exited: session_id={}", task_session_id); + }); + + Ok(CreateSessionResult { session, output_rx }) + } + + pub async fn get_session(&self, session_id: &str) -> Option { + self.sessions.read().await.get(session_id).cloned() + } + + pub async fn list_sessions(&self) -> Vec { + self.sessions.read().await.values() + .filter(|s| s.status != SessionStatus::Closed) + .cloned() + .collect() + } + + pub async fn write(&self, session_id: &str, data: &[u8]) -> anyhow::Result<()> { + let handles = self.handles.read().await; + let handle = handles.get(session_id).context("Session not found or PTY not active")?; + handle.cmd_tx.send(PtyCommand::Write(data.to_vec())).await + .map_err(|_| anyhow::anyhow!("PTY task has exited")) + } + + pub async fn resize(&self, session_id: &str, cols: u16, rows: u16) -> anyhow::Result<()> { + { + let mut sessions = self.sessions.write().await; + if let Some(s) = sessions.get_mut(session_id) { + s.cols = cols; + s.rows = rows; + } + } + let handles = self.handles.read().await; + if let Some(handle) = handles.get(session_id) { + handle.cmd_tx.send(PtyCommand::Resize(cols as u32, rows as u32)).await + .map_err(|_| anyhow::anyhow!("PTY task has exited"))?; + } + Ok(()) + } + + pub async fn close_session(&self, session_id: &str) -> anyhow::Result<()> { + // Send close command to owner task + { + let handles = self.handles.read().await; + if let Some(handle) = handles.get(session_id) { + let _ = handle.cmd_tx.send(PtyCommand::Close).await; + } + } + // Also remove from sessions map immediately so it disappears from list + { + let mut sessions = self.sessions.write().await; + sessions.remove(session_id); + } + Ok(()) + } + + pub async fn is_pty_active(&self, session_id: &str) -> bool { + self.handles.read().await.contains_key(session_id) + } + + pub async fn subscribe_output(&self, session_id: &str) -> anyhow::Result>> { + let handles = self.handles.read().await; + let handle = handles.get(session_id).context("Session not found or PTY not active")?; + Ok(handle.output_tx.subscribe()) + } +} + +impl Clone for RemoteTerminalManager { + fn clone(&self) -> Self { + Self { + sessions: self.sessions.clone(), + ssh_manager: self.ssh_manager.clone(), + handles: self.handles.clone(), + } + } +} diff --git a/src/crates/core/src/service/remote_ssh/types.rs b/src/crates/core/src/service/remote_ssh/types.rs new file mode 100644 index 00000000..89f0c351 --- /dev/null +++ b/src/crates/core/src/service/remote_ssh/types.rs @@ -0,0 +1,242 @@ +//! Type definitions for Remote SSH service + +use serde::{Deserialize, Serialize}; + +/// Workspace backend type +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(tag = "type", content = "data")] +pub enum WorkspaceBackend { + /// Local workspace (default) + Local, + /// Remote SSH workspace + Remote(RemoteWorkspaceInfo), +} + +/// Remote workspace information +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct RemoteWorkspaceInfo { + /// SSH connection ID + pub connection_id: String, + /// Connection name (display name) + pub connection_name: String, + /// Remote path on the server + pub remote_path: String, +} + +/// SSH connection configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SSHConnectionConfig { + /// Unique identifier for this connection + pub id: String, + /// Display name for the connection + pub name: String, + /// Remote host address (hostname or IP) + pub host: String, + /// SSH port (default: 22) + pub port: u16, + /// SSH username + pub username: String, + /// Authentication method + pub auth: SSHAuthMethod, + /// Default remote working directory + #[serde(rename = "defaultWorkspace")] + pub default_workspace: Option, +} + +/// SSH authentication method +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum SSHAuthMethod { + /// Password authentication + Password { + password: String, + }, + /// Private key authentication + PrivateKey { + /// Path to private key file on local machine + #[serde(rename = "keyPath")] + key_path: String, + /// Optional passphrase for encrypted private key + passphrase: Option, + }, + /// SSH agent authentication (uses system SSH agent) + Agent, +} + +/// Connection state +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub enum ConnectionState { + /// Not connected + Disconnected, + /// Connection in progress + Connecting, + /// Successfully connected + Connected, + /// Connection failed with error + Failed { error: String }, +} + +/// Saved connection (without sensitive data like passwords) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SavedConnection { + pub id: String, + pub name: String, + pub host: String, + pub port: u16, + pub username: String, + #[serde(rename = "authType")] + pub auth_type: SavedAuthType, + #[serde(rename = "defaultWorkspace")] + pub default_workspace: Option, + #[serde(rename = "lastConnected")] + pub last_connected: Option, +} + +/// Saved auth type (excludes sensitive credentials) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum SavedAuthType { + Password, // Password is stored in system keychain + PrivateKey { + #[serde(rename = "keyPath")] + key_path: String, + }, + Agent, +} + +/// Remote file entry information +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteFileEntry { + pub name: String, + pub path: String, + #[serde(rename = "isDir")] + pub is_dir: bool, + #[serde(rename = "isFile")] + pub is_file: bool, + #[serde(rename = "isSymlink")] + pub is_symlink: bool, + pub size: Option, + pub modified: Option, + pub permissions: Option, +} + +/// Remote file tree node +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteTreeNode { + pub name: String, + pub path: String, + #[serde(rename = "isDir")] + pub is_dir: bool, + pub children: Option>, +} + +/// Remote directory entry (for read_dir operations) +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteDirEntry { + pub name: String, + pub path: String, + #[serde(rename = "isDir")] + pub is_dir: bool, + #[serde(rename = "isFile")] + pub is_file: bool, + #[serde(rename = "isSymlink")] + pub is_symlink: bool, + pub size: Option, + pub modified: Option, + pub permissions: Option, +} + +/// Result of SSH connection attempt +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SSHConnectionResult { + pub success: bool, + #[serde(rename = "connectionId")] + pub connection_id: Option, + pub error: Option, + #[serde(rename = "serverInfo")] + pub server_info: Option, +} + +/// Remote server information +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ServerInfo { + #[serde(rename = "osType")] + pub os_type: String, + pub hostname: String, + #[serde(rename = "homeDir")] + pub home_dir: String, +} + +/// Result of remote file operation +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteFileResult { + pub success: bool, + pub error: Option, +} + +/// Result of remote directory listing +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteListResult { + pub entries: Vec, + pub error: Option, +} + +/// Request to open a remote workspace +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteWorkspaceRequest { + #[serde(rename = "connectionId")] + pub connection_id: String, + #[serde(rename = "remotePath")] + pub remote_path: String, +} + +/// Remote workspace info +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteWorkspace { + #[serde(rename = "connectionId")] + pub connection_id: String, + #[serde(rename = "remotePath")] + pub remote_path: String, + #[serde(rename = "connectionName")] + pub connection_name: String, +} + +/// SSH config entry parsed from ~/.ssh/config +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SSHConfigEntry { + /// Host name (alias from SSH config) + pub host: String, + /// Actual hostname or IP + pub hostname: Option, + /// SSH port + pub port: Option, + /// Username + pub user: Option, + /// Path to identity file (private key) + pub identity_file: Option, + /// Whether to use SSH agent + pub agent: Option, +} + +/// Result of looking up SSH config for a host +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SSHConfigLookupResult { + /// Whether a config entry was found + pub found: bool, + /// Config entry if found + pub config: Option, +} diff --git a/src/crates/core/src/service/remote_ssh/workspace_state.rs b/src/crates/core/src/service/remote_ssh/workspace_state.rs new file mode 100644 index 00000000..a68dc0cb --- /dev/null +++ b/src/crates/core/src/service/remote_ssh/workspace_state.rs @@ -0,0 +1,265 @@ +//! Remote Workspace Global State +//! +//! Provides a **registry** of remote SSH workspaces so that multiple remote +//! workspaces can be open simultaneously. Each workspace is keyed by its +//! remote path and maps to the SSH connection that serves it. + +use crate::service::remote_ssh::{RemoteFileService, RemoteTerminalManager, SSHConnectionManager}; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::Arc; +use tokio::sync::RwLock; + +/// A single registered remote workspace entry. +#[derive(Debug, Clone)] +pub struct RemoteWorkspaceEntry { + pub connection_id: String, + pub connection_name: String, +} + +// ── Legacy compat alias (used by a handful of call-sites that still read +// the old struct shape). Will be removed once every consumer is migrated. +/// Legacy alias – prefer `RemoteWorkspaceEntry` + `lookup_connection`. +#[derive(Clone)] +pub struct RemoteWorkspaceState { + pub is_active: bool, + pub connection_id: Option, + pub remote_path: Option, + pub connection_name: Option, +} + +/// Global remote workspace state manager. +/// +/// Instead of storing a **single** active workspace it now maintains a +/// `HashMap` so that several remote +/// workspaces can coexist. +pub struct RemoteWorkspaceStateManager { + /// Key = remote_path (e.g. "/root/project"), Value = connection info. + workspaces: Arc>>, + /// SSH connection manager (shared across all workspaces). + ssh_manager: Arc>>, + /// Remote file service (shared). + file_service: Arc>>, + /// Remote terminal manager (shared). + terminal_manager: Arc>>, + /// Local base path for session persistence. + local_session_base: PathBuf, +} + +impl RemoteWorkspaceStateManager { + pub fn new() -> Self { + let local_session_base = dirs::data_local_dir() + .unwrap_or_else(|| PathBuf::from(".")) + .join("BitFun") + .join("remote-workspaces"); + + Self { + workspaces: Arc::new(RwLock::new(HashMap::new())), + ssh_manager: Arc::new(RwLock::new(None)), + file_service: Arc::new(RwLock::new(None)), + terminal_manager: Arc::new(RwLock::new(None)), + local_session_base, + } + } + + // ── Service setters (shared across all workspaces) ───────────── + + pub async fn set_ssh_manager(&self, manager: SSHConnectionManager) { + *self.ssh_manager.write().await = Some(manager); + } + + pub async fn set_file_service(&self, service: RemoteFileService) { + *self.file_service.write().await = Some(service); + } + + pub async fn set_terminal_manager(&self, manager: RemoteTerminalManager) { + *self.terminal_manager.write().await = Some(manager); + } + + // ── Registry API ─────────────────────────────────────────────── + + /// Register (or update) a remote workspace. + pub async fn register_remote_workspace( + &self, + remote_path: String, + connection_id: String, + connection_name: String, + ) { + let mut guard = self.workspaces.write().await; + guard.insert( + remote_path, + RemoteWorkspaceEntry { + connection_id, + connection_name, + }, + ); + } + + /// Unregister a remote workspace by its path. + pub async fn unregister_remote_workspace(&self, remote_path: &str) { + let mut guard = self.workspaces.write().await; + guard.remove(remote_path); + } + + /// Look up the connection info for a given path. + /// + /// Returns `Some(entry)` if `path` equals a registered remote root **or** + /// is a sub-path of one (e.g. `/root/project/src/main.rs` matches + /// `/root/project`). + pub async fn lookup_connection(&self, path: &str) -> Option { + let guard = self.workspaces.read().await; + // Exact match first (most common). + if let Some(entry) = guard.get(path) { + return Some(entry.clone()); + } + // Sub-path match. + for (root, entry) in guard.iter() { + if path.starts_with(&format!("{}/", root)) { + return Some(entry.clone()); + } + } + None + } + + /// Quick boolean check: is `path` inside any registered remote workspace? + pub async fn is_remote_path(&self, path: &str) -> bool { + self.lookup_connection(path).await.is_some() + } + + /// Returns `true` if at least one remote workspace is registered. + pub async fn has_any(&self) -> bool { + !self.workspaces.read().await.is_empty() + } + + // ── Legacy compat ────────────────────────────────────────────── + + /// **Compat** — old code calls `activate_remote_workspace`. Now just + /// delegates to `register_remote_workspace`. + pub async fn activate_remote_workspace( + &self, + connection_id: String, + remote_path: String, + connection_name: String, + ) { + self.register_remote_workspace(remote_path, connection_id, connection_name) + .await; + } + + /// **Compat** — old code calls `deactivate_remote_workspace`. + /// Now unregisters ALL workspaces. Callers that need to remove a + /// specific workspace should use `unregister_remote_workspace`. + pub async fn deactivate_remote_workspace(&self) { + self.workspaces.write().await.clear(); + } + + /// **Compat** — returns a snapshot shaped like the old single-workspace + /// state. Picks the *first* registered workspace. + pub async fn get_state(&self) -> RemoteWorkspaceState { + let guard = self.workspaces.read().await; + if let Some((path, entry)) = guard.iter().next() { + RemoteWorkspaceState { + is_active: true, + connection_id: Some(entry.connection_id.clone()), + remote_path: Some(path.clone()), + connection_name: Some(entry.connection_name.clone()), + } + } else { + RemoteWorkspaceState { + is_active: false, + connection_id: None, + remote_path: None, + connection_name: None, + } + } + } + + /// **Compat** — returns true if any workspace is registered. + pub async fn is_active(&self) -> bool { + self.has_any().await + } + + // ── Service getters ──────────────────────────────────────────── + + pub async fn get_ssh_manager(&self) -> Option { + self.ssh_manager.read().await.clone() + } + + pub async fn get_file_service(&self) -> Option { + self.file_service.read().await.clone() + } + + pub async fn get_terminal_manager(&self) -> Option { + self.terminal_manager.read().await.clone() + } + + // ── Session storage ──────────────────────────────────────────── + + pub fn get_local_session_path(&self, connection_id: &str) -> PathBuf { + self.local_session_base.join(connection_id).join("sessions") + } + + /// Map a workspace path to the effective session storage path. + /// Remote paths → local session dir. Local paths → returned as-is. + pub async fn get_effective_session_path(&self, workspace_path: &str) -> PathBuf { + if let Some(entry) = self.lookup_connection(workspace_path).await { + return self.get_local_session_path(&entry.connection_id); + } + PathBuf::from(workspace_path) + } +} + +// ── Global singleton ──────────────────────────────────────────────── + +static REMOTE_WORKSPACE_MANAGER: std::sync::OnceLock> = + std::sync::OnceLock::new(); + +pub fn init_remote_workspace_manager() -> Arc { + if let Some(existing) = REMOTE_WORKSPACE_MANAGER.get() { + return existing.clone(); + } + let manager = Arc::new(RemoteWorkspaceStateManager::new()); + match REMOTE_WORKSPACE_MANAGER.set(manager.clone()) { + Ok(()) => manager, + Err(_) => REMOTE_WORKSPACE_MANAGER.get().cloned().unwrap_or(manager), + } +} + +pub fn get_remote_workspace_manager() -> Option> { + REMOTE_WORKSPACE_MANAGER.get().cloned() +} + +// ── Free-standing helpers (convenience) ───────────────────────────── + +/// Get the effective session path for a workspace. +pub async fn get_effective_session_path(workspace_path: &str) -> std::path::PathBuf { + if let Some(manager) = get_remote_workspace_manager() { + manager.get_effective_session_path(workspace_path).await + } else { + std::path::PathBuf::from(workspace_path) + } +} + +/// Check if a specific path belongs to any registered remote workspace. +pub async fn is_remote_path(path: &str) -> bool { + if let Some(manager) = get_remote_workspace_manager() { + manager.is_remote_path(path).await + } else { + false + } +} + +/// Look up the connection entry for a given path. +pub async fn lookup_remote_connection(path: &str) -> Option { + let manager = get_remote_workspace_manager()?; + manager.lookup_connection(path).await +} + +/// **Compat** — old boolean check. Now returns true if ANY remote workspace +/// is registered. Prefer `is_remote_path(path)` for path-specific checks. +pub async fn is_remote_workspace_active() -> bool { + if let Some(manager) = get_remote_workspace_manager() { + manager.has_any().await + } else { + false + } +} diff --git a/src/crates/core/src/service/snapshot/manager.rs b/src/crates/core/src/service/snapshot/manager.rs index 615c699d..53566c75 100644 --- a/src/crates/core/src/service/snapshot/manager.rs +++ b/src/crates/core/src/service/snapshot/manager.rs @@ -1,5 +1,6 @@ use crate::agentic::tools::framework::{Tool, ToolResult, ToolUseContext}; use crate::agentic::tools::registry::ToolRegistry; +use crate::service::remote_ssh::workspace_state::is_remote_path; use crate::service::snapshot::service::SnapshotService; use crate::service::snapshot::types::{ OperationType, SnapshotConfig, SnapshotError, SnapshotResult, @@ -464,6 +465,15 @@ impl WrappedTool { ) })?; + // Remote workspaces: skip snapshot tracking, just execute the tool directly + if is_remote_path(snapshot_workspace.to_string_lossy().as_ref()).await { + debug!( + "Skipping snapshot for remote workspace: workspace={}", + snapshot_workspace.display() + ); + return self.original_tool.call(input, context).await; + } + let snapshot_manager = get_or_create_snapshot_manager(snapshot_workspace.clone(), None) .await .map_err(|e| crate::util::errors::BitFunError::Tool(e.to_string()))?; @@ -476,7 +486,11 @@ impl WrappedTool { let is_create_tool = matches!(self.name(), "Write" | "write_file" | "create_file"); - if !file_path.exists() && !is_create_tool { + // For local workspaces only: verify the file exists before attempting to snapshot + if !is_remote_path(file_path.to_string_lossy().as_ref()).await + && !file_path.exists() + && !is_create_tool + { error!( "File not found: file_path={} raw_path={} snapshot_workspace={}", file_path.display(), diff --git a/src/crates/core/src/service/terminal/src/api.rs b/src/crates/core/src/service/terminal/src/api.rs index 0986b964..776fd961 100644 --- a/src/crates/core/src/service/terminal/src/api.rs +++ b/src/crates/core/src/service/terminal/src/api.rs @@ -45,6 +45,9 @@ pub struct CreateSessionRequest { /// Optional terminal dimensions pub cols: Option, pub rows: Option, + /// Optional remote connection ID (for remote workspace sessions) + #[serde(rename = "remoteConnectionId", skip_serializing_if = "Option::is_none")] + pub remote_connection_id: Option, } /// Response for session creation diff --git a/src/crates/core/src/service/workspace/manager.rs b/src/crates/core/src/service/workspace/manager.rs index 6788b10c..067390fe 100644 --- a/src/crates/core/src/service/workspace/manager.rs +++ b/src/crates/core/src/service/workspace/manager.rs @@ -38,6 +38,7 @@ pub enum WorkspaceKind { #[default] Normal, Assistant, + Remote, } pub(crate) const IDENTITY_FILE_NAME: &str = "IDENTITY.md"; @@ -275,6 +276,8 @@ impl WorkspaceInfo { let now = chrono::Utc::now(); let id = uuid::Uuid::new_v4().to_string(); + let is_remote = workspace_kind == WorkspaceKind::Remote; + let mut workspace = Self { id, name: options.display_name.clone().unwrap_or(default_name), @@ -293,11 +296,13 @@ impl WorkspaceInfo { metadata: HashMap::new(), }; - workspace.detect_workspace_type().await; - workspace.load_identity().await; + if !is_remote { + workspace.detect_workspace_type().await; + workspace.load_identity().await; - if options.scan_options.calculate_statistics { - workspace.scan_workspace(options.scan_options).await?; + if options.scan_options.calculate_statistics { + workspace.scan_workspace(options.scan_options).await?; + } } workspace.status = if options.auto_set_current { @@ -558,6 +563,9 @@ impl WorkspaceInfo { /// Checks whether the workspace is still valid. pub async fn is_valid(&self) -> bool { + if self.workspace_kind == WorkspaceKind::Remote { + return true; + } self.root_path.exists() && self.root_path.is_dir() } @@ -654,18 +662,22 @@ impl WorkspaceManager { path: PathBuf, options: WorkspaceOpenOptions, ) -> BitFunResult { - if !path.exists() { - return Err(BitFunError::service(format!( - "Workspace path does not exist: {:?}", - path - ))); - } + let is_remote = options.workspace_kind == WorkspaceKind::Remote; + + if !is_remote { + if !path.exists() { + return Err(BitFunError::service(format!( + "Workspace path does not exist: {:?}", + path + ))); + } - if !path.is_dir() { - return Err(BitFunError::service(format!( - "Workspace path is not a directory: {:?}", - path - ))); + if !path.is_dir() { + return Err(BitFunError::service(format!( + "Workspace path is not a directory: {:?}", + path + ))); + } } let existing_workspace_id = self diff --git a/src/crates/core/src/service/workspace/service.rs b/src/crates/core/src/service/workspace/service.rs index c2b3f800..d333d022 100644 --- a/src/crates/core/src/service/workspace/service.rs +++ b/src/crates/core/src/service/workspace/service.rs @@ -1269,6 +1269,10 @@ impl WorkspaceService { path: &Path, mut options: WorkspaceCreateOptions, ) -> WorkspaceCreateOptions { + if options.workspace_kind == WorkspaceKind::Remote { + return options; + } + if options.workspace_kind == WorkspaceKind::Assistant { if options.display_name.is_none() { options.display_name = Some(Self::assistant_display_name( diff --git a/src/web-ui/src/app/App.tsx b/src/web-ui/src/app/App.tsx index 8aebb386..1b5d11d7 100644 --- a/src/web-ui/src/app/App.tsx +++ b/src/web-ui/src/app/App.tsx @@ -1,6 +1,7 @@ import { useEffect, useCallback, useState, useRef } from 'react'; import { ChatProvider, useAIInitialization } from '../infrastructure'; import { ViewModeProvider } from '../infrastructure/contexts/ViewModeContext'; +import { SSHRemoteProvider } from '../features/ssh-remote'; import AppLayout from './layout/AppLayout'; import { useCurrentModelConfig } from '../hooks/useModelConfigs'; import { ContextMenuRenderer } from '../shared/context-menu-system/components/ContextMenuRenderer'; @@ -217,32 +218,34 @@ function App() { return ( - - {/* Onboarding overlay (first launch) */} - {ENABLE_MAIN_ONBOARDING && isOnboardingActive && ( - - )} - - {/* Unified app layout with startup/workspace modes */} - - - {/* Context menu renderer */} - - - {/* Notification system */} - - - - {/* Confirm dialog */} - - - {/* Startup splash — sits above everything, exits once workspace is ready */} - {splashVisible && ( - - )} - + + + {/* Onboarding overlay (first launch) */} + {ENABLE_MAIN_ONBOARDING && isOnboardingActive && ( + + )} + + {/* Unified app layout with startup/workspace modes */} + + + {/* Context menu renderer */} + + + {/* Notification system */} + + + + {/* Confirm dialog */} + + + {/* Startup splash — sits above everything, exits once workspace is ready */} + {splashVisible && ( + + )} + + ); diff --git a/src/web-ui/src/app/components/NavPanel/MainNav.tsx b/src/web-ui/src/app/components/NavPanel/MainNav.tsx index 416fed70..d0dca917 100644 --- a/src/web-ui/src/app/components/NavPanel/MainNav.tsx +++ b/src/web-ui/src/app/components/NavPanel/MainNav.tsx @@ -40,6 +40,7 @@ import { workspaceManager } from '@/infrastructure/services/business/workspaceMa import { useWorkspaceContext } from '@/infrastructure/contexts/WorkspaceContext'; import { createLogger } from '@/shared/utils/logger'; import { WorkspaceKind } from '@/shared/types'; +import { useSSHRemoteContext, SSHConnectionDialog, RemoteFileBrowser } from '@/features/ssh-remote'; const DEFAULT_MODE_CONFIG_KEY = 'app.session_config.default_mode'; const NAV_DISPLAY_MODE_STORAGE_KEY = 'bitfun.nav.displayMode'; @@ -88,6 +89,17 @@ const MainNav: React.FC = ({ anchorNavSceneId = null, }) => { useMiniAppCatalogSync(); + + // SSH Remote state - use context instead of hook for consistent state + const sshRemote = useSSHRemoteContext(); + const [isSSHConnectionDialogOpen, setIsSSHConnectionDialogOpen] = useState(false); + + useEffect(() => { + if (sshRemote.showFileBrowser) { + setIsSSHConnectionDialogOpen(false); + } + }, [sshRemote.showFileBrowser]); + const { state, switchLeftPanelTab } = useApp(); const { openScene } = useSceneManager(); const openNavScene = useNavSceneStore(s => s.openNavScene); @@ -407,6 +419,23 @@ const MainNav: React.FC = ({ await switchWorkspace(targetWorkspace); }, [closeWorkspaceMenu, recentWorkspaces, switchWorkspace]); + // SSH Remote handlers + const handleOpenRemoteSSH = useCallback(() => { + closeWorkspaceMenu(); + setIsSSHConnectionDialogOpen(true); + }, [closeWorkspaceMenu]); + + const handleSelectRemoteWorkspace = useCallback(async (path: string) => { + try { + await sshRemote.openWorkspace(path); + sshRemote.setShowFileBrowser(false); + // Close the SSH connection dialog as well + setIsSSHConnectionDialogOpen(false); + } catch (err) { + log.error('Failed to open remote workspace', err); + } + }, [sshRemote]); + useEffect(() => { if (!workspaceMenuOpen) return; @@ -615,6 +644,17 @@ const MainNav: React.FC = ({ {t('header.newProject')} +
+ } + placement="bottom" + > +
+ + + + +
+ + ); +}; + +export default RemoteConnectionIndicator; diff --git a/src/web-ui/src/app/components/TitleBar/TitleBar.scss b/src/web-ui/src/app/components/TitleBar/TitleBar.scss index ccc27ff7..a76fdbdc 100644 --- a/src/web-ui/src/app/components/TitleBar/TitleBar.scss +++ b/src/web-ui/src/app/components/TitleBar/TitleBar.scss @@ -812,3 +812,56 @@ $_app-icon-size: 20px; opacity: 1 !important; } } + +// ==================== Remote Connection Indicator ==================== + +.remote-connection-indicator { + display: flex; + align-items: center; + justify-content: center; + position: relative; + color: var(--color-success, #22c55e); + cursor: default; + padding: 4px; + border-radius: 4px; + transition: background-color 0.15s ease; + + &:hover { + background-color: var(--element-bg-subtle); + } + + .remote-connection-dot { + position: absolute; + top: 4px; + right: 4px; + width: 6px; + height: 6px; + background-color: var(--color-success, #22c55e); + border-radius: 50%; + border: 1px solid var(--color-bg-primary); + } +} + +.remote-connection-tooltip { + .remote-connection-tooltip-title { + font-weight: 600; + margin-bottom: 4px; + } + + .remote-connection-tooltip-info { + display: flex; + flex-direction: column; + gap: 2px; + font-size: 12px; + + .remote-connection-name { + color: var(--color-text-secondary); + } + + .remote-connection-path { + font-family: monospace; + font-size: 11px; + color: var(--color-text-secondary); + } + } +} diff --git a/src/web-ui/src/app/components/TitleBar/TitleBar.tsx b/src/web-ui/src/app/components/TitleBar/TitleBar.tsx index 30fab75d..118b0fa1 100644 --- a/src/web-ui/src/app/components/TitleBar/TitleBar.tsx +++ b/src/web-ui/src/app/components/TitleBar/TitleBar.tsx @@ -23,6 +23,7 @@ import { NewProjectDialog } from '../NewProjectDialog'; import { AboutDialog } from '../AboutDialog'; import { AgentOrb } from './AgentOrb'; import NotificationButton from './NotificationButton'; +import { RemoteConnectionIndicator } from './RemoteConnectionIndicator'; import { createLogger } from '@/shared/utils/logger'; const log = createLogger('TitleBar'); @@ -210,12 +211,12 @@ const TitleBar: React.FC = ({ void (async () => { try { - const { listen } = await import('@tauri-apps/api/event'); + const { api } = await import('@/infrastructure/api/service-api/ApiClient'); - unlistenFns.push(await listen('bitfun_menu_open_project', () => { void handleOpenProject(); })); - unlistenFns.push(await listen('bitfun_menu_new_project', () => { handleNewProject(); })); - unlistenFns.push(await listen('bitfun_menu_go_home', () => { handleGoHome(); })); - unlistenFns.push(await listen('bitfun_menu_about', () => { handleShowAbout(); })); + unlistenFns.push(await api.listen('bitfun_menu_open_project', () => { void handleOpenProject(); })); + unlistenFns.push(await api.listen('bitfun_menu_new_project', () => { handleNewProject(); })); + unlistenFns.push(await api.listen('bitfun_menu_go_home', () => { handleGoHome(); })); + unlistenFns.push(await api.listen('bitfun_menu_about', () => { handleShowAbout(); })); } catch (error) { log.debug('menubar listen failed', error); } @@ -306,6 +307,7 @@ const TitleBar: React.FC = ({ {/* Right: Notification + Settings + WindowControls */}
+ diff --git a/src/web-ui/src/app/scenes/shell/hooks/useShellEntries.ts b/src/web-ui/src/app/scenes/shell/hooks/useShellEntries.ts index bbd2fd16..9e8c33d5 100644 --- a/src/web-ui/src/app/scenes/shell/hooks/useShellEntries.ts +++ b/src/web-ui/src/app/scenes/shell/hooks/useShellEntries.ts @@ -118,7 +118,9 @@ function isSessionRunning(session: SessionResponse): boolean { } export function useShellEntries(): UseShellEntriesReturn { - const { workspacePath } = useCurrentWorkspace(); + const { workspacePath, workspace } = useCurrentWorkspace(); + const isRemote = workspace?.workspaceKind === 'remote'; + const currentConnectionId = workspace?.connectionId ?? null; const [sessions, setSessions] = useState([]); const [hubConfig, setHubConfig] = useState({ terminals: [], worktrees: {} }); @@ -154,11 +156,22 @@ export function useShellEntries(): UseShellEntriesReturn { } try { - setSessions(await service.listSessions()); + const allSessions = await service.listSessions(); + // Filter sessions based on current workspace type: + // - Remote workspace: only show terminals belonging to this connection + // - Local workspace: only show local (non-remote) terminals + const filtered = allSessions.filter(session => { + const isRemoteSession = session.shellType === 'Remote'; + if (isRemote) { + return isRemoteSession && session.connectionId === currentConnectionId; + } + return !isRemoteSession; + }); + setSessions(filtered); } catch (error) { log.error('Failed to list sessions', error); } - }, []); + }, [isRemote, currentConnectionId]); useEffect(() => { if (!workspacePath) { diff --git a/src/web-ui/src/features/ssh-remote/ConfirmDialog.scss b/src/web-ui/src/features/ssh-remote/ConfirmDialog.scss new file mode 100644 index 00000000..e2df491f --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/ConfirmDialog.scss @@ -0,0 +1,41 @@ +/** + * Confirm Dialog Styles + */ + +.confirm-dialog { + padding: 8px 0; + + &__warning { + display: flex; + align-items: center; + gap: 12px; + margin-bottom: 16px; + padding: 14px; + background: var(--color-error-bg); + border: 1px solid var(--color-error-border); + border-radius: 8px; + color: var(--color-error); + + svg { + flex-shrink: 0; + } + + span { + font-weight: 500; + font-size: 14px; + } + } + + &__message { + font-size: 14px; + color: var(--color-text-secondary); + margin: 0 0 20px 0; + line-height: 1.5; + } + + &__actions { + display: flex; + justify-content: flex-end; + gap: 12px; + } +} diff --git a/src/web-ui/src/features/ssh-remote/ConfirmDialog.tsx b/src/web-ui/src/features/ssh-remote/ConfirmDialog.tsx new file mode 100644 index 00000000..0d319395 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/ConfirmDialog.tsx @@ -0,0 +1,73 @@ +/** + * Confirm Dialog Component + * Custom modal for confirmation prompts + */ + +import React from 'react'; +import { useI18n } from '@/infrastructure/i18n'; +import { Modal } from '@/component-library'; +import { Button } from '@/component-library'; +import { AlertTriangle } from 'lucide-react'; +import './ConfirmDialog.scss'; + +interface ConfirmDialogProps { + open: boolean; + title: string; + message: string; + confirmText?: string; + cancelText?: string; + onConfirm: () => void; + onCancel: () => void; + destructive?: boolean; +} + +export const ConfirmDialog: React.FC = ({ + open, + title, + message, + confirmText, + cancelText, + onConfirm, + onCancel, + destructive = false, +}) => { + const { t } = useI18n('common'); + + const handleConfirm = () => { + onConfirm(); + onCancel(); + }; + + return ( + +
+ {destructive && ( +
+ + {title} +
+ )} +

{message}

+
+ + +
+
+
+ ); +}; + +export default ConfirmDialog; diff --git a/src/web-ui/src/features/ssh-remote/PasswordInputDialog.scss b/src/web-ui/src/features/ssh-remote/PasswordInputDialog.scss new file mode 100644 index 00000000..e8e32cce --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/PasswordInputDialog.scss @@ -0,0 +1,61 @@ +/** + * Password Input Dialog Styles + */ + +.password-input-dialog { + padding: 8px 0; + + &__description { + display: flex; + align-items: center; + gap: 10px; + padding: 12px 14px; + background: var(--element-bg-subtle); + border: 1px solid var(--border-subtle); + border-radius: 6px; + margin-bottom: 16px; + + &-icon { + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + background: var(--color-accent-100); + border-radius: 6px; + color: var(--color-accent-500); + flex-shrink: 0; + } + + &-text { + font-size: 13px; + color: var(--color-text-secondary); + font-family: var(--font-family-mono); + word-break: break-all; + } + } + + &__input { + margin-bottom: 16px; + } + + &__actions { + display: flex; + justify-content: flex-end; + gap: 12px; + } + + &__spinner { + animation: spin 1s linear infinite; + margin-right: 6px; + } +} + +@keyframes spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} diff --git a/src/web-ui/src/features/ssh-remote/PasswordInputDialog.tsx b/src/web-ui/src/features/ssh-remote/PasswordInputDialog.tsx new file mode 100644 index 00000000..f52edd35 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/PasswordInputDialog.tsx @@ -0,0 +1,119 @@ +/** + * Password Input Dialog Component + * Custom modal for secure password/key passphrase input + */ + +import React, { useState, useRef, useEffect } from 'react'; +import { useI18n } from '@/infrastructure/i18n'; +import { Modal } from '@/component-library'; +import { Button } from '@/component-library'; +import { Input } from '@/component-library'; +import { Lock, Key, Loader2 } from 'lucide-react'; +import './PasswordInputDialog.scss'; + +interface PasswordInputDialogProps { + open: boolean; + title: string; + description?: string; + placeholder?: string; + isKeyPath?: boolean; + isConnecting?: boolean; + onSubmit: (value: string) => void; + onCancel: () => void; +} + +export const PasswordInputDialog: React.FC = ({ + open, + title, + description, + placeholder = '', + isKeyPath = false, + isConnecting = false, + onSubmit, + onCancel, +}) => { + const { t } = useI18n('common'); + const [value, setValue] = useState(''); + const inputRef = useRef(null); + + // Focus input when dialog opens + useEffect(() => { + if (open) { + setValue(''); + setTimeout(() => { + inputRef.current?.focus(); + }, 100); + } + }, [open]); + + const handleSubmit = () => { + if (value.trim()) { + onSubmit(value.trim()); + } + }; + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter' && !isConnecting) { + e.preventDefault(); + handleSubmit(); + } + if (e.key === 'Escape') { + onCancel(); + } + }; + + return ( + +
+ {description && ( +
+
+ {isKeyPath ? : } +
+ {description} +
+ )} +
+ setValue(e.target.value)} + onKeyDown={handleKeyDown} + placeholder={placeholder} + prefix={isKeyPath ? : } + size="medium" + disabled={isConnecting} + /> +
+
+ + +
+
+
+ ); +}; + +export default PasswordInputDialog; diff --git a/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.scss b/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.scss new file mode 100644 index 00000000..810909fb --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.scss @@ -0,0 +1,512 @@ +/** + * Remote File Browser Styles + */ + +.remote-file-browser-overlay { + position: fixed; + inset: 0; + display: flex; + align-items: center; + justify-content: center; + background: rgba(0, 0, 0, 0.6); + backdrop-filter: blur(4px); + z-index: 260; + animation: fadeIn 0.2s ease; +} + +@keyframes fadeIn { + from { opacity: 0; } + to { opacity: 1; } +} + +.remote-file-browser { + display: flex; + flex-direction: column; + height: 80vh; + width: 90vw; + max-width: 1000px; + background: var(--color-bg-secondary); + border: 1px solid var(--border-subtle); + border-radius: 12px; + box-shadow: 0 24px 48px rgba(0, 0, 0, 0.5), 0 0 0 1px rgba(255, 255, 255, 0.03); + overflow: hidden; + animation: slideIn 0.3s cubic-bezier(0.16, 1, 0.3, 1); + + &__header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 16px 20px; + border-bottom: 1px solid var(--border-subtle); + flex-shrink: 0; + + &-title { + font-size: 16px; + font-weight: 600; + color: var(--color-text-primary); + margin: 0; + } + } + + &__close-btn { + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + padding: 0; + background: transparent; + border: none; + border-radius: 6px; + color: var(--color-text-muted); + cursor: pointer; + transition: all var(--motion-fast) var(--easing-standard); + + &:hover { + background: var(--element-bg-subtle); + color: var(--color-text-primary); + } + } + + // Path breadcrumb + &__breadcrumb { + display: flex; + align-items: center; + gap: 4px; + padding: 10px 20px; + background: var(--color-bg-tertiary); + border-bottom: 1px solid var(--border-subtle); + overflow-x: auto; + flex-shrink: 0; + } + + &__breadcrumb-path { + display: flex; + align-items: center; + gap: 4px; + flex: 1; + min-width: 0; + cursor: text; + border-radius: 6px; + padding: 2px 4px; + margin: -2px -4px; + transition: background var(--motion-fast) var(--easing-standard); + + &:hover { + background: color-mix(in srgb, var(--element-bg-soft) 60%, transparent); + } + } + + &__path-input { + flex: 1; + min-width: 0; + padding: 4px 8px; + background: var(--color-bg-primary); + border: 1px solid var(--color-accent-500); + border-radius: 6px; + font-size: 13px; + font-family: var(--font-family-mono); + color: var(--color-text-primary); + outline: none; + } + + &__breadcrumb-btn { + display: inline-flex; + align-items: center; + padding: 4px 8px; + background: transparent; + border: none; + border-radius: 4px; + font-size: 13px; + color: var(--color-text-secondary); + cursor: pointer; + white-space: nowrap; + transition: all var(--motion-fast) var(--easing-standard); + + &:hover { + background: var(--element-bg-subtle); + color: var(--color-text-primary); + } + + &--current { + color: var(--color-text-primary); + font-weight: 500; + } + } + + &__breadcrumb-sep { + color: var(--color-text-muted); + flex-shrink: 0; + } + + &__breadcrumb-current { + font-size: 13px; + color: var(--color-text-primary); + font-weight: 500; + padding: 4px 8px; + } + + // Toolbar + &__toolbar { + display: flex; + align-items: center; + gap: 8px; + padding: 10px 20px; + border-bottom: 1px solid var(--border-subtle); + flex-shrink: 0; + } + + &__toolbar-btn { + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + padding: 0; + background: transparent; + border: none; + border-radius: 6px; + color: var(--color-text-muted); + cursor: pointer; + transition: all var(--motion-fast) var(--easing-standard); + + &:hover:not(:disabled) { + background: var(--element-bg-subtle); + color: var(--color-text-primary); + } + + &:disabled { + opacity: 0.4; + cursor: not-allowed; + } + } + + // Content area + &__content { + flex: 1; + overflow: auto; + padding: 0; + } + + &__error { + display: flex; + align-items: center; + justify-content: space-between; + margin: 16px 20px; + padding: 12px 14px; + background: var(--color-error-bg); + border: 1px solid var(--color-error-border); + border-radius: 6px; + font-size: 13px; + color: var(--color-error); + + button { + background: none; + border: none; + color: var(--color-error); + cursor: pointer; + font-size: 16px; + padding: 0 4px; + } + } + + &__loading { + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + gap: 12px; + padding: 64px; + color: var(--color-text-muted); + font-size: 13px; + } + + &__spinner { + animation: spin 1s linear infinite; + color: var(--color-accent-500); + } + + &__table { + width: 100%; + border-collapse: collapse; + } + + &__thead { + position: sticky; + top: 0; + background: var(--color-bg-secondary); + z-index: 1; + + th { + padding: 10px 20px; + font-size: 11px; + font-weight: 600; + color: var(--color-text-muted); + text-align: left; + border-bottom: 1px solid var(--border-subtle); + text-transform: uppercase; + letter-spacing: 0.5px; + white-space: nowrap; + } + } + + &__th { + &--name { + width: auto; + } + &--size { + width: 100px; + } + &--date { + width: 120px; + } + } + + &__tbody { + tr { + border-bottom: 1px solid var(--border-subtle); + transition: background var(--motion-fast) var(--easing-standard); + cursor: pointer; + + &:hover { + background: var(--element-bg-subtle); + } + + &--selected { + background: var(--color-accent-50); + } + + &--parent { + color: var(--color-text-muted); + } + } + + td { + padding: 10px 20px; + font-size: 13px; + color: var(--color-text-primary); + } + } + + &__td { + &--name { + display: flex; + align-items: center; + gap: 10px; + } + &--size { + color: var(--color-text-muted); + font-family: var(--font-family-mono); + font-size: 12px; + } + &--date { + color: var(--color-text-muted); + font-size: 12px; + } + } + + &__entry-icon { + flex-shrink: 0; + color: var(--color-text-muted); + + &--parent { + color: var(--color-text-muted); + } + + &--link { + color: var(--color-purple-500); + } + + &--file { + color: var(--color-text-secondary); + } + } + + &__name { + font-family: var(--font-family-mono); + font-size: 13px; + } + + &__empty { + text-align: center; + padding: 48px 20px !important; + color: var(--color-text-muted); + } + + // Context menu + &__context-menu { + position: fixed; + min-width: 160px; + background: var(--color-bg-secondary); + border: 1px solid var(--border-base); + border-radius: 8px; + box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4); + padding: 4px; + z-index: 100; + } + + &__context-menu-item { + display: flex; + align-items: center; + gap: 10px; + width: 100%; + padding: 8px 12px; + background: transparent; + border: none; + border-radius: 4px; + font-size: 13px; + color: var(--color-text-secondary); + cursor: pointer; + text-align: left; + transition: all var(--motion-fast) var(--easing-standard); + + svg { + color: var(--color-text-muted); + } + + &:hover { + background: var(--element-bg-subtle); + color: var(--color-text-primary); + + svg { + color: var(--color-text-primary); + } + } + + &--danger { + color: var(--color-error); + + svg { + color: var(--color-error); + } + + &:hover { + background: var(--color-error-bg); + } + } + } + + &__context-menu-icon { + font-size: 14px; + width: 16px; + text-align: center; + } + + &__context-menu-divider { + height: 1px; + background: var(--border-subtle); + margin: 4px 0; + } + + // Dialog overlay + &__dialog-overlay { + position: fixed; + inset: 0; + display: flex; + align-items: center; + justify-content: center; + background: rgba(0, 0, 0, 0.6); + z-index: 270; + } + + &__dialog { + width: 100%; + max-width: 380px; + background: var(--color-bg-secondary); + border: 1px solid var(--border-subtle); + border-radius: 10px; + box-shadow: 0 16px 48px rgba(0, 0, 0, 0.5); + padding: 20px; + } + + &__dialog-title { + font-size: 16px; + font-weight: 600; + color: var(--color-text-primary); + margin: 0 0 16px 0; + } + + &__dialog-input { + width: 100%; + padding: 10px 12px; + background: var(--color-bg-primary); + border: 1px solid var(--border-base); + border-radius: 6px; + font-size: 14px; + font-family: var(--font-family-mono); + color: var(--color-text-primary); + outline: none; + transition: border-color var(--motion-fast) var(--easing-standard); + box-sizing: border-box; + + &::placeholder { + color: var(--color-text-muted); + } + + &:focus { + border-color: var(--color-accent-500); + } + } + + &__dialog-actions { + display: flex; + justify-content: flex-end; + gap: 12px; + margin-top: 20px; + } + + // Footer + &__footer { + display: flex; + align-items: center; + justify-content: space-between; + padding: 14px 20px; + border-top: 1px solid var(--border-subtle); + flex-shrink: 0; + background: var(--color-bg-secondary); + } + + &__footer-info { + font-size: 13px; + color: var(--color-text-muted); + } + + &__footer-label { + color: var(--color-text-muted); + } + + &__footer-path { + font-family: var(--font-family-mono); + font-size: 12px; + color: var(--color-text-secondary); + background: var(--element-bg-subtle); + padding: 2px 8px; + border-radius: 4px; + } + + &__footer-hint { + color: var(--color-text-muted); + font-style: italic; + } + + &__footer-actions { + display: flex; + gap: 12px; + } +} + +@keyframes spin { + from { transform: rotate(0deg); } + to { transform: rotate(360deg); } +} + +@keyframes slideIn { + from { + opacity: 0; + transform: translateY(20px) scale(0.97); + } + to { + opacity: 1; + transform: translateY(0) scale(1); + } +} diff --git a/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx b/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx new file mode 100644 index 00000000..8b623ea2 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx @@ -0,0 +1,521 @@ +/** + * Remote File Browser Component + * Used to browse and select remote directory as workspace + */ + +import React, { useState, useEffect, useRef } from 'react'; +import { useI18n } from '@/infrastructure/i18n'; +import { Button } from '@/component-library'; +import { ConfirmDialog } from './ConfirmDialog'; +import type { RemoteFileEntry } from './types'; +import { sshApi } from './sshApi'; +import { + X, + RefreshCw, + Folder, + File, + Link, + ChevronRight, + Home, + ArrowLeft, + Loader2, +} from 'lucide-react'; +import './RemoteFileBrowser.scss'; + +interface RemoteFileBrowserProps { + connectionId: string; + initialPath?: string; + onSelect: (path: string) => void; + onCancel: () => void; +} + +interface ContextMenuState { + show: boolean; + x: number; + y: number; + entry: RemoteFileEntry | null; +} + +interface DeleteConfirmState { + show: boolean; + entry: RemoteFileEntry | null; +} + +export const RemoteFileBrowser: React.FC = ({ + connectionId, + initialPath = '/', + onSelect, + onCancel, +}) => { + const { t } = useI18n('common'); + const [currentPath, setCurrentPath] = useState(initialPath); + const [pathInputValue, setPathInputValue] = useState(initialPath); + const [isEditingPath, setIsEditingPath] = useState(false); + const pathInputRef = useRef(null); + const [entries, setEntries] = useState([]); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [selectedPath, setSelectedPath] = useState(null); + const [contextMenu, setContextMenu] = useState({ + show: false, + x: 0, + y: 0, + entry: null, + }); + const [renameEntry, setRenameEntry] = useState(null); + const [renameValue, setRenameValue] = useState(''); + const [deleteConfirm, setDeleteConfirm] = useState({ + show: false, + entry: null, + }); + const contextMenuRef = useRef(null); + + useEffect(() => { + loadDirectory(currentPath); + }, [currentPath]); + + // Close context menu when clicking outside + useEffect(() => { + const handleClickOutside = (e: MouseEvent) => { + if (contextMenuRef.current && !contextMenuRef.current.contains(e.target as Node)) { + setContextMenu({ show: false, x: 0, y: 0, entry: null }); + } + }; + document.addEventListener('mousedown', handleClickOutside); + return () => document.removeEventListener('mousedown', handleClickOutside); + }, []); + + const loadDirectory = async (path: string) => { + setLoading(true); + setError(null); + try { + const result = await sshApi.readDir(connectionId, path); + // Sort: directories first, then by name + result.sort((a, b) => { + if (a.isDir !== b.isDir) return a.isDir ? -1 : 1; + return a.name.localeCompare(b.name); + }); + setEntries(result); + } catch (e) { + setError(e instanceof Error ? e.message : 'Failed to load directory'); + setEntries([]); + } finally { + setLoading(false); + } + }; + + const navigateTo = (path: string) => { + setCurrentPath(path); + setPathInputValue(path); + setSelectedPath(null); + setIsEditingPath(false); + }; + + const handlePathInputKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter') { + const val = pathInputValue.trim(); + if (val) { + navigateTo(val.startsWith('/') ? val : `/${val}`); + } + } else if (e.key === 'Escape') { + setPathInputValue(currentPath); + setIsEditingPath(false); + } + }; + + const handlePathInputBlur = () => { + setPathInputValue(currentPath); + setIsEditingPath(false); + }; + + const handleEntryClick = (entry: RemoteFileEntry) => { + if (entry.isDir) { + navigateTo(entry.path); + } else { + setSelectedPath(entry.path); + } + setContextMenu({ show: false, x: 0, y: 0, entry: null }); + }; + + const handleEntryDoubleClick = (entry: RemoteFileEntry) => { + if (entry.isDir) { + navigateTo(entry.path); + } + }; + + const handleContextMenu = (e: React.MouseEvent, entry: RemoteFileEntry) => { + e.preventDefault(); + setContextMenu({ + show: true, + x: e.clientX, + y: e.clientY, + entry, + }); + }; + + const handleContextMenuAction = async (action: string) => { + if (!contextMenu.entry) return; + + const entry = contextMenu.entry; + setContextMenu({ show: false, x: 0, y: 0, entry: null }); + + try { + switch (action) { + case 'delete': + setDeleteConfirm({ show: true, entry }); + break; + case 'open': + if (entry.isDir) { + navigateTo(entry.path); + } else { + onSelect(entry.path); + } + break; + case 'rename': + setRenameEntry(entry); + setRenameValue(entry.name); + break; + } + } catch (e) { + setError(e instanceof Error ? e.message : 'Operation failed'); + } + }; + + const handleDeleteConfirm = async () => { + if (!deleteConfirm.entry) return; + const entry = deleteConfirm.entry; + setDeleteConfirm({ show: false, entry: null }); + + try { + await sshApi.remove(connectionId, entry.path, entry.isDir); + loadDirectory(currentPath); + } catch (e) { + setError(e instanceof Error ? e.message : 'Delete failed'); + } + }; + + const handleRename = async () => { + if (!renameEntry || !renameValue.trim()) return; + if (renameValue.trim() === renameEntry.name) { + setRenameEntry(null); + return; + } + + const parentPath = getParentPath(renameEntry.path) || '/'; + const newPath = parentPath.endsWith('/') + ? `${parentPath}${renameValue.trim()}` + : `${parentPath}/${renameValue.trim()}`; + + try { + await sshApi.rename(connectionId, renameEntry.path, newPath); + setRenameEntry(null); + loadDirectory(currentPath); + } catch (e) { + setError(e instanceof Error ? e.message : 'Failed to rename'); + } + }; + + const getParentPath = (path: string): string | null => { + if (path === '/') return null; + const parts = path.split('/').filter(Boolean); + parts.pop(); + return '/' + parts.join('/'); + }; + + const openSelectedWorkspace = () => { + onSelect(selectedPath || currentPath); + }; + + const formatFileSize = (bytes?: number): string => { + if (!bytes) return '-'; + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`; + if (bytes < 1024 * 1024 * 1024) return `${(bytes / 1024 / 1024).toFixed(1)} MB`; + return `${(bytes / 1024 / 1024 / 1024).toFixed(1)} GB`; + }; + + const formatDate = (timestamp?: number): string => { + if (!timestamp) return '-'; + return new Date(timestamp).toLocaleDateString(); + }; + + const getEntryIcon = (entry: RemoteFileEntry) => { + if (entry.isDir) return ; + if (entry.isSymlink) return ; + return ; + }; + + const pathParts = currentPath.split('/').filter(Boolean); + + return ( +
+
+ {/* Header */} +
+

+ {t('ssh.remote.selectWorkspace')} +

+ +
+ + {/* Path Breadcrumb / Input */} +
+ {isEditingPath ? ( + setPathInputValue(e.target.value)} + onKeyDown={handlePathInputKeyDown} + onBlur={handlePathInputBlur} + autoFocus + spellCheck={false} + /> + ) : ( +
{ + setIsEditingPath(true); + setTimeout(() => pathInputRef.current?.select(), 0); + }} + title={t('ssh.remote.clickToEditPath') || 'Click to edit path'} + > + + + {pathParts.length === 0 ? ( + / + ) : ( + pathParts.map((part, index) => { + const path = '/' + pathParts.slice(0, index + 1).join('/'); + const isLast = index === pathParts.length - 1; + return ( + + + {!isLast && } + + ); + }) + )} +
+ )} +
+ + {/* Toolbar */} +
+ + +
+ + {/* File List */} +
+ {error && ( +
+ {error} + +
+ )} + + {loading ? ( +
+ + Loading... +
+ ) : ( + + + + + + + + + + {/* Parent directory link */} + {currentPath !== '/' && ( + { + const parent = getParentPath(currentPath); + if (parent !== null) navigateTo(parent); + }} + className="remote-file-browser__row remote-file-browser__row--parent" + > + + + )} + {entries.map((entry) => ( + handleEntryClick(entry)} + onDoubleClick={() => handleEntryDoubleClick(entry)} + onContextMenu={(e) => handleContextMenu(e, entry)} + className={`remote-file-browser__row ${selectedPath === entry.path ? 'remote-file-browser__row--selected' : ''}`} + > + + + + + ))} + {entries.length === 0 && !loading && ( + + + + )} + +
+ {t('ssh.remote.name')} + + {t('ssh.remote.size')} + + {t('ssh.remote.modified')} +
+ + .. +
+ {getEntryIcon(entry)} + {entry.name} + + {entry.isDir ? '-' : formatFileSize(entry.size)} + + {formatDate(entry.modified)} +
+ {t('ssh.remote.emptyDirectory')} +
+ )} +
+ + {/* Context Menu */} + {contextMenu.show && contextMenu.entry && ( +
+ + +
+ +
+ )} + + {/* Rename Dialog */} + {renameEntry && ( +
+
+

{t('ssh.remote.rename')}

+ setRenameValue(e.target.value)} + className="remote-file-browser__dialog-input" + autoFocus + onKeyDown={(e) => { + if (e.key === 'Enter') handleRename(); + if (e.key === 'Escape') setRenameEntry(null); + }} + /> +
+ + +
+
+
+ )} + + {/* Delete Confirmation Dialog */} + setDeleteConfirm({ show: false, entry: null })} + destructive + /> + + {/* Footer */} +
+
+ {selectedPath ? ( + <> + {t('ssh.remote.selected')}: + {selectedPath} + + ) : ( + {t('ssh.remote.clickToSelect')} + )} +
+
+ + +
+
+
+
+ ); +}; + +export default RemoteFileBrowser; diff --git a/src/web-ui/src/features/ssh-remote/SSHConnectionDialog.scss b/src/web-ui/src/features/ssh-remote/SSHConnectionDialog.scss new file mode 100644 index 00000000..cc4aafe3 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/SSHConnectionDialog.scss @@ -0,0 +1,220 @@ +/** + * SSH Connection Dialog Styles + */ + +.ssh-connection-dialog { + padding: 16px 0 8px; + + &__error { + margin-bottom: 16px; + padding: 0 16px; + } + + &__section { + margin-bottom: 16px; + padding: 0 16px; + } + + &__section-title { + font-size: 11px; + font-weight: 600; + color: var(--color-text-muted); + margin: 0 0 10px 0; + text-transform: uppercase; + letter-spacing: 0.5px; + } + + &__saved-list { + display: flex; + flex-direction: column; + gap: 6px; + } + + &__saved-item { + display: flex; + align-items: center; + gap: 10px; + padding: 8px 12px; + border: 1px solid var(--border-subtle); + border-radius: 8px; + background: var(--card-bg-default); + transition: all var(--motion-fast) var(--easing-standard); + cursor: pointer; + position: relative; + + &:hover { + border-color: var(--border-accent); + background: var(--card-bg-hover); + } + + &:active { + transform: scale(0.99); + } + + &--config { + .ssh-connection-dialog__saved-icon { + background: var(--color-success-100); + color: var(--color-success-500); + } + } + } + + &__saved-icon { + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + background: var(--color-accent-100); + border-radius: 8px; + color: var(--color-accent-500); + flex-shrink: 0; + } + + &__saved-info { + display: flex; + flex-direction: column; + gap: 2px; + min-width: 0; + flex: 1; + } + + &__saved-name { + font-size: 14px; + font-weight: 500; + color: var(--color-text-primary); + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + } + + &__saved-detail { + font-size: 12px; + color: var(--color-text-muted); + font-family: var(--font-family-mono); + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + } + + &__saved-meta { + display: flex; + align-items: center; + flex-shrink: 0; + } + + &__saved-auth { + display: inline-flex; + align-items: center; + gap: 4px; + padding: 4px 8px; + background: var(--element-bg-subtle); + border: 1px solid var(--border-subtle); + border-radius: 4px; + font-size: 11px; + color: var(--color-text-muted); + white-space: nowrap; + } + + &__saved-actions { + display: flex; + align-items: center; + gap: 6px; + flex-shrink: 0; + opacity: 0; + transition: opacity var(--motion-fast) var(--easing-standard); + + .ssh-connection-dialog__saved-item:hover & { + opacity: 1; + } + } + + &__delete-btn { + color: var(--color-text-muted); + opacity: 0.7; + + &:hover { + color: var(--color-error); + opacity: 1; + } + } + + &__divider { + display: flex; + align-items: center; + gap: 16px; + margin: 16px 16px; + padding-top: 8px; + + &::before, + &::after { + content: ''; + flex: 1; + height: 1px; + background: var(--border-subtle); + } + + span { + font-size: 11px; + font-weight: 500; + color: var(--color-text-muted); + white-space: nowrap; + text-transform: uppercase; + letter-spacing: 0.5px; + } + } + + &__form { + display: flex; + flex-direction: column; + gap: 14px; + padding: 0 16px; + } + + &__row { + display: flex; + gap: 12px; + } + + &__field { + &--flex { + flex: 1; + } + + &--port { + width: 100px; + flex-shrink: 0; + } + } + + &__label { + display: block; + font-size: 12px; + font-weight: 500; + color: var(--color-text-muted); + margin-bottom: 6px; + } + + &__actions { + display: flex; + justify-content: flex-end; + gap: 12px; + margin-top: 20px; + padding: 16px 16px 8px; + border-top: 1px solid var(--border-subtle); + } + + &__spinner { + animation: spin 1s linear infinite; + margin-right: 6px; + } +} + +@keyframes spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} diff --git a/src/web-ui/src/features/ssh-remote/SSHConnectionDialog.tsx b/src/web-ui/src/features/ssh-remote/SSHConnectionDialog.tsx new file mode 100644 index 00000000..ac19c464 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/SSHConnectionDialog.tsx @@ -0,0 +1,622 @@ +/** + * SSH Connection Dialog Component + * Professional SSH connection dialog following BitFun design patterns + */ + +import React, { useState, useEffect } from 'react'; +import { useI18n } from '@/infrastructure/i18n'; +import { useSSHRemoteContext } from './SSHRemoteProvider'; +import { PasswordInputDialog } from './PasswordInputDialog'; +import { Modal } from '@/component-library'; +import { Button } from '@/component-library'; +import { Input } from '@/component-library'; +import { Select } from '@/component-library'; +import { Alert } from '@/component-library'; +import { Loader2, Server, User, Key, Lock, Terminal, Trash2, Plus, Pencil, Play } from 'lucide-react'; +import type { + SSHConnectionConfig, + SSHAuthMethod, + SavedConnection, + SSHConfigEntry, +} from './types'; +import { sshApi } from './sshApi'; +import './SSHConnectionDialog.scss'; + +interface PasswordPromptState { + type: 'password' | 'keyPath'; + savedConnection: SavedConnection; +} + +interface SSHConnectionDialogProps { + open: boolean; + onClose: () => void; +} + +export const SSHConnectionDialog: React.FC = ({ + open, + onClose, +}) => { + const { t } = useI18n('common'); + const { connect, status, connectionError, clearError } = useSSHRemoteContext(); + const [savedConnections, setSavedConnections] = useState([]); + const [sshConfigHosts, setSSHConfigHosts] = useState([]); + const [localError, setLocalError] = useState(null); + const [isConnecting, setIsConnecting] = useState(false); + const [passwordPrompt, setPasswordPrompt] = useState(null); + + const error = localError || connectionError; + + // Clear errors when dialog opens + useEffect(() => { + if (open) { + clearError(); + setLocalError(null); + loadSavedConnections(); + loadSSHConfigHosts(); + } + }, [open]); + + // Form state + const [formData, setFormData] = useState({ + name: '', + host: '', + port: '22', + username: '', + authType: 'password' as 'password' | 'privateKey' | 'agent', + password: '', + keyPath: '~/.ssh/id_rsa', + passphrase: '', + }); + + const loadSavedConnections = async () => { + setLocalError(null); + try { + const connections = await sshApi.listSavedConnections(); + setSavedConnections(connections); + } catch (e) { + setSavedConnections([]); + } + }; + + const loadSSHConfigHosts = async () => { + try { + const hosts = await sshApi.listSSHConfigHosts(); + setSSHConfigHosts(hosts); + } catch (e) { + setSSHConfigHosts([]); + } + }; + + // Load SSH config from ~/.ssh/config when host changes + useEffect(() => { + if (!formData.host.trim()) return; + + const loadSSHConfig = async () => { + try { + const result = await sshApi.getSSHConfig(formData.host.trim()); + if (result.found && result.config) { + const config = result.config; + // Auto-fill fields from SSH config if they're not already set + setFormData((prev) => ({ + ...prev, + port: config.port ? String(config.port) : prev.port, + username: config.user || prev.username, + keyPath: config.identityFile || prev.keyPath, + // If identity file is set, default to privateKey auth + authType: config.identityFile ? 'privateKey' : prev.authType, + })); + } + } catch (e) { + // Silently ignore SSH config errors + console.debug('Failed to load SSH config:', e); + } + }; + + // Debounce the SSH config lookup + const timeout = setTimeout(loadSSHConfig, 300); + return () => clearTimeout(timeout); + }, [formData.host]); + + const handleInputChange = (field: string, value: string) => { + setFormData((prev) => ({ ...prev, [field]: value })); + }; + + const generateConnectionId = (host: string, port: number, username: string) => { + return `ssh-${username}@${host}:${port}`; + }; + + const buildAuthMethod = (): SSHAuthMethod => { + switch (formData.authType) { + case 'password': + return { type: 'Password', password: formData.password }; + case 'privateKey': + return { + type: 'PrivateKey', + keyPath: formData.keyPath, + passphrase: formData.passphrase || undefined, + }; + case 'agent': + return { type: 'Agent' }; + } + }; + + const handleConnect = async () => { + // Validation + if (!formData.host.trim()) { + setLocalError(t('ssh.remote.hostRequired')); + return; + } + if (!formData.username.trim()) { + setLocalError(t('ssh.remote.usernameRequired')); + return; + } + const port = parseInt(formData.port, 10); + if (isNaN(port) || port < 1 || port > 65535) { + setLocalError(t('ssh.remote.portInvalid')); + return; + } + if (formData.authType === 'password' && !formData.password) { + setLocalError(t('ssh.remote.passwordRequired')); + return; + } + if (formData.authType === 'privateKey' && !formData.keyPath.trim()) { + setLocalError(t('ssh.remote.keyPathRequired')); + return; + } + + const config: SSHConnectionConfig = { + id: generateConnectionId(formData.host.trim(), port, formData.username.trim()), + name: formData.name || `${formData.username}@${formData.host}`, + host: formData.host.trim(), + port, + username: formData.username.trim(), + auth: buildAuthMethod(), + }; + + setIsConnecting(true); + setLocalError(null); + try { + await sshApi.saveConnection(config); + await connect(config.id, config); + // Don't call onClose() here - connect() handles closing the dialog via context + } catch (e) { + setLocalError(e instanceof Error ? e.message : 'Connection failed'); + } finally { + setIsConnecting(false); + } + }; + + const handleQuickConnect = async (conn: SavedConnection) => { + setLocalError(null); + + if (conn.authType.type === 'Password') { + setPasswordPrompt({ type: 'password', savedConnection: conn }); + } else { + const auth: SSHAuthMethod = conn.authType.type === 'PrivateKey' + ? { type: 'PrivateKey', keyPath: conn.authType.keyPath } + : { type: 'Agent' }; + + setIsConnecting(true); + try { + await connect(conn.id, { + id: conn.id, + name: conn.name, + host: conn.host, + port: conn.port, + username: conn.username, + auth, + }); + } catch (e) { + setLocalError(e instanceof Error ? e.message : 'Connection failed'); + } finally { + setIsConnecting(false); + } + } + }; + + // Handle connecting to a host from ~/.ssh/config + const handleSSHConfigConnect = async (configHost: SSHConfigEntry) => { + setLocalError(null); + + // Build hostname - if hostname is set in config, use it; otherwise use the host alias + const hostname = configHost.hostname || configHost.host; + const username = configHost.user || ''; + const port = configHost.port || 22; + + // Deterministic ID based on host:port:username + const connectionId = generateConnectionId(hostname, port, username); + + // Build connection name - use alias as name + const name = configHost.host; + + // Determine auth method - only use private key if identityFile is valid + const hasValidIdentityFile = configHost.identityFile && configHost.identityFile.trim() !== ''; + + const authConfig: SSHConnectionConfig = { + id: connectionId, + name, + host: hostname, + port, + username, + auth: hasValidIdentityFile + ? { type: 'PrivateKey', keyPath: configHost.identityFile! } + : { type: 'Agent' }, + }; + + setIsConnecting(true); + try { + await connect(connectionId, authConfig); + } catch (e) { + setLocalError(e instanceof Error ? e.message : 'Authentication failed'); + } finally { + setIsConnecting(false); + } + }; + + const handlePasswordPromptSubmit = async (value: string) => { + if (!passwordPrompt) return; + + const conn = passwordPrompt.savedConnection; + // Don't clear passwordPrompt yet - keep dialog open during connection + + setIsConnecting(true); + setLocalError(null); + try { + if (passwordPrompt.type === 'password') { + await connect(conn.id, { + id: conn.id, + name: conn.name, + host: conn.host, + port: conn.port, + username: conn.username, + auth: { type: 'Password', password: value }, + }); + } else { + await connect(conn.id, { + id: conn.id, + name: conn.name, + host: conn.host, + port: conn.port, + username: conn.username, + auth: { type: 'PrivateKey', keyPath: value }, + }); + } + // Success - clear password prompt + setPasswordPrompt(null); + // Close the main dialog - connect() sets showConnectionDialog(false) internally + // but for reconnection we need to also close via onClose + onClose(); + } catch (e) { + // Keep password prompt visible so user can retry + setLocalError(e instanceof Error ? e.message : 'Connection failed'); + } finally { + setIsConnecting(false); + } + }; + + const handlePasswordPromptCancel = () => { + setPasswordPrompt(null); + setLocalError(null); + }; + + const handleEditConnection = (e: React.MouseEvent, conn: SavedConnection) => { + e.stopPropagation(); + const keyPath = conn.authType.type === 'PrivateKey' ? conn.authType.keyPath : '~/.ssh/id_rsa'; + setFormData({ + name: conn.name, + host: conn.host, + port: String(conn.port), + username: conn.username, + authType: conn.authType.type === 'Password' ? 'password' + : conn.authType.type === 'PrivateKey' ? 'privateKey' + : 'agent', + password: '', + keyPath, + passphrase: '', + }); + }; + + const handleDeleteConnection = async (e: React.MouseEvent, connectionId: string) => { + e.stopPropagation(); + try { + await sshApi.deleteConnection(connectionId); + await loadSavedConnections(); + } catch (err) { + setLocalError(err instanceof Error ? err.message : 'Failed to delete'); + } + }; + + const authOptions = [ + { label: t('ssh.remote.password') || 'Password', value: 'password', icon: }, + { label: t('ssh.remote.privateKey') || 'Private Key', value: 'privateKey', icon: }, + { label: t('ssh.remote.sshAgent') || 'SSH Agent', value: 'agent', icon: }, + ]; + + if (!open) return null; + + return ( + <> + +
+ {/* Error display */} + {error && ( +
+ setLocalError(null)} + /> +
+ )} + + {/* Saved connections section */} + {savedConnections.length > 0 && ( +
+

+ {t('ssh.remote.savedConnections')} +

+
+ {savedConnections.map((conn) => ( +
!isConnecting && handleQuickConnect(conn)} + role="button" + tabIndex={0} + onKeyDown={(e) => e.key === 'Enter' && !isConnecting && handleQuickConnect(conn)} + > +
+ +
+
+ {conn.name} + + {conn.username}@{conn.host}:{conn.port} + +
+
+ + + +
+
+ ))} +
+
+ )} + + {/* SSH Config hosts section */} + {sshConfigHosts.length > 0 && ( +
+

+ {t('ssh.remote.sshConfigHosts') || 'SSH Config'} +

+
+ {sshConfigHosts + .filter((configHost) => { + // Hide SSH config hosts that already have a saved connection + const hostname = configHost.hostname || configHost.host; + const port = configHost.port || 22; + const user = configHost.user || ''; + return !savedConnections.some( + (c) => c.host === hostname && c.port === port && c.username === user + ); + }) + .map((configHost) => ( +
!isConnecting && handleSSHConfigConnect(configHost)} + role="button" + tabIndex={0} + onKeyDown={(e) => e.key === 'Enter' && !isConnecting && handleSSHConfigConnect(configHost)} + > +
+ +
+
+ {configHost.host} + + {configHost.user || ''}@{configHost.hostname || configHost.host}:{configHost.port || 22} + +
+
+ ))} +
+
+ )} + + {/* Divider */} + {(savedConnections.length > 0 || sshConfigHosts.length > 0) && ( +
+ {t('ssh.remote.newConnection')} +
+ )} + + {/* New connection form */} +
+ {/* Host and Port */} +
+
+ handleInputChange('host', e.target.value)} + placeholder="example.com" + prefix={} + size="medium" + /> +
+
+ handleInputChange('port', e.target.value)} + placeholder="22" + size="medium" + /> +
+
+ + {/* Username */} +
+ handleInputChange('username', e.target.value)} + placeholder="root" + prefix={} + size="medium" + /> +
+ + {/* Connection Name */} +
+ handleInputChange('name', e.target.value)} + placeholder={t('ssh.remote.connectionNamePlaceholder')} + size="medium" + /> +
+ + {/* Authentication Method */} +
+ + handleInputChange('password', e.target.value)} + placeholder={t('ssh.remote.password')} + prefix={} + size="medium" + /> +
+ )} + + {/* Private Key */} + {formData.authType === 'privateKey' && ( + <> +
+ handleInputChange('keyPath', e.target.value)} + placeholder="~/.ssh/id_rsa" + prefix={} + size="medium" + /> +
+
+ handleInputChange('passphrase', e.target.value)} + placeholder={t('ssh.remote.passphraseOptional')} + size="medium" + /> +
+ + )} +
+ + {/* Actions */} +
+ + +
+
+
+ + {/* Password/Key Path Input Dialog */} + {passwordPrompt && ( + + )} + + ); +}; + +export default SSHConnectionDialog; diff --git a/src/web-ui/src/features/ssh-remote/SSHRemoteProvider.tsx b/src/web-ui/src/features/ssh-remote/SSHRemoteProvider.tsx new file mode 100644 index 00000000..0ef5d0d5 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/SSHRemoteProvider.tsx @@ -0,0 +1,487 @@ +/** + * SSH Remote Feature - React Context Provider + */ + +import React, { createContext, useContext, useState, useCallback, useEffect, useRef } from 'react'; +import { createLogger } from '@/shared/utils/logger'; +import { workspaceManager } from '@/infrastructure/services/business/workspaceManager'; +import { WorkspaceKind } from '@/shared/types/global-state'; +import type { SSHConnectionConfig, RemoteWorkspace } from './types'; +import { sshApi } from './sshApi'; +import { flowChatStore } from '@/flow_chat/store/FlowChatStore'; + +const log = createLogger('SSHRemoteProvider'); + +export type ConnectionStatus = 'disconnected' | 'connecting' | 'connected' | 'error'; + +interface SSHContextValue { + // Connection state + status: ConnectionStatus; + isConnected: boolean; + isConnecting: boolean; + connectionId: string | null; + connectionConfig: SSHConnectionConfig | null; + remoteWorkspace: RemoteWorkspace | null; + connectionError: string | null; + + // Per-workspace connection statuses (keyed by connectionId) + workspaceStatuses: Record; + + // UI state + showConnectionDialog: boolean; + showFileBrowser: boolean; + error: string | null; + + // Actions + connect: (connectionId: string, config: SSHConnectionConfig) => Promise; + disconnect: () => Promise; + openWorkspace: (path: string) => Promise; + closeWorkspace: () => Promise; + + // UI actions + setShowConnectionDialog: (show: boolean) => void; + setShowFileBrowser: (show: boolean) => void; + clearError: () => void; +} + +export const SSHContext = createContext(null); + +export const useSSHRemoteContext = () => { + const context = useContext(SSHContext); + if (!context) { + throw new Error('useSSHRemoteContext must be used within SSHRemoteProvider'); + } + return context; +}; + +interface SSHRemoteProviderProps { + children: React.ReactNode; +} + +export const SSHRemoteProvider: React.FC = ({ children }) => { + const [status, setStatus] = useState('disconnected'); + const [isConnected, setIsConnected] = useState(false); + const [isConnecting, setIsConnecting] = useState(false); + const [connectionId, setConnectionId] = useState(null); + const [connectionConfig, setConnectionConfig] = useState(null); + const [remoteWorkspace, setRemoteWorkspace] = useState(null); + const [showConnectionDialog, setShowConnectionDialog] = useState(false); + const [showFileBrowser, setShowFileBrowser] = useState(false); + const [error, setError] = useState(null); + const [connectionError, setConnectionError] = useState(null); + // Per-workspace connection statuses (keyed by connectionId) + const [workspaceStatuses, setWorkspaceStatuses] = useState>({}); + const heartbeatInterval = useRef(null); + + const setWorkspaceStatus = useCallback((connId: string, st: ConnectionStatus) => { + setWorkspaceStatuses(prev => ({ ...prev, [connId]: st })); + }, []); + + // Wait for workspace manager to finish loading, then check remote workspaces + useEffect(() => { + const state = workspaceManager.getState(); + if (!state.loading) { + // Already loaded — kick off immediately + void checkRemoteWorkspace(); + return; + } + // Wait for loading to complete + const unsubscribe = workspaceManager.addEventListener(event => { + if (event.type === 'workspace:loading' && !event.loading) { + unsubscribe(); + void checkRemoteWorkspace(); + } + }); + return unsubscribe; + // checkRemoteWorkspace is defined below but stable (no deps change it) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + // Cleanup heartbeat on unmount + useEffect(() => { + return () => { + if (heartbeatInterval.current) { + clearInterval(heartbeatInterval.current); + } + }; + }, []); + + // Try to reconnect a single remote workspace with retries. + // Returns the reconnected workspace info on success, false on failure. + // Waits RETRY_WAIT_MS between each attempt (fixed, not exponential). + const RETRY_WAIT_MS = 10_000; + + const tryReconnectWithRetry = async ( + workspace: RemoteWorkspace, + maxRetries: number, + timeoutMs: number + ): Promise => { + log.info('tryReconnectWithRetry: starting', { workspace, maxRetries, timeoutMs }); + + const savedConnections = await sshApi.listSavedConnections(); + const savedConn = savedConnections.find(c => c.id === workspace.connectionId); + + if (!savedConn) { + log.warn('No saved connection found for workspace', { connectionId: workspace.connectionId }); + return false; + } + + // Determine auth method from tagged enum + let authMethod: SSHConnectionConfig['auth'] | null = null; + if (savedConn.authType.type === 'PrivateKey') { + authMethod = { type: 'PrivateKey', keyPath: savedConn.authType.keyPath }; + } else if (savedConn.authType.type === 'Agent') { + authMethod = { type: 'Agent' }; + } else { + // Password auth — cannot auto-reconnect + log.warn('Cannot auto-reconnect: password auth requires user input', { connectionId: workspace.connectionId }); + return false; + } + + const reconnectConfig: SSHConnectionConfig = { + id: savedConn.id, + name: savedConn.name, + host: savedConn.host, + port: savedConn.port, + username: savedConn.username, + auth: authMethod, + }; + + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + log.info(`Attempting to reconnect (${attempt}/${maxRetries})`, { + connectionId: workspace.connectionId, + host: reconnectConfig.host, + }); + + const connectWithTimeout = async (): Promise<{ connectionId: string }> => { + const result = await sshApi.connect(reconnectConfig); + if (!result.success || !result.connectionId) { + throw new Error(result.error || 'Connection failed'); + } + return { connectionId: result.connectionId }; + }; + + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Connection timeout')), timeoutMs); + }); + + const result = await Promise.race([connectWithTimeout(), timeoutPromise]); + + // Successfully connected — open the workspace in SSH state manager + await sshApi.openWorkspace(result.connectionId, workspace.remotePath); + const reconnectedWorkspace: RemoteWorkspace = { + connectionId: result.connectionId, + connectionName: savedConn.name, + remotePath: workspace.remotePath, + }; + + log.info('Successfully reconnected to remote workspace', { + originalConnectionId: workspace.connectionId, + newConnectionId: result.connectionId, + }); + return { workspace: reconnectedWorkspace, connectionId: result.connectionId }; + } catch (err) { + log.warn(`Reconnect attempt ${attempt}/${maxRetries} failed`, { connectionId: workspace.connectionId, error: err }); + if (attempt < maxRetries) { + // Fixed 10-second wait between retries + await new Promise(resolve => setTimeout(resolve, RETRY_WAIT_MS)); + } + } + } + + return false; + }; + + const checkRemoteWorkspace = async () => { + try { + // ── Collect all remote workspaces to reconnect ────────────────────── + const allWorkspaces = Array.from(workspaceManager.getState().openedWorkspaces.values()); + const openedRemote = allWorkspaces.filter( + ws => ws.workspaceKind === WorkspaceKind.Remote && ws.connectionId + ); + + // Also check legacy single-workspace persisted in app_state + let legacyWorkspace: RemoteWorkspace | null = null; + try { + legacyWorkspace = await sshApi.getWorkspaceInfo(); + } catch { + // Ignore + } + + // Build a deduplicated list keyed by remotePath + const toReconnect = new Map(); + + for (const ws of openedRemote) { + if (!ws.connectionId) continue; + toReconnect.set(ws.rootPath, { + connectionId: ws.connectionId, + connectionName: ws.connectionName || 'Remote', + remotePath: ws.rootPath, + }); + } + + // Add legacy workspace if it isn't already covered + if (legacyWorkspace && !toReconnect.has(legacyWorkspace.remotePath)) { + toReconnect.set(legacyWorkspace.remotePath, legacyWorkspace); + } + + if (toReconnect.size === 0) { + log.info('checkRemoteWorkspace: no remote workspaces to reconnect'); + return; + } + + log.info(`checkRemoteWorkspace: found ${toReconnect.size} remote workspace(s)`); + + // Mark all as 'connecting' immediately so the UI shows the pending state + const initialStatuses: Record = {}; + for (const [, ws] of toReconnect) { + initialStatuses[ws.connectionId] = 'connecting'; + } + setWorkspaceStatuses(prev => ({ ...prev, ...initialStatuses })); + + // ── Process each workspace ────────────────────────────────────────── + for (const [, workspace] of toReconnect) { + const isAlreadyOpened = openedRemote.some(ws => ws.rootPath === workspace.remotePath); + + // Check if SSH is already live + const alreadyConnected = await sshApi.isConnected(workspace.connectionId).catch(() => false); + + if (alreadyConnected) { + log.info('Remote workspace already connected', { connectionId: workspace.connectionId }); + // Register with SSH state manager (idempotent) + await sshApi.openWorkspace(workspace.connectionId, workspace.remotePath).catch(() => {}); + setWorkspaceStatus(workspace.connectionId, 'connected'); + setIsConnected(true); + setConnectionId(workspace.connectionId); + setRemoteWorkspace(workspace); + startHeartbeat(workspace.connectionId); + + if (!isAlreadyOpened) { + await workspaceManager.openRemoteWorkspace(workspace).catch(() => {}); + } + // Re-initialize sessions now that the workspace is registered in the state manager + void flowChatStore.initializeFromDisk(workspace.remotePath).catch(() => {}); + continue; + } + + // Not connected — attempt auto-reconnect + log.info('Remote workspace disconnected, attempting auto-reconnect', { + connectionId: workspace.connectionId, + remotePath: workspace.remotePath, + }); + + const result = await tryReconnectWithRetry(workspace, 5, 5000); + + if (result !== false) { + log.info('Reconnection successful', { newConnectionId: result.connectionId }); + setWorkspaceStatus(result.workspace.connectionId, 'connected'); + setIsConnected(true); + setConnectionId(result.connectionId); + setRemoteWorkspace(result.workspace); + startHeartbeat(result.connectionId); + + if (!isAlreadyOpened) { + await workspaceManager.openRemoteWorkspace(result.workspace).catch(() => {}); + } + // Re-initialize sessions now that the workspace is registered in the state manager + void flowChatStore.initializeFromDisk(result.workspace.remotePath).catch(() => {}); + } else { + // Reconnection failed — keep workspace in sidebar, show red indicator + log.warn('Auto-reconnect failed, workspace kept in sidebar with error status', { + connectionId: workspace.connectionId, + }); + setWorkspaceStatus(workspace.connectionId, 'error'); + setConnectionError('Remote connection lost. Please reconnect manually.'); + setStatus('error'); + + // Make sure the workspace appears in opened list even if reconnection failed + if (!isAlreadyOpened) { + await workspaceManager.openRemoteWorkspace(workspace).catch(() => {}); + } + } + } + } catch (e) { + log.error('checkRemoteWorkspace failed', e); + } + }; + + const statusRef = useRef(status); + statusRef.current = status; + + const startHeartbeat = (connId: string) => { + if (heartbeatInterval.current) { + clearInterval(heartbeatInterval.current); + } + + heartbeatInterval.current = window.setInterval(async () => { + try { + const connected = await sshApi.isConnected(connId); + if (!connected && statusRef.current === 'connected') { + handleConnectionLost(connId); + } + } catch { + // Ignore heartbeat errors + } + }, 30000); + }; + + const handleConnectionLost = (connId: string) => { + log.warn('Remote connection lost, attempting auto-reconnect...'); + setStatus('error'); + setWorkspaceStatus(connId, 'error'); + setConnectionError('Connection lost. Attempting to reconnect...'); + setIsConnected(false); + if (heartbeatInterval.current) { + clearInterval(heartbeatInterval.current); + heartbeatInterval.current = null; + } + // Attempt auto-reconnect in background + void checkRemoteWorkspace(); + }; + + const connect = useCallback(async (_connId: string, config: SSHConnectionConfig) => { + log.debug('SSH connect called', { host: config.host }); + setStatus('connecting'); + setIsConnecting(true); + setConnectionError(null); + setError(null); + + try { + const result = await sshApi.connect(config); + log.debug('SSH connect result', { success: result.success, connectionId: result.connectionId, error: result.error }); + + if (result.success && result.connectionId) { + log.info('SSH connection successful', { connectionId: result.connectionId }); + setStatus('connected'); + setIsConnected(true); + setConnectionId(result.connectionId); + setConnectionConfig(config); + setShowConnectionDialog(false); + setShowFileBrowser(true); + startHeartbeat(result.connectionId); + } else { + log.warn('SSH connection failed', { error: result.error }); + setStatus('error'); + const errorMsg = result.error || 'Connection failed'; + setConnectionError(errorMsg); + throw new Error(errorMsg); + } + } catch (e) { + log.error('SSH connection exception', e); + if (e instanceof Error) { + setStatus('error'); + setConnectionError(e.message); + throw e; + } + const errorMsg = e instanceof Error ? e.message : 'Connection failed'; + setStatus('error'); + setConnectionError(errorMsg); + throw new Error(errorMsg); + } finally { + setIsConnecting(false); + } + }, []); + + const disconnect = useCallback(async () => { + const currentRemoteWorkspace = remoteWorkspace; + const currentConnectionId = connectionId; + + if (heartbeatInterval.current) { + clearInterval(heartbeatInterval.current); + heartbeatInterval.current = null; + } + + if (currentConnectionId) { + try { + await sshApi.disconnect(currentConnectionId); + } catch { + // Ignore disconnect errors + } + } + setStatus('disconnected'); + setConnectionId(null); + setConnectionConfig(null); + setRemoteWorkspace(null); + setIsConnected(false); + setShowFileBrowser(false); + + if (currentRemoteWorkspace) { + setWorkspaceStatus(currentRemoteWorkspace.connectionId, 'disconnected'); + try { + await workspaceManager.removeRemoteWorkspace(currentRemoteWorkspace.connectionId); + } catch { + // Ignore errors + } + } + }, [connectionId, remoteWorkspace, setWorkspaceStatus]); + + const openWorkspace = useCallback(async (pingPath: string) => { + if (!connectionId) { + throw new Error('Not connected'); + } + const connName = connectionConfig?.name || 'Remote'; + await sshApi.openWorkspace(connectionId, pingPath); + const remoteWs = { + connectionId, + connectionName: connName, + remotePath: pingPath, + }; + setRemoteWorkspace(remoteWs); + setShowFileBrowser(false); + setWorkspaceStatus(connectionId, 'connected'); + + await workspaceManager.openRemoteWorkspace(remoteWs); + }, [connectionId, connectionConfig, setWorkspaceStatus]); + + const closeWorkspace = useCallback(async () => { + const currentRemoteWorkspace = remoteWorkspace; + + try { + await sshApi.closeWorkspace(); + } catch { + // Ignore errors + } + setRemoteWorkspace(null); + setShowFileBrowser(true); + + if (currentRemoteWorkspace) { + setWorkspaceStatus(currentRemoteWorkspace.connectionId, 'disconnected'); + try { + await workspaceManager.removeRemoteWorkspace(currentRemoteWorkspace.connectionId); + } catch { + // Ignore errors + } + } + }, [remoteWorkspace, setWorkspaceStatus]); + + const clearError = useCallback(() => { + setError(null); + setConnectionError(null); + }, []); + + const value: SSHContextValue = { + status, + isConnected, + isConnecting, + connectionId, + connectionConfig, + remoteWorkspace, + connectionError, + workspaceStatuses, + showConnectionDialog, + showFileBrowser, + error, + connect, + disconnect, + openWorkspace, + closeWorkspace, + setShowConnectionDialog, + setShowFileBrowser, + clearError, + }; + + return {children}; +}; + +export default SSHRemoteProvider; diff --git a/src/web-ui/src/features/ssh-remote/index.ts b/src/web-ui/src/features/ssh-remote/index.ts new file mode 100644 index 00000000..72e6aa65 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/index.ts @@ -0,0 +1,11 @@ +/** + * SSH Remote Feature - Public API + */ + +export * from './types'; +export * from './sshApi'; +export { SSHConnectionDialog } from './SSHConnectionDialog'; +export { RemoteFileBrowser } from './RemoteFileBrowser'; +export { PasswordInputDialog } from './PasswordInputDialog'; +export { ConfirmDialog } from './ConfirmDialog'; +export { SSHRemoteProvider, useSSHRemoteContext } from './SSHRemoteProvider'; diff --git a/src/web-ui/src/features/ssh-remote/sshApi.ts b/src/web-ui/src/features/ssh-remote/sshApi.ts new file mode 100644 index 00000000..c07b5028 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/sshApi.ts @@ -0,0 +1,191 @@ +/** + * SSH Remote Feature - API Service + */ + +import type { + SSHConnectionConfig, + SSHConnectionResult, + SavedConnection, + RemoteFileEntry, + RemoteTreeNode, + RemoteWorkspace, + SSHConfigLookupResult, + SSHConfigEntry, +} from './types'; + +// API adapter for Tauri/Server Mode compatibility +import { api } from '@/infrastructure/api/service-api/ApiClient'; + +export const sshApi = { + // === Connection Management === + + /** + * List all saved SSH connections + */ + async listSavedConnections(): Promise { + return api.invoke('ssh_list_saved_connections', {}); + }, + + /** + * Save SSH connection configuration + */ + async saveConnection(config: SSHConnectionConfig): Promise { + return api.invoke('ssh_save_connection', { config }); + }, + + /** + * Delete saved SSH connection + */ + async deleteConnection(connectionId: string): Promise { + return api.invoke('ssh_delete_connection', { connectionId }); + }, + + /** + * Connect to remote SSH server + */ + async connect(config: SSHConnectionConfig): Promise { + return api.invoke('ssh_connect', { config }); + }, + + /** + * Disconnect from SSH server + */ + async disconnect(connectionId: string): Promise { + return api.invoke('ssh_disconnect', { connectionId }); + }, + + /** + * Disconnect all SSH connections + */ + async disconnectAll(): Promise { + return api.invoke('ssh_disconnect_all', {}); + }, + + /** + * Check if connected to SSH server + */ + async isConnected(connectionId: string): Promise { + return api.invoke('ssh_is_connected', { connectionId }); + }, + + /** + * Get SSH config for a host from ~/.ssh/config + */ + async getSSHConfig(host: string): Promise { + return api.invoke('ssh_get_config', { host }); + }, + + /** + * List all hosts from ~/.ssh/config + */ + async listSSHConfigHosts(): Promise { + return api.invoke('ssh_list_config_hosts', {}); + }, + + // === Remote File Operations === + + /** + * Read file content from remote server + */ + async readFile(connectionId: string, path: string): Promise { + return api.invoke('remote_read_file', { connectionId, path }); + }, + + /** + * Write content to remote file + */ + async writeFile(connectionId: string, path: string, content: string): Promise { + return api.invoke('remote_write_file', { connectionId, path, content }); + }, + + /** + * Check if remote path exists + */ + async exists(connectionId: string, path: string): Promise { + return api.invoke('remote_exists', { connectionId, path }); + }, + + /** + * List directory contents + */ + async readDir(connectionId: string, path: string): Promise { + return api.invoke('remote_read_dir', { connectionId, path }); + }, + + /** + * Get remote file tree + */ + async getTree( + connectionId: string, + path: string, + depth?: number + ): Promise { + return api.invoke('remote_get_tree', { connectionId, path, depth }); + }, + + /** + * Create remote directory + */ + async createDir( + connectionId: string, + path: string, + recursive: boolean + ): Promise { + return api.invoke('remote_create_dir', { connectionId, path, recursive }); + }, + + /** + * Remove remote file or directory + */ + async remove( + connectionId: string, + path: string, + recursive: boolean + ): Promise { + return api.invoke('remote_remove', { connectionId, path, recursive }); + }, + + /** + * Rename/move remote file or directory + */ + async rename( + connectionId: string, + oldPath: string, + newPath: string + ): Promise { + return api.invoke('remote_rename', { connectionId, oldPath, newPath }); + }, + + /** + * Execute command on remote server + */ + async execute( + connectionId: string, + command: string + ): Promise<[string, string, number]> { + return api.invoke<[string, string, number]>('remote_execute', { connectionId, command }); + }, + + // === Remote Workspace === + + /** + * Open remote workspace + */ + async openWorkspace(connectionId: string, remotePath: string): Promise { + return api.invoke('remote_open_workspace', { connectionId, remotePath }); + }, + + /** + * Close remote workspace + */ + async closeWorkspace(): Promise { + return api.invoke('remote_close_workspace', {}); + }, + + /** + * Get current remote workspace info + */ + async getWorkspaceInfo(): Promise { + return api.invoke('remote_get_workspace_info', {}); + }, +}; diff --git a/src/web-ui/src/features/ssh-remote/types.ts b/src/web-ui/src/features/ssh-remote/types.ts new file mode 100644 index 00000000..e256d787 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/types.ts @@ -0,0 +1,84 @@ +/** + * SSH Remote Feature - Types + */ + +export interface SSHConnectionConfig { + id: string; + name: string; + host: string; + port: number; + username: string; + auth: SSHAuthMethod; + defaultWorkspace?: string; +} + +export type SSHAuthMethod = + | { type: 'Password'; password: string } + | { type: 'PrivateKey'; keyPath: string; passphrase?: string } + | { type: 'Agent' }; + +export type SavedAuthType = + | { type: 'Password' } + | { type: 'PrivateKey'; keyPath: string } + | { type: 'Agent' }; + +export interface SavedConnection { + id: string; + name: string; + host: string; + port: number; + username: string; + authType: SavedAuthType; + defaultWorkspace?: string; + lastConnected?: number; +} + +export interface SSHConnectionResult { + success: boolean; + connectionId?: string; + error?: string; + serverInfo?: ServerInfo; +} + +export interface ServerInfo { + osType: string; + hostname: string; + homeDir: string; +} + +export interface RemoteFileEntry { + name: string; + path: string; + isDir: boolean; + isFile: boolean; + isSymlink: boolean; + size?: number; + modified?: number; +} + +export interface RemoteTreeNode { + name: string; + path: string; + isDir: boolean; + children?: RemoteTreeNode[]; +} + +export interface RemoteWorkspace { + connectionId: string; + connectionName: string; + remotePath: string; +} + +export interface SSHConfigEntry { + host: string; + hostname?: string; + port?: number; + user?: string; + identityFile?: string; + agent?: boolean; +} + +export interface SSHConfigLookupResult { + found: boolean; + config?: SSHConfigEntry; +} diff --git a/src/web-ui/src/features/ssh-remote/useSSHRemote.ts b/src/web-ui/src/features/ssh-remote/useSSHRemote.ts new file mode 100644 index 00000000..6453e626 --- /dev/null +++ b/src/web-ui/src/features/ssh-remote/useSSHRemote.ts @@ -0,0 +1,180 @@ +/** + * SSH Remote Feature - State Management Hook (Standalone version) + * This is a simple state management hook without React Context + */ + +import { useState, useCallback, useEffect, useRef } from 'react'; +import { sshApi } from './sshApi'; +import { useWorkspaceContext } from '@/infrastructure/contexts/WorkspaceContext'; +import type { SSHConnectionConfig, RemoteWorkspace } from './types'; + +export interface SSHState { + isConnected: boolean; + connectionId: string | null; + connectionConfig: SSHConnectionConfig | null; + remoteWorkspace: RemoteWorkspace | null; + error: string | null; + // UI state + showConnectionDialog: boolean; + showFileBrowser: boolean; +} + +export function useSSHRemote() { + const [state, setState] = useState({ + isConnected: false, + connectionId: null, + connectionConfig: null, + remoteWorkspace: null, + error: null, + showConnectionDialog: false, + showFileBrowser: false, + }); + + const { recentWorkspaces, switchWorkspace } = useWorkspaceContext(); + const previousWorkspaceRef = useRef(null); + + // Check for existing remote workspace on mount + useEffect(() => { + checkRemoteWorkspace(); + }, []); + + const checkRemoteWorkspace = async () => { + try { + const workspace = await sshApi.getWorkspaceInfo(); + if (workspace) { + setState((prev) => ({ + ...prev, + isConnected: true, + remoteWorkspace: workspace, + })); + } + } catch (e) { + // Ignore errors on initial check + } + }; + + const connect = useCallback( + async (connectionId: string, config: SSHConnectionConfig) => { + setState((prev) => ({ + ...prev, + isConnected: true, + connectionId, + connectionConfig: config, + error: null, + })); + }, + [] + ); + + const disconnect = useCallback(async () => { + const hadRemoteConnection = state.isConnected && state.connectionId; + + if (state.connectionId) { + try { + await sshApi.disconnect(state.connectionId); + } catch (e) { + // Ignore disconnect errors + } + } + + setState((prev) => ({ + ...prev, + isConnected: false, + connectionId: null, + connectionConfig: null, + remoteWorkspace: null, + })); + + // Switch back to the most recent local workspace if we had a remote connection + if (hadRemoteConnection) { + try { + const localWorkspaces = recentWorkspaces.filter( + (w) => !w.rootPath.startsWith('ssh://') + ); + if (localWorkspaces.length > 0) { + await switchWorkspace(localWorkspaces[0]); + } + } catch (e) { + // Ignore errors when switching workspaces + } + } + }, [state.isConnected, state.connectionId, recentWorkspaces, switchWorkspace]); + + const openWorkspace = useCallback( + async (remotePath: string) => { + if (!state.connectionId) { + throw new Error('Not connected'); + } + + // Save current workspace ID before switching to remote + if (recentWorkspaces.length > 0) { + previousWorkspaceRef.current = recentWorkspaces[0].id; + } + + await sshApi.openWorkspace(state.connectionId, remotePath); + const config = state.connectionConfig; + setState((prev) => ({ + ...prev, + remoteWorkspace: { + connectionId: state.connectionId!, + connectionName: config?.name || 'Remote', + remotePath, + }, + showFileBrowser: false, + })); + }, + [state.connectionId, state.connectionConfig, recentWorkspaces] + ); + + const closeWorkspace = useCallback(async () => { + // Save previous workspace ID + const prevWorkspaceId = previousWorkspaceRef.current; + + try { + await sshApi.closeWorkspace(); + } catch (e) { + // Ignore errors + } + setState((prev) => ({ + ...prev, + remoteWorkspace: null, + })); + + // Switch back to previous local workspace if available + if (prevWorkspaceId) { + try { + const targetWorkspace = recentWorkspaces.find((w) => w.id === prevWorkspaceId); + if (targetWorkspace && !targetWorkspace.rootPath.startsWith('ssh://')) { + await switchWorkspace(targetWorkspace); + } + } catch (e) { + // Ignore errors when switching workspaces + } + } + }, [recentWorkspaces, switchWorkspace]); + + const setError = useCallback((error: string | null) => { + setState((prev) => ({ ...prev, error })); + }, []); + + const setShowConnectionDialog = useCallback((show: boolean) => { + setState((prev) => ({ ...prev, showConnectionDialog: show })); + }, []); + + const setShowFileBrowser = useCallback((show: boolean) => { + setState((prev) => ({ ...prev, showFileBrowser: show })); + }, []); + + return { + ...state, + connect, + disconnect, + openWorkspace, + closeWorkspace, + setError, + setShowConnectionDialog, + setShowFileBrowser, + }; +} + +export default useSSHRemote; diff --git a/src/web-ui/src/infrastructure/api/adapters/tauri-adapter.ts b/src/web-ui/src/infrastructure/api/adapters/tauri-adapter.ts index 661c2637..9f2eb5cd 100644 --- a/src/web-ui/src/infrastructure/api/adapters/tauri-adapter.ts +++ b/src/web-ui/src/infrastructure/api/adapters/tauri-adapter.ts @@ -1,6 +1,5 @@ -import { invoke } from '@tauri-apps/api/core'; import { listen, UnlistenFn } from '@tauri-apps/api/event'; import { ITransportAdapter } from './base'; import { createLogger } from '@/shared/utils/logger'; @@ -10,45 +9,80 @@ const log = createLogger('TauriAdapter'); export class TauriTransportAdapter implements ITransportAdapter { private unlistenFunctions: UnlistenFn[] = []; private connected: boolean = false; - - + private invokeFn: ((action: string, params?: any) => Promise) | null = null; + private initPromise: Promise | null = null; + + // Lazy initialize Tauri API + private async ensureInitialized() { + if (this.invokeFn) return; + + if (this.initPromise) { + await this.initPromise; + return; + } + + this.initPromise = this.doInitialize(); + await this.initPromise; + } + + private async doInitialize() { + try { + // Check if Tauri API is available + if (typeof window !== 'undefined' && !('__TAURI__' in window)) { + log.warn('Tauri API not available, running in non-Tauri environment'); + this.invokeFn = async () => { + throw new Error('Tauri API is not available. Make sure you are running in a Tauri environment.'); + }; + return; + } + + const tauriApi = await import('@tauri-apps/api/core'); + this.invokeFn = tauriApi.invoke; + log.debug('Tauri API initialized successfully'); + } catch (error) { + log.error('Failed to load Tauri API', error); + this.invokeFn = async () => { + throw new Error('Failed to load Tauri API: ' + (error instanceof Error ? error.message : 'Unknown error')); + }; + } + } + async connect(): Promise { this.connected = true; } - - + async request(action: string, params?: any): Promise { if (!this.connected) { await this.connect(); } - + + await this.ensureInitialized(); + try { - - - const result = params !== undefined - ? await invoke(action, params) - : await invoke(action); - - return result; + if (!this.invokeFn) { + throw new Error('Tauri invoke function not initialized'); + } + const result = params !== undefined + ? await this.invokeFn(action, params) + : await this.invokeFn(action); + + return result as T; } catch (error) { log.error('Request failed', { action, error }); throw error; } } - - + listen(event: string, callback: (data: T) => void): () => void { let unlistenFn: UnlistenFn | null = null; let isUnlistened = false; - - + listen(event, (e) => { if (!isUnlistened) { callback(e.payload); } }).then(fn => { if (isUnlistened) { - fn(); } else { unlistenFn = fn; @@ -57,8 +91,7 @@ export class TauriTransportAdapter implements ITransportAdapter { }).catch(error => { log.error('Failed to listen event', { event, error }); }); - - + return () => { isUnlistened = true; if (unlistenFn) { @@ -70,10 +103,8 @@ export class TauriTransportAdapter implements ITransportAdapter { } }; } - - + async disconnect(): Promise { - this.unlistenFunctions.forEach(fn => { try { fn(); @@ -84,10 +115,9 @@ export class TauriTransportAdapter implements ITransportAdapter { this.unlistenFunctions = []; this.connected = false; } - - + isConnected(): boolean { - return this.connected; + return this.connected; } } diff --git a/src/web-ui/src/infrastructure/api/service-api/GlobalAPI.ts b/src/web-ui/src/infrastructure/api/service-api/GlobalAPI.ts index bd15defb..e28e02d8 100644 --- a/src/web-ui/src/infrastructure/api/service-api/GlobalAPI.ts +++ b/src/web-ui/src/infrastructure/api/service-api/GlobalAPI.ts @@ -46,6 +46,8 @@ export interface WorkspaceInfo { tags: string[]; statistics?: ProjectStatistics | null; identity?: WorkspaceIdentity | null; + connectionId?: string; + connectionName?: string; } export interface UpdateAppStatusRequest { @@ -56,6 +58,12 @@ export interface OpenWorkspaceRequest { path: string; } +export interface OpenRemoteWorkspaceRequest { + remotePath: string; + connectionId: string; + connectionName: string; +} + export interface CreateAssistantWorkspaceRequest {} export interface CloseWorkspaceRequest { @@ -127,6 +135,16 @@ export class GlobalAPI { } } + async openRemoteWorkspace(remotePath: string, connectionId: string, connectionName: string): Promise { + try { + return await api.invoke('open_remote_workspace', { + request: { remotePath, connectionId, connectionName } + }); + } catch (error) { + throw createTauriCommandError('open_remote_workspace', error, { remotePath, connectionId, connectionName }); + } + } + async createAssistantWorkspace(): Promise { try { return await api.invoke('create_assistant_workspace', { diff --git a/src/web-ui/src/infrastructure/api/service-api/tauri-commands.ts b/src/web-ui/src/infrastructure/api/service-api/tauri-commands.ts index c46a9e87..f2d71eb7 100644 --- a/src/web-ui/src/infrastructure/api/service-api/tauri-commands.ts +++ b/src/web-ui/src/infrastructure/api/service-api/tauri-commands.ts @@ -32,6 +32,8 @@ export interface WorkspaceInfo { vibe?: string | null; emoji?: string | null; } | null; + connectionId?: string; + connectionName?: string; } export interface FileOperationRequest { diff --git a/src/web-ui/src/infrastructure/services/business/workspaceManager.ts b/src/web-ui/src/infrastructure/services/business/workspaceManager.ts index 6a3392e1..4d6f177d 100644 --- a/src/web-ui/src/infrastructure/services/business/workspaceManager.ts +++ b/src/web-ui/src/infrastructure/services/business/workspaceManager.ts @@ -1,6 +1,6 @@ -import { WorkspaceInfo, globalStateAPI } from '../../../shared/types'; +import { WorkspaceInfo, WorkspaceKind, globalStateAPI } from '../../../shared/types'; import { createLogger } from '@/shared/utils/logger'; import { listen } from '@tauri-apps/api/event'; @@ -233,6 +233,7 @@ class WorkspaceManager { event?: WorkspaceEvent ): void { const openedWorkspaceMap = this.buildOpenedWorkspaceMap(openedWorkspaces); + const resolvedCurrentWorkspace = currentWorkspace ? openedWorkspaceMap.get(currentWorkspace.id) ?? currentWorkspace : null; @@ -403,6 +404,88 @@ class WorkspaceManager { } } + public async openRemoteWorkspace(remoteWorkspace: { + connectionId: string; + connectionName: string; + remotePath: string; + }): Promise { + try { + this.setLoading(true); + this.setError(null); + + log.info('Opening remote workspace', remoteWorkspace); + + const workspace = await globalStateAPI.openRemoteWorkspace( + remoteWorkspace.remotePath, + remoteWorkspace.connectionId, + remoteWorkspace.connectionName, + ); + + const [recentWorkspaces, openedWorkspaces] = await Promise.all([ + globalStateAPI.getRecentWorkspaces(), + globalStateAPI.getOpenedWorkspaces(), + ]); + + this.updateWorkspaceState( + workspace, + recentWorkspaces, + openedWorkspaces, + false, + null, + { type: 'workspace:opened', workspace } + ); + + return workspace; + } catch (error) { + log.error('Failed to open remote workspace', { remoteWorkspace, error }); + const errorMessage = error instanceof Error ? error.message : String(error); + this.updateState({ loading: false, error: errorMessage }, { type: 'workspace:error', error: errorMessage }); + throw error; + } + } + + public async removeRemoteWorkspace(connectionId: string): Promise { + try { + const workspace = this.findRemoteWorkspaceByConnectionId(connectionId); + if (!workspace) { + return; + } + + await globalStateAPI.closeWorkspace(workspace.id); + + const [currentWorkspace, recentWorkspaces, openedWorkspaces] = await Promise.all([ + globalStateAPI.getCurrentWorkspace(), + globalStateAPI.getRecentWorkspaces(), + globalStateAPI.getOpenedWorkspaces(), + ]); + + this.updateWorkspaceState( + currentWorkspace, + recentWorkspaces, + openedWorkspaces, + false, + null, + { type: 'workspace:closed', workspaceId: workspace.id } + ); + + this.emit({ type: 'workspace:active-changed', workspace: currentWorkspace }); + } catch (error) { + log.error('Failed to remove remote workspace', { connectionId, error }); + const errorMessage = error instanceof Error ? error.message : String(error); + this.updateState({ error: errorMessage }, { type: 'workspace:error', error: errorMessage }); + throw error; + } + } + + private findRemoteWorkspaceByConnectionId(connectionId: string): WorkspaceInfo | undefined { + for (const [, ws] of this.state.openedWorkspaces) { + if (ws.connectionId === connectionId && ws.workspaceKind === WorkspaceKind.Remote) { + return ws; + } + } + return undefined; + } + public async createAssistantWorkspace(): Promise { try { this.setLoading(true); diff --git a/src/web-ui/src/locales/en-US/common.json b/src/web-ui/src/locales/en-US/common.json index 3dd05471..cbbfeacd 100644 --- a/src/web-ui/src/locales/en-US/common.json +++ b/src/web-ui/src/locales/en-US/common.json @@ -874,5 +874,54 @@ "timeOfDay": "Messages by Time of Day", "lines": "Lines", "files": "Files" + }, + "ssh": { + "remote": { + "title": "Remote SSH", + "connect": "Connect Remote Project", + "connecting": "Connecting...", + "connected": "Connected", + "disconnect": "Disconnect", + "savedConnections": "Saved Connections", + "sshConfigHosts": "SSH Config", + "fillForm": "Use", + "newConnection": "New Connection", + "host": "Host", + "port": "Port", + "username": "Username", + "connectionName": "Connection Name", + "connectionNamePlaceholder": "Leave empty to auto-generate", + "authMethod": "Authentication Method", + "password": "Password", + "privateKey": "Private Key", + "sshAgent": "SSH Agent", + "privateKeyPath": "Private Key Path", + "passphrase": "Passphrase", + "passphraseOptional": "Leave empty if none", + "selectWorkspace": "Select Workspace Directory", + "openWorkspace": "Open as Workspace", + "selected": "Selected", + "clickToSelect": "Click to select a directory", + "name": "Name", + "size": "Size", + "modified": "Modified", + "emptyDirectory": "Directory is empty", + "hostRequired": "Host is required", + "usernameRequired": "Username is required", + "portInvalid": "Port must be a number between 1 and 65535", + "passwordRequired": "Password is required", + "keyPathRequired": "Private key path is required", + "enterPassword": "Enter Password", + "enterKeyPath": "Enter Private Key Path", + "keyPathDescription": "Enter the private key file path", + "disconnectConfirm": "Are you sure you want to disconnect?", + "disconnectWorkspaceConfirm": "Are you sure you want to close the remote workspace? This will disconnect from the remote server.", + "newFile": "New File", + "newFolder": "New Folder", + "delete": "Delete", + "deleteTitle": "Confirm Delete", + "deleteConfirm": "Are you sure you want to delete this file/folder? This action cannot be undone.", + "rename": "Rename" + } } } diff --git a/src/web-ui/src/locales/zh-CN/common.json b/src/web-ui/src/locales/zh-CN/common.json index 147f56e8..edba2f2b 100644 --- a/src/web-ui/src/locales/zh-CN/common.json +++ b/src/web-ui/src/locales/zh-CN/common.json @@ -874,5 +874,54 @@ "timeOfDay": "按时段分布", "lines": "行", "files": "文件" + }, + "ssh": { + "remote": { + "title": "远程 SSH", + "connect": "连接远程工程", + "connecting": "连接中...", + "connected": "已连接", + "disconnect": "断开连接", + "savedConnections": "已保存的连接", + "sshConfigHosts": "SSH 配置", + "fillForm": "使用", + "newConnection": "新建连接", + "host": "主机地址", + "port": "端口", + "username": "用户名", + "connectionName": "连接名称", + "connectionNamePlaceholder": "留空自动生成", + "authMethod": "认证方式", + "password": "密码", + "privateKey": "私钥", + "sshAgent": "SSH Agent", + "privateKeyPath": "私钥路径", + "passphrase": "密码短语", + "passphraseOptional": "留空表示无密码短语", + "selectWorkspace": "选择工作区目录", + "openWorkspace": "打开为工作区", + "selected": "已选择", + "clickToSelect": "点击选择一个目录", + "name": "名称", + "size": "大小", + "modified": "修改时间", + "emptyDirectory": "目录为空", + "hostRequired": "主机地址不能为空", + "usernameRequired": "用户名不能为空", + "portInvalid": "端口必须是 1-65535 之间的数字", + "passwordRequired": "密码不能为空", + "keyPathRequired": "私钥路径不能为空", + "enterPassword": "请输入密码", + "enterKeyPath": "请输入私钥路径", + "keyPathDescription": "输入私钥文件路径", + "disconnectConfirm": "确定要断开连接吗?", + "disconnectWorkspaceConfirm": "确定要关闭远程工作区吗?这将断开与远程服务器的连接。", + "newFile": "新建文件", + "newFolder": "新建文件夹", + "delete": "删除", + "deleteTitle": "确认删除", + "deleteConfirm": "确定要删除此文件/文件夹吗?此操作无法撤销。", + "rename": "重命名" + } } } diff --git a/src/web-ui/src/shared/types/global-state.ts b/src/web-ui/src/shared/types/global-state.ts index d66683fa..9214de4b 100644 --- a/src/web-ui/src/shared/types/global-state.ts +++ b/src/web-ui/src/shared/types/global-state.ts @@ -52,6 +52,7 @@ export enum WorkspaceType { export enum WorkspaceKind { Normal = 'normal', Assistant = 'assistant', + Remote = 'remote', } @@ -86,6 +87,12 @@ export interface WorkspaceInfo { tags: string[]; statistics?: ProjectStatistics; identity?: WorkspaceIdentity | null; + connectionId?: string; + connectionName?: string; +} + +export function isRemoteWorkspace(workspace: WorkspaceInfo | null | undefined): boolean { + return workspace?.workspaceKind === WorkspaceKind.Remote; } @@ -139,6 +146,7 @@ export interface GlobalStateAPI { openWorkspace(path: string): Promise; + openRemoteWorkspace(remotePath: string, connectionId: string, connectionName: string): Promise; createAssistantWorkspace(): Promise; deleteAssistantWorkspace(workspaceId: string): Promise; resetAssistantWorkspace(workspaceId: string): Promise; @@ -202,6 +210,8 @@ function mapWorkspaceKind(workspaceKind: APIWorkspaceInfo['workspaceKind']): Wor switch (workspaceKind) { case WorkspaceKind.Assistant: return WorkspaceKind.Assistant; + case WorkspaceKind.Remote: + return WorkspaceKind.Remote; default: return WorkspaceKind.Normal; } @@ -246,6 +256,8 @@ function mapWorkspaceInfo(workspace: APIWorkspaceInfo): WorkspaceInfo { } : undefined, identity: mapWorkspaceIdentity(workspace.identity), + connectionId: workspace.connectionId, + connectionName: workspace.connectionName, }; } @@ -294,6 +306,10 @@ export function createGlobalStateAPI(): GlobalStateAPI { return mapWorkspaceInfo(await globalAPI.openWorkspace(path)); }, + async openRemoteWorkspace(remotePath: string, connectionId: string, connectionName: string): Promise { + return mapWorkspaceInfo(await globalAPI.openRemoteWorkspace(remotePath, connectionId, connectionName)); + }, + async createAssistantWorkspace(): Promise { return mapWorkspaceInfo(await globalAPI.createAssistantWorkspace()); }, diff --git a/src/web-ui/src/tools/terminal/types/session.ts b/src/web-ui/src/tools/terminal/types/session.ts index f377cba6..84d0e07d 100644 --- a/src/web-ui/src/tools/terminal/types/session.ts +++ b/src/web-ui/src/tools/terminal/types/session.ts @@ -31,6 +31,7 @@ export interface SessionResponse { status: SessionStatus | string; cols: number; rows: number; + connectionId?: string; } export interface ShellInfo { From a76c8548f556830789b7ea412e31e090a11b32c3 Mon Sep 17 00:00:00 2001 From: bowen628 Date: Sat, 21 Mar 2026 00:23:21 +0800 Subject: [PATCH 2/5] fix: vendor OpenSSL to fix Windows CI build failure russh-keys uses the openssl crate which requires a system OpenSSL installation. On Windows runners there is none, so the build fails. Adding openssl with the vendored feature compiles OpenSSL from source (same approach already used by git2 via vendored-openssl). Co-Authored-By: Claude Sonnet 4.6 --- Cargo.toml | 3 +++ src/crates/core/Cargo.toml | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index f249c28c..e6992fd6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -80,6 +80,9 @@ toml = "0.8" # Git git2 = { version = "0.18", default-features = false, features = ["https", "vendored-libgit2", "vendored-openssl"] } +# OpenSSL — vendored so no system OpenSSL is needed (required by russh-keys on Windows) +openssl = { version = "0.10", features = ["vendored"] } + # Terminal portable-pty = "0.8" vte = "0.15.0" diff --git a/src/crates/core/Cargo.toml b/src/crates/core/Cargo.toml index 4fe71bb1..75ccaafd 100644 --- a/src/crates/core/Cargo.toml +++ b/src/crates/core/Cargo.toml @@ -111,6 +111,7 @@ tokio-tungstenite = { workspace = true } russh = { version = "0.45", optional = true } russh-sftp = { version = "2.1", optional = true } russh-keys = { version = "0.45", features = ["openssl"], optional = true } +openssl = { workspace = true, optional = true } shellexpand = { version = "3", optional = true } ssh_config = { version = "0.1", optional = true } @@ -129,4 +130,4 @@ tauri = { workspace = true, optional = true } [features] default = ["ssh-remote"] tauri-support = ["tauri"] # Optional tauri support -ssh-remote = ["russh", "russh-sftp", "russh-keys", "shellexpand", "ssh_config"] # Optional SSH remote support +ssh-remote = ["russh", "russh-sftp", "russh-keys", "openssl", "shellexpand", "ssh_config"] # Optional SSH remote support From 4098636e9638582a5b2de1d3dd34587a0706abbf Mon Sep 17 00:00:00 2001 From: bowen628 Date: Sat, 21 Mar 2026 01:09:59 +0800 Subject: [PATCH 3/5] fix: improve SSH error diagnostics, algorithm compatibility, and Windows build MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Make HandlerError carry actual error message (was unit struct, discarding all info) - Implement disconnected() callback to capture real SSH disconnect reason - Add DH_G14_SHA1/DH_G1_SHA1 KEX and SSH_RSA host key for legacy server compatibility - Improve error message when server closes connection before sending SSH banner - Fix win32job missing from bitfun-core Windows target dependencies - Fix type annotation needed for MutexGuard in process_manager.rs - Fix RemoteFileBrowser cancel button using missing i18n key (common.cancel → actions.cancel) Co-Authored-By: Claude Sonnet 4.6 --- src/crates/core/Cargo.toml | 3 + .../core/src/service/remote_ssh/manager.rs | 131 +++++++++++++++--- src/crates/core/src/util/process_manager.rs | 2 +- .../features/ssh-remote/RemoteFileBrowser.tsx | 2 +- 4 files changed, 117 insertions(+), 21 deletions(-) diff --git a/src/crates/core/Cargo.toml b/src/crates/core/Cargo.toml index 75ccaafd..9a0ea9b1 100644 --- a/src/crates/core/Cargo.toml +++ b/src/crates/core/Cargo.toml @@ -127,6 +127,9 @@ bitfun-transport = { path = "../transport" } # Tauri dependency (optional, enabled only when needed) tauri = { workspace = true, optional = true } +[target.'cfg(windows)'.dependencies] +win32job = { workspace = true } + [features] default = ["ssh-remote"] tauri-support = ["tauri"] # Optional tauri support diff --git a/src/crates/core/src/service/remote_ssh/manager.rs b/src/crates/core/src/service/remote_ssh/manager.rs index 9e757c22..a1f1a9c3 100644 --- a/src/crates/core/src/service/remote_ssh/manager.rs +++ b/src/crates/core/src/service/remote_ssh/manager.rs @@ -7,7 +7,7 @@ use crate::service::remote_ssh::types::{ SSHConfigEntry, SSHConfigLookupResult, }; use anyhow::{anyhow, Context}; -use russh::client::{Handle, Handler, Msg}; +use russh::client::{DisconnectReason, Handle, Handler, Msg}; use russh_keys::key::PublicKey; use russh_keys::PublicKeyBase64; use russh_sftp::client::fs::ReadDir; @@ -50,6 +50,11 @@ struct SSHHandler { /// Host info for known hosts lookup host: Option, port: Option, + /// Stores the real disconnect reason so callers get a useful error message. + /// russh's run() absorbs errors internally; we capture them here and + /// surface them after connect_stream() returns. + /// Uses std::sync::Mutex so it can be read from sync map_err closures. + disconnect_reason: Arc>>, } impl SSHHandler { @@ -61,6 +66,7 @@ impl SSHHandler { known_hosts: None, host: None, port: None, + disconnect_reason: Arc::new(std::sync::Mutex::new(None)), } } @@ -72,6 +78,7 @@ impl SSHHandler { known_hosts: None, host: None, port: None, + disconnect_reason: Arc::new(std::sync::Mutex::new(None)), } } @@ -86,6 +93,7 @@ impl SSHHandler { known_hosts: None, host: None, port: None, + disconnect_reason: Arc::new(std::sync::Mutex::new(None)), } } @@ -93,37 +101,40 @@ impl SSHHandler { host: String, port: u16, known_hosts: Arc>>, - ) -> Self { - Self { + ) -> (Self, Arc>>) { + let disconnect_reason = Arc::new(std::sync::Mutex::new(None)); + let handler = Self { expected_key: None, verify_callback: None, known_hosts: Some(known_hosts), host: Some(host), port: Some(port), - } + disconnect_reason: disconnect_reason.clone(), + }; + (handler, disconnect_reason) } } #[derive(Debug)] -struct HandlerError; +struct HandlerError(String); impl std::fmt::Display for HandlerError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "HandlerError") + write!(f, "{}", self.0) } } impl std::error::Error for HandlerError {} impl From for HandlerError { - fn from(_: russh::Error) -> Self { - HandlerError + fn from(e: russh::Error) -> Self { + HandlerError(format!("{:?}", e)) } } impl From for HandlerError { - fn from(_s: String) -> Self { - HandlerError + fn from(s: String) -> Self { + HandlerError(s) } } @@ -145,7 +156,10 @@ impl Handler for SSHHandler { } log::warn!("Server key mismatch for {}:{}. Expected fingerprint: {}, got: {}", host, port, expected.fingerprint(), server_fingerprint); - return Err(HandlerError); + return Err(HandlerError(format!( + "Host key mismatch for {}:{}: expected {}, got {}", + host, port, expected.fingerprint(), server_fingerprint + ))); } // 2. Check known_hosts for this host @@ -154,7 +168,6 @@ impl Handler for SSHHandler { let key = format!("{}:{}", host, port); let known_guard = known_hosts.read().await; if let Some(known) = known_guard.get(&key) { - // Clone the fingerprint to avoid borrow issues let stored_fingerprint = known.fingerprint.clone(); drop(known_guard); @@ -162,12 +175,15 @@ impl Handler for SSHHandler { log::debug!("Server key verified from known_hosts for {}:{}", host, port); return Ok(true); } else { - // Key changed - potential security issue! log::warn!( "Host key changed for {}:{}. Expected: {}, got: {}", host, port, stored_fingerprint, server_fingerprint ); - return Err(HandlerError); + return Err(HandlerError(format!( + "Host key changed for {}:{} — stored fingerprint {} does not match server fingerprint {}. \ + If the server key was legitimately updated, clear the known host entry and reconnect.", + host, port, stored_fingerprint, server_fingerprint + ))); } } } @@ -181,7 +197,7 @@ impl Handler for SSHHandler { log::debug!("Server key verified via callback for {}:{}", host, port); return Ok(true); } - return Err(HandlerError); + return Err(HandlerError("Host key rejected by verify callback".to_string())); } // 4. First time connection - accept the key (like standard SSH client's StrictHostKeyChecking=accept-new) @@ -194,6 +210,32 @@ impl Handler for SSHHandler { ); Ok(true) } + + async fn disconnected( + &mut self, + reason: DisconnectReason, + ) -> Result<(), Self::Error> { + let msg = match &reason { + DisconnectReason::ReceivedDisconnect(info) => { + format!( + "Server sent disconnect: {:?} — {}", + info.reason_code, info.message + ) + } + DisconnectReason::Error(e) => { + format!("Connection closed with error: {}", e) + } + }; + log::warn!("SSH disconnected ({}:{}): {}", self.host.as_deref().unwrap_or("?"), self.port.unwrap_or(22), msg); + if let Ok(mut guard) = self.disconnect_reason.lock() { + *guard = Some(msg); + } + // Propagate errors so russh surfaces them; swallow clean server disconnect. + match reason { + DisconnectReason::ReceivedDisconnect(_) => Ok(()), + DisconnectReason::Error(e) => Err(e), + } + } } /// SSH Connection Manager @@ -686,11 +728,36 @@ impl SSHConnectionManager { inactivity_timeout: Some(std::time::Duration::from_secs(60)), keepalive_interval: Some(std::time::Duration::from_secs(30)), keepalive_max: 3, + // Broad algorithm list for compatibility with both modern and legacy SSH servers. + // Modern algorithms first (preferred), legacy ones appended as fallback. + preferred: russh::Preferred { + // KEX: modern curve25519 first, then older DH groups for legacy servers + kex: std::borrow::Cow::Owned(vec![ + russh::kex::CURVE25519, + russh::kex::CURVE25519_PRE_RFC_8731, + russh::kex::DH_G16_SHA512, + russh::kex::DH_G14_SHA256, + russh::kex::DH_G14_SHA1, // legacy servers + russh::kex::DH_G1_SHA1, // very old servers + russh::kex::EXTENSION_SUPPORT_AS_CLIENT, + russh::kex::EXTENSION_OPENSSH_STRICT_KEX_AS_CLIENT, + ]), + // Host key algorithms: include ssh-rsa for older servers + key: std::borrow::Cow::Owned(vec![ + russh_keys::key::ED25519, + russh_keys::key::ECDSA_SHA2_NISTP256, + russh_keys::key::ECDSA_SHA2_NISTP521, + russh_keys::key::RSA_SHA2_256, + russh_keys::key::RSA_SHA2_512, + russh_keys::key::SSH_RSA, // legacy servers that only advertise ssh-rsa + ]), + ..russh::Preferred::DEFAULT + }, ..Default::default() }); // Create handler with known_hosts for verification - let handler = SSHHandler::with_known_hosts( + let (handler, disconnect_reason) = SSHHandler::with_known_hosts( config.host.clone(), config.port, self.known_hosts.clone(), @@ -698,13 +765,39 @@ impl SSHConnectionManager { // SSH handshake with timeout log::info!("Starting SSH handshake to {}", addr); - let mut handle = tokio::time::timeout( + let connect_result = tokio::time::timeout( std::time::Duration::from_secs(timeout_secs), russh::client::connect_stream(ssh_config, stream, handler), ) .await - .map_err(|_| anyhow!("SSH handshake timeout after {} seconds", timeout_secs))? - .map_err(|e| anyhow!("Failed to establish SSH connection: {:?}", e))?; + .map_err(|_| anyhow!("SSH handshake timeout after {} seconds", timeout_secs))?; + + let mut handle = connect_result.map_err(|e| { + // Try to surface the real disconnect reason captured in the handler. + // russh's run() absorbs errors; our disconnected() callback stores them. + let real_reason = disconnect_reason + .lock() + .ok() + .and_then(|g| g.clone()); + if let Some(reason) = real_reason { + anyhow!("SSH handshake failed: {}", reason) + } else { + // HandlerError("Disconnect") with no stored reason means the server + // closed the TCP connection before sending any SSH banner. + // This typically means: sshd is not running, max connections reached, + // or a firewall/IP ban is in effect. + let e_dbg = format!("{:?}", e); + if e_dbg.contains("Disconnect") { + anyhow!( + "SSH connection refused: server {}:{} closed the connection without sending an SSH banner. \ + Check that sshd is running and accepting connections.", + config.host, config.port + ) + } else { + anyhow!("Failed to establish SSH connection: {:?}", e) + } + } + })?; log::info!("SSH handshake completed successfully"); // Authenticate based on auth method diff --git a/src/crates/core/src/util/process_manager.rs b/src/crates/core/src/util/process_manager.rs index 839ef6e7..4185b703 100644 --- a/src/crates/core/src/util/process_manager.rs +++ b/src/crates/core/src/util/process_manager.rs @@ -74,7 +74,7 @@ impl ProcessManager { Ok(guard) => guard, Err(poisoned) => { warn!("Process manager job mutex was poisoned during cleanup, recovering lock"); - poisoned.into_inner() + poisoned.into_inner() as std::sync::MutexGuard<'_, Option> } }; job_guard.take(); diff --git a/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx b/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx index 8b623ea2..c291eadf 100644 --- a/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx +++ b/src/web-ui/src/features/ssh-remote/RemoteFileBrowser.tsx @@ -502,7 +502,7 @@ export const RemoteFileBrowser: React.FC = ({