diff --git a/src-tauri/src/core/state.rs b/src-tauri/src/core/state.rs index dab29aa85..b59aa0a3d 100644 --- a/src-tauri/src/core/state.rs +++ b/src-tauri/src/core/state.rs @@ -8,18 +8,18 @@ use tokio::task::JoinHandle; /// Server handle type for managing the proxy server lifecycle pub type ServerHandle = JoinHandle>>; +use tokio::{process::Child, sync::Mutex}; #[derive(Default)] pub struct AppState { pub app_token: Option, pub mcp_servers: Arc>>>, pub download_manager: Arc>, - pub cortex_restart_count: Arc>, - pub cortex_killed_intentionally: Arc>, pub mcp_restart_counts: Arc>>, pub mcp_active_servers: Arc>>, pub mcp_successfully_connected: Arc>>, pub server_handle: Arc>>, + pub llama_server_process: Arc>>, } pub fn generate_app_token() -> String { rand::thread_rng() diff --git a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs b/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs index 4efa725cc..dc7b10757 100644 --- a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs +++ b/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs @@ -1,8 +1,7 @@ use std::path::PathBuf; -use std::sync::Arc; +use tauri::path::BaseDirectory; use tauri::{AppHandle, Manager, State}; // Import Manager trait -use tokio::process::{Child, Command}; -use tokio::sync::Mutex; +use tokio::process::Command; use crate::core::state::AppState; @@ -23,6 +22,16 @@ pub enum ServerError { Tauri(#[from] tauri::Error), } +// impl serialization for tauri +impl serde::Serialize for ServerError { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.to_string().as_ref()) + } +} + type ServerResult = Result; // --- Helper function to find the server binary --- @@ -38,26 +47,26 @@ fn get_server_path(app_handle: &AppHandle) -> ServerResult { app_handle .path() - .resolve_resource(relative_path) - .map_err(|e| ServerError::ResourcePathError(e.to_string()))? - .ok_or_else(|| { - ServerError::BinaryNotFound(format!( - "Could not resolve resource path for '{}'", - if cfg!(windows) { - "engines/llama-server.exe" - } else { - "engines/llama-server" - } // TODO: ADJUST THIS PATH - )) - }) + .resolve(relative_path, BaseDirectory::Resource) + .map_err(|e| ServerError::ResourcePathError(e.to_string())) + // .ok_or_else(|| { + // ServerError::BinaryNotFound(format!( + // "Could not resolve resource path for '{}'", + // if cfg!(windows) { + // "engines/llama-server.exe" + // } else { + // "engines/llama-server" + // } // TODO: ADJUST THIS PATH + // )) + // }) } // --- Load Command --- #[tauri::command] pub async fn load( - app_handle: AppHandle, // Get the AppHandle - state: State<'_, AppState>, // Access the shared state - args: Vec, // Arguments from the frontend + app_handle: AppHandle, // Get the AppHandle + state: State<'_, AppState>, // Access the shared state + args: Vec, // Arguments from the frontend ) -> ServerResult<()> { let mut process_lock = state.llama_server_process.lock().await; @@ -71,8 +80,14 @@ pub async fn load( log::info!("Using arguments: {:?}", args); if !server_path.exists() { - log::error!("Server binary not found at expected path: {:?}", server_path); - return Err(ServerError::BinaryNotFound(format!("Binary not found at {:?}", server_path))); + log::error!( + "Server binary not found at expected path: {:?}", + server_path + ); + return Err(ServerError::BinaryNotFound(format!( + "Binary not found at {:?}", + server_path + ))); } // Configure the command to run the server diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index fa6ccaea4..4d46d19a2 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -88,19 +88,18 @@ pub fn run() { core::hardware::get_system_info, core::hardware::get_system_usage, // llama-cpp extension - core::utils::extensions::inference_llamacpp_extension::load, - core::utils::extensions::inference_llamacpp_extension::unload + core::utils::extensions::inference_llamacpp_extension::server::load, + core::utils::extensions::inference_llamacpp_extension::server::unload, ]) .manage(AppState { app_token: Some(generate_app_token()), mcp_servers: Arc::new(Mutex::new(HashMap::new())), download_manager: Arc::new(Mutex::new(DownloadManagerState::default())), - cortex_restart_count: Arc::new(Mutex::new(0)), - cortex_killed_intentionally: Arc::new(Mutex::new(false)), mcp_restart_counts: Arc::new(Mutex::new(HashMap::new())), mcp_active_servers: Arc::new(Mutex::new(HashMap::new())), mcp_successfully_connected: Arc::new(Mutex::new(HashMap::new())), server_handle: Arc::new(Mutex::new(None)), + llama_server_process: Arc::new(Mutex::new(None)), }) .setup(|app| { app.handle().plugin(