make it compile
This commit is contained in:
parent
0551b0bfd2
commit
15f0b11c0d
@ -8,18 +8,18 @@ use tokio::task::JoinHandle;
|
||||
|
||||
/// Server handle type for managing the proxy server lifecycle
|
||||
pub type ServerHandle = JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>>;
|
||||
use tokio::{process::Child, sync::Mutex};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AppState {
|
||||
pub app_token: Option<String>,
|
||||
pub mcp_servers: Arc<Mutex<HashMap<String, RunningService<RoleClient, ()>>>>,
|
||||
pub download_manager: Arc<Mutex<DownloadManagerState>>,
|
||||
pub cortex_restart_count: Arc<Mutex<u32>>,
|
||||
pub cortex_killed_intentionally: Arc<Mutex<bool>>,
|
||||
pub mcp_restart_counts: Arc<Mutex<HashMap<String, u32>>>,
|
||||
pub mcp_active_servers: Arc<Mutex<HashMap<String, serde_json::Value>>>,
|
||||
pub mcp_successfully_connected: Arc<Mutex<HashMap<String, bool>>>,
|
||||
pub server_handle: Arc<Mutex<Option<ServerHandle>>>,
|
||||
pub llama_server_process: Arc<Mutex<Option<Child>>>,
|
||||
}
|
||||
pub fn generate_app_token() -> String {
|
||||
rand::thread_rng()
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::{AppHandle, Manager, State}; // Import Manager trait
|
||||
use tokio::process::{Child, Command};
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::process::Command;
|
||||
|
||||
use crate::core::state::AppState;
|
||||
|
||||
@ -23,6 +22,16 @@ pub enum ServerError {
|
||||
Tauri(#[from] tauri::Error),
|
||||
}
|
||||
|
||||
// impl serialization for tauri
|
||||
impl serde::Serialize for ServerError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.to_string().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
type ServerResult<T> = Result<T, ServerError>;
|
||||
|
||||
// --- Helper function to find the server binary ---
|
||||
@ -38,26 +47,26 @@ fn get_server_path(app_handle: &AppHandle) -> ServerResult<PathBuf> {
|
||||
|
||||
app_handle
|
||||
.path()
|
||||
.resolve_resource(relative_path)
|
||||
.map_err(|e| ServerError::ResourcePathError(e.to_string()))?
|
||||
.ok_or_else(|| {
|
||||
ServerError::BinaryNotFound(format!(
|
||||
"Could not resolve resource path for '{}'",
|
||||
if cfg!(windows) {
|
||||
"engines/llama-server.exe"
|
||||
} else {
|
||||
"engines/llama-server"
|
||||
} // TODO: ADJUST THIS PATH
|
||||
))
|
||||
})
|
||||
.resolve(relative_path, BaseDirectory::Resource)
|
||||
.map_err(|e| ServerError::ResourcePathError(e.to_string()))
|
||||
// .ok_or_else(|| {
|
||||
// ServerError::BinaryNotFound(format!(
|
||||
// "Could not resolve resource path for '{}'",
|
||||
// if cfg!(windows) {
|
||||
// "engines/llama-server.exe"
|
||||
// } else {
|
||||
// "engines/llama-server"
|
||||
// } // TODO: ADJUST THIS PATH
|
||||
// ))
|
||||
// })
|
||||
}
|
||||
|
||||
// --- Load Command ---
|
||||
#[tauri::command]
|
||||
pub async fn load(
|
||||
app_handle: AppHandle, // Get the AppHandle
|
||||
state: State<'_, AppState>, // Access the shared state
|
||||
args: Vec<String>, // Arguments from the frontend
|
||||
app_handle: AppHandle, // Get the AppHandle
|
||||
state: State<'_, AppState>, // Access the shared state
|
||||
args: Vec<String>, // Arguments from the frontend
|
||||
) -> ServerResult<()> {
|
||||
let mut process_lock = state.llama_server_process.lock().await;
|
||||
|
||||
@ -71,8 +80,14 @@ pub async fn load(
|
||||
log::info!("Using arguments: {:?}", args);
|
||||
|
||||
if !server_path.exists() {
|
||||
log::error!("Server binary not found at expected path: {:?}", server_path);
|
||||
return Err(ServerError::BinaryNotFound(format!("Binary not found at {:?}", server_path)));
|
||||
log::error!(
|
||||
"Server binary not found at expected path: {:?}",
|
||||
server_path
|
||||
);
|
||||
return Err(ServerError::BinaryNotFound(format!(
|
||||
"Binary not found at {:?}",
|
||||
server_path
|
||||
)));
|
||||
}
|
||||
|
||||
// Configure the command to run the server
|
||||
|
||||
@ -88,19 +88,18 @@ pub fn run() {
|
||||
core::hardware::get_system_info,
|
||||
core::hardware::get_system_usage,
|
||||
// llama-cpp extension
|
||||
core::utils::extensions::inference_llamacpp_extension::load,
|
||||
core::utils::extensions::inference_llamacpp_extension::unload
|
||||
core::utils::extensions::inference_llamacpp_extension::server::load,
|
||||
core::utils::extensions::inference_llamacpp_extension::server::unload,
|
||||
])
|
||||
.manage(AppState {
|
||||
app_token: Some(generate_app_token()),
|
||||
mcp_servers: Arc::new(Mutex::new(HashMap::new())),
|
||||
download_manager: Arc::new(Mutex::new(DownloadManagerState::default())),
|
||||
cortex_restart_count: Arc::new(Mutex::new(0)),
|
||||
cortex_killed_intentionally: Arc::new(Mutex::new(false)),
|
||||
mcp_restart_counts: Arc::new(Mutex::new(HashMap::new())),
|
||||
mcp_active_servers: Arc::new(Mutex::new(HashMap::new())),
|
||||
mcp_successfully_connected: Arc::new(Mutex::new(HashMap::new())),
|
||||
server_handle: Arc::new(Mutex::new(None)),
|
||||
llama_server_process: Arc::new(Mutex::new(None)),
|
||||
})
|
||||
.setup(|app| {
|
||||
app.handle().plugin(
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user