style: Rename camelCase to snake_case in llamacpp extension code

Rename variable, struct, and enum names from camelCase to snake_case throughout the llamacpp extension codebase to align with Rust naming conventions. This change improves readability and consistency without altering functionality.
This commit is contained in:
Akarshan 2025-06-06 10:14:12 +05:30 committed by Louis
parent c2b606a3fc
commit 4ffc504150
No known key found for this signature in database
GPG Key ID: 44FA9F4D33C37DE2
2 changed files with 33 additions and 33 deletions

View File

@ -460,9 +460,9 @@ export default class llamacpp_extension extends AIEngine {
try { try {
// TODO: add LIBRARY_PATH // TODO: add LIBRARY_PATH
const sInfo = await invoke<sessionInfo>('load_llama_model', { const sInfo = await invoke<sessionInfo>('load_llama_model', {
backendPath: await getBackendExePath(backend, version), backend_path: await getBackendExePath(backend, version),
libraryPath: await joinPath([this.providerPath, 'lib']), library_path: await joinPath([this.providerPath, 'lib']),
args, args: args
}) })
// Store the session info for later use // Store the session info for later use
@ -484,7 +484,7 @@ export default class llamacpp_extension extends AIEngine {
try { try {
// Pass the PID as the session_id // Pass the PID as the session_id
const result = await invoke<unloadResult>('unload_llama_model', { const result = await invoke<unloadResult>('unload_llama_model', {
pid, pid: pid
}) })
// If successful, remove from active sessions // If successful, remove from active sessions

View File

@ -13,9 +13,9 @@ use crate::core::state::AppState;
type HmacSha256 = Hmac<Sha256>; type HmacSha256 = Hmac<Sha256>;
// Error type for server commands // Error type for server commands
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum serverError { pub enum ServerError {
#[error("Server is already running")] // #[error("Server is already running")]
AlreadyRunning, // AlreadyRunning,
// #[error("Server is not running")] // #[error("Server is not running")]
// NotRunning, // NotRunning,
#[error("Failed to locate server binary: {0}")] #[error("Failed to locate server binary: {0}")]
@ -27,7 +27,7 @@ pub enum serverError {
} }
// impl serialization for tauri // impl serialization for tauri
impl serde::Serialize for serverError { impl serde::Serialize for ServerError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: serde::Serializer, S: serde::Serializer,
@ -36,19 +36,19 @@ impl serde::Serialize for serverError {
} }
} }
type ServerResult<T> = Result<T, serverError>; type ServerResult<T> = Result<T, ServerError>;
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct sessionInfo { pub struct SessionInfo {
pub pid: String, // opaque handle for unload/chat pub pid: String, // opaque handle for unload/chat
pub port: String, // llama-server output port pub port: String, // llama-server output port
pub modelId: String, pub model_id: String,
pub modelPath: String, // path of the loaded model pub model_path: String, // path of the loaded model
pub apiKey: String, pub api_key: String,
} }
#[derive(serde::Serialize, serde::Deserialize)] #[derive(serde::Serialize, serde::Deserialize)]
pub struct unloadResult { pub struct UnloadResult {
success: bool, success: bool,
error: Option<String>, error: Option<String>,
} }
@ -60,7 +60,7 @@ pub async fn load_llama_model(
backend_path: &str, backend_path: &str,
library_path: Option<&str>, library_path: Option<&str>,
args: Vec<String>, // Arguments from the frontend args: Vec<String>, // Arguments from the frontend
) -> ServerResult<sessionInfo> { ) -> ServerResult<SessionInfo> {
let mut process_map = state.llama_server_process.lock().await; let mut process_map = state.llama_server_process.lock().await;
log::info!("Attempting to launch server at path: {:?}", backend_path); log::info!("Attempting to launch server at path: {:?}", backend_path);
@ -72,7 +72,7 @@ pub async fn load_llama_model(
"Server binary not found at expected path: {:?}", "Server binary not found at expected path: {:?}",
backend_path backend_path
); );
return Err(serverError::BinaryNotFound(format!( return Err(ServerError::BinaryNotFound(format!(
"Binary not found at {:?}", "Binary not found at {:?}",
backend_path backend_path
))); )));
@ -85,21 +85,21 @@ pub async fn load_llama_model(
.cloned() .cloned()
.unwrap_or_default(); .unwrap_or_default();
let modelPath = args let model_path = args
.iter() .iter()
.position(|arg| arg == "-m") .position(|arg| arg == "-m")
.and_then(|i| args.get(i + 1)) .and_then(|i| args.get(i + 1))
.cloned() .cloned()
.unwrap_or_default(); .unwrap_or_default();
let apiKey = args let api_key = args
.iter() .iter()
.position(|arg| arg == "--api-key") .position(|arg| arg == "--api-key")
.and_then(|i| args.get(i + 1)) .and_then(|i| args.get(i + 1))
.cloned() .cloned()
.unwrap_or_default(); .unwrap_or_default();
let modelId = args let model_id = args
.iter() .iter()
.position(|arg| arg == "-a") .position(|arg| arg == "-a")
.and_then(|i| args.get(i + 1)) .and_then(|i| args.get(i + 1))
@ -133,7 +133,7 @@ pub async fn load_llama_model(
// command.stderr(Stdio::piped()); // command.stderr(Stdio::piped());
// Spawn the child process // Spawn the child process
let child = command.spawn().map_err(serverError::Io)?; let child = command.spawn().map_err(ServerError::Io)?;
// Get the PID to use as session ID // Get the PID to use as session ID
let pid = child.id().map(|id| id.to_string()).unwrap_or_else(|| { let pid = child.id().map(|id| id.to_string()).unwrap_or_else(|| {
@ -146,12 +146,12 @@ pub async fn load_llama_model(
// Store the child process handle in the state // Store the child process handle in the state
process_map.insert(pid.clone(), child); process_map.insert(pid.clone(), child);
let session_info = sessionInfo { let session_info = SessionInfo {
pid, pid: pid,
port, port: port,
modelId, model_id: model_id,
modelPath, model_path: model_path,
apiKey, api_key: api_key,
}; };
Ok(session_info) Ok(session_info)
@ -162,7 +162,7 @@ pub async fn load_llama_model(
pub async fn unload_llama_model( pub async fn unload_llama_model(
pid: String, pid: String,
state: State<'_, AppState>, state: State<'_, AppState>,
) -> ServerResult<unloadResult> { ) -> ServerResult<UnloadResult> {
let mut process_map = state.llama_server_process.lock().await; let mut process_map = state.llama_server_process.lock().await;
match process_map.remove(&pid) { match process_map.remove(&pid) {
Some(mut child) => { Some(mut child) => {
@ -172,7 +172,7 @@ pub async fn unload_llama_model(
Ok(_) => { Ok(_) => {
log::info!("Server process termination signal sent successfully"); log::info!("Server process termination signal sent successfully");
Ok(unloadResult { Ok(UnloadResult {
success: true, success: true,
error: None, error: None,
}) })
@ -180,7 +180,7 @@ pub async fn unload_llama_model(
Err(e) => { Err(e) => {
log::error!("Failed to kill server process: {}", e); log::error!("Failed to kill server process: {}", e);
Ok(unloadResult { Ok(UnloadResult {
success: false, success: false,
error: Some(format!("Failed to kill server process: {}", e)), error: Some(format!("Failed to kill server process: {}", e)),
}) })
@ -193,7 +193,7 @@ pub async fn unload_llama_model(
pid pid
); );
Ok(unloadResult { Ok(UnloadResult {
success: true, success: true,
error: None, error: None,
}) })
@ -203,10 +203,10 @@ pub async fn unload_llama_model(
// crypto // crypto
#[tauri::command] #[tauri::command]
pub fn generate_api_key(modelId: String, apiSecret: String) -> Result<String, String> { pub fn generate_api_key(model_id: String, api_secret: String) -> Result<String, String> {
let mut mac = HmacSha256::new_from_slice(apiSecret.as_bytes()) let mut mac = HmacSha256::new_from_slice(api_secret.as_bytes())
.map_err(|e| format!("Invalid key length: {}", e))?; .map_err(|e| format!("Invalid key length: {}", e))?;
mac.update(modelId.as_bytes()); mac.update(model_id.as_bytes());
let result = mac.finalize(); let result = mac.finalize();
let code_bytes = result.into_bytes(); let code_bytes = result.into_bytes();
let hash = general_purpose::STANDARD.encode(code_bytes); let hash = general_purpose::STANDARD.encode(code_bytes);