diff --git a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs b/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs index 7203d75b3..69586b57d 100644 --- a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs +++ b/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs @@ -83,7 +83,13 @@ pub async fn load_llama_model( ))); } - let port = 8080; // Default port + let port = args + .iter() + .position(|arg| arg == "--port") + .and_then(|i| args.get(i + 1)) + .cloned() + .unwrap_or_default(); + let modelPath = args .iter() .position(|arg| arg == "-m")