* add llamacpp plugin * Refactor llamacpp plugin * add utils plugin * remove utils folder * add hardware implementation * add utils folder + move utils function * organize cargo files * refactor utils src * refactor util * apply fmt * fmt * Update gguf + reformat * add permission for gguf commands * fix cargo test windows * revert yarn lock * remove cargo.lock for hardware plugin * ignore cargo.lock file * Fix hardware invoke + refactor hardware + refactor tests, constants * use api wrapper in extension to invoke hardware call + api wrapper build integration * add newline at EOF (per Akarshan) * add vi mock for getSystemInfo
22 lines
478 B
Rust
22 lines
478 B
Rust
const COMMANDS: &[&str] = &[
|
|
// Cleanup command
|
|
"cleanup_llama_processes",
|
|
// LlamaCpp server commands
|
|
"load_llama_model",
|
|
"unload_llama_model",
|
|
"get_devices",
|
|
"generate_api_key",
|
|
"is_process_running",
|
|
"get_random_port",
|
|
"find_session_by_model",
|
|
"get_loaded_models",
|
|
"get_all_sessions",
|
|
"get_session_by_model",
|
|
// GGUF commands
|
|
"read_gguf_metadata",
|
|
];
|
|
|
|
fn main() {
|
|
tauri_plugin::Builder::new(COMMANDS).build();
|
|
}
|