From 9c25480c7bbca2e847250e3d486ec96b2b29b85e Mon Sep 17 00:00:00 2001 From: Akarshan Biswas Date: Thu, 21 Aug 2025 16:01:31 +0530 Subject: [PATCH] fix: Update placeholder text and error message (#6263) This commit improves the clarity of the llama.cpp extension. - Corrected a placeholder example from `GGML_VK_VISIBLE_DEVICES='0,1'` to `GGML_VK_VISIBLE_DEVICES=0,1` for better accuracy. - Changed an ambiguous error message from `"Failed to load llama-server: ${error}"` to the more specific `"Failed to load llamacpp backend"`. --- extensions/llamacpp-extension/settings.json | 2 +- extensions/llamacpp-extension/src/index.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/extensions/llamacpp-extension/settings.json b/extensions/llamacpp-extension/settings.json index 5f7fd1ca9..3c0964fc6 100644 --- a/extensions/llamacpp-extension/settings.json +++ b/extensions/llamacpp-extension/settings.json @@ -17,7 +17,7 @@ "controllerType": "input", "controllerProps": { "value": "none", - "placeholder": "Eg, GGML_VK_VISIBLE_DEVICES='0,1'", + "placeholder": "Eg. GGML_VK_VISIBLE_DEVICES=0,1", "type": "text", "textAlign": "right" } diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index dc710d6e0..f4cdd83c8 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -1763,7 +1763,7 @@ export default class llamacpp_extension extends AIEngine { return dList } catch (error) { logger.error('Failed to query devices:\n', error) - throw new Error(`Failed to load llama-server: ${error}`) + throw new Error("Failed to load llamacpp backend") } }