From 13806a3f0681846abb4f6a10e80aedf3506092c7 Mon Sep 17 00:00:00 2001 From: Akarshan Date: Thu, 11 Sep 2025 09:50:55 +0530 Subject: [PATCH] Fixup sorting in determineBestBackend --- extensions/llamacpp-extension/src/index.ts | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index 0a3e11845..2de568ab0 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -541,10 +541,7 @@ export default class llamacpp_extension extends AIEngine { const getBackendCategory = (backendString: string): string | undefined => { if (backendString.includes('cu12.0')) return 'cuda-cu12.0' if (backendString.includes('cu11.7')) return 'cuda-cu11.7' - if (backendString.includes('vulkan')) { - // Only return vulkan category if we have enough GPU memory - return hasEnoughGpuMemory ? 'vulkan' : undefined - } + if (backendString.includes('vulkan')) return 'vulkan' if (backendString.includes('avx512')) return 'avx512' if (backendString.includes('avx2')) return 'avx2' if ( @@ -2011,7 +2008,7 @@ export default class llamacpp_extension extends AIEngine { const totalMemory = totalVRAM + totalRAM logger.info( - `Total VRAM: ${totalVRAM} bytes, Total RAM: ${totalRAM} bytes, Free: ${usableRAM} bytes, Total Memory: ${totalMemory} bytes` + `Total VRAM: ${totalVRAM} bytes, Total RAM: ${totalRAM} bytes, Total Memory: ${totalMemory} bytes` ) return {