diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index cfda2a403..9bb3e6559 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -1649,7 +1649,7 @@ export default class llamacpp_extension extends AIEngine { if (cfg.main_gpu !== undefined && cfg.main_gpu != 0) args.push('--main-gpu', String(cfg.main_gpu)) // Note: Older llama.cpp versions are no longer supported - if (cfg.flash_attn !== undefined || cfg.flash_attn === '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported + if (cfg.flash_attn !== undefined || cfg.flash_attn !== '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported // Boolean flags if (cfg.ctx_shift) args.push('--context-shift')