fix: compare
This commit is contained in:
parent
8b15fe4ef2
commit
7b6e4cd172
@ -1649,7 +1649,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
if (cfg.main_gpu !== undefined && cfg.main_gpu != 0)
|
||||
args.push('--main-gpu', String(cfg.main_gpu))
|
||||
// Note: Older llama.cpp versions are no longer supported
|
||||
if (cfg.flash_attn !== undefined || cfg.flash_attn === '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported
|
||||
if (cfg.flash_attn !== undefined || cfg.flash_attn !== '') args.push('--flash-attn', String(cfg.flash_attn)) //default: auto = ON when supported
|
||||
|
||||
// Boolean flags
|
||||
if (cfg.ctx_shift) args.push('--context-shift')
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user