diff --git a/core/src/browser/extensions/engines/AIEngine.ts b/core/src/browser/extensions/engines/AIEngine.ts index 9730fac79..80ef1a4fb 100644 --- a/core/src/browser/extensions/engines/AIEngine.ts +++ b/core/src/browser/extensions/engines/AIEngine.ts @@ -103,7 +103,6 @@ export type listResult = modelInfo[] export interface loadOptions { modelPath: string port?: number - n_ctx?: number } export interface sessionInfo { diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index 3542a98a8..914b07f16 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -14,9 +14,7 @@ import { modelInfo, loadOptions, sessionInfo, - unloadOptions, unloadResult, - chatOptions, chatCompletion, chatCompletionChunk, ImportOptions, @@ -316,7 +314,7 @@ export default class llamacpp_extension extends AIEngine { args.push('--port', String(opts.port || 8080)) // Default port if not specified if (opts.n_ctx !== undefined) { - args.push('-c', String(opts.n_ctx)) + args.push('-c', String(cfg.ctx_size)) } // Add remaining options from the interface