feat: Allow specifying mmproj path during model loading
The `loadOptions` interface in `AIEngine.ts` now includes an optional `mmprojPath` property. This allows users to provide a path to their MMProject file when loading a model, which is required for certain model types. The `llamacpp-extension/src/index.ts` has been updated to pass this option to the llamacpp server if provided.
This commit is contained in:
parent
fbfaaf43c5
commit
07d76dc871
@ -103,6 +103,7 @@ export type listResult = modelInfo[]
|
||||
export interface loadOptions {
|
||||
modelId: string
|
||||
modelPath: string
|
||||
mmprojPath?: string
|
||||
port?: number
|
||||
}
|
||||
|
||||
@ -181,7 +182,7 @@ export abstract class AIEngine extends BaseExtension {
|
||||
/**
|
||||
* Unloads a model from memory
|
||||
*/
|
||||
abstract unload(opts: unloadOptions): Promise<unloadResult>
|
||||
abstract unload(sessionId: string): Promise<unloadResult>
|
||||
|
||||
/**
|
||||
* Sends a chat request to the model
|
||||
|
||||
@ -343,6 +343,9 @@ export default class llamacpp_extension extends AIEngine {
|
||||
args.push('-m', opts.modelPath)
|
||||
args.push('-a', opts.modelId)
|
||||
args.push('--port', String(opts.port || 8080)) // Default port if not specified
|
||||
if (opts.mmprojPath) {
|
||||
args.push('--mmproj', opts.mmprojPath)
|
||||
}
|
||||
|
||||
if (cfg.ctx_size !== undefined) {
|
||||
args.push('-c', String(cfg.ctx_size))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user