diff --git a/core/src/browser/extensions/engines/AIEngine.ts b/core/src/browser/extensions/engines/AIEngine.ts index 3a47cb4ea..ea63ffa13 100644 --- a/core/src/browser/extensions/engines/AIEngine.ts +++ b/core/src/browser/extensions/engines/AIEngine.ts @@ -158,17 +158,6 @@ export interface chatOptions { // Output for /chat will be Promise for non-streaming // or Promise> for streaming -// 6. /delete -export interface deleteOptions { - providerId: string - modelId: string // The ID of the model to delete (implies finding its path) - modelPath?: string // Optionally, direct path can be provided -} -export interface deleteResult { - success: boolean - error?: string -} - // 7. /import export interface ImportOptions { [key: string]: any @@ -226,7 +215,7 @@ export abstract class AIEngine extends BaseExtension { /** * Deletes a model */ - abstract delete(opts: deleteOptions): Promise + abstract delete(modelId: string): Promise /** * Imports a model diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index 77257f17e..f561cc3d8 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -21,8 +21,6 @@ import { chatOptions, chatCompletion, chatCompletionChunk, - deleteOptions, - deleteResult, ImportOptions, chatCompletionRequest, events, @@ -35,6 +33,11 @@ interface DownloadItem { save_path: string } +interface ModelConfig { + model_path: string + mmproj_path?: string +} + /** * Helper to convert GGUF model filename to a more structured ID/name * Example: "mistral-7b-instruct-v0.2.Q4_K_M.gguf" -> { baseModelId: "mistral-7b-instruct-v0.2", quant: "Q4_K_M" } @@ -59,6 +62,15 @@ function parseGGUFFileName(filename: string): { * The class provides methods for initializing and stopping a model, and for making inference requests. * It also subscribes to events emitted by the @janhq/core package and handles new message requests. */ + +// Folder structure for downloaded models: +// /models/llamacpp/ +// - model.yml (required) +// - model.gguf (optional, present if downloaded from URL) +// - mmproj.gguf (optional, present if mmproj exists and it was downloaded from URL) +// +// Contents of model.yml can be found in ModelConfig interface + export default class llamacpp_extension extends AIEngine { provider: string = 'llamacpp' readonly providerId: string = 'llamacpp' @@ -155,15 +167,10 @@ export default class llamacpp_extension extends AIEngine { // TODO: check if files are valid GGUF files + const modelConfig = { model_path: modelPath, mmproj_path: mmprojPath } as ModelConfig await invoke( 'write_yaml', - { - data: { - model_path: modelPath, - mmproj_path: mmprojPath, - }, - savePath: `${modelDir}/model.yml`, - }, + { data: modelConfig, savePath: `${modelDir}/model.yml` }, ) } @@ -430,8 +437,14 @@ export default class llamacpp_extension extends AIEngine { return (await response.json()) as chatCompletion } - override async delete(opts: deleteOptions): Promise { - throw new Error('method not implemented yet') + override async delete(modelId: string): Promise { + const modelDir = await joinPath([this.modelsBasePath, this.provider, modelId]) + + if (!(await fs.existsSync(await joinPath([modelDir, 'model.yml'])))) { + throw new Error(`Model ${modelId} does not exist`) + } + + await fs.rm(modelDir) } // Optional method for direct client access