From 429a319f93045197b205c48a67e8b02856210efa Mon Sep 17 00:00:00 2001 From: Louis Date: Thu, 22 Aug 2024 16:10:40 +0700 Subject: [PATCH] fix: unable to import model due to missing metadata (#3439) --- extensions/model-extension/src/index.ts | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/extensions/model-extension/src/index.ts b/extensions/model-extension/src/index.ts index 980218f21..e2f68a58c 100644 --- a/extensions/model-extension/src/index.ts +++ b/extensions/model-extension/src/index.ts @@ -574,7 +574,7 @@ export default class JanModelExtension extends ModelExtension { ]) ) - const eos_id = metadata['tokenizer.ggml.eos_token_id'] + const eos_id = metadata?.['tokenizer.ggml.eos_token_id'] if (!defaultModel) { console.error('Unable to find default model') @@ -594,16 +594,18 @@ export default class JanModelExtension extends ModelExtension { ], parameters: { ...defaultModel.parameters, - stop: [metadata['tokenizer.ggml.tokens'][eos_id] ?? ''], + stop: eos_id + ? [metadata['tokenizer.ggml.tokens'][eos_id] ?? ''] + : defaultModel.parameters.stop, }, settings: { ...defaultModel.settings, prompt_template: - metadata.parsed_chat_template ?? + metadata?.parsed_chat_template ?? defaultModel.settings.prompt_template, ctx_len: - metadata['llama.context_length'] ?? defaultModel.settings.ctx_len, - ngl: (metadata['llama.block_count'] ?? 32) + 1, + metadata?.['llama.context_length'] ?? defaultModel.settings.ctx_len, + ngl: (metadata?.['llama.block_count'] ?? 32) + 1, llama_model_path: binaryFileName, }, created: Date.now(), @@ -683,7 +685,7 @@ export default class JanModelExtension extends ModelExtension { 'retrieveGGUFMetadata', modelBinaryPath ) - const eos_id = metadata['tokenizer.ggml.eos_token_id'] + const eos_id = metadata?.['tokenizer.ggml.eos_token_id'] const binaryFileName = await baseName(modelBinaryPath) @@ -699,17 +701,19 @@ export default class JanModelExtension extends ModelExtension { ], parameters: { ...defaultModel.parameters, - stop: [metadata['tokenizer.ggml.tokens'][eos_id] ?? ''], + stop: eos_id + ? [metadata?.['tokenizer.ggml.tokens'][eos_id] ?? ''] + : defaultModel.parameters.stop, }, settings: { ...defaultModel.settings, prompt_template: - metadata.parsed_chat_template ?? + metadata?.parsed_chat_template ?? defaultModel.settings.prompt_template, ctx_len: - metadata['llama.context_length'] ?? defaultModel.settings.ctx_len, - ngl: (metadata['llama.block_count'] ?? 32) + 1, + metadata?.['llama.context_length'] ?? defaultModel.settings.ctx_len, + ngl: (metadata?.['llama.block_count'] ?? 32) + 1, llama_model_path: binaryFileName, }, created: Date.now(),