diff --git a/extensions/model-extension/package.json b/extensions/model-extension/package.json index 588e81636..d81326fed 100644 --- a/extensions/model-extension/package.json +++ b/extensions/model-extension/package.json @@ -1,6 +1,6 @@ { "name": "@janhq/model-extension", - "version": "1.0.15", + "version": "1.0.16", "description": "Model Management Extension provides model exploration and seamless downloads", "main": "dist/index.js", "module": "dist/module.js", diff --git a/models/mixtral-8x7b-instruct/model.json b/models/mixtral-8x7b-instruct/model.json index e910c94ab..6377f63f9 100644 --- a/models/mixtral-8x7b-instruct/model.json +++ b/models/mixtral-8x7b-instruct/model.json @@ -15,7 +15,7 @@ }, "metadata": { "author": "MistralAI, TheBloke", - "tags": ["MOE", "Foundational Model"], + "tags": ["70B", "Foundational Model"], "size": 26440000000 }, "engine": "nitro" diff --git a/models/pandora-10.7b-v1/model.json b/models/pandora-10.7b-v1/model.json index 9a16ae0e2..15c183924 100644 --- a/models/pandora-10.7b-v1/model.json +++ b/models/pandora-10.7b-v1/model.json @@ -8,14 +8,14 @@ "format": "gguf", "settings": { "ctx_len": 4096, - "prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant" + "prompt_template": "{system_message}\n### Instruction:\n{prompt}\n### Response:" }, "parameters": { "max_tokens": 4096 }, "metadata": { "author": "Jan", - "tags": ["11B","Merged"], + "tags": ["13B","Merged"], "size": 6360000000 }, "engine": "nitro" diff --git a/models/solar-10.7b-slerp/model.json b/models/solar-10.7b-slerp/model.json index a3ca7d93f..ea1a5e93d 100644 --- a/models/solar-10.7b-slerp/model.json +++ b/models/solar-10.7b-slerp/model.json @@ -15,7 +15,7 @@ }, "metadata": { "author": "Jan", - "tags": ["11B","Finetuned"], + "tags": ["13B","Finetuned"], "size": 6360000000 }, "engine": "nitro" diff --git a/models/trinity-v1-7b/model.json b/models/trinity-v1-7b/model.json index 493505913..d792393d0 100644 --- a/models/trinity-v1-7b/model.json +++ b/models/trinity-v1-7b/model.json @@ -8,7 +8,7 @@ "format": "gguf", "settings": { "ctx_len": 4096, - "prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant" + "prompt_template": "{system_message}\n### Instruction:\n{prompt}\n### Response:" }, "parameters": { "max_tokens": 4096