* feat: Support multiple model binaries * fix: Update downloadModel with multiple binaries handler * feat: Add 3 models with multiple binaries * chore: fix model download * fix: model file lookup & model path * chore: add .prettierrc * chore: refactor docs * chore: bump model version * fix(capybara): add filename * fix(codeninja): add file name + llama model path * fix(default): add llama model path * fix(deepseek coder): add filename * fix(deepseek 33B): add filename * fix(dolphin mixtral): add filename * fix(llama2-chat): add filename * fix(llama2-70B): add filename * fix(mistral 7b): add filename + model path * fix(bakllava): correct size model * fix(llava-7b): correct size model * fix(llava-13b): correct size model * fix(mixtral-8x7b): add file name + modelpath * fix(noramaid-7b): add file name + modelpath * fix(openchat-7b): add file name + modelpath * fix(openhermes-7b): add file name + modelpath * fix(phi2-3b): add file name + modelpath * fix(phind): add file name + modelpath * fix(solarslerp): add file name + modelpath * fix(starling): add file name + modelpath * fix(stealth): add file name + modelpath * fix(tinyllama): add file name + modelpath * fix(trinity): add file name + modelpath * fix(tulu): add file name + modelpath * fix(wizardcoder): add file name + modelpath * fix(yi): add file name + modelpath * update from source -> sources Signed-off-by: James <james@jan.ai> --------- Signed-off-by: James <james@jan.ai> Co-authored-by: hiro <vuonghoainam.work@gmail.com> Co-authored-by: hahuyhoang411 <hahuyhoanghhh41@gmail.com> Co-authored-by: James <james@jan.ai>
35 lines
1000 B
JSON
35 lines
1000 B
JSON
{
|
|
"sources": [
|
|
{
|
|
"filename": "deepseek-coder-1.3b-instruct.Q8_0.gguf",
|
|
"url": "https://huggingface.co/TheBloke/deepseek-coder-1.3b-instruct-GGUF/resolve/main/deepseek-coder-1.3b-instruct.Q8_0.gguf"
|
|
}
|
|
],
|
|
"id": "deepseek-coder-1.3b",
|
|
"object": "model",
|
|
"name": "Deepseek Coder 1.3B Q8",
|
|
"version": "1.0",
|
|
"description": "Deepseek Coder excelled in project-level code completion with advanced capabilities across multiple programming languages.",
|
|
"format": "gguf",
|
|
"settings": {
|
|
"ctx_len": 4096,
|
|
"prompt_template": "### Instruction:\n{prompt}\n### Response:",
|
|
"llama_model_path": "deepseek-coder-1.3b-instruct.Q8_0.gguf"
|
|
},
|
|
"parameters": {
|
|
"temperature": 0.7,
|
|
"top_p": 0.95,
|
|
"stream": true,
|
|
"max_tokens": 4096,
|
|
"stop": [],
|
|
"frequency_penalty": 0,
|
|
"presence_penalty": 0
|
|
},
|
|
"metadata": {
|
|
"author": "Deepseek, The Bloke",
|
|
"tags": ["Tiny", "Foundational Model"],
|
|
"size": 1430000000
|
|
},
|
|
"engine": "nitro"
|
|
}
|