* feat: Support multiple model binaries * fix: Update downloadModel with multiple binaries handler * feat: Add 3 models with multiple binaries * chore: fix model download * fix: model file lookup & model path * chore: add .prettierrc * chore: refactor docs * chore: bump model version * fix(capybara): add filename * fix(codeninja): add file name + llama model path * fix(default): add llama model path * fix(deepseek coder): add filename * fix(deepseek 33B): add filename * fix(dolphin mixtral): add filename * fix(llama2-chat): add filename * fix(llama2-70B): add filename * fix(mistral 7b): add filename + model path * fix(bakllava): correct size model * fix(llava-7b): correct size model * fix(llava-13b): correct size model * fix(mixtral-8x7b): add file name + modelpath * fix(noramaid-7b): add file name + modelpath * fix(openchat-7b): add file name + modelpath * fix(openhermes-7b): add file name + modelpath * fix(phi2-3b): add file name + modelpath * fix(phind): add file name + modelpath * fix(solarslerp): add file name + modelpath * fix(starling): add file name + modelpath * fix(stealth): add file name + modelpath * fix(tinyllama): add file name + modelpath * fix(trinity): add file name + modelpath * fix(tulu): add file name + modelpath * fix(wizardcoder): add file name + modelpath * fix(yi): add file name + modelpath * update from source -> sources Signed-off-by: James <james@jan.ai> --------- Signed-off-by: James <james@jan.ai> Co-authored-by: hiro <vuonghoainam.work@gmail.com> Co-authored-by: hahuyhoang411 <hahuyhoanghhh41@gmail.com> Co-authored-by: James <james@jan.ai>
35 lines
1015 B
JSON
35 lines
1015 B
JSON
{
|
|
"sources": [
|
|
{
|
|
"filename": "wizardcoder-python-13b-v1.0.Q5_K_M.gguf",
|
|
"url": "https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GGUF/resolve/main/wizardcoder-python-13b-v1.0.Q5_K_M.gguf"
|
|
}
|
|
],
|
|
"id": "wizardcoder-13b",
|
|
"object": "model",
|
|
"name": "Wizard Coder Python 13B Q5",
|
|
"version": "1.0",
|
|
"description": "WizardCoder 13B is a Python coding model. This model demonstrate high proficiency in specific domains like coding and mathematics.",
|
|
"format": "gguf",
|
|
"settings": {
|
|
"ctx_len": 4096,
|
|
"prompt_template": "### Instruction:\n{prompt}\n### Response:",
|
|
"llama_model_path": "wizardcoder-python-13b-v1.0.Q5_K_M.gguf"
|
|
},
|
|
"parameters": {
|
|
"temperature": 0.7,
|
|
"top_p": 0.95,
|
|
"stream": true,
|
|
"max_tokens": 4096,
|
|
"stop": [],
|
|
"frequency_penalty": 0,
|
|
"presence_penalty": 0
|
|
},
|
|
"metadata": {
|
|
"author": "WizardLM, The Bloke",
|
|
"tags": ["Recommended", "13B", "Finetuned"],
|
|
"size": 7870000000
|
|
},
|
|
"engine": "nitro"
|
|
}
|