* fix: stop word update * fix: enable stop settings --------- Co-authored-by: hahuyhoang411 <hahuyhoanghhh41@gmail.com>
29 lines
892 B
JSON
29 lines
892 B
JSON
{
|
|
"source_url": "https://huggingface.co/TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF/resolve/main/mixtral-8x7b-instruct-v0.1.Q4_K_M.gguf",
|
|
"id": "mixtral-8x7b-instruct",
|
|
"object": "model",
|
|
"name": "Mixtral 8x7B Instruct Q4",
|
|
"version": "1.0",
|
|
"description": "The Mixtral-8x7B is a pretrained generative Sparse Mixture of Experts. The Mixtral-8x7B outperforms 70B models on most benchmarks.",
|
|
"format": "gguf",
|
|
"settings": {
|
|
"ctx_len": 4096,
|
|
"prompt_template": "[INST] {prompt} [/INST]"
|
|
},
|
|
"parameters": {
|
|
"temperature": 0.7,
|
|
"top_p": 0.95,
|
|
"stream": true,
|
|
"max_tokens": 4096,
|
|
"stop": [],
|
|
"frequency_penalty": 0,
|
|
"presence_penalty": 0
|
|
},
|
|
"metadata": {
|
|
"author": "MistralAI, TheBloke",
|
|
"tags": ["70B", "Foundational Model"],
|
|
"size": 26440000000
|
|
},
|
|
"engine": "nitro"
|
|
}
|