delete: dolphin mixtral due to the low performance
This commit is contained in:
parent
faa991331f
commit
7063300a34
@ -1,34 +0,0 @@
|
||||
{
|
||||
"sources": [
|
||||
{
|
||||
"filename": "dolphin-2.7-mixtral-8x7b.Q4_K_M.gguf",
|
||||
"url": "https://huggingface.co/TheBloke/dolphin-2.7-mixtral-8x7b-GGUF/resolve/main/dolphin-2.7-mixtral-8x7b.Q4_K_M.gguf"
|
||||
}
|
||||
],
|
||||
"id": "dolphin-2.7-mixtral-8x7b",
|
||||
"object": "model",
|
||||
"name": "Dolphin 8x7B Q4",
|
||||
"version": "1.0",
|
||||
"description": "Dolphin is an uncensored model built on Mixtral-8x7b. It is good at programming tasks.",
|
||||
"format": "gguf",
|
||||
"settings": {
|
||||
"ctx_len": 4096,
|
||||
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
|
||||
"llama_model_path": "dolphin-2.7-mixtral-8x7b.Q4_K_M.gguf"
|
||||
},
|
||||
"parameters": {
|
||||
"temperature": 0.7,
|
||||
"top_p": 0.95,
|
||||
"stream": true,
|
||||
"max_tokens": 4096,
|
||||
"stop": [],
|
||||
"frequency_penalty": 0,
|
||||
"presence_penalty": 0
|
||||
},
|
||||
"metadata": {
|
||||
"author": "Cognitive Computations, TheBloke",
|
||||
"tags": ["70B", "Finetuned"],
|
||||
"size": 26440000000
|
||||
},
|
||||
"engine": "nitro"
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user