delete: redundant capybara 34b
This commit is contained in:
parent
7063300a34
commit
5b9549b0f9
@ -1,34 +0,0 @@
|
|||||||
{
|
|
||||||
"sources": [
|
|
||||||
{
|
|
||||||
"filename": "nous-capybara-34b.Q5_K_M.gguf",
|
|
||||||
"url": "https://huggingface.co/TheBloke/Nous-Capybara-34B-GGUF/resolve/main/nous-capybara-34b.Q5_K_M.gguf"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"id": "capybara-34b",
|
|
||||||
"object": "model",
|
|
||||||
"name": "Capybara 200k 34B Q5",
|
|
||||||
"version": "1.0",
|
|
||||||
"description": "Nous Capybara 34B is a long context length model that supports 200K tokens.",
|
|
||||||
"format": "gguf",
|
|
||||||
"settings": {
|
|
||||||
"ctx_len": 4096,
|
|
||||||
"prompt_template": "USER:\n{prompt}\nASSISTANT:",
|
|
||||||
"llama_model_path": "nous-capybara-34b.Q5_K_M.gguf"
|
|
||||||
},
|
|
||||||
"parameters": {
|
|
||||||
"temperature": 0.7,
|
|
||||||
"top_p": 0.95,
|
|
||||||
"stream": true,
|
|
||||||
"max_tokens": 4096,
|
|
||||||
"stop": [],
|
|
||||||
"frequency_penalty": 0,
|
|
||||||
"presence_penalty": 0
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"author": "NousResearch, The Bloke",
|
|
||||||
"tags": ["34B", "Finetuned"],
|
|
||||||
"size": 24320000000
|
|
||||||
},
|
|
||||||
"engine": "nitro"
|
|
||||||
}
|
|
||||||
Loading…
x
Reference in New Issue
Block a user