2023-12-04 17:33:26 +07:00

24 lines
790 B
JSON

{
"source_url": "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_M.gguf",
"id": "llama2-chat-7b-q5",
"object": "model",
"name": "Llama 2 Chat 7B Q5",
"version": "1.0",
"description": "This is a 5-bit quantized iteration of Meta AI's Llama 2 Chat 7b model, specifically designed for a comprehensive understanding through training on extensive internet data.",
"format": "gguf",
"settings": {
"ctx_len": 2048,
"system_prompt": "[INST] <<SYS>>\n",
"user_prompt": "<</SYS>>\n",
"ai_prompt": "[/INST]"
},
"parameters": {
"max_tokens": 2048
},
"metadata": {
"author": "MetaAI, The Bloke",
"tags": ["Small", "Foundational Model"],
"size": 4780000000
}
}