{ "sources": [ { "filename": "llama-2-70b-chat.Q4_K_M.gguf", "url": "https://huggingface.co/TheBloke/Llama-2-70B-Chat-GGUF/resolve/main/llama-2-70b-chat.Q4_K_M.gguf" } ], "id": "llama2-chat-70b", "object": "model", "name": "Llama 2 Chat 70B Q4", "version": "1.1", "description": "Llama 2 specifically designed for a comprehensive understanding the world.", "format": "gguf", "settings": { "ctx_len": 4096, "prompt_template": "[INST] <>\n{system_message}<>\n{prompt}[/INST]", "llama_model_path": "llama-2-70b-chat.Q4_K_M.gguf", "ngl": 81 }, "parameters": { "temperature": 0.7, "top_p": 0.95, "stream": true, "max_tokens": 4096, "stop": [], "frequency_penalty": 0, "presence_penalty": 0 }, "metadata": { "author": "MetaAI", "tags": ["70B", "Foundational Model"], "size": 43920000000 }, "engine": "llama-cpp" }