{ "sources": [ { "filename": "Llama-3.2-3B-Instruct-Q8_0.gguf", "url": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q8_0.gguf" } ], "id": "llama3.2-3b-instruct", "object": "model", "name": "Llama 3.2 3B Instruct Q8", "version": "1.0", "description": "Meta's Llama 3.2 excels at general usage situations, including chat, general world knowledge, and coding.", "format": "gguf", "settings": { "ctx_len": 131072, "prompt_template": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n{system_message}<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", "llama_model_path": "Llama-3.2-3B-Instruct-Q8_0.gguf", "ngl": 33 }, "parameters": { "temperature": 0.7, "top_p": 0.95, "stream": true, "max_tokens": 8192, "stop": ["<|end_of_text|>", "<|eot_id|>", "<|eom_id|>"], "frequency_penalty": 0, "presence_penalty": 0 }, "metadata": { "author": "MetaAI", "tags": ["3B", "Featured"], "size": 3420000000 }, "engine": "llama-cpp" }