* chore: add google gemini and deepseek as new supported remote engine providers * chore: add logos * chore: fallback default error message on empty * chore: update open router models * chore: typo fix * chore: remove unusable queen vl
63 lines
2.0 KiB
JSON
63 lines
2.0 KiB
JSON
[
|
|
{
|
|
"model": "deepseek/deepseek-r1:free",
|
|
"object": "model",
|
|
"name": "DeepSeek: R1",
|
|
"version": "1.0",
|
|
"description": "OpenRouter scouts for the lowest prices and best latencies/throughputs across dozens of providers, and lets you choose how to prioritize them.",
|
|
"inference_params": {
|
|
"temperature": 0.7,
|
|
"top_p": 0.95,
|
|
"frequency_penalty": 0,
|
|
"presence_penalty": 0,
|
|
"stream": true
|
|
},
|
|
"engine": "openrouter"
|
|
},
|
|
{
|
|
"model": "deepseek/deepseek-r1-distill-llama-70b:free",
|
|
"object": "model",
|
|
"name": "DeepSeek: R1 Distill Llama 70B",
|
|
"version": "1.0",
|
|
"description": " OpenRouter scouts for the lowest prices and best latencies/throughputs across dozens of providers, and lets you choose how to prioritize them.",
|
|
"inference_params": {
|
|
"temperature": 0.7,
|
|
"top_p": 0.95,
|
|
"frequency_penalty": 0,
|
|
"presence_penalty": 0,
|
|
"stream": true
|
|
},
|
|
"engine": "openrouter"
|
|
},
|
|
{
|
|
"model": "deepseek/deepseek-r1-distill-llama-70b:free",
|
|
"object": "model",
|
|
"name": "DeepSeek: R1 Distill Llama 70B",
|
|
"version": "1.0",
|
|
"description": "OpenRouter scouts for the lowest prices and best latencies/throughputs across dozens of providers, and lets you choose how to prioritize them.",
|
|
"inference_params": {
|
|
"temperature": 0.7,
|
|
"top_p": 0.95,
|
|
"frequency_penalty": 0,
|
|
"presence_penalty": 0,
|
|
"stream": true
|
|
},
|
|
"engine": "openrouter"
|
|
},
|
|
{
|
|
"model": "meta-llama/llama-3.1-405b-instruct:free",
|
|
"object": "model",
|
|
"name": "Meta: Llama 3.1 405B Instruct",
|
|
"version": "1.0",
|
|
"description": "OpenRouter scouts for the lowest prices and best latencies/throughputs across dozens of providers, and lets you choose how to prioritize them.",
|
|
"inference_params": {
|
|
"temperature": 0.7,
|
|
"top_p": 0.95,
|
|
"frequency_penalty": 0,
|
|
"presence_penalty": 0,
|
|
"stream": true
|
|
},
|
|
"engine": "openrouter"
|
|
}
|
|
]
|