fix: inference openai compatible accepts parameters

This commit is contained in:
hiro 2023-12-10 20:16:19 +07:00
parent 3903c10c5b
commit 0522d2fe4d

View File

@ -22,8 +22,8 @@ export function requestInference(
const requestBody = JSON.stringify({ const requestBody = JSON.stringify({
messages: recentMessages, messages: recentMessages,
stream: true, stream: true,
model: model_id model: model_id,
// ...model.parameters, ...model.parameters,
}); });
fetch(`${engine.full_url}`, { fetch(`${engine.full_url}`, {
method: "POST", method: "POST",