fix: inference openai compatible accepts parameters

This commit is contained in:
hiro 2023-12-10 20:16:19 +07:00
parent 3903c10c5b
commit 0522d2fe4d

View File

@ -22,8 +22,8 @@ export function requestInference(
const requestBody = JSON.stringify({
messages: recentMessages,
stream: true,
model: model_id
// ...model.parameters,
model: model_id,
...model.parameters,
});
fetch(`${engine.full_url}`, {
method: "POST",