fix: inference openai compatible accepts parameters
This commit is contained in:
parent
3903c10c5b
commit
0522d2fe4d
@ -22,8 +22,8 @@ export function requestInference(
|
||||
const requestBody = JSON.stringify({
|
||||
messages: recentMessages,
|
||||
stream: true,
|
||||
model: model_id
|
||||
// ...model.parameters,
|
||||
model: model_id,
|
||||
...model.parameters,
|
||||
});
|
||||
fetch(`${engine.full_url}`, {
|
||||
method: "POST",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user