chore: add back stream parameter to o1 models
This commit is contained in:
parent
4820218a11
commit
df0801d6d9
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@janhq/inference-openai-extension",
|
||||
"productName": "OpenAI Inference Engine",
|
||||
"version": "1.0.3",
|
||||
"version": "1.0.4",
|
||||
"description": "This extension enables OpenAI chat completion API calls",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/module.js",
|
||||
|
||||
@ -99,6 +99,7 @@
|
||||
"parameters": {
|
||||
"temperature": 1,
|
||||
"top_p": 1,
|
||||
"stream": true,
|
||||
"max_tokens": 32768,
|
||||
"frequency_penalty": 0,
|
||||
"presence_penalty": 0
|
||||
@ -126,6 +127,7 @@
|
||||
"temperature": 1,
|
||||
"top_p": 1,
|
||||
"max_tokens": 65536,
|
||||
"stream": true,
|
||||
"frequency_penalty": 0,
|
||||
"presence_penalty": 0
|
||||
},
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user