From afe05b2a58cc3ff340c8eef0dcf578e845a00799 Mon Sep 17 00:00:00 2001 From: Louis Date: Mon, 11 Nov 2024 16:01:41 +0700 Subject: [PATCH] fix: update the payload transform for OpenAI preview models --- extensions/inference-openai-extension/src/index.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/extensions/inference-openai-extension/src/index.ts b/extensions/inference-openai-extension/src/index.ts index 44c243adf..64880b678 100644 --- a/extensions/inference-openai-extension/src/index.ts +++ b/extensions/inference-openai-extension/src/index.ts @@ -70,16 +70,17 @@ export default class JanInferenceOpenAIExtension extends RemoteOAIEngine { * Tranform the payload before sending it to the inference endpoint. * The new preview models such as o1-mini and o1-preview replaced max_tokens by max_completion_tokens parameter. * Others do not. - * @param payload - * @returns + * @param payload + * @returns */ transformPayload = (payload: OpenAIPayloadType): OpenAIPayloadType => { // Transform the payload for preview models if (this.previewModels.includes(payload.model)) { - const { max_tokens, ...params } = payload + const { max_tokens, temperature, top_p, stop, ...params } = payload return { ...params, max_completion_tokens: max_tokens, + stream: false // o1 only support stream = false } } // Pass through for non-preview models