Merge pull request #3993 from janhq/fix/openai-preview-models-transform-payload-update

fix: update the payload transform for OpenAI preview models
This commit is contained in:
Louis 2024-11-11 16:28:54 +07:00 committed by GitHub
commit 4e91c80bcc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -70,16 +70,17 @@ export default class JanInferenceOpenAIExtension extends RemoteOAIEngine {
* Tranform the payload before sending it to the inference endpoint. * Tranform the payload before sending it to the inference endpoint.
* The new preview models such as o1-mini and o1-preview replaced max_tokens by max_completion_tokens parameter. * The new preview models such as o1-mini and o1-preview replaced max_tokens by max_completion_tokens parameter.
* Others do not. * Others do not.
* @param payload * @param payload
* @returns * @returns
*/ */
transformPayload = (payload: OpenAIPayloadType): OpenAIPayloadType => { transformPayload = (payload: OpenAIPayloadType): OpenAIPayloadType => {
// Transform the payload for preview models // Transform the payload for preview models
if (this.previewModels.includes(payload.model)) { if (this.previewModels.includes(payload.model)) {
const { max_tokens, ...params } = payload const { max_tokens, temperature, top_p, stop, ...params } = payload
return { return {
...params, ...params,
max_completion_tokens: max_tokens, max_completion_tokens: max_tokens,
stream: false // o1 only support stream = false
} }
} }
// Pass through for non-preview models // Pass through for non-preview models