From 58bb1b493933927e9710dd5029d75d64e95a6d2c Mon Sep 17 00:00:00 2001 From: Louis Date: Wed, 15 Jan 2025 12:37:29 +0700 Subject: [PATCH] fix: incorrect default max_tokens set - legacy issue (#4451) * fix: incorrect default max_tokens set - legacy issue * chore: bump cortex server --- extensions/inference-cortex-extension/bin/version.txt | 2 +- web/containers/ModelDropdown/index.tsx | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/extensions/inference-cortex-extension/bin/version.txt b/extensions/inference-cortex-extension/bin/version.txt index 9b3178149..9e69738bc 100644 --- a/extensions/inference-cortex-extension/bin/version.txt +++ b/extensions/inference-cortex-extension/bin/version.txt @@ -1 +1 @@ -1.0.9-rc1 +1.0.9-rc2 diff --git a/web/containers/ModelDropdown/index.tsx b/web/containers/ModelDropdown/index.tsx index d64e6730b..2ecdf4cd3 100644 --- a/web/containers/ModelDropdown/index.tsx +++ b/web/containers/ModelDropdown/index.tsx @@ -248,10 +248,10 @@ const ModelDropdown = ({ ctx_len: model?.settings.ctx_len ? defaultContextLength : undefined, max_tokens: defaultContextLength ? Math.min( - model?.parameters.token_limit ?? 8192, + model?.parameters.max_tokens ?? 8192, defaultContextLength ) - : model?.parameters.token_limit, + : model?.parameters.max_tokens, } const modelParams = {