fix: preserve model settings should maintain all settings (#3435)

* fix: preserve model settings should maintain all settings

* fix: a legacy bug that allow sending empty stop string

* fix: blank default settings

* fix: incorrect persisting model update
This commit is contained in:
Louis 2024-08-22 15:19:44 +07:00 committed by GitHub
parent a337b2e247
commit 7c9d49ba60
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 49 additions and 27 deletions

View File

@ -749,9 +749,17 @@ export default class JanModelExtension extends ModelExtension {
const updatedModel: Model = { const updatedModel: Model = {
...model, ...model,
...modelInfo, ...modelInfo,
parameters: {
...model.parameters,
...modelInfo.parameters,
},
settings: {
...model.settings,
...modelInfo.settings,
},
metadata: { metadata: {
...model.metadata, ...model.metadata,
tags: modelInfo.metadata?.tags ?? [], ...modelInfo.metadata,
}, },
} }

View File

@ -171,11 +171,11 @@ const ModelDropdown = ({
: 2048 : 2048
const overriddenSettings = const overriddenSettings =
model?.settings.ctx_len && model.settings.ctx_len > 2048 model?.settings.ctx_len && model.settings.ctx_len > 2048
? { ctx_len: defaultContextLength } ? { ctx_len: defaultContextLength ?? 2048 }
: {} : {}
const overriddenParameters = const overriddenParameters =
model?.parameters.max_tokens && model.parameters.max_tokens model?.parameters.max_tokens && model.parameters.max_tokens
? { max_tokens: defaultMaxTokens } ? { max_tokens: defaultMaxTokens ?? 2048 }
: {} : {}
const modelParams = { const modelParams = {

View File

@ -110,20 +110,19 @@ export const useCreateNewThread = () => {
settings: assistant.tools && assistant.tools[0].settings, settings: assistant.tools && assistant.tools[0].settings,
} }
const defaultContextLength = preserveModelSettings const defaultContextLength = preserveModelSettings
? model?.metadata?.default_ctx_len ? defaultModel?.metadata?.default_ctx_len
: 2048 : 2048
const defaultMaxTokens = preserveModelSettings const defaultMaxTokens = preserveModelSettings
? model?.metadata?.default_max_tokens ? defaultModel?.metadata?.default_max_tokens
: 2048 : 2048
const overriddenSettings = const overriddenSettings =
defaultModel?.settings.ctx_len && defaultModel.settings.ctx_len > 2048 defaultModel?.settings.ctx_len && defaultModel.settings.ctx_len > 2048
? { ctx_len: defaultContextLength } ? { ctx_len: defaultContextLength ?? 2048 }
: {} : {}
const overriddenParameters = const overriddenParameters = defaultModel?.parameters.max_tokens
defaultModel?.parameters.max_tokens && defaultModel.parameters.max_tokens ? { max_tokens: defaultMaxTokens ?? 2048 }
? { max_tokens: defaultMaxTokens } : {}
: {}
const createdAt = Date.now() const createdAt = Date.now()
let instructions: string | undefined = undefined let instructions: string | undefined = undefined

View File

@ -95,5 +95,9 @@ export default function useRecommendedModel() {
getRecommendedModel() getRecommendedModel()
}, [getRecommendedModel]) }, [getRecommendedModel])
return { recommendedModel, downloadedModels: sortedModels } return {
recommendedModel,
downloadedModels: sortedModels,
setRecommendedModel,
}
} }

View File

@ -14,6 +14,8 @@ import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import { toRuntimeParams, toSettingParams } from '@/utils/modelParam' import { toRuntimeParams, toSettingParams } from '@/utils/modelParam'
import useRecommendedModel from './useRecommendedModel'
import { extensionManager } from '@/extension' import { extensionManager } from '@/extension'
import { preserveModelSettingsAtom } from '@/helpers/atoms/AppConfig.atom' import { preserveModelSettingsAtom } from '@/helpers/atoms/AppConfig.atom'
import { import {
@ -38,6 +40,7 @@ export default function useUpdateModelParameters() {
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const updateDownloadedModel = useSetAtom(updateDownloadedModelAtom) const updateDownloadedModel = useSetAtom(updateDownloadedModelAtom)
const preserveModelFeatureEnabled = useAtomValue(preserveModelSettingsAtom) const preserveModelFeatureEnabled = useAtomValue(preserveModelSettingsAtom)
const { recommendedModel, setRecommendedModel } = useRecommendedModel()
const updateModelParameter = useCallback( const updateModelParameter = useCallback(
async (thread: Thread, settings: UpdateModelParameter) => { async (thread: Thread, settings: UpdateModelParameter) => {
@ -75,29 +78,35 @@ export default function useUpdateModelParameters() {
// Persists default settings to model file // Persists default settings to model file
// Do not overwrite ctx_len and max_tokens // Do not overwrite ctx_len and max_tokens
if (preserveModelFeatureEnabled && selectedModel) { if (preserveModelFeatureEnabled) {
const defaultContextLength = settingParams.ctx_len
const defaultMaxTokens = runtimeParams.max_tokens
// eslint-disable-next-line @typescript-eslint/naming-convention
const { ctx_len, ...toSaveSettings } = settingParams
// eslint-disable-next-line @typescript-eslint/naming-convention
const { max_tokens, ...toSaveParams } = runtimeParams
const updatedModel = { const updatedModel = {
...selectedModel, id: settings.modelId ?? selectedModel?.id,
parameters: { parameters: {
...runtimeParams, ...toSaveSettings,
max_tokens: selectedModel.parameters.max_tokens,
}, },
settings: { settings: {
...settingParams, ...toSaveParams,
ctx_len: selectedModel.settings.ctx_len,
}, },
metadata: { metadata: {
...selectedModel.metadata, default_ctx_len: defaultContextLength,
default_ctx_len: settingParams.ctx_len, default_max_tokens: defaultMaxTokens,
default_max_tokens: runtimeParams.max_tokens,
}, },
} as Model } as Partial<Model>
await extensionManager const model = await extensionManager
.get<ModelExtension>(ExtensionTypeEnum.Model) .get<ModelExtension>(ExtensionTypeEnum.Model)
?.saveModel(updatedModel) ?.updateModelInfo(updatedModel)
setSelectedModel(updatedModel) if (model) updateDownloadedModel(model)
updateDownloadedModel(updatedModel) if (selectedModel?.id === model?.id) setSelectedModel(model)
if (recommendedModel?.id === model?.id) setRecommendedModel(model)
} }
}, },
[ [
@ -105,15 +114,17 @@ export default function useUpdateModelParameters() {
selectedModel, selectedModel,
setThreadModelParams, setThreadModelParams,
preserveModelFeatureEnabled, preserveModelFeatureEnabled,
setSelectedModel,
updateDownloadedModel, updateDownloadedModel,
setSelectedModel,
] ]
) )
const processStopWords = (params: ModelParams): ModelParams => { const processStopWords = (params: ModelParams): ModelParams => {
if ('stop' in params && typeof params['stop'] === 'string') { if ('stop' in params && typeof params['stop'] === 'string') {
// Input as string but stop words accept an array of strings (space as separator) // Input as string but stop words accept an array of strings (space as separator)
params['stop'] = (params['stop'] as string).split(' ') params['stop'] = (params['stop'] as string)
.split(' ')
.filter((e) => e.trim().length)
} }
return params return params
} }