fix: preserve model settings should maintain all settings (#3435)

* fix: preserve model settings should maintain all settings

* fix: a legacy bug that allow sending empty stop string

* fix: blank default settings

* fix: incorrect persisting model update
This commit is contained in:
Louis 2024-08-22 15:19:44 +07:00 committed by GitHub
parent a337b2e247
commit 7c9d49ba60
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 49 additions and 27 deletions

View File

@ -749,9 +749,17 @@ export default class JanModelExtension extends ModelExtension {
const updatedModel: Model = {
...model,
...modelInfo,
parameters: {
...model.parameters,
...modelInfo.parameters,
},
settings: {
...model.settings,
...modelInfo.settings,
},
metadata: {
...model.metadata,
tags: modelInfo.metadata?.tags ?? [],
...modelInfo.metadata,
},
}

View File

@ -171,11 +171,11 @@ const ModelDropdown = ({
: 2048
const overriddenSettings =
model?.settings.ctx_len && model.settings.ctx_len > 2048
? { ctx_len: defaultContextLength }
? { ctx_len: defaultContextLength ?? 2048 }
: {}
const overriddenParameters =
model?.parameters.max_tokens && model.parameters.max_tokens
? { max_tokens: defaultMaxTokens }
? { max_tokens: defaultMaxTokens ?? 2048 }
: {}
const modelParams = {

View File

@ -110,20 +110,19 @@ export const useCreateNewThread = () => {
settings: assistant.tools && assistant.tools[0].settings,
}
const defaultContextLength = preserveModelSettings
? model?.metadata?.default_ctx_len
? defaultModel?.metadata?.default_ctx_len
: 2048
const defaultMaxTokens = preserveModelSettings
? model?.metadata?.default_max_tokens
? defaultModel?.metadata?.default_max_tokens
: 2048
const overriddenSettings =
defaultModel?.settings.ctx_len && defaultModel.settings.ctx_len > 2048
? { ctx_len: defaultContextLength }
? { ctx_len: defaultContextLength ?? 2048 }
: {}
const overriddenParameters =
defaultModel?.parameters.max_tokens && defaultModel.parameters.max_tokens
? { max_tokens: defaultMaxTokens }
: {}
const overriddenParameters = defaultModel?.parameters.max_tokens
? { max_tokens: defaultMaxTokens ?? 2048 }
: {}
const createdAt = Date.now()
let instructions: string | undefined = undefined

View File

@ -95,5 +95,9 @@ export default function useRecommendedModel() {
getRecommendedModel()
}, [getRecommendedModel])
return { recommendedModel, downloadedModels: sortedModels }
return {
recommendedModel,
downloadedModels: sortedModels,
setRecommendedModel,
}
}

View File

@ -14,6 +14,8 @@ import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import { toRuntimeParams, toSettingParams } from '@/utils/modelParam'
import useRecommendedModel from './useRecommendedModel'
import { extensionManager } from '@/extension'
import { preserveModelSettingsAtom } from '@/helpers/atoms/AppConfig.atom'
import {
@ -38,6 +40,7 @@ export default function useUpdateModelParameters() {
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const updateDownloadedModel = useSetAtom(updateDownloadedModelAtom)
const preserveModelFeatureEnabled = useAtomValue(preserveModelSettingsAtom)
const { recommendedModel, setRecommendedModel } = useRecommendedModel()
const updateModelParameter = useCallback(
async (thread: Thread, settings: UpdateModelParameter) => {
@ -75,29 +78,35 @@ export default function useUpdateModelParameters() {
// Persists default settings to model file
// Do not overwrite ctx_len and max_tokens
if (preserveModelFeatureEnabled && selectedModel) {
if (preserveModelFeatureEnabled) {
const defaultContextLength = settingParams.ctx_len
const defaultMaxTokens = runtimeParams.max_tokens
// eslint-disable-next-line @typescript-eslint/naming-convention
const { ctx_len, ...toSaveSettings } = settingParams
// eslint-disable-next-line @typescript-eslint/naming-convention
const { max_tokens, ...toSaveParams } = runtimeParams
const updatedModel = {
...selectedModel,
id: settings.modelId ?? selectedModel?.id,
parameters: {
...runtimeParams,
max_tokens: selectedModel.parameters.max_tokens,
...toSaveSettings,
},
settings: {
...settingParams,
ctx_len: selectedModel.settings.ctx_len,
...toSaveParams,
},
metadata: {
...selectedModel.metadata,
default_ctx_len: settingParams.ctx_len,
default_max_tokens: runtimeParams.max_tokens,
default_ctx_len: defaultContextLength,
default_max_tokens: defaultMaxTokens,
},
} as Model
} as Partial<Model>
await extensionManager
const model = await extensionManager
.get<ModelExtension>(ExtensionTypeEnum.Model)
?.saveModel(updatedModel)
setSelectedModel(updatedModel)
updateDownloadedModel(updatedModel)
?.updateModelInfo(updatedModel)
if (model) updateDownloadedModel(model)
if (selectedModel?.id === model?.id) setSelectedModel(model)
if (recommendedModel?.id === model?.id) setRecommendedModel(model)
}
},
[
@ -105,15 +114,17 @@ export default function useUpdateModelParameters() {
selectedModel,
setThreadModelParams,
preserveModelFeatureEnabled,
setSelectedModel,
updateDownloadedModel,
setSelectedModel,
]
)
const processStopWords = (params: ModelParams): ModelParams => {
if ('stop' in params && typeof params['stop'] === 'string') {
// Input as string but stop words accept an array of strings (space as separator)
params['stop'] = (params['stop'] as string).split(' ')
params['stop'] = (params['stop'] as string)
.split(' ')
.filter((e) => e.trim().length)
}
return params
}