fix: migrate app settings to the new version (#5936)
* fix: migrate app settings to the new version * fix: edge cases * fix: migrate HF import model on Windows * fix hardware page broken after downgraded * test: correct test * fix: backward compatible hardware info
This commit is contained in:
parent
c9b44eec52
commit
1fc37a9349
@ -886,7 +886,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
|
||||
modelId =
|
||||
modelId !== 'imported'
|
||||
? modelId
|
||||
? modelId.replace(/^(cortex\.so|huggingface\.co)[\/\\]/, '')
|
||||
: (await basename(child)).replace('.yml', '')
|
||||
|
||||
const modelName = legacyModelConfig.model ?? modelId
|
||||
|
||||
@ -68,6 +68,11 @@ describe('useHardware', () => {
|
||||
extensions: ['SSE', 'AVX'],
|
||||
name: 'Intel Core i7',
|
||||
usage: 25.5,
|
||||
instructions: [],
|
||||
},
|
||||
ram: {
|
||||
available: 0,
|
||||
total: 0,
|
||||
},
|
||||
gpus: [
|
||||
{
|
||||
|
||||
@ -9,6 +9,7 @@ export interface CPU {
|
||||
extensions: string[]
|
||||
name: string
|
||||
usage: number
|
||||
instructions?: string[] // Cortex migration: ensure instructions data ready
|
||||
}
|
||||
|
||||
export interface GPUAdditionalInfo {
|
||||
@ -171,6 +172,15 @@ export const useHardware = create<HardwareStore>()(
|
||||
set({
|
||||
hardwareData: {
|
||||
...data,
|
||||
cpu: {
|
||||
...data.cpu,
|
||||
// Cortex migration - ensure instructions data ready
|
||||
instructions: [],
|
||||
},
|
||||
ram: {
|
||||
available: 0,
|
||||
total: 0,
|
||||
},
|
||||
gpus: data.gpus.map((gpu) => ({
|
||||
...gpu,
|
||||
activated: gpu.activated ?? false,
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { create } from 'zustand'
|
||||
import { persist, createJSONStorage } from 'zustand/middleware'
|
||||
import { localStorageKey } from '@/constants/localStorage'
|
||||
import { sep } from '@tauri-apps/api/path'
|
||||
|
||||
type ModelProviderState = {
|
||||
providers: ModelProvider[]
|
||||
@ -50,6 +51,17 @@ export const useModelProvider = create<ModelProviderState>()(
|
||||
),
|
||||
}
|
||||
})
|
||||
|
||||
let legacyModels: Model[] | undefined = []
|
||||
/// Cortex Migration
|
||||
if (
|
||||
localStorage.getItem('cortex_model_settings_migrated') !== 'true'
|
||||
) {
|
||||
legacyModels = state.providers.find(
|
||||
(e) => e.provider === 'llama.cpp'
|
||||
)?.models
|
||||
localStorage.setItem('cortex_model_settings_migrated', 'true')
|
||||
}
|
||||
// Ensure deletedModels is always an array
|
||||
const currentDeletedModels = Array.isArray(state.deletedModels)
|
||||
? state.deletedModels
|
||||
@ -75,13 +87,21 @@ export const useModelProvider = create<ModelProviderState>()(
|
||||
...models,
|
||||
]
|
||||
const updatedModels = provider.models?.map((model) => {
|
||||
const settings =
|
||||
(legacyModels && legacyModels?.length > 0
|
||||
? legacyModels
|
||||
: models
|
||||
).find(
|
||||
(m) => m.id.split(':').slice(0, 2).join(sep()) === model.id
|
||||
)?.settings || model.settings
|
||||
const existingModel = models.find((m) => m.id === model.id)
|
||||
return {
|
||||
...model,
|
||||
settings: existingModel?.settings || model.settings,
|
||||
settings: settings,
|
||||
capabilities: existingModel?.capabilities || model.capabilities,
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
...provider,
|
||||
models: provider.persist ? updatedModels : mergedModels,
|
||||
|
||||
@ -2,6 +2,7 @@ import { create } from 'zustand'
|
||||
import { ulid } from 'ulidx'
|
||||
import { createThread, deleteThread, updateThread } from '@/services/threads'
|
||||
import { Fzf } from 'fzf'
|
||||
import { sep } from '@tauri-apps/api/path'
|
||||
|
||||
type ThreadState = {
|
||||
threads: Record<string, Thread>
|
||||
@ -34,7 +35,19 @@ export const useThreads = create<ThreadState>()((set, get) => ({
|
||||
setThreads: (threads) => {
|
||||
const threadMap = threads.reduce(
|
||||
(acc: Record<string, Thread>, thread) => {
|
||||
acc[thread.id] = thread
|
||||
acc[thread.id] = {
|
||||
...thread,
|
||||
model: thread.model
|
||||
? {
|
||||
provider: thread.model.provider.replace(
|
||||
'llama.cpp',
|
||||
'llamacpp'
|
||||
),
|
||||
// Cortex migration: take first two parts of the ID (the last is file name which is not needed)
|
||||
id: thread.model?.id.split(':').slice(0, 2).join(sep()),
|
||||
}
|
||||
: undefined,
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, Thread>
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user