fix: migrate app settings to the new version (#5936)

* fix: migrate app settings to the new version

* fix: edge cases

* fix: migrate HF import model on Windows

* fix hardware page broken after downgraded

* test: correct test

* fix: backward compatible hardware info
This commit is contained in:
Louis 2025-07-27 21:13:05 +07:00 committed by GitHub
parent c9b44eec52
commit 1fc37a9349
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 51 additions and 3 deletions

View File

@ -886,7 +886,7 @@ export default class llamacpp_extension extends AIEngine {
modelId =
modelId !== 'imported'
? modelId
? modelId.replace(/^(cortex\.so|huggingface\.co)[\/\\]/, '')
: (await basename(child)).replace('.yml', '')
const modelName = legacyModelConfig.model ?? modelId

View File

@ -68,6 +68,11 @@ describe('useHardware', () => {
extensions: ['SSE', 'AVX'],
name: 'Intel Core i7',
usage: 25.5,
instructions: [],
},
ram: {
available: 0,
total: 0,
},
gpus: [
{

View File

@ -9,6 +9,7 @@ export interface CPU {
extensions: string[]
name: string
usage: number
instructions?: string[] // Cortex migration: ensure instructions data ready
}
export interface GPUAdditionalInfo {
@ -171,6 +172,15 @@ export const useHardware = create<HardwareStore>()(
set({
hardwareData: {
...data,
cpu: {
...data.cpu,
// Cortex migration - ensure instructions data ready
instructions: [],
},
ram: {
available: 0,
total: 0,
},
gpus: data.gpus.map((gpu) => ({
...gpu,
activated: gpu.activated ?? false,

View File

@ -1,6 +1,7 @@
import { create } from 'zustand'
import { persist, createJSONStorage } from 'zustand/middleware'
import { localStorageKey } from '@/constants/localStorage'
import { sep } from '@tauri-apps/api/path'
type ModelProviderState = {
providers: ModelProvider[]
@ -50,6 +51,17 @@ export const useModelProvider = create<ModelProviderState>()(
),
}
})
let legacyModels: Model[] | undefined = []
/// Cortex Migration
if (
localStorage.getItem('cortex_model_settings_migrated') !== 'true'
) {
legacyModels = state.providers.find(
(e) => e.provider === 'llama.cpp'
)?.models
localStorage.setItem('cortex_model_settings_migrated', 'true')
}
// Ensure deletedModels is always an array
const currentDeletedModels = Array.isArray(state.deletedModels)
? state.deletedModels
@ -75,13 +87,21 @@ export const useModelProvider = create<ModelProviderState>()(
...models,
]
const updatedModels = provider.models?.map((model) => {
const settings =
(legacyModels && legacyModels?.length > 0
? legacyModels
: models
).find(
(m) => m.id.split(':').slice(0, 2).join(sep()) === model.id
)?.settings || model.settings
const existingModel = models.find((m) => m.id === model.id)
return {
...model,
settings: existingModel?.settings || model.settings,
settings: settings,
capabilities: existingModel?.capabilities || model.capabilities,
}
})
return {
...provider,
models: provider.persist ? updatedModels : mergedModels,

View File

@ -2,6 +2,7 @@ import { create } from 'zustand'
import { ulid } from 'ulidx'
import { createThread, deleteThread, updateThread } from '@/services/threads'
import { Fzf } from 'fzf'
import { sep } from '@tauri-apps/api/path'
type ThreadState = {
threads: Record<string, Thread>
@ -34,7 +35,19 @@ export const useThreads = create<ThreadState>()((set, get) => ({
setThreads: (threads) => {
const threadMap = threads.reduce(
(acc: Record<string, Thread>, thread) => {
acc[thread.id] = thread
acc[thread.id] = {
...thread,
model: thread.model
? {
provider: thread.model.provider.replace(
'llama.cpp',
'llamacpp'
),
// Cortex migration: take first two parts of the ID (the last is file name which is not needed)
id: thread.model?.id.split(':').slice(0, 2).join(sep()),
}
: undefined,
}
return acc
},
{} as Record<string, Thread>