fix(Model): switch model caused app crash (#1596)

Signed-off-by: James <james@jan.ai>
Co-authored-by: James <james@jan.ai>
Co-authored-by: Louis <louis@jan.ai>
This commit is contained in:
NamH 2024-01-17 09:33:40 +07:00 committed by GitHub
parent f293e11c58
commit db987e88f9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 187 additions and 127 deletions

View File

@ -3,7 +3,6 @@
import * as React from 'react' import * as React from 'react'
import { import {
CaretSortIcon, CaretSortIcon,
// CheckIcon,
ChevronDownIcon, ChevronDownIcon,
ChevronUpIcon, ChevronUpIcon,
} from '@radix-ui/react-icons' } from '@radix-ui/react-icons'

View File

@ -1,11 +1,6 @@
import { useCallback, useEffect, useState } from 'react' import { useCallback, useEffect } from 'react'
import { import { InferenceEngine, Model } from '@janhq/core'
InferenceEngine,
Model,
ModelRuntimeParams,
ModelSettingParams,
} from '@janhq/core'
import { import {
Button, Button,
Select, Select,
@ -14,34 +9,26 @@ import {
SelectItem, SelectItem,
SelectTrigger, SelectTrigger,
SelectValue, SelectValue,
Input,
Tooltip,
TooltipContent,
TooltipPortal,
TooltipTrigger,
TooltipArrow,
Badge,
} from '@janhq/uikit' } from '@janhq/uikit'
import { atom, useAtomValue, useSetAtom } from 'jotai' import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
import { MonitorIcon, InfoIcon } from 'lucide-react' import { MonitorIcon } from 'lucide-react'
import { twMerge } from 'tailwind-merge' import { twMerge } from 'tailwind-merge'
import { MainViewState } from '@/constants/screens' import { MainViewState } from '@/constants/screens'
import { useActiveModel } from '@/hooks/useActiveModel'
import { useEngineSettings } from '@/hooks/useEngineSettings'
import { useMainViewState } from '@/hooks/useMainViewState' import { useMainViewState } from '@/hooks/useMainViewState'
import useRecommendedModel from '@/hooks/useRecommendedModel' import useRecommendedModel from '@/hooks/useRecommendedModel'
import { toGibibytes } from '@/utils/converter' import { toGibibytes } from '@/utils/converter'
import { totalRamAtom, usedRamAtom } from '@/helpers/atoms/SystemBar.atom' import ModelLabel from '../ModelLabel'
import OpenAiKeyInput from '../OpenAiKeyInput'
import { import {
ModelParams, ModelParams,
activeThreadAtom, activeThreadAtom,
@ -56,25 +43,10 @@ export default function DropdownListSidebar() {
const activeThreadId = useAtomValue(getActiveThreadIdAtom) const activeThreadId = useAtomValue(getActiveThreadIdAtom)
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const threadStates = useAtomValue(threadStatesAtom) const threadStates = useAtomValue(threadStatesAtom)
const setSelectedModel = useSetAtom(selectedModelAtom) const [selectedModel, setSelectedModel] = useAtom(selectedModelAtom)
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const { activeModel } = useActiveModel()
const [selected, setSelected] = useState<Model | undefined>()
const { setMainViewState } = useMainViewState() const { setMainViewState } = useMainViewState()
const [openAISettings, setOpenAISettings] = useState<
{ api_key: string } | undefined
>(undefined)
const { readOpenAISettings, saveOpenAISettings } = useEngineSettings()
const totalRam = useAtomValue(totalRamAtom)
const usedRam = useAtomValue(usedRamAtom)
useEffect(() => {
readOpenAISettings().then((settings) => {
setOpenAISettings(settings)
})
}, [])
const { recommendedModel, downloadedModels } = useRecommendedModel() const { recommendedModel, downloadedModels } = useRecommendedModel()
const selectedName = const selectedName =
@ -89,7 +61,6 @@ export default function DropdownListSidebar() {
} }
useEffect(() => { useEffect(() => {
setSelected(recommendedModel)
setSelectedModel(recommendedModel) setSelectedModel(recommendedModel)
if (activeThread) { if (activeThread) {
@ -122,7 +93,6 @@ export default function DropdownListSidebar() {
const onValueSelected = useCallback( const onValueSelected = useCallback(
(modelId: string) => { (modelId: string) => {
const model = downloadedModels.find((m) => m.id === modelId) const model = downloadedModels.find((m) => m.id === modelId)
setSelected(model)
setSelectedModel(model) setSelectedModel(model)
if (activeThreadId) { if (activeThreadId) {
@ -140,74 +110,9 @@ export default function DropdownListSidebar() {
return null return null
} }
const getLabel = (size: number) => {
const minimumRamModel = size * 1.25
const availableRam = totalRam - usedRam + (activeModel?.metadata.size ?? 0)
if (minimumRamModel > totalRam) {
return (
<Badge className="space-x-1 rounded-md" themes="danger">
<span>Not enough RAM</span>
<Tooltip>
<TooltipTrigger>
<InfoIcon size={16} />
</TooltipTrigger>
<TooltipPortal>
<TooltipContent
side="right"
sideOffset={10}
className="max-w-[300px]"
>
<span>
{`This tag signals insufficient RAM for optimal model
performance. It's dynamic and may change with your system's
RAM availability.`}
</span>
<TooltipArrow />
</TooltipContent>
</TooltipPortal>
</Tooltip>
</Badge>
)
}
if (minimumRamModel < availableRam) {
return (
<Badge className="space-x-1 rounded-md" themes="success">
<span>Recommended</span>
</Badge>
)
}
if (minimumRamModel < totalRam && minimumRamModel > availableRam) {
return (
<Badge className="space-x-1 rounded-md" themes="warning">
<span>Slow on your device</span>
<Tooltip>
<TooltipTrigger>
<InfoIcon size={16} />
</TooltipTrigger>
<TooltipPortal>
<TooltipContent
side="right"
sideOffset={10}
className="max-w-[300px]"
>
<span>
This tag indicates that your current RAM performance may
affect model speed. It can change based on other active apps.
To improve, consider closing unnecessary applications to free
up RAM.
</span>
<TooltipArrow />
</TooltipContent>
</TooltipPortal>
</Tooltip>
</Badge>
)
}
}
return ( return (
<> <>
<Select value={selected?.id} onValueChange={onValueSelected}> <Select value={selectedModel?.id} onValueChange={onValueSelected}>
<SelectTrigger className="w-full"> <SelectTrigger className="w-full">
<SelectValue placeholder="Choose model to start"> <SelectValue placeholder="Choose model to start">
{selectedName} {selectedName}
@ -229,7 +134,9 @@ export default function DropdownListSidebar() {
<SelectItem <SelectItem
key={i} key={i}
value={x.id} value={x.id}
className={twMerge(x.id === selected?.id && 'bg-secondary')} className={twMerge(
x.id === selectedModel?.id && 'bg-secondary'
)}
> >
<div className="flex w-full justify-between"> <div className="flex w-full justify-between">
<span className="line-clamp-1 block">{x.name}</span> <span className="line-clamp-1 block">{x.name}</span>
@ -237,8 +144,9 @@ export default function DropdownListSidebar() {
<span className="font-bold text-muted-foreground"> <span className="font-bold text-muted-foreground">
{toGibibytes(x.metadata.size)} {toGibibytes(x.metadata.size)}
</span> </span>
{x.engine == InferenceEngine.nitro && {x.engine == InferenceEngine.nitro && (
getLabel(x.metadata.size)} <ModelLabel size={x.metadata.size} />
)}
</div> </div>
</div> </div>
</SelectItem> </SelectItem>
@ -258,24 +166,7 @@ export default function DropdownListSidebar() {
</SelectContent> </SelectContent>
</Select> </Select>
{selected?.engine === InferenceEngine.openai && ( <OpenAiKeyInput selectedModel={selectedModel} />
<div className="mt-4">
<label
id="thread-title"
className="mb-2 inline-block font-bold text-gray-600 dark:text-gray-300"
>
API Key
</label>
<Input
id="assistant-instructions"
placeholder="Enter your API_KEY"
defaultValue={openAISettings?.api_key}
onChange={(e) => {
saveOpenAISettings({ apiKey: e.target.value })
}}
/>
</div>
)}
</> </>
) )
} }

View File

@ -0,0 +1,34 @@
import React from 'react'
import {
Badge,
Tooltip,
TooltipArrow,
TooltipContent,
TooltipPortal,
TooltipTrigger,
} from '@janhq/uikit'
import { InfoIcon } from 'lucide-react'
const NotEnoughRamLabel: React.FC = () => (
<Badge className="space-x-1 rounded-md" themes="danger">
<span>Not enough RAM</span>
<Tooltip>
<TooltipTrigger>
<InfoIcon size={16} />
</TooltipTrigger>
<TooltipPortal>
<TooltipContent side="right" sideOffset={10} className="max-w-[300px]">
<span>
{`This tag signals insufficient RAM for optimal model
performance. It's dynamic and may change with your system's
RAM availability.`}
</span>
<TooltipArrow />
</TooltipContent>
</TooltipPortal>
</Tooltip>
</Badge>
)
export default React.memo(NotEnoughRamLabel)

View File

@ -0,0 +1,11 @@
import React from 'react'
import { Badge } from '@janhq/uikit'
const RecommendedLabel: React.FC = () => (
<Badge className="space-x-1 rounded-md" themes="success">
<span>Recommended</span>
</Badge>
)
export default React.memo(RecommendedLabel)

View File

@ -0,0 +1,34 @@
import React from 'react'
import {
Badge,
Tooltip,
TooltipArrow,
TooltipContent,
TooltipPortal,
TooltipTrigger,
} from '@janhq/uikit'
import { InfoIcon } from 'lucide-react'
const SlowOnYourDeviceLabel: React.FC = () => (
<Badge className="space-x-1 rounded-md" themes="warning">
<span>Slow on your device</span>
<Tooltip>
<TooltipTrigger>
<InfoIcon size={16} />
</TooltipTrigger>
<TooltipPortal>
<TooltipContent side="right" sideOffset={10} className="max-w-[300px]">
<span>
This tag indicates that your current RAM performance may affect
model speed. It can change based on other active apps. To improve,
consider closing unnecessary applications to free up RAM.
</span>
<TooltipArrow />
</TooltipContent>
</TooltipPortal>
</Tooltip>
</Badge>
)
export default React.memo(SlowOnYourDeviceLabel)

View File

@ -0,0 +1,43 @@
import React from 'react'
import { useAtomValue } from 'jotai'
import { useActiveModel } from '@/hooks/useActiveModel'
import NotEnoughRamLabel from './NotEnoughRamLabel'
import RecommendedLabel from './RecommendedLabel'
import SlowOnYourDeviceLabel from './SlowOnYourDeviceLabel'
import { totalRamAtom, usedRamAtom } from '@/helpers/atoms/SystemBar.atom'
type Props = {
size: number
}
const ModelLabel: React.FC<Props> = ({ size }) => {
const { activeModel } = useActiveModel()
const totalRam = useAtomValue(totalRamAtom)
const usedRam = useAtomValue(usedRamAtom)
const getLabel = (size: number) => {
const minimumRamModel = size * 1.25
const availableRam = totalRam - usedRam + (activeModel?.metadata.size ?? 0)
if (minimumRamModel > totalRam) {
return <NotEnoughRamLabel />
}
if (minimumRamModel < availableRam) {
return <RecommendedLabel />
}
if (minimumRamModel < totalRam && minimumRamModel > availableRam) {
return <SlowOnYourDeviceLabel />
}
return null
}
return getLabel(size)
}
export default React.memo(ModelLabel)

View File

@ -0,0 +1,48 @@
import React, { useEffect, useState } from 'react'
import { InferenceEngine, Model } from '@janhq/core'
import { Input } from '@janhq/uikit'
import { useEngineSettings } from '@/hooks/useEngineSettings'
type Props = {
selectedModel?: Model
}
const OpenAiKeyInput: React.FC<Props> = ({ selectedModel }) => {
const [openAISettings, setOpenAISettings] = useState<
{ api_key: string } | undefined
>(undefined)
const { readOpenAISettings, saveOpenAISettings } = useEngineSettings()
useEffect(() => {
readOpenAISettings().then((settings) => {
setOpenAISettings(settings)
})
}, [])
if (!selectedModel || selectedModel.engine !== InferenceEngine.openai) {
return null
}
return (
<div className="mt-4">
<label
id="thread-title"
className="mb-2 inline-block font-bold text-gray-600 dark:text-gray-300"
>
API Key
</label>
<Input
id="assistant-instructions"
placeholder="Enter your API_KEY"
defaultValue={openAISettings?.api_key}
onChange={(e) => {
saveOpenAISettings({ apiKey: e.target.value })
}}
/>
</div>
)
}
export default OpenAiKeyInput