From db987e88f90ba5ca80cc3ae426a53874a1a526b9 Mon Sep 17 00:00:00 2001 From: NamH Date: Wed, 17 Jan 2024 09:33:40 +0700 Subject: [PATCH] fix(Model): switch model caused app crash (#1596) Signed-off-by: James Co-authored-by: James Co-authored-by: Louis --- uikit/src/select/index.tsx | 1 - web/containers/DropdownListSidebar/index.tsx | 143 +++--------------- .../ModelLabel/NotEnoughRamLabel.tsx | 34 +++++ .../ModelLabel/RecommendedLabel.tsx | 11 ++ .../ModelLabel/SlowOnYourDeviceLabel.tsx | 34 +++++ web/containers/ModelLabel/index.tsx | 43 ++++++ web/containers/OpenAiKeyInput/index.tsx | 48 ++++++ 7 files changed, 187 insertions(+), 127 deletions(-) create mode 100644 web/containers/ModelLabel/NotEnoughRamLabel.tsx create mode 100644 web/containers/ModelLabel/RecommendedLabel.tsx create mode 100644 web/containers/ModelLabel/SlowOnYourDeviceLabel.tsx create mode 100644 web/containers/ModelLabel/index.tsx create mode 100644 web/containers/OpenAiKeyInput/index.tsx diff --git a/uikit/src/select/index.tsx b/uikit/src/select/index.tsx index 9bee7a153..e31e28f89 100644 --- a/uikit/src/select/index.tsx +++ b/uikit/src/select/index.tsx @@ -3,7 +3,6 @@ import * as React from 'react' import { CaretSortIcon, - // CheckIcon, ChevronDownIcon, ChevronUpIcon, } from '@radix-ui/react-icons' diff --git a/web/containers/DropdownListSidebar/index.tsx b/web/containers/DropdownListSidebar/index.tsx index e2b1abb3e..46a2ba712 100644 --- a/web/containers/DropdownListSidebar/index.tsx +++ b/web/containers/DropdownListSidebar/index.tsx @@ -1,11 +1,6 @@ -import { useCallback, useEffect, useState } from 'react' +import { useCallback, useEffect } from 'react' -import { - InferenceEngine, - Model, - ModelRuntimeParams, - ModelSettingParams, -} from '@janhq/core' +import { InferenceEngine, Model } from '@janhq/core' import { Button, Select, @@ -14,34 +9,26 @@ import { SelectItem, SelectTrigger, SelectValue, - Input, - Tooltip, - TooltipContent, - TooltipPortal, - TooltipTrigger, - TooltipArrow, - Badge, } from '@janhq/uikit' -import { atom, useAtomValue, useSetAtom } from 'jotai' +import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai' -import { MonitorIcon, InfoIcon } from 'lucide-react' +import { MonitorIcon } from 'lucide-react' import { twMerge } from 'tailwind-merge' import { MainViewState } from '@/constants/screens' -import { useActiveModel } from '@/hooks/useActiveModel' - -import { useEngineSettings } from '@/hooks/useEngineSettings' - import { useMainViewState } from '@/hooks/useMainViewState' import useRecommendedModel from '@/hooks/useRecommendedModel' import { toGibibytes } from '@/utils/converter' -import { totalRamAtom, usedRamAtom } from '@/helpers/atoms/SystemBar.atom' +import ModelLabel from '../ModelLabel' + +import OpenAiKeyInput from '../OpenAiKeyInput' + import { ModelParams, activeThreadAtom, @@ -56,25 +43,10 @@ export default function DropdownListSidebar() { const activeThreadId = useAtomValue(getActiveThreadIdAtom) const activeThread = useAtomValue(activeThreadAtom) const threadStates = useAtomValue(threadStatesAtom) - const setSelectedModel = useSetAtom(selectedModelAtom) + const [selectedModel, setSelectedModel] = useAtom(selectedModelAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) - const { activeModel } = useActiveModel() - const [selected, setSelected] = useState() const { setMainViewState } = useMainViewState() - const [openAISettings, setOpenAISettings] = useState< - { api_key: string } | undefined - >(undefined) - const { readOpenAISettings, saveOpenAISettings } = useEngineSettings() - const totalRam = useAtomValue(totalRamAtom) - const usedRam = useAtomValue(usedRamAtom) - - useEffect(() => { - readOpenAISettings().then((settings) => { - setOpenAISettings(settings) - }) - }, []) - const { recommendedModel, downloadedModels } = useRecommendedModel() const selectedName = @@ -89,7 +61,6 @@ export default function DropdownListSidebar() { } useEffect(() => { - setSelected(recommendedModel) setSelectedModel(recommendedModel) if (activeThread) { @@ -122,7 +93,6 @@ export default function DropdownListSidebar() { const onValueSelected = useCallback( (modelId: string) => { const model = downloadedModels.find((m) => m.id === modelId) - setSelected(model) setSelectedModel(model) if (activeThreadId) { @@ -140,74 +110,9 @@ export default function DropdownListSidebar() { return null } - const getLabel = (size: number) => { - const minimumRamModel = size * 1.25 - const availableRam = totalRam - usedRam + (activeModel?.metadata.size ?? 0) - if (minimumRamModel > totalRam) { - return ( - - Not enough RAM - - - - - - - - {`This tag signals insufficient RAM for optimal model - performance. It's dynamic and may change with your system's - RAM availability.`} - - - - - - - ) - } - if (minimumRamModel < availableRam) { - return ( - - Recommended - - ) - } - if (minimumRamModel < totalRam && minimumRamModel > availableRam) { - return ( - - Slow on your device - - - - - - - - This tag indicates that your current RAM performance may - affect model speed. It can change based on other active apps. - To improve, consider closing unnecessary applications to free - up RAM. - - - - - - - ) - } - } - return ( <> - {selectedName} @@ -229,7 +134,9 @@ export default function DropdownListSidebar() {
{x.name} @@ -237,8 +144,9 @@ export default function DropdownListSidebar() { {toGibibytes(x.metadata.size)} - {x.engine == InferenceEngine.nitro && - getLabel(x.metadata.size)} + {x.engine == InferenceEngine.nitro && ( + + )}
@@ -258,24 +166,7 @@ export default function DropdownListSidebar() { - {selected?.engine === InferenceEngine.openai && ( -
- - { - saveOpenAISettings({ apiKey: e.target.value }) - }} - /> -
- )} + ) } diff --git a/web/containers/ModelLabel/NotEnoughRamLabel.tsx b/web/containers/ModelLabel/NotEnoughRamLabel.tsx new file mode 100644 index 000000000..b24e42d63 --- /dev/null +++ b/web/containers/ModelLabel/NotEnoughRamLabel.tsx @@ -0,0 +1,34 @@ +import React from 'react' + +import { + Badge, + Tooltip, + TooltipArrow, + TooltipContent, + TooltipPortal, + TooltipTrigger, +} from '@janhq/uikit' +import { InfoIcon } from 'lucide-react' + +const NotEnoughRamLabel: React.FC = () => ( + + Not enough RAM + + + + + + + + {`This tag signals insufficient RAM for optimal model + performance. It's dynamic and may change with your system's + RAM availability.`} + + + + + + +) + +export default React.memo(NotEnoughRamLabel) diff --git a/web/containers/ModelLabel/RecommendedLabel.tsx b/web/containers/ModelLabel/RecommendedLabel.tsx new file mode 100644 index 000000000..f3e8b215d --- /dev/null +++ b/web/containers/ModelLabel/RecommendedLabel.tsx @@ -0,0 +1,11 @@ +import React from 'react' + +import { Badge } from '@janhq/uikit' + +const RecommendedLabel: React.FC = () => ( + + Recommended + +) + +export default React.memo(RecommendedLabel) diff --git a/web/containers/ModelLabel/SlowOnYourDeviceLabel.tsx b/web/containers/ModelLabel/SlowOnYourDeviceLabel.tsx new file mode 100644 index 000000000..22211c296 --- /dev/null +++ b/web/containers/ModelLabel/SlowOnYourDeviceLabel.tsx @@ -0,0 +1,34 @@ +import React from 'react' + +import { + Badge, + Tooltip, + TooltipArrow, + TooltipContent, + TooltipPortal, + TooltipTrigger, +} from '@janhq/uikit' +import { InfoIcon } from 'lucide-react' + +const SlowOnYourDeviceLabel: React.FC = () => ( + + Slow on your device + + + + + + + + This tag indicates that your current RAM performance may affect + model speed. It can change based on other active apps. To improve, + consider closing unnecessary applications to free up RAM. + + + + + + +) + +export default React.memo(SlowOnYourDeviceLabel) diff --git a/web/containers/ModelLabel/index.tsx b/web/containers/ModelLabel/index.tsx new file mode 100644 index 000000000..1cd9cbb85 --- /dev/null +++ b/web/containers/ModelLabel/index.tsx @@ -0,0 +1,43 @@ +import React from 'react' + +import { useAtomValue } from 'jotai' + +import { useActiveModel } from '@/hooks/useActiveModel' + +import NotEnoughRamLabel from './NotEnoughRamLabel' + +import RecommendedLabel from './RecommendedLabel' + +import SlowOnYourDeviceLabel from './SlowOnYourDeviceLabel' + +import { totalRamAtom, usedRamAtom } from '@/helpers/atoms/SystemBar.atom' + +type Props = { + size: number +} + +const ModelLabel: React.FC = ({ size }) => { + const { activeModel } = useActiveModel() + const totalRam = useAtomValue(totalRamAtom) + const usedRam = useAtomValue(usedRamAtom) + + const getLabel = (size: number) => { + const minimumRamModel = size * 1.25 + const availableRam = totalRam - usedRam + (activeModel?.metadata.size ?? 0) + if (minimumRamModel > totalRam) { + return + } + if (minimumRamModel < availableRam) { + return + } + if (minimumRamModel < totalRam && minimumRamModel > availableRam) { + return + } + + return null + } + + return getLabel(size) +} + +export default React.memo(ModelLabel) diff --git a/web/containers/OpenAiKeyInput/index.tsx b/web/containers/OpenAiKeyInput/index.tsx new file mode 100644 index 000000000..c6c6e6489 --- /dev/null +++ b/web/containers/OpenAiKeyInput/index.tsx @@ -0,0 +1,48 @@ +import React, { useEffect, useState } from 'react' + +import { InferenceEngine, Model } from '@janhq/core' +import { Input } from '@janhq/uikit' + +import { useEngineSettings } from '@/hooks/useEngineSettings' + +type Props = { + selectedModel?: Model +} + +const OpenAiKeyInput: React.FC = ({ selectedModel }) => { + const [openAISettings, setOpenAISettings] = useState< + { api_key: string } | undefined + >(undefined) + const { readOpenAISettings, saveOpenAISettings } = useEngineSettings() + + useEffect(() => { + readOpenAISettings().then((settings) => { + setOpenAISettings(settings) + }) + }, []) + + if (!selectedModel || selectedModel.engine !== InferenceEngine.openai) { + return null + } + + return ( +
+ + { + saveOpenAISettings({ apiKey: e.target.value }) + }} + /> +
+ ) +} + +export default OpenAiKeyInput