import { useCallback, useEffect, useState } from 'react' import { InferenceEngine, Model, ModelRuntimeParams, ModelSettingParams, } from '@janhq/core' import { Button, Select, SelectContent, SelectGroup, SelectItem, SelectTrigger, SelectValue, Input, Tooltip, TooltipContent, TooltipPortal, TooltipTrigger, TooltipArrow, Badge, } from '@janhq/uikit' import { atom, useAtomValue, useSetAtom } from 'jotai' import { MonitorIcon, InfoIcon } from 'lucide-react' import { twMerge } from 'tailwind-merge' import { MainViewState } from '@/constants/screens' import { useActiveModel } from '@/hooks/useActiveModel' import { useEngineSettings } from '@/hooks/useEngineSettings' import { useMainViewState } from '@/hooks/useMainViewState' import useRecommendedModel from '@/hooks/useRecommendedModel' import { toGibibytes } from '@/utils/converter' import { totalRamAtom, usedRamAtom } from '@/helpers/atoms/SystemBar.atom' import { ModelParams, activeThreadAtom, getActiveThreadIdAtom, setThreadModelParamsAtom, threadStatesAtom, } from '@/helpers/atoms/Thread.atom' export const selectedModelAtom = atom(undefined) export default function DropdownListSidebar() { const activeThreadId = useAtomValue(getActiveThreadIdAtom) const activeThread = useAtomValue(activeThreadAtom) const threadStates = useAtomValue(threadStatesAtom) const setSelectedModel = useSetAtom(selectedModelAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const { activeModel } = useActiveModel() const [selected, setSelected] = useState() const { setMainViewState } = useMainViewState() const [openAISettings, setOpenAISettings] = useState< { api_key: string } | undefined >(undefined) const { readOpenAISettings, saveOpenAISettings } = useEngineSettings() const totalRam = useAtomValue(totalRamAtom) const usedRam = useAtomValue(usedRamAtom) useEffect(() => { readOpenAISettings().then((settings) => { setOpenAISettings(settings) }) }, []) const { recommendedModel, downloadedModels } = useRecommendedModel() /** * Default value for max_tokens and ctx_len * Its to avoid OOM issue since a model can set a big number for these settings */ const defaultValue = (value?: number) => { if (value && value < 4096) return value return 4096 } useEffect(() => { setSelected(recommendedModel) setSelectedModel(recommendedModel) if (activeThread) { const finishInit = threadStates[activeThread.id].isFinishInit ?? true if (finishInit) return const modelParams: ModelParams = { ...recommendedModel?.parameters, ...recommendedModel?.settings, // This is to set default value for these settings instead of maximum value max_tokens: defaultValue(recommendedModel?.parameters.max_tokens), ctx_len: defaultValue(recommendedModel?.settings.ctx_len), } setThreadModelParams(activeThread.id, modelParams) } }, [ recommendedModel, activeThread, setSelectedModel, setThreadModelParams, threadStates, ]) const onValueSelected = useCallback( (modelId: string) => { const model = downloadedModels.find((m) => m.id === modelId) setSelected(model) setSelectedModel(model) if (activeThreadId) { const modelParams = { ...model?.parameters, ...model?.settings, } setThreadModelParams(activeThreadId, modelParams) } }, [downloadedModels, activeThreadId, setSelectedModel, setThreadModelParams] ) if (!activeThread) { return null } const getLabel = (size: number) => { const minimumRamModel = size * 1.25 const availableRam = totalRam - usedRam + (activeModel?.metadata.size ?? 0) if (minimumRamModel > totalRam) { return ( Not enough RAM {`This tag signals insufficient RAM for optimal model performance. It's dynamic and may change with your system's RAM availability.`} ) } if (minimumRamModel < availableRam) { return ( Recommended ) } if (minimumRamModel < totalRam && minimumRamModel > availableRam) { return ( Slow on your device This tag indicates that your current RAM performance may affect model speed. It can change based on other active apps. To improve, consider closing unnecessary applications to free up RAM. ) } } return ( <> {selected?.engine === InferenceEngine.openai && (
{ saveOpenAISettings({ apiKey: e.target.value }) }} />
)} ) }