import { useState, useMemo, useEffect, useCallback } from 'react' import { InferenceEngine } from '@janhq/core' import { Badge, Input, ScrollArea, Select, useClickOutside } from '@janhq/joi' import { useAtom, useAtomValue, useSetAtom } from 'jotai' import { ChevronDownIcon, DownloadCloudIcon, XIcon } from 'lucide-react' import { twMerge } from 'tailwind-merge' import ProgressCircle from '@/containers/Loader/ProgressCircle' import ModelLabel from '@/containers/ModelLabel' import SetupRemoteModel from '@/containers/SetupRemoteModel' import useDownloadModel from '@/hooks/useDownloadModel' import { modelDownloadStateAtom } from '@/hooks/useDownloadState' import useRecommendedModel from '@/hooks/useRecommendedModel' import useUpdateModelParameters from '@/hooks/useUpdateModelParameters' import { formatDownloadPercentage, toGibibytes } from '@/utils/converter' import { extensionManager } from '@/extension' import { inActiveEngineProviderAtom } from '@/helpers/atoms/Extension.atom' import { configuredModelsAtom, getDownloadingModelAtom, selectedModelAtom, } from '@/helpers/atoms/Model.atom' import { activeThreadAtom, setThreadModelParamsAtom, } from '@/helpers/atoms/Thread.atom' type Props = { chatInputMode?: boolean strictedThread?: boolean disabled?: boolean } const engineHasLogo = [ InferenceEngine.anthropic, InferenceEngine.cohere, InferenceEngine.martian, InferenceEngine.mistral, InferenceEngine.openai, ] const ModelDropdown = ({ disabled, chatInputMode, strictedThread = true, }: Props) => { const { downloadModel } = useDownloadModel() const [searchFilter, setSearchFilter] = useState('all') const [filterOptionsOpen, setFilterOptionsOpen] = useState(false) const [searchText, setSearchText] = useState('') const [open, setOpen] = useState(false) const activeThread = useAtomValue(activeThreadAtom) const downloadingModels = useAtomValue(getDownloadingModelAtom) const [toggle, setToggle] = useState(null) const [selectedModel, setSelectedModel] = useAtom(selectedModelAtom) const { recommendedModel, downloadedModels } = useRecommendedModel() const [dropdownOptions, setDropdownOptions] = useState( null ) const downloadStates = useAtomValue(modelDownloadStateAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const { updateModelParameter } = useUpdateModelParameters() const configuredModels = useAtomValue(configuredModelsAtom) const featuredModel = configuredModels.filter((x) => x.metadata.tags.includes('Featured') ) useClickOutside(() => !filterOptionsOpen && setOpen(false), null, [ dropdownOptions, toggle, ]) const filteredDownloadedModels = useMemo( () => downloadedModels .filter((e) => e.name.toLowerCase().includes(searchText.toLowerCase().trim()) ) .filter((e) => { if (searchFilter === 'all') { return e.engine } if (searchFilter === 'local') { return ( e.engine === InferenceEngine.nitro || e.engine === InferenceEngine.nitro_tensorrt_llm ) } if (searchFilter === 'remote') { return ( e.engine !== InferenceEngine.nitro && e.engine !== InferenceEngine.nitro_tensorrt_llm ) } }) .sort((a, b) => a.name.localeCompare(b.name)), [downloadedModels, searchText, searchFilter] ) useEffect(() => { if (!activeThread) return let model = downloadedModels.find( (model) => model.id === activeThread.assistants[0].model.id ) if (!model) { model = recommendedModel } setSelectedModel(model) }, [recommendedModel, activeThread, downloadedModels, setSelectedModel]) const onClickModelItem = useCallback( async (modelId: string) => { const model = downloadedModels.find((m) => m.id === modelId) setSelectedModel(model) setOpen(false) if (activeThread) { // Default setting ctx_len for the model for a better onboarding experience // TODO: When Cortex support hardware instructions, we should remove this const overriddenSettings = model?.settings.ctx_len && model.settings.ctx_len > 2048 ? { ctx_len: 2048 } : {} const modelParams = { ...model?.parameters, ...model?.settings, ...overriddenSettings, } // Update model parameter to the thread state setThreadModelParams(activeThread.id, modelParams) // Update model parameter to the thread file if (model) updateModelParameter(activeThread, { params: modelParams, modelId: model.id, engine: model.engine, }) } }, [ downloadedModels, activeThread, setSelectedModel, setThreadModelParams, updateModelParameter, ] ) const [extensionHasSettings, setExtensionHasSettings] = useState< { name?: string; setting: string; apiKey: string; provider: string }[] >([]) const inActiveEngineProvider = useAtomValue(inActiveEngineProviderAtom) useEffect(() => { const getAllSettings = async () => { const extensionsMenu: { name?: string setting: string apiKey: string provider: string }[] = [] const extensions = extensionManager.getAll() for (const extension of extensions) { if (typeof extension.getSettings === 'function') { const settings = await extension.getSettings() if ( (settings && settings.length > 0) || (await extension.installationState()) !== 'NotRequired' ) { extensionsMenu.push({ name: extension.productName, setting: extension.name, apiKey: 'apiKey' in extension && typeof extension.apiKey === 'string' ? extension.apiKey : '', provider: 'provider' in extension && typeof extension.provider === 'string' ? extension.provider : '', }) } } } setExtensionHasSettings(extensionsMenu) } getAllSettings() }, []) const findByEngine = filteredDownloadedModels .filter((x) => !inActiveEngineProvider.includes(x.engine)) .map((x) => x.engine) const groupByEngine = findByEngine.filter(function (item, index) { if (findByEngine.indexOf(item) === index) return item !== InferenceEngine.nitro }) if (strictedThread && !activeThread) { return null } return (
{chatInputMode ? ( setOpen(!open)} > {selectedModel?.name} ) : ( } onClick={() => setOpen(!open)} /> )}
setSearchText(e.target.value)} suffixIcon={ searchText.length > 0 && ( setSearchText('')} /> ) } />