feat: handle case thread when original model deleted (#4280)

This commit is contained in:
Faisal Amir 2024-12-18 18:37:26 +08:00 committed by GitHub
parent 0cd0ff0443
commit 7d07e995ab
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 122 additions and 68 deletions

View File

@ -12,7 +12,7 @@ import {
useClickOutside,
} from '@janhq/joi'
import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
import {
ChevronDownIcon,
@ -37,6 +37,7 @@ import useUpdateModelParameters from '@/hooks/useUpdateModelParameters'
import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
import { manualRecommendationModel } from '@/utils/model'
import {
getLogoEngine,
getTitleByEngine,
@ -65,16 +66,21 @@ type Props = {
disabled?: boolean
}
export const modelDropdownStateAtom = atom(false)
const ModelDropdown = ({
disabled,
chatInputMode,
strictedThread = true,
}: Props) => {
const { downloadModel } = useDownloadModel()
const [modelDropdownState, setModelDropdownState] = useAtom(
modelDropdownStateAtom
)
const [searchFilter, setSearchFilter] = useState('local')
const [searchText, setSearchText] = useState('')
const [open, setOpen] = useState(false)
const [open, setOpen] = useState<boolean>(modelDropdownState)
const activeThread = useAtomValue(activeThreadAtom)
const activeAssistant = useAtomValue(activeAssistantAtom)
const downloadingModels = useAtomValue(getDownloadingModelAtom)
@ -84,22 +90,38 @@ const ModelDropdown = ({
const [dropdownOptions, setDropdownOptions] = useState<HTMLDivElement | null>(
null
)
const downloadStates = useAtomValue(modelDownloadStateAtom)
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const { updateModelParameter } = useUpdateModelParameters()
const searchInputRef = useRef<HTMLInputElement>(null)
const configuredModels = useAtomValue(configuredModelsAtom)
const featuredModel = configuredModels.filter((x) =>
x.metadata?.tags?.includes('Featured')
const featuredModel = configuredModels.filter(
(x) =>
manualRecommendationModel.includes(x.id) &&
x.metadata?.tags?.includes('Featured') &&
x.metadata?.size < 5000000000
)
const { updateThreadMetadata } = useCreateNewThread()
useClickOutside(() => setOpen(false), null, [dropdownOptions, toggle])
useClickOutside(() => handleChangeStateOpen(false), null, [
dropdownOptions,
toggle,
])
const [showEngineListModel, setShowEngineListModel] = useAtom(
showEngineListModelAtom
)
const handleChangeStateOpen = useCallback(
(state: boolean) => {
setOpen(state)
setModelDropdownState(state)
},
[setModelDropdownState]
)
const isModelSupportRagAndTools = useCallback((model: Model) => {
return (
model?.engine === InferenceEngine.openai ||
@ -145,6 +167,12 @@ const ModelDropdown = ({
[configuredModels, searchText, searchFilter, downloadedModels]
)
useEffect(() => {
if (modelDropdownState && chatInputMode) {
setOpen(modelDropdownState)
}
}, [chatInputMode, modelDropdownState])
useEffect(() => {
if (open && searchInputRef.current) {
searchInputRef.current.focus()
@ -157,7 +185,7 @@ const ModelDropdown = ({
let model = downloadedModels.find((model) => model.id === modelId)
if (!model) {
model = recommendedModel
model = undefined
}
setSelectedModel(model)
}, [
@ -343,14 +371,21 @@ const ModelDropdown = ({
'inline-block max-w-[200px] cursor-pointer overflow-hidden text-ellipsis whitespace-nowrap',
open && 'border border-transparent'
)}
onClick={() => setOpen(!open)}
onClick={() => handleChangeStateOpen(!open)}
>
<span>{selectedModel?.name}</span>
<span
className={twMerge(
!selectedModel && 'text-[hsla(var(--text-tertiary))]'
)}
>
{selectedModel?.name || 'Select Model'}
</span>
</Badge>
) : (
<Input
value={selectedModel?.name || ''}
className="cursor-pointer"
placeholder="Select Model"
disabled={disabled}
readOnly
suffixIcon={

View File

@ -22,7 +22,6 @@ import { toaster } from '@/containers/Toast'
import { isLocalEngine } from '@/utils/modelEngine'
import { useActiveModel } from './useActiveModel'
import useRecommendedModel from './useRecommendedModel'
import useSetActiveThread from './useSetActiveThread'
@ -71,8 +70,6 @@ export const useCreateNewThread = () => {
const experimentalEnabled = useAtomValue(experimentalFeatureEnabledAtom)
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
const { recommendedModel, downloadedModels } = useRecommendedModel()
const threads = useAtomValue(threadsAtom)
const { stopInference } = useActiveModel()
@ -84,7 +81,7 @@ export const useCreateNewThread = () => {
setIsGeneratingResponse(false)
stopInference()
const defaultModel = model ?? recommendedModel ?? downloadedModels[0]
const defaultModel = model
if (!model) {
// if we have model, which means user wants to create new thread from Model hub. Allow them.

View File

@ -15,6 +15,7 @@ import {
import { extractInferenceParams, extractModelLoadParams } from '@janhq/core'
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
import { modelDropdownStateAtom } from '@/containers/ModelDropdown'
import {
currentPromptAtom,
editPromptAtom,
@ -73,6 +74,7 @@ export default function useSendChatMessage() {
const activeThreadRef = useRef<Thread | undefined>()
const activeAssistantRef = useRef<ThreadAssistantInfo | undefined>()
const setTokenSpeed = useSetAtom(tokenSpeedAtom)
const setModelDropdownState = useSetAtom(modelDropdownStateAtom)
const selectedModelRef = useRef<Model | undefined>()
@ -122,6 +124,11 @@ export default function useSendChatMessage() {
return
}
if (selectedModelRef.current?.id === undefined) {
setModelDropdownState(true)
return
}
if (engineParamsUpdate) setReloadModel(true)
setTokenSpeed(undefined)

View File

@ -27,6 +27,7 @@ import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
import { useStarterScreen } from '@/hooks/useStarterScreen'
import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
import { manualRecommendationModel } from '@/utils/model'
import {
getLogoEngine,
getTitleByEngine,
@ -56,15 +57,16 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
const configuredModels = useAtomValue(configuredModelsAtom)
const setMainViewState = useSetAtom(mainViewStateAtom)
const recommendModel = ['llama3.2-1b-instruct', 'llama3.2-3b-instruct']
const featuredModel = configuredModels.filter((x) => {
const manualRecommendModel = configuredModels.filter((x) =>
recommendModel.includes(x.id)
manualRecommendationModel.includes(x.id)
)
if (manualRecommendModel.length === 2) {
return x.id === recommendModel[0] || x.id === recommendModel[1]
return (
x.id === manualRecommendationModel[0] ||
x.id === manualRecommendationModel[1]
)
} else {
return (
x.metadata?.tags?.includes('Featured') && x.metadata?.size < 5000000000

View File

@ -23,6 +23,7 @@ import useSendChatMessage from '@/hooks/useSendChatMessage'
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
import { selectedModelAtom } from '@/helpers/atoms/Model.atom'
import {
getActiveThreadIdAtom,
activeSettingInputBoxAtom,
@ -78,7 +79,7 @@ const RichTextEditor = ({
const messages = useAtomValue(getCurrentChatMessagesAtom)
const { sendChatMessage } = useSendChatMessage()
const { stopInference } = useActiveModel()
const selectedModel = useAtomValue(selectedModelAtom)
const largeContentThreshold = 1000
// The decorate function identifies code blocks and marks the ranges
@ -233,7 +234,9 @@ const RichTextEditor = ({
event.preventDefault()
if (messages[messages.length - 1]?.status !== MessageStatus.Pending) {
sendChatMessage(currentPrompt)
resetEditor()
if (selectedModel) {
resetEditor()
}
} else onStopInferenceClick()
}
},

View File

@ -90,6 +90,12 @@ const ChatInput = () => {
}
}, [activeThreadId])
useEffect(() => {
if (!selectedModel && !activeSettingInputBox) {
setActiveSettingInputBox(true)
}
}, [activeSettingInputBox, selectedModel, setActiveSettingInputBox])
const onStopInferenceClick = async () => {
stopInference()
}
@ -297,6 +303,7 @@ const ChatInput = () => {
</Button>
</div>
)}
{messages[messages.length - 1]?.status !== MessageStatus.Pending &&
!isGeneratingResponse &&
!isStreamingResponse ? (
@ -340,55 +347,53 @@ const ChatInput = () => {
</div>
</div>
{activeSettingInputBox && (
<div
className={twMerge(
'absolute bottom-[5px] left-[1px] flex w-[calc(100%-10px)] items-center justify-between rounded-b-lg bg-[hsla(var(--center-panel-bg))] p-3 pr-0',
!activeThread && 'bg-transparent',
stateModel.loading && 'bg-transparent'
)}
>
<div className="flex items-center gap-x-2">
<ModelDropdown chatInputMode />
<Badge
theme="secondary"
className={twMerge(
'flex cursor-pointer items-center gap-x-1',
activeTabThreadRightPanel === 'model' &&
'border border-transparent'
)}
variant={
activeTabThreadRightPanel === 'model' ? 'solid' : 'outline'
<div
className={twMerge(
'absolute bottom-[5px] left-[1px] flex w-[calc(100%-10px)] items-center justify-between rounded-b-lg bg-[hsla(var(--center-panel-bg))] p-3 pr-0',
!activeThread && 'bg-transparent',
!activeSettingInputBox && 'hidden',
stateModel.loading && 'bg-transparent'
)}
>
<div className="flex items-center gap-x-2">
<ModelDropdown chatInputMode />
<Badge
theme="secondary"
className={twMerge(
'flex cursor-pointer items-center gap-x-1',
activeTabThreadRightPanel === 'model' &&
'border border-transparent'
)}
variant={
activeTabThreadRightPanel === 'model' ? 'solid' : 'outline'
}
onClick={() => {
// TODO @faisal: should be refactor later and better experience beetwen tab and toggle button
if (showRightPanel && activeTabThreadRightPanel !== 'model') {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
onClick={() => {
// TODO @faisal: should be refactor later and better experience beetwen tab and toggle button
if (showRightPanel && activeTabThreadRightPanel !== 'model') {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
if (showRightPanel && activeTabThreadRightPanel === 'model') {
setShowRightPanel(false)
setActiveTabThreadRightPanel(undefined)
}
if (activeTabThreadRightPanel === undefined) {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
if (
!showRightPanel &&
activeTabThreadRightPanel !== 'model'
) {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
}}
>
<Settings2Icon
size={16}
className="flex-shrink-0 cursor-pointer text-[hsla(var(--text-secondary))]"
/>
</Badge>
</div>
if (showRightPanel && activeTabThreadRightPanel === 'model') {
setShowRightPanel(false)
setActiveTabThreadRightPanel(undefined)
}
if (activeTabThreadRightPanel === undefined) {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
if (!showRightPanel && activeTabThreadRightPanel !== 'model') {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
}}
>
<Settings2Icon
size={16}
className="flex-shrink-0 cursor-pointer text-[hsla(var(--text-secondary))]"
/>
</Badge>
</div>
{selectedModel && (
<Button
theme="icon"
onClick={() => setActiveSettingInputBox(false)}
@ -398,8 +403,8 @@ const ChatInput = () => {
className="cursor-pointer text-[hsla(var(--text-secondary))]"
/>
</Button>
</div>
)}
)}
</div>
</div>
<input

View File

@ -7,3 +7,8 @@
export const normalizeModelId = (downloadUrl: string): string => {
return downloadUrl.split('/').pop() ?? downloadUrl
}
export const manualRecommendationModel = [
'llama3.2-1b-instruct',
'llama3.2-3b-instruct',
]