feat: handle case thread when original model deleted (#4280)
This commit is contained in:
parent
0cd0ff0443
commit
7d07e995ab
@ -12,7 +12,7 @@ import {
|
|||||||
useClickOutside,
|
useClickOutside,
|
||||||
} from '@janhq/joi'
|
} from '@janhq/joi'
|
||||||
|
|
||||||
import { useAtom, useAtomValue, useSetAtom } from 'jotai'
|
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ChevronDownIcon,
|
ChevronDownIcon,
|
||||||
@ -37,6 +37,7 @@ import useUpdateModelParameters from '@/hooks/useUpdateModelParameters'
|
|||||||
|
|
||||||
import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
|
import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
|
||||||
|
|
||||||
|
import { manualRecommendationModel } from '@/utils/model'
|
||||||
import {
|
import {
|
||||||
getLogoEngine,
|
getLogoEngine,
|
||||||
getTitleByEngine,
|
getTitleByEngine,
|
||||||
@ -65,16 +66,21 @@ type Props = {
|
|||||||
disabled?: boolean
|
disabled?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const modelDropdownStateAtom = atom(false)
|
||||||
|
|
||||||
const ModelDropdown = ({
|
const ModelDropdown = ({
|
||||||
disabled,
|
disabled,
|
||||||
chatInputMode,
|
chatInputMode,
|
||||||
strictedThread = true,
|
strictedThread = true,
|
||||||
}: Props) => {
|
}: Props) => {
|
||||||
const { downloadModel } = useDownloadModel()
|
const { downloadModel } = useDownloadModel()
|
||||||
|
const [modelDropdownState, setModelDropdownState] = useAtom(
|
||||||
|
modelDropdownStateAtom
|
||||||
|
)
|
||||||
|
|
||||||
const [searchFilter, setSearchFilter] = useState('local')
|
const [searchFilter, setSearchFilter] = useState('local')
|
||||||
const [searchText, setSearchText] = useState('')
|
const [searchText, setSearchText] = useState('')
|
||||||
const [open, setOpen] = useState(false)
|
const [open, setOpen] = useState<boolean>(modelDropdownState)
|
||||||
const activeThread = useAtomValue(activeThreadAtom)
|
const activeThread = useAtomValue(activeThreadAtom)
|
||||||
const activeAssistant = useAtomValue(activeAssistantAtom)
|
const activeAssistant = useAtomValue(activeAssistantAtom)
|
||||||
const downloadingModels = useAtomValue(getDownloadingModelAtom)
|
const downloadingModels = useAtomValue(getDownloadingModelAtom)
|
||||||
@ -84,22 +90,38 @@ const ModelDropdown = ({
|
|||||||
const [dropdownOptions, setDropdownOptions] = useState<HTMLDivElement | null>(
|
const [dropdownOptions, setDropdownOptions] = useState<HTMLDivElement | null>(
|
||||||
null
|
null
|
||||||
)
|
)
|
||||||
|
|
||||||
const downloadStates = useAtomValue(modelDownloadStateAtom)
|
const downloadStates = useAtomValue(modelDownloadStateAtom)
|
||||||
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
|
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
|
||||||
const { updateModelParameter } = useUpdateModelParameters()
|
const { updateModelParameter } = useUpdateModelParameters()
|
||||||
const searchInputRef = useRef<HTMLInputElement>(null)
|
const searchInputRef = useRef<HTMLInputElement>(null)
|
||||||
const configuredModels = useAtomValue(configuredModelsAtom)
|
const configuredModels = useAtomValue(configuredModelsAtom)
|
||||||
const featuredModel = configuredModels.filter((x) =>
|
|
||||||
x.metadata?.tags?.includes('Featured')
|
const featuredModel = configuredModels.filter(
|
||||||
|
(x) =>
|
||||||
|
manualRecommendationModel.includes(x.id) &&
|
||||||
|
x.metadata?.tags?.includes('Featured') &&
|
||||||
|
x.metadata?.size < 5000000000
|
||||||
)
|
)
|
||||||
const { updateThreadMetadata } = useCreateNewThread()
|
const { updateThreadMetadata } = useCreateNewThread()
|
||||||
|
|
||||||
useClickOutside(() => setOpen(false), null, [dropdownOptions, toggle])
|
useClickOutside(() => handleChangeStateOpen(false), null, [
|
||||||
|
dropdownOptions,
|
||||||
|
toggle,
|
||||||
|
])
|
||||||
|
|
||||||
const [showEngineListModel, setShowEngineListModel] = useAtom(
|
const [showEngineListModel, setShowEngineListModel] = useAtom(
|
||||||
showEngineListModelAtom
|
showEngineListModelAtom
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const handleChangeStateOpen = useCallback(
|
||||||
|
(state: boolean) => {
|
||||||
|
setOpen(state)
|
||||||
|
setModelDropdownState(state)
|
||||||
|
},
|
||||||
|
[setModelDropdownState]
|
||||||
|
)
|
||||||
|
|
||||||
const isModelSupportRagAndTools = useCallback((model: Model) => {
|
const isModelSupportRagAndTools = useCallback((model: Model) => {
|
||||||
return (
|
return (
|
||||||
model?.engine === InferenceEngine.openai ||
|
model?.engine === InferenceEngine.openai ||
|
||||||
@ -145,6 +167,12 @@ const ModelDropdown = ({
|
|||||||
[configuredModels, searchText, searchFilter, downloadedModels]
|
[configuredModels, searchText, searchFilter, downloadedModels]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (modelDropdownState && chatInputMode) {
|
||||||
|
setOpen(modelDropdownState)
|
||||||
|
}
|
||||||
|
}, [chatInputMode, modelDropdownState])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (open && searchInputRef.current) {
|
if (open && searchInputRef.current) {
|
||||||
searchInputRef.current.focus()
|
searchInputRef.current.focus()
|
||||||
@ -157,7 +185,7 @@ const ModelDropdown = ({
|
|||||||
|
|
||||||
let model = downloadedModels.find((model) => model.id === modelId)
|
let model = downloadedModels.find((model) => model.id === modelId)
|
||||||
if (!model) {
|
if (!model) {
|
||||||
model = recommendedModel
|
model = undefined
|
||||||
}
|
}
|
||||||
setSelectedModel(model)
|
setSelectedModel(model)
|
||||||
}, [
|
}, [
|
||||||
@ -343,14 +371,21 @@ const ModelDropdown = ({
|
|||||||
'inline-block max-w-[200px] cursor-pointer overflow-hidden text-ellipsis whitespace-nowrap',
|
'inline-block max-w-[200px] cursor-pointer overflow-hidden text-ellipsis whitespace-nowrap',
|
||||||
open && 'border border-transparent'
|
open && 'border border-transparent'
|
||||||
)}
|
)}
|
||||||
onClick={() => setOpen(!open)}
|
onClick={() => handleChangeStateOpen(!open)}
|
||||||
>
|
>
|
||||||
<span>{selectedModel?.name}</span>
|
<span
|
||||||
|
className={twMerge(
|
||||||
|
!selectedModel && 'text-[hsla(var(--text-tertiary))]'
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{selectedModel?.name || 'Select Model'}
|
||||||
|
</span>
|
||||||
</Badge>
|
</Badge>
|
||||||
) : (
|
) : (
|
||||||
<Input
|
<Input
|
||||||
value={selectedModel?.name || ''}
|
value={selectedModel?.name || ''}
|
||||||
className="cursor-pointer"
|
className="cursor-pointer"
|
||||||
|
placeholder="Select Model"
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
readOnly
|
readOnly
|
||||||
suffixIcon={
|
suffixIcon={
|
||||||
|
|||||||
@ -22,7 +22,6 @@ import { toaster } from '@/containers/Toast'
|
|||||||
import { isLocalEngine } from '@/utils/modelEngine'
|
import { isLocalEngine } from '@/utils/modelEngine'
|
||||||
|
|
||||||
import { useActiveModel } from './useActiveModel'
|
import { useActiveModel } from './useActiveModel'
|
||||||
import useRecommendedModel from './useRecommendedModel'
|
|
||||||
|
|
||||||
import useSetActiveThread from './useSetActiveThread'
|
import useSetActiveThread from './useSetActiveThread'
|
||||||
|
|
||||||
@ -71,8 +70,6 @@ export const useCreateNewThread = () => {
|
|||||||
const experimentalEnabled = useAtomValue(experimentalFeatureEnabledAtom)
|
const experimentalEnabled = useAtomValue(experimentalFeatureEnabledAtom)
|
||||||
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
|
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
|
||||||
|
|
||||||
const { recommendedModel, downloadedModels } = useRecommendedModel()
|
|
||||||
|
|
||||||
const threads = useAtomValue(threadsAtom)
|
const threads = useAtomValue(threadsAtom)
|
||||||
const { stopInference } = useActiveModel()
|
const { stopInference } = useActiveModel()
|
||||||
|
|
||||||
@ -84,7 +81,7 @@ export const useCreateNewThread = () => {
|
|||||||
setIsGeneratingResponse(false)
|
setIsGeneratingResponse(false)
|
||||||
stopInference()
|
stopInference()
|
||||||
|
|
||||||
const defaultModel = model ?? recommendedModel ?? downloadedModels[0]
|
const defaultModel = model
|
||||||
|
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// if we have model, which means user wants to create new thread from Model hub. Allow them.
|
// if we have model, which means user wants to create new thread from Model hub. Allow them.
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import {
|
|||||||
import { extractInferenceParams, extractModelLoadParams } from '@janhq/core'
|
import { extractInferenceParams, extractModelLoadParams } from '@janhq/core'
|
||||||
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
|
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
|
||||||
|
|
||||||
|
import { modelDropdownStateAtom } from '@/containers/ModelDropdown'
|
||||||
import {
|
import {
|
||||||
currentPromptAtom,
|
currentPromptAtom,
|
||||||
editPromptAtom,
|
editPromptAtom,
|
||||||
@ -73,6 +74,7 @@ export default function useSendChatMessage() {
|
|||||||
const activeThreadRef = useRef<Thread | undefined>()
|
const activeThreadRef = useRef<Thread | undefined>()
|
||||||
const activeAssistantRef = useRef<ThreadAssistantInfo | undefined>()
|
const activeAssistantRef = useRef<ThreadAssistantInfo | undefined>()
|
||||||
const setTokenSpeed = useSetAtom(tokenSpeedAtom)
|
const setTokenSpeed = useSetAtom(tokenSpeedAtom)
|
||||||
|
const setModelDropdownState = useSetAtom(modelDropdownStateAtom)
|
||||||
|
|
||||||
const selectedModelRef = useRef<Model | undefined>()
|
const selectedModelRef = useRef<Model | undefined>()
|
||||||
|
|
||||||
@ -122,6 +124,11 @@ export default function useSendChatMessage() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (selectedModelRef.current?.id === undefined) {
|
||||||
|
setModelDropdownState(true)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (engineParamsUpdate) setReloadModel(true)
|
if (engineParamsUpdate) setReloadModel(true)
|
||||||
setTokenSpeed(undefined)
|
setTokenSpeed(undefined)
|
||||||
|
|
||||||
|
|||||||
@ -27,6 +27,7 @@ import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
|
|||||||
import { useStarterScreen } from '@/hooks/useStarterScreen'
|
import { useStarterScreen } from '@/hooks/useStarterScreen'
|
||||||
|
|
||||||
import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
|
import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
|
||||||
|
import { manualRecommendationModel } from '@/utils/model'
|
||||||
import {
|
import {
|
||||||
getLogoEngine,
|
getLogoEngine,
|
||||||
getTitleByEngine,
|
getTitleByEngine,
|
||||||
@ -56,15 +57,16 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
|
|||||||
const configuredModels = useAtomValue(configuredModelsAtom)
|
const configuredModels = useAtomValue(configuredModelsAtom)
|
||||||
const setMainViewState = useSetAtom(mainViewStateAtom)
|
const setMainViewState = useSetAtom(mainViewStateAtom)
|
||||||
|
|
||||||
const recommendModel = ['llama3.2-1b-instruct', 'llama3.2-3b-instruct']
|
|
||||||
|
|
||||||
const featuredModel = configuredModels.filter((x) => {
|
const featuredModel = configuredModels.filter((x) => {
|
||||||
const manualRecommendModel = configuredModels.filter((x) =>
|
const manualRecommendModel = configuredModels.filter((x) =>
|
||||||
recommendModel.includes(x.id)
|
manualRecommendationModel.includes(x.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
if (manualRecommendModel.length === 2) {
|
if (manualRecommendModel.length === 2) {
|
||||||
return x.id === recommendModel[0] || x.id === recommendModel[1]
|
return (
|
||||||
|
x.id === manualRecommendationModel[0] ||
|
||||||
|
x.id === manualRecommendationModel[1]
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
return (
|
return (
|
||||||
x.metadata?.tags?.includes('Featured') && x.metadata?.size < 5000000000
|
x.metadata?.tags?.includes('Featured') && x.metadata?.size < 5000000000
|
||||||
|
|||||||
@ -23,6 +23,7 @@ import useSendChatMessage from '@/hooks/useSendChatMessage'
|
|||||||
|
|
||||||
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
|
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
|
||||||
|
|
||||||
|
import { selectedModelAtom } from '@/helpers/atoms/Model.atom'
|
||||||
import {
|
import {
|
||||||
getActiveThreadIdAtom,
|
getActiveThreadIdAtom,
|
||||||
activeSettingInputBoxAtom,
|
activeSettingInputBoxAtom,
|
||||||
@ -78,7 +79,7 @@ const RichTextEditor = ({
|
|||||||
const messages = useAtomValue(getCurrentChatMessagesAtom)
|
const messages = useAtomValue(getCurrentChatMessagesAtom)
|
||||||
const { sendChatMessage } = useSendChatMessage()
|
const { sendChatMessage } = useSendChatMessage()
|
||||||
const { stopInference } = useActiveModel()
|
const { stopInference } = useActiveModel()
|
||||||
|
const selectedModel = useAtomValue(selectedModelAtom)
|
||||||
const largeContentThreshold = 1000
|
const largeContentThreshold = 1000
|
||||||
|
|
||||||
// The decorate function identifies code blocks and marks the ranges
|
// The decorate function identifies code blocks and marks the ranges
|
||||||
@ -233,7 +234,9 @@ const RichTextEditor = ({
|
|||||||
event.preventDefault()
|
event.preventDefault()
|
||||||
if (messages[messages.length - 1]?.status !== MessageStatus.Pending) {
|
if (messages[messages.length - 1]?.status !== MessageStatus.Pending) {
|
||||||
sendChatMessage(currentPrompt)
|
sendChatMessage(currentPrompt)
|
||||||
|
if (selectedModel) {
|
||||||
resetEditor()
|
resetEditor()
|
||||||
|
}
|
||||||
} else onStopInferenceClick()
|
} else onStopInferenceClick()
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -90,6 +90,12 @@ const ChatInput = () => {
|
|||||||
}
|
}
|
||||||
}, [activeThreadId])
|
}, [activeThreadId])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!selectedModel && !activeSettingInputBox) {
|
||||||
|
setActiveSettingInputBox(true)
|
||||||
|
}
|
||||||
|
}, [activeSettingInputBox, selectedModel, setActiveSettingInputBox])
|
||||||
|
|
||||||
const onStopInferenceClick = async () => {
|
const onStopInferenceClick = async () => {
|
||||||
stopInference()
|
stopInference()
|
||||||
}
|
}
|
||||||
@ -297,6 +303,7 @@ const ChatInput = () => {
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{messages[messages.length - 1]?.status !== MessageStatus.Pending &&
|
{messages[messages.length - 1]?.status !== MessageStatus.Pending &&
|
||||||
!isGeneratingResponse &&
|
!isGeneratingResponse &&
|
||||||
!isStreamingResponse ? (
|
!isStreamingResponse ? (
|
||||||
@ -340,11 +347,11 @@ const ChatInput = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{activeSettingInputBox && (
|
|
||||||
<div
|
<div
|
||||||
className={twMerge(
|
className={twMerge(
|
||||||
'absolute bottom-[5px] left-[1px] flex w-[calc(100%-10px)] items-center justify-between rounded-b-lg bg-[hsla(var(--center-panel-bg))] p-3 pr-0',
|
'absolute bottom-[5px] left-[1px] flex w-[calc(100%-10px)] items-center justify-between rounded-b-lg bg-[hsla(var(--center-panel-bg))] p-3 pr-0',
|
||||||
!activeThread && 'bg-transparent',
|
!activeThread && 'bg-transparent',
|
||||||
|
!activeSettingInputBox && 'hidden',
|
||||||
stateModel.loading && 'bg-transparent'
|
stateModel.loading && 'bg-transparent'
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
@ -374,10 +381,7 @@ const ChatInput = () => {
|
|||||||
setShowRightPanel(true)
|
setShowRightPanel(true)
|
||||||
setActiveTabThreadRightPanel('model')
|
setActiveTabThreadRightPanel('model')
|
||||||
}
|
}
|
||||||
if (
|
if (!showRightPanel && activeTabThreadRightPanel !== 'model') {
|
||||||
!showRightPanel &&
|
|
||||||
activeTabThreadRightPanel !== 'model'
|
|
||||||
) {
|
|
||||||
setShowRightPanel(true)
|
setShowRightPanel(true)
|
||||||
setActiveTabThreadRightPanel('model')
|
setActiveTabThreadRightPanel('model')
|
||||||
}
|
}
|
||||||
@ -389,6 +393,7 @@ const ChatInput = () => {
|
|||||||
/>
|
/>
|
||||||
</Badge>
|
</Badge>
|
||||||
</div>
|
</div>
|
||||||
|
{selectedModel && (
|
||||||
<Button
|
<Button
|
||||||
theme="icon"
|
theme="icon"
|
||||||
onClick={() => setActiveSettingInputBox(false)}
|
onClick={() => setActiveSettingInputBox(false)}
|
||||||
@ -398,9 +403,9 @@ const ChatInput = () => {
|
|||||||
className="cursor-pointer text-[hsla(var(--text-secondary))]"
|
className="cursor-pointer text-[hsla(var(--text-secondary))]"
|
||||||
/>
|
/>
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<input
|
<input
|
||||||
type="file"
|
type="file"
|
||||||
|
|||||||
@ -7,3 +7,8 @@
|
|||||||
export const normalizeModelId = (downloadUrl: string): string => {
|
export const normalizeModelId = (downloadUrl: string): string => {
|
||||||
return downloadUrl.split('/').pop() ?? downloadUrl
|
return downloadUrl.split('/').pop() ?? downloadUrl
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const manualRecommendationModel = [
|
||||||
|
'llama3.2-1b-instruct',
|
||||||
|
'llama3.2-3b-instruct',
|
||||||
|
]
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user