diff --git a/web/hooks/useSetActiveThread.ts b/web/hooks/useSetActiveThread.ts index 3545d0d23..f5649ccaf 100644 --- a/web/hooks/useSetActiveThread.ts +++ b/web/hooks/useSetActiveThread.ts @@ -8,6 +8,8 @@ import { import { useAtomValue, useSetAtom } from 'jotai' +import { loadModelErrorAtom } from './useActiveModel' + import { extensionManager } from '@/extension' import { setConvoMessagesAtom } from '@/helpers/atoms/ChatMessage.atom' import { @@ -24,6 +26,7 @@ export default function useSetActiveThread() { const setThreadMessage = useSetAtom(setConvoMessagesAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom) + const setLoadModelError = useSetAtom(loadModelErrorAtom) const setActiveThread = async (thread: Thread) => { if (activeThreadId === thread.id) { @@ -32,6 +35,7 @@ export default function useSetActiveThread() { } setIsGeneratingResponse(false) + setLoadModelError(undefined) events.emit(InferenceEvent.OnInferenceStopped, thread.id) // load the corresponding messages diff --git a/web/screens/Chat/ChatBody/index.tsx b/web/screens/Chat/ChatBody/index.tsx index 1ce6b591f..66f14d076 100644 --- a/web/screens/Chat/ChatBody/index.tsx +++ b/web/screens/Chat/ChatBody/index.tsx @@ -25,7 +25,6 @@ const ChatBody: React.FC = () => { const messages = useAtomValue(getCurrentChatMessagesAtom) const { downloadedModels } = useGetDownloadedModels() const { setMainViewState } = useMainViewState() - const loadModelError = useAtomValue(loadModelErrorAtom) if (downloadedModels.length === 0) return ( @@ -86,9 +85,8 @@ const ChatBody: React.FC = () => { message.content.length > 0) && ( )} - {!loadModelError && - (message.status === MessageStatus.Error || - message.status === MessageStatus.Stopped) && + {(message.status === MessageStatus.Error || + message.status === MessageStatus.Stopped) && index === messages.length - 1 && ( )} diff --git a/web/screens/Chat/LoadModelErrorMessage/index.tsx b/web/screens/Chat/LoadModelErrorMessage/index.tsx deleted file mode 100644 index d3c4a704d..000000000 --- a/web/screens/Chat/LoadModelErrorMessage/index.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import { MessageStatus, ThreadMessage } from '@janhq/core' -import { useAtomValue } from 'jotai' - -import { useActiveModel } from '@/hooks/useActiveModel' - -import { totalRamAtom } from '@/helpers/atoms/SystemBar.atom' - -const LoadModelErrorMessage = () => { - const { activeModel } = useActiveModel() - const availableRam = useAtomValue(totalRamAtom) - - return ( - <> -
- - {Number(activeModel?.metadata.size) > availableRam ? ( - <> - Oops! Model size exceeds available RAM. Consider selecting a - smaller model or upgrading your RAM for smoother performance. - - ) : ( - <> -

Apologies, something's amiss!

- Jan's in beta. Find troubleshooting guides{' '} - - here - {' '} - or reach out to us on{' '} - - Discord - {' '} - for assistance. - - )} -
-
- - ) -} -export default LoadModelErrorMessage diff --git a/web/screens/Chat/index.tsx b/web/screens/Chat/index.tsx index 1f7896604..e3eedb6c1 100644 --- a/web/screens/Chat/index.tsx +++ b/web/screens/Chat/index.tsx @@ -20,7 +20,7 @@ import { snackbar } from '@/containers/Toast' import { FeatureToggleContext } from '@/context/FeatureToggle' -import { activeModelAtom, loadModelErrorAtom } from '@/hooks/useActiveModel' +import { activeModelAtom } from '@/hooks/useActiveModel' import { queuedMessageAtom, reloadModelAtom } from '@/hooks/useSendChatMessage' import ChatBody from '@/screens/Chat/ChatBody' @@ -28,7 +28,6 @@ import ChatBody from '@/screens/Chat/ChatBody' import ThreadList from '@/screens/Chat/ThreadList' import ChatInput from './ChatInput' -import LoadModelErrorMessage from './LoadModelErrorMessage' import RequestDownloadModel from './RequestDownloadModel' import Sidebar from './Sidebar' @@ -70,7 +69,6 @@ const ChatScreen: React.FC = () => { const activeModel = useAtomValue(activeModelAtom) const isGeneratingResponse = useAtomValue(isGeneratingResponseAtom) - const loadModelError = useAtomValue(loadModelErrorAtom) const { getRootProps, isDragReject } = useDropzone({ noClick: true, @@ -213,7 +211,6 @@ const ChatScreen: React.FC = () => { )} {activeModel && isGeneratingResponse && } - {loadModelError && }