* fix: update new api from cortex to support 0.5.0 Signed-off-by: James <namnh0122@gmail.com> * fix stop button for streaming Signed-off-by: James <namnh0122@gmail.com> * fix stop inference for nonstreaming Signed-off-by: James <namnh0122@gmail.com> * chore: remove umami prevent tracking call to vercel Signed-off-by: James <namnh0122@gmail.com> * add warning modal when running more than 2 model concurrently Signed-off-by: James <namnh0122@gmail.com> * fix: skip summarize if abort Signed-off-by: James <namnh0122@gmail.com> * 0.5.0-3 * add inference error popup Signed-off-by: James <namnh0122@gmail.com> * add back import local model Signed-off-by: James <namnh0122@gmail.com> * fix: max token issue (#3225) Signed-off-by: James <namnh0122@gmail.com> * format status Signed-off-by: James <namnh0122@gmail.com> * fix migration missing instructions Signed-off-by: James <namnh0122@gmail.com> * fix: wait for cortex process overlay should be on top (#3224) * fix: wait for cortex process overlay should be on top * chore: update cortex.js * Cortex 0.5.0-5 * add import model to my model screen Signed-off-by: James <namnh0122@gmail.com> * fix: should migrate symlink models (#3226) * fix import on windows (#3229) Signed-off-by: James <namnh0122@gmail.com> * fix yarn lint Signed-off-by: James <namnh0122@gmail.com> * fix: clean up port before start jan (#3232) Signed-off-by: James <namnh0122@gmail.com> --------- Signed-off-by: James <namnh0122@gmail.com> Co-authored-by: Van Pham <64197333+Van-QA@users.noreply.github.com> Co-authored-by: Louis <louis@jan.ai>
41 lines
1.1 KiB
TypeScript
41 lines
1.1 KiB
TypeScript
import { useMemo } from 'react'
|
|
|
|
import { useAtomValue } from 'jotai'
|
|
|
|
import SendMessageButton from './SendMessageButton'
|
|
import StopInferenceButton from './StopInferenceButton'
|
|
|
|
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
|
|
|
|
import { isGeneratingResponseAtom } from '@/helpers/atoms/Thread.atom'
|
|
|
|
type Props = {
|
|
onStopInferenceClick: () => void
|
|
onSendMessageClick: (message: string) => void
|
|
}
|
|
|
|
const ChatActionButton: React.FC<Props> = ({
|
|
onStopInferenceClick,
|
|
onSendMessageClick,
|
|
}) => {
|
|
const messages = useAtomValue(getCurrentChatMessagesAtom)
|
|
const isGeneratingResponse = useAtomValue(isGeneratingResponseAtom)
|
|
|
|
const showStopButton = useMemo(() => {
|
|
if (isGeneratingResponse) return true
|
|
|
|
const lastMessage = messages[messages.length - 1]
|
|
if (!lastMessage) return false
|
|
if (lastMessage.status === 'in_progress') return true
|
|
return false
|
|
}, [isGeneratingResponse, messages])
|
|
|
|
if (showStopButton) {
|
|
return <StopInferenceButton onStopInferenceClick={onStopInferenceClick} />
|
|
}
|
|
|
|
return <SendMessageButton onSendMessageClick={onSendMessageClick} />
|
|
}
|
|
|
|
export default ChatActionButton
|