fix: send message button state reset on stop

This commit is contained in:
Louis 2024-12-26 19:06:24 +07:00
parent 3cd4db0a92
commit 5931dbede3
No known key found for this signature in database
GPG Key ID: 44FA9F4D33C37DE2
3 changed files with 34 additions and 4 deletions

View File

@ -173,6 +173,21 @@ export const updateThreadWaitingForResponseAtom = atom(
}
)
/**
* Reset the thread waiting for response state
*/
export const resetThreadWaitingForResponseAtom = atom(null, (get, set) => {
const currentState = { ...get(threadStatesAtom) }
Object.keys(currentState).forEach((threadId) => {
currentState[threadId] = {
...currentState[threadId],
waitingForResponse: false,
error: undefined,
}
})
set(threadStatesAtom, currentState)
})
/**
* Update the thread last message
*/

View File

@ -10,6 +10,10 @@ import { LAST_USED_MODEL_ID } from './useRecommendedModel'
import { vulkanEnabledAtom } from '@/helpers/atoms/AppConfig.atom'
import { activeAssistantAtom } from '@/helpers/atoms/Assistant.atom'
import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom'
import {
isGeneratingResponseAtom,
resetThreadWaitingForResponseAtom,
} from '@/helpers/atoms/Thread.atom'
export const activeModelAtom = atom<Model | undefined>(undefined)
export const loadModelErrorAtom = atom<string | undefined>(undefined)
@ -34,6 +38,10 @@ export function useActiveModel() {
const pendingModelLoad = useRef(false)
const isVulkanEnabled = useAtomValue(vulkanEnabledAtom)
const activeAssistant = useAtomValue(activeAssistantAtom)
const setGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
const resetThreadWaitingForResponseState = useSetAtom(
resetThreadWaitingForResponseAtom
)
const downloadedModelsRef = useRef<Model[]>([])
@ -139,6 +147,8 @@ export function useActiveModel() {
return
const engine = EngineManager.instance().get(stoppingModel.engine)
setGeneratingResponse(false)
resetThreadWaitingForResponseState()
return engine
?.unloadModel(stoppingModel)
.catch((e) => console.error(e))
@ -148,7 +158,14 @@ export function useActiveModel() {
pendingModelLoad.current = false
})
},
[activeModel, setStateModel, setActiveModel, stateModel]
[
activeModel,
setStateModel,
setActiveModel,
stateModel,
setGeneratingResponse,
resetThreadWaitingForResponseState,
]
)
const stopInference = useCallback(async () => {

View File

@ -302,9 +302,7 @@ const ChatInput = () => {
</div>
)}
{messages[messages.length - 1]?.status !== MessageStatus.Pending &&
!isGeneratingResponse &&
!isStreamingResponse ? (
{!isGeneratingResponse && !isStreamingResponse ? (
<>
{currentPrompt.length !== 0 && (
<Button