From bdebbca9b5bc8b83f2cc730c259f0aae4bdac217 Mon Sep 17 00:00:00 2001 From: Louis Date: Fri, 26 Jan 2024 12:50:58 +0700 Subject: [PATCH] fix: stop openai inference raises something amiss (#1799) --- extensions/inference-openai-extension/src/index.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/extensions/inference-openai-extension/src/index.ts b/extensions/inference-openai-extension/src/index.ts index 9abfc2c7d..0b53d7c21 100644 --- a/extensions/inference-openai-extension/src/index.ts +++ b/extensions/inference-openai-extension/src/index.ts @@ -146,7 +146,7 @@ export default class JanInferenceOpenAIExtension extends BaseExtension { */ private static async handleMessageRequest( data: MessageRequest, - instance: JanInferenceOpenAIExtension + instance: JanInferenceOpenAIExtension, ) { if (data.model.engine !== "openai") { return; @@ -176,7 +176,7 @@ export default class JanInferenceOpenAIExtension extends BaseExtension { ...JanInferenceOpenAIExtension._currentModel, parameters: data.model.parameters, }, - instance.controller + instance.controller, ).subscribe({ next: (content) => { const messageContent: ThreadContent = { @@ -197,7 +197,7 @@ export default class JanInferenceOpenAIExtension extends BaseExtension { }, error: async (err) => { if (instance.isCancelled || message.content.length > 0) { - message.status = MessageStatus.Error; + message.status = MessageStatus.Stopped; events.emit(MessageEvent.OnMessageUpdate, message); return; } @@ -209,7 +209,7 @@ export default class JanInferenceOpenAIExtension extends BaseExtension { }, }; message.content = [messageContent]; - message.status = MessageStatus.Ready; + message.status = MessageStatus.Error; events.emit(MessageEvent.OnMessageUpdate, message); }, });