fixes: #247 - inference plugin should check nitro service available (#313)

* fix: #247 - inference plugin should check nitro service available

* fix: #247 check service status and emit error if any

* chore: error handling

* chore: typo

* fix: open conversation does not work when model is deleted

* chore: reload plugins in development mode without exiting the process

* chore: move model file check to inference plugin

* update package-lock.json

---------

Co-authored-by: Hien To <>
This commit is contained in:
Louis 2023-10-10 18:24:33 +07:00 committed by GitHub
parent 63d8b895f3
commit a57dfe743b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 747 additions and 282 deletions

View File

@ -129,6 +129,7 @@ function updateFinishedDownloadAt(fileName: string, time: number) {
db.run(stmt, [time, fileName], (err: any) => { db.run(stmt, [time, fileName], (err: any) => {
if (err) { if (err) {
console.log(err); console.log(err);
res(undefined);
} else { } else {
console.log("Updated 1 row"); console.log("Updated 1 row");
res("Updated"); res("Updated");
@ -150,7 +151,11 @@ function getUnfinishedDownloadModels() {
const query = `SELECT * FROM models WHERE finish_download_at = -1 ORDER BY start_download_at DESC`; const query = `SELECT * FROM models WHERE finish_download_at = -1 ORDER BY start_download_at DESC`;
db.all(query, (err: Error, row: any) => { db.all(query, (err: Error, row: any) => {
res(row); if (row) {
res(row);
} else {
res([]);
}
}); });
db.close(); db.close();
}); });
@ -193,13 +198,10 @@ function getModelById(modelId: string) {
path.join(app.getPath("userData"), "jan.db") path.join(app.getPath("userData"), "jan.db")
); );
console.debug("Get model by id", modelId);
db.get( db.get(
`SELECT * FROM models WHERE id = ?`, `SELECT * FROM models WHERE id = ?`,
[modelId], [modelId],
(err: any, row: any) => { (err: any, row: any) => {
console.debug("Get model by id result", row);
if (row) { if (row) {
const product = { const product = {
id: row.id, id: row.id,
@ -223,6 +225,8 @@ function getModelById(modelId: string) {
downloadUrl: row.download_url, downloadUrl: row.download_url,
}; };
res(product); res(product);
} else {
res(undefined);
} }
} }
); );

View File

@ -9,14 +9,6 @@ const initModel = async (product) =>
} }
}); });
const dispose = async () =>
new Promise(async (resolve) => {
if (window.electronAPI) {
window.electronAPI
.invokePluginFunc(MODULE_PATH, "dispose")
.then((res) => resolve(res));
}
});
const inferenceUrl = () => "http://localhost:3928/llama/chat_completion"; const inferenceUrl = () => "http://localhost:3928/llama/chat_completion";
const stopModel = () => { const stopModel = () => {
@ -27,6 +19,5 @@ const stopModel = () => {
export function init({ register }) { export function init({ register }) {
register("initModel", "initModel", initModel); register("initModel", "initModel", initModel);
register("inferenceUrl", "inferenceUrl", inferenceUrl); register("inferenceUrl", "inferenceUrl", inferenceUrl);
register("dispose", "dispose", dispose);
register("stopModel", "stopModel", stopModel); register("stopModel", "stopModel", stopModel);
} }

View File

@ -1,102 +0,0 @@
const path = require("path");
const { app, dialog } = require("electron");
const { spawn } = require("child_process");
const fs = require("fs");
let subprocess = null;
async function initModel(product) {
// fileName fallback
if (!product.fileName) {
product.fileName = product.file_name;
}
if (!product.fileName) {
await dialog.showMessageBox({
message: "Selected model does not have file name..",
});
return;
}
if (subprocess) {
console.error(
"A subprocess is already running. Attempt to kill then reinit."
);
dispose();
}
let binaryFolder = path.join(__dirname, "nitro"); // Current directory by default
// Read the existing config
const configFilePath = path.join(binaryFolder, "config", "config.json");
let config = {};
if (fs.existsSync(configFilePath)) {
const rawData = fs.readFileSync(configFilePath, "utf-8");
config = JSON.parse(rawData);
}
// Update the llama_model_path
if (!config.custom_config) {
config.custom_config = {};
}
const modelPath = path.join(app.getPath("userData"), product.fileName);
config.custom_config.llama_model_path = modelPath;
// Write the updated config back to the file
fs.writeFileSync(configFilePath, JSON.stringify(config, null, 4));
let binaryName;
if (process.platform === "win32") {
binaryName = "nitro_windows_amd64.exe";
} else if (process.platform === "darwin") { // Mac OS platform
binaryName = process.arch === "arm64" ? "nitro_mac_arm64" : "nitro_mac_amd64";
} else {
// Linux
binaryName = "nitro_linux_amd64_cuda"; // For other platforms
}
const binaryPath = path.join(binaryFolder, binaryName);
// Execute the binary
subprocess = spawn(binaryPath, [configFilePath], { cwd: binaryFolder });
// Handle subprocess output
subprocess.stdout.on("data", (data) => {
console.log(`stdout: ${data}`);
});
subprocess.stderr.on("data", (data) => {
console.error(`stderr: ${data}`);
});
subprocess.on("close", (code) => {
console.log(`child process exited with code ${code}`);
subprocess = null;
});
}
function dispose() {
killSubprocess();
// clean other registered resources here
}
function killSubprocess() {
if (subprocess) {
subprocess.kill();
subprocess = null;
console.log("Subprocess terminated.");
} else {
console.error("No subprocess is currently running.");
}
}
module.exports = {
initModel,
killSubprocess,
dispose,
};

View File

@ -0,0 +1,119 @@
const path = require("path");
const { app } = require("electron");
const { spawn } = require("child_process");
const fs = require("fs");
const tcpPortUsed = require("tcp-port-used");
const { killPortProcess } = require("kill-port-process");
let subprocess = null;
const PORT = 3928;
const initModel = (product) => {
return (
new Promise<void>(async (resolve, reject) => {
if (!product?.fileName) {
reject("Model not found, please download again.");
}
if (subprocess) {
console.error(
"A subprocess is already running. Attempt to kill then reinit."
);
killSubprocess();
}
resolve(product?.fileName);
})
// Kill port process if it is already in use
.then((fileName) =>
tcpPortUsed
.waitUntilFree(PORT, 200, 3000)
.catch(() => killPortProcess(PORT))
.then(() => fileName)
)
// Spawn Nitro subprocess to load model
.then(() => {
let binaryFolder = path.join(__dirname, "nitro"); // Current directory by default
// Read the existing config
const configFilePath = path.join(binaryFolder, "config", "config.json");
let config: any = {};
if (fs.existsSync(configFilePath)) {
const rawData = fs.readFileSync(configFilePath, "utf-8");
config = JSON.parse(rawData);
}
// Update the llama_model_path
if (!config.custom_config) {
config.custom_config = {};
}
const modelPath = path.join(app.getPath("userData"), product.fileName);
config.custom_config.llama_model_path = modelPath;
// Write the updated config back to the file
fs.writeFileSync(configFilePath, JSON.stringify(config, null, 4));
let binaryName;
if (process.platform === "win32") {
binaryName = "nitro_windows_amd64.exe";
} else if (process.platform === "darwin") {
// Mac OS platform
binaryName =
process.arch === "arm64" ? "nitro_mac_arm64" : "nitro_mac_amd64";
} else {
// Linux
binaryName = "nitro_linux_amd64_cuda"; // For other platforms
}
const binaryPath = path.join(binaryFolder, binaryName);
// Execute the binary
subprocess = spawn(binaryPath, [configFilePath], { cwd: binaryFolder });
// Handle subprocess output
subprocess.stdout.on("data", (data) => {
console.log(`stdout: ${data}`);
});
subprocess.stderr.on("data", (data) => {
console.error(`stderr: ${data}`);
});
subprocess.on("close", (code) => {
console.log(`child process exited with code ${code}`);
subprocess = null;
});
})
.then(() => tcpPortUsed.waitUntilUsed(PORT, 300, 30000))
.then(() => {
return {};
})
.catch((err) => {
return { error: err };
})
);
};
function dispose() {
killSubprocess();
// clean other registered resources here
}
function killSubprocess() {
if (subprocess) {
subprocess.kill();
subprocess = null;
console.log("Subprocess terminated.");
} else {
killPortProcess(PORT);
console.error("No subprocess is currently running.");
}
}
module.exports = {
initModel,
killSubprocess,
dispose,
};

File diff suppressed because it is too large Load Diff

View File

@ -10,23 +10,29 @@
"init" "init"
], ],
"scripts": { "scripts": {
"build": "webpack --config webpack.config.js", "build": "tsc -b . && webpack --config webpack.config.js",
"postinstall": "rimraf ./*.tgz && npm run build && cpx \"module.js\" \"dist\" && rimraf dist/nitro/* && cpx \"nitro/**\" \"dist/nitro\"", "postinstall": "rimraf ./*.tgz && npm run build && rimraf dist/nitro/* && cpx \"nitro/**\" \"dist/nitro\"",
"build:publish": "npm pack && cpx *.tgz ../../pre-install" "build:publish": "npm pack && cpx *.tgz ../../pre-install"
}, },
"exports": {
".": "./dist/index.js",
"./main": "./dist/module.js"
},
"devDependencies": { "devDependencies": {
"cpx": "^1.5.0", "cpx": "^1.5.0",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"webpack": "^5.88.2", "webpack": "^5.88.2",
"webpack-cli": "^5.1.4" "webpack-cli": "^5.1.4"
}, },
"bundledDependencies": [
"electron-is-dev",
"node-llama-cpp"
],
"dependencies": { "dependencies": {
"electron-is-dev": "^2.0.0" "kill-port-process": "^3.2.0",
"tcp-port-used": "^1.0.2",
"ts-loader": "^9.5.0"
}, },
"bundledDependencies": [
"tcp-port-used",
"kill-port-process"
],
"engines": { "engines": {
"node": ">=18.0.0" "node": ">=18.0.0"
}, },

View File

@ -0,0 +1,22 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */
/* Language and Environment */
"target": "es2016" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
/* Modules */
"module": "ES6" /* Specify what module code is generated. */,
// "rootDir": "./", /* Specify the root folder within your source files. */
// "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
// "baseUrl": "." /* Specify the base directory to resolve non-relative module names. */,
// "paths": {} /* Specify a set of entries that re-map imports to additional lookup locations. */,
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "resolveJsonModule": true, /* Enable importing .json files. */
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
/* Type Checking */
"strict": false /* Enable all strict type-checking options. */,
"skipLibCheck": true /* Skip type checking all .d.ts files. */
}
}

View File

@ -0,0 +1,7 @@
export {};
declare global {
interface Window {
electronAPI?: any | undefined;
}
}

View File

@ -2,7 +2,7 @@ const path = require("path");
module.exports = { module.exports = {
experiments: { outputModule: true }, experiments: { outputModule: true },
entry: "./index.js", // Adjust the entry point to match your project's main file entry: "./index.ts", // Adjust the entry point to match your project's main file
mode: "production", mode: "production",
module: { module: {
rules: [ rules: [
@ -19,7 +19,7 @@ module.exports = {
library: { type: "module" }, // Specify ESM output format library: { type: "module" }, // Specify ESM output format
}, },
resolve: { resolve: {
extensions: [".js"], extensions: [".ts", ".js"],
}, },
// Add loaders and other configuration as needed for your project // Add loaders and other configuration as needed for your project
}; };

View File

@ -156,8 +156,23 @@ function handleIPCs() {
rmdir(fullPath, { recursive: true }, function (err) { rmdir(fullPath, { recursive: true }, function (err) {
if (err) console.log(err); if (err) console.log(err);
app.relaunch(); dispose(requiredModules);
app.exit();
// just relaunch if packaged, should launch manually in development mode
if (app.isPackaged) {
app.relaunch();
app.exit();
} else {
for (const modulePath in requiredModules) {
delete require.cache[
require.resolve(
join(app.getPath("userData"), "plugins", modulePath)
)
];
}
setupPlugins();
mainWindow?.reload();
}
}); });
}); });

View File

@ -9,6 +9,8 @@ import {
conversationStatesAtom, conversationStatesAtom,
getActiveConvoIdAtom, getActiveConvoIdAtom,
setActiveConvoIdAtom, setActiveConvoIdAtom,
updateConversationErrorAtom,
updateConversationWaitingForResponseAtom,
} from "@/_helpers/atoms/Conversation.atom"; } from "@/_helpers/atoms/Conversation.atom";
import { import {
setMainViewStateAtom, setMainViewStateAtom,
@ -33,6 +35,10 @@ const HistoryItem: React.FC<Props> = ({
const conversationStates = useAtomValue(conversationStatesAtom); const conversationStates = useAtomValue(conversationStatesAtom);
const activeConvoId = useAtomValue(getActiveConvoIdAtom); const activeConvoId = useAtomValue(getActiveConvoIdAtom);
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom); const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
const updateConvWaiting = useSetAtom(
updateConversationWaitingForResponseAtom
);
const updateConvError = useSetAtom(updateConversationErrorAtom);
const isSelected = activeConvoId === conversation.id; const isSelected = activeConvoId === conversation.id;
const { initModel } = useInitModel(); const { initModel } = useInitModel();
@ -42,13 +48,16 @@ const HistoryItem: React.FC<Props> = ({
DataService.GET_MODEL_BY_ID, DataService.GET_MODEL_BY_ID,
conversation.model_id conversation.model_id
); );
if (!model) {
alert( if (conversation.id) updateConvWaiting(conversation.id, true);
`Model ${conversation.model_id} not found! Please re-download the model first.` initModel(model).then((res: any) => {
); if (conversation.id) updateConvWaiting(conversation.id, false);
} else {
initModel(model); if (res?.error && conversation.id) {
} updateConvError(conversation.id, res.error);
}
});
if (activeConvoId !== conversation.id) { if (activeConvoId !== conversation.id) {
setMainViewState(MainViewState.Conversation); setMainViewState(MainViewState.Conversation);
setActiveConvoId(conversation.id); setActiveConvoId(conversation.id);

View File

@ -9,14 +9,14 @@ import { Fragment } from "react";
import { PlusIcon } from "@heroicons/react/24/outline"; import { PlusIcon } from "@heroicons/react/24/outline";
import useCreateConversation from "@/_hooks/useCreateConversation"; import useCreateConversation from "@/_hooks/useCreateConversation";
import { currentProductAtom } from "@/_helpers/atoms/Model.atom"; import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
import { showingTyping } from "@/_helpers/JotaiWrapper";
import LoadingIndicator from "../LoadingIndicator"; import LoadingIndicator from "../LoadingIndicator";
import { currentConvoStateAtom } from "@/_helpers/atoms/Conversation.atom";
const InputToolbar: React.FC = () => { const InputToolbar: React.FC = () => {
const showingAdvancedPrompt = useAtomValue(showingAdvancedPromptAtom); const showingAdvancedPrompt = useAtomValue(showingAdvancedPromptAtom);
const currentProduct = useAtomValue(currentProductAtom); const currentProduct = useAtomValue(currentProductAtom);
const { requestCreateConvo } = useCreateConversation(); const { requestCreateConvo } = useCreateConversation();
const isTyping = useAtomValue(showingTyping); const currentConvoState = useAtomValue(currentConvoStateAtom);
if (showingAdvancedPrompt) { if (showingAdvancedPrompt) {
return <div />; return <div />;
@ -34,12 +34,20 @@ const InputToolbar: React.FC = () => {
return ( return (
<Fragment> <Fragment>
<div className="flex justify-between gap-2 mr-3 my-2"> <div className="flex justify-between gap-2 mr-3 my-2">
<div className="h-6"> <div className="h-6 space-x-5">
{isTyping && ( {currentConvoState?.waitingForResponse === true && (
<div className="my-2" key="indicator"> <div className="ml-1 my-2" key="indicator">
<LoadingIndicator /> <LoadingIndicator />
</div> </div>
)}{" "} )}
{!currentConvoState?.waitingForResponse &&
currentConvoState?.error && (
<div className="flex flex-row justify-center">
<span className="mx-5 my-2 text-red-500 text-sm">
{currentConvoState?.error?.toString()}
</span>
</div>
)}
</div> </div>
{/* <SecondaryButton title="Regenerate" onClick={onRegenerateClick} /> */} {/* <SecondaryButton title="Regenerate" onClick={onRegenerateClick} /> */}

View File

@ -13,8 +13,6 @@ export default function JotaiWrapper({ children }: Props) {
export const currentPromptAtom = atom<string>(""); export const currentPromptAtom = atom<string>("");
export const showingTyping = atom<boolean>(false);
export const appDownloadProgress = atom<number>(-1); export const appDownloadProgress = atom<number>(-1);
export const searchingModelText = atom<string>(""); export const searchingModelText = atom<string>("");

View File

@ -55,6 +55,18 @@ export const updateConversationWaitingForResponseAtom = atom(
currentState[conversationId] = { currentState[conversationId] = {
...currentState[conversationId], ...currentState[conversationId],
waitingForResponse, waitingForResponse,
error: undefined,
};
set(conversationStatesAtom, currentState);
}
);
export const updateConversationErrorAtom = atom(
null,
(get, set, conversationId: string, error?: Error) => {
const currentState = { ...get(conversationStatesAtom) };
currentState[conversationId] = {
...currentState[conversationId],
error,
}; };
set(conversationStatesAtom, currentState); set(conversationStatesAtom, currentState);
} }

View File

@ -7,6 +7,8 @@ import {
userConversationsAtom, userConversationsAtom,
setActiveConvoIdAtom, setActiveConvoIdAtom,
addNewConversationStateAtom, addNewConversationStateAtom,
updateConversationWaitingForResponseAtom,
updateConversationErrorAtom,
} from "@/_helpers/atoms/Conversation.atom"; } from "@/_helpers/atoms/Conversation.atom";
import useInitModel from "./useInitModel"; import useInitModel from "./useInitModel";
@ -17,6 +19,10 @@ const useCreateConversation = () => {
); );
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom); const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
const addNewConvoState = useSetAtom(addNewConversationStateAtom); const addNewConvoState = useSetAtom(addNewConversationStateAtom);
const updateConvWaiting = useSetAtom(
updateConversationWaitingForResponseAtom
);
const updateConvError = useSetAtom(updateConversationErrorAtom);
const requestCreateConvo = async (model: Product) => { const requestCreateConvo = async (model: Product) => {
const conversationName = model.name; const conversationName = model.name;
@ -27,7 +33,14 @@ const useCreateConversation = () => {
name: conversationName, name: conversationName,
}; };
const id = await executeSerial(DataService.CREATE_CONVERSATION, conv); const id = await executeSerial(DataService.CREATE_CONVERSATION, conv);
await initModel(model);
if (id) updateConvWaiting(id, true);
initModel(model).then((res: any) => {
if (id) updateConvWaiting(id, false);
if (res?.error) {
updateConvError(id, res.error);
}
});
const mappedConvo: Conversation = { const mappedConvo: Conversation = {
id, id,

View File

@ -1,6 +1,6 @@
import { Product } from "@/_models/Product"; import { Product } from "@/_models/Product";
import { executeSerial } from "@/_services/pluginService"; import { executeSerial } from "@/_services/pluginService";
import { InfereceService } from "../../shared/coreService"; import { InferenceService } from "../../shared/coreService";
import { useAtom } from "jotai"; import { useAtom } from "jotai";
import { currentProductAtom } from "@/_helpers/atoms/Model.atom"; import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
@ -12,12 +12,14 @@ export default function useInitModel() {
console.debug(`Model ${model.id} is already init. Ignore..`); console.debug(`Model ${model.id} is already init. Ignore..`);
return; return;
} }
try { const res = await executeSerial(InferenceService.INIT_MODEL, model);
await executeSerial(InfereceService.INIT_MODEL, model); if (res?.error) {
console.debug(`Init model ${model.name} successfully!`); console.log("error occured: ", res);
return res;
} else {
console.log(`Init model successfully!`);
setActiveModel(model); setActiveModel(model);
} catch (err) { return {};
console.error(`Init model ${model.name} failed: ${err}`);
} }
}; };

View File

@ -1,7 +1,7 @@
import { currentPromptAtom, showingTyping } from "@/_helpers/JotaiWrapper"; import { currentPromptAtom } from "@/_helpers/JotaiWrapper";
import { useAtom, useAtomValue, useSetAtom } from "jotai"; import { useAtom, useAtomValue, useSetAtom } from "jotai";
import { selectAtom } from "jotai/utils"; import { selectAtom } from "jotai/utils";
import { DataService, InfereceService } from "../../shared/coreService"; import { DataService, InferenceService } from "../../shared/coreService";
import { import {
MessageSenderType, MessageSenderType,
RawMessage, RawMessage,
@ -18,6 +18,7 @@ import {
import { import {
currentConversationAtom, currentConversationAtom,
getActiveConvoIdAtom, getActiveConvoIdAtom,
updateConversationWaitingForResponseAtom,
} from "@/_helpers/atoms/Conversation.atom"; } from "@/_helpers/atoms/Conversation.atom";
export default function useSendChatMessage() { export default function useSendChatMessage() {
@ -26,6 +27,9 @@ export default function useSendChatMessage() {
const addNewMessage = useSetAtom(addNewMessageAtom); const addNewMessage = useSetAtom(addNewMessageAtom);
const updateMessage = useSetAtom(updateMessageAtom); const updateMessage = useSetAtom(updateMessageAtom);
const activeConversationId = useAtomValue(getActiveConvoIdAtom) ?? ""; const activeConversationId = useAtomValue(getActiveConvoIdAtom) ?? "";
const updateConvWaiting = useSetAtom(
updateConversationWaitingForResponseAtom
);
const chatMessagesHistory = useAtomValue( const chatMessagesHistory = useAtomValue(
selectAtom( selectAtom(
@ -34,10 +38,11 @@ export default function useSendChatMessage() {
) )
); );
const [currentPrompt, setCurrentPrompt] = useAtom(currentPromptAtom); const [currentPrompt, setCurrentPrompt] = useAtom(currentPromptAtom);
const [, setIsTyping] = useAtom(showingTyping);
const sendChatMessage = async () => { const sendChatMessage = async () => {
setIsTyping(true);
setCurrentPrompt(""); setCurrentPrompt("");
const conversationId = activeConversationId;
updateConvWaiting(conversationId, true);
const prompt = currentPrompt.trim(); const prompt = currentPrompt.trim();
const newMessage: RawMessage = { const newMessage: RawMessage = {
conversation_id: parseInt(currentConvo?.id ?? "0") ?? 0, conversation_id: parseInt(currentConvo?.id ?? "0") ?? 0,
@ -65,7 +70,7 @@ export default function useSendChatMessage() {
: "assistant", : "assistant",
}; };
}); });
const url = await executeSerial(InfereceService.INFERENCE_URL); const url = await executeSerial(InferenceService.INFERENCE_URL);
const response = await fetch(url, { const response = await fetch(url, {
method: "POST", method: "POST",
headers: { headers: {
@ -108,7 +113,7 @@ export default function useSendChatMessage() {
const lines = text.trim().split("\n"); const lines = text.trim().split("\n");
for (const line of lines) { for (const line of lines) {
if (line.startsWith("data: ") && !line.includes("data: [DONE]")) { if (line.startsWith("data: ") && !line.includes("data: [DONE]")) {
setIsTyping(false); updateConvWaiting(conversationId, false);
const data = JSON.parse(line.replace("data: ", "")); const data = JSON.parse(line.replace("data: ", ""));
answer += data.choices[0]?.delta?.content ?? ""; answer += data.choices[0]?.delta?.content ?? "";
if (answer.startsWith("assistant: ")) { if (answer.startsWith("assistant: ")) {
@ -139,7 +144,7 @@ export default function useSendChatMessage() {
.replace("T", " ") .replace("T", " ")
.replace(/\.\d+Z$/, ""), .replace(/\.\d+Z$/, ""),
}); });
setIsTyping(false); updateConvWaiting(conversationId, false);
}; };
return { return {
sendChatMessage, sendChatMessage,

View File

@ -1,5 +1,5 @@
import { executeSerial } from "@/_services/pluginService"; import { executeSerial } from "@/_services/pluginService";
import { DataService, InfereceService } from "../../shared/coreService"; import { DataService, InferenceService } from "../../shared/coreService";
import useInitModel from "./useInitModel"; import useInitModel from "./useInitModel";
import { useSetAtom } from "jotai"; import { useSetAtom } from "jotai";
import { currentProductAtom } from "@/_helpers/atoms/Model.atom"; import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
@ -18,7 +18,7 @@ export default function useStartStopModel() {
}; };
const stopModel = async (modelId: string) => { const stopModel = async (modelId: string) => {
await executeSerial(InfereceService.STOP_MODEL, modelId); await executeSerial(InferenceService.STOP_MODEL, modelId);
setActiveModel(undefined); setActiveModel(undefined);
}; };

View File

@ -14,4 +14,5 @@ export interface Conversation {
export type ConversationState = { export type ConversationState = {
hasMore: boolean; hasMore: boolean;
waitingForResponse: boolean; waitingForResponse: boolean;
error?: Error;
}; };

View File

@ -6,7 +6,7 @@ import {
import { import {
CoreService, CoreService,
DataService, DataService,
InfereceService, InferenceService,
ModelManagementService, ModelManagementService,
} from "../../shared/coreService"; } from "../../shared/coreService";
@ -14,7 +14,7 @@ export const isCorePluginInstalled = () => {
if (!extensionPoints.get(DataService.GET_CONVERSATIONS)) { if (!extensionPoints.get(DataService.GET_CONVERSATIONS)) {
return false; return false;
} }
if (!extensionPoints.get(InfereceService.INIT_MODEL)) { if (!extensionPoints.get(InferenceService.INIT_MODEL)) {
return false; return false;
} }
if (!extensionPoints.get(ModelManagementService.GET_DOWNLOADED_MODELS)) { if (!extensionPoints.get(ModelManagementService.GET_DOWNLOADED_MODELS)) {
@ -33,7 +33,7 @@ export const setupBasePlugins = async () => {
if ( if (
!extensionPoints.get(DataService.GET_CONVERSATIONS) || !extensionPoints.get(DataService.GET_CONVERSATIONS) ||
!extensionPoints.get(InfereceService.INIT_MODEL) || !extensionPoints.get(InferenceService.INIT_MODEL) ||
!extensionPoints.get(ModelManagementService.GET_DOWNLOADED_MODELS) !extensionPoints.get(ModelManagementService.GET_DOWNLOADED_MODELS)
) { ) {
const installed = await plugins.install(basePlugins); const installed = await plugins.install(basePlugins);

View File

@ -1,7 +1,7 @@
export type CoreService = export type CoreService =
| DataService | DataService
| ModelService | ModelService
| InfereceService | InferenceService
| ModelManagementService | ModelManagementService
| SystemMonitoringService | SystemMonitoringService
| PreferenceService; | PreferenceService;
@ -27,7 +27,7 @@ export enum ModelService {
GET_MODELS = "getModels", GET_MODELS = "getModels",
} }
export enum InfereceService { export enum InferenceService {
INFERENCE_URL = "inferenceUrl", INFERENCE_URL = "inferenceUrl",
INIT_MODEL = "initModel", INIT_MODEL = "initModel",
STOP_MODEL = "stopModel", STOP_MODEL = "stopModel",