* fix: #247 - inference plugin should check nitro service available * fix: #247 check service status and emit error if any * chore: error handling * chore: typo * fix: open conversation does not work when model is deleted * chore: reload plugins in development mode without exiting the process * chore: move model file check to inference plugin * update package-lock.json --------- Co-authored-by: Hien To <>
28 lines
866 B
TypeScript
28 lines
866 B
TypeScript
import { Product } from "@/_models/Product";
|
|
import { executeSerial } from "@/_services/pluginService";
|
|
import { InferenceService } from "../../shared/coreService";
|
|
import { useAtom } from "jotai";
|
|
import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
|
|
|
|
export default function useInitModel() {
|
|
const [activeModel, setActiveModel] = useAtom(currentProductAtom);
|
|
|
|
const initModel = async (model: Product) => {
|
|
if (activeModel && activeModel.id === model.id) {
|
|
console.debug(`Model ${model.id} is already init. Ignore..`);
|
|
return;
|
|
}
|
|
const res = await executeSerial(InferenceService.INIT_MODEL, model);
|
|
if (res?.error) {
|
|
console.log("error occured: ", res);
|
|
return res;
|
|
} else {
|
|
console.log(`Init model successfully!`);
|
|
setActiveModel(model);
|
|
return {};
|
|
}
|
|
};
|
|
|
|
return { initModel };
|
|
}
|