Louis a57dfe743b
fixes: #247 - inference plugin should check nitro service available (#313)
* fix: #247 - inference plugin should check nitro service available

* fix: #247 check service status and emit error if any

* chore: error handling

* chore: typo

* fix: open conversation does not work when model is deleted

* chore: reload plugins in development mode without exiting the process

* chore: move model file check to inference plugin

* update package-lock.json

---------

Co-authored-by: Hien To <>
2023-10-10 18:24:33 +07:00

24 lines
748 B
TypeScript

const MODULE_PATH = "inference-plugin/dist/module.js";
const initModel = async (product) =>
new Promise(async (resolve) => {
if (window.electronAPI) {
window.electronAPI
.invokePluginFunc(MODULE_PATH, "initModel", product)
.then((res) => resolve(res));
}
});
const inferenceUrl = () => "http://localhost:3928/llama/chat_completion";
const stopModel = () => {
window.electronAPI.invokePluginFunc(MODULE_PATH, "killSubprocess");
};
// Register all the above functions and objects with the relevant extension points
export function init({ register }) {
register("initModel", "initModel", initModel);
register("inferenceUrl", "inferenceUrl", inferenceUrl);
register("stopModel", "stopModel", stopModel);
}