* fix: #247 - inference plugin should check nitro service available * fix: #247 check service status and emit error if any * chore: error handling * chore: typo * fix: open conversation does not work when model is deleted * chore: reload plugins in development mode without exiting the process * chore: move model file check to inference plugin * update package-lock.json --------- Co-authored-by: Hien To <>
24 lines
748 B
TypeScript
24 lines
748 B
TypeScript
const MODULE_PATH = "inference-plugin/dist/module.js";
|
|
|
|
const initModel = async (product) =>
|
|
new Promise(async (resolve) => {
|
|
if (window.electronAPI) {
|
|
window.electronAPI
|
|
.invokePluginFunc(MODULE_PATH, "initModel", product)
|
|
.then((res) => resolve(res));
|
|
}
|
|
});
|
|
|
|
const inferenceUrl = () => "http://localhost:3928/llama/chat_completion";
|
|
|
|
const stopModel = () => {
|
|
window.electronAPI.invokePluginFunc(MODULE_PATH, "killSubprocess");
|
|
};
|
|
|
|
// Register all the above functions and objects with the relevant extension points
|
|
export function init({ register }) {
|
|
register("initModel", "initModel", initModel);
|
|
register("inferenceUrl", "inferenceUrl", inferenceUrl);
|
|
register("stopModel", "stopModel", stopModel);
|
|
}
|