fix: Update openai with read/ write for engine.json

This commit is contained in:
hiro 2023-12-04 12:10:52 +07:00
parent 2c648caa5f
commit 486c5b8ca0

View File

@ -20,12 +20,18 @@ import {
executeOnMain, executeOnMain,
getUserSpace, getUserSpace,
fs, fs,
Model,
} from "@janhq/core"; } from "@janhq/core";
import { InferenceExtension } from "@janhq/core"; import { InferenceExtension } from "@janhq/core";
import { requestInference } from "./helpers/sse"; import { requestInference } from "./helpers/sse";
import { ulid } from "ulid"; import { ulid } from "ulid";
import { join } from "path"; import { join } from "path";
interface EngineSettings {
base_url?: string;
api_key?: string;
}
/** /**
* A class that implements the InferenceExtension interface from the @janhq/core package. * A class that implements the InferenceExtension interface from the @janhq/core package.
* The class provides methods for initializing and stopping a model, and for making inference requests. * The class provides methods for initializing and stopping a model, and for making inference requests.
@ -35,6 +41,10 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
private static readonly _homeDir = 'engines' private static readonly _homeDir = 'engines'
private static readonly _engineMetadataFileName = 'openai.json' private static readonly _engineMetadataFileName = 'openai.json'
private _engineSettings: EngineSettings = {
"base_url": "https://api.openai.com/v1",
"api_key": "sk-<your key here>"
}
controller = new AbortController(); controller = new AbortController();
isCancelled = false; isCancelled = false;
/** /**
@ -52,9 +62,19 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
onLoad(): void { onLoad(): void {
fs.mkdir(JanInferenceOpenAIExtension._homeDir) fs.mkdir(JanInferenceOpenAIExtension._homeDir)
this.writeDefaultEngineSettings() this.writeDefaultEngineSettings()
// Events subscription
events.on(EventName.OnMessageSent, (data) => events.on(EventName.OnMessageSent, (data) =>
JanInferenceOpenAIExtension.handleMessageRequest(data, this) JanInferenceOpenAIExtension.handleMessageRequest(data, this)
); );
events.on(EventName.OnModelInit, (data: Model) => {
JanInferenceOpenAIExtension.handleModelInit(data);
});
events.on(EventName.OnModelStop, (data: Model) => {
JanInferenceOpenAIExtension.handleModelStop(data);
});
} }
/** /**
@ -71,31 +91,18 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
modelId: string, modelId: string,
settings?: ModelSettingParams settings?: ModelSettingParams
): Promise<void> { ): Promise<void> {
const userSpacePath = await getUserSpace(); return
const modelFullPath = join(userSpacePath, "models", modelId, modelId);
return executeOnMain(MODULE, "initModel", {
modelFullPath,
settings,
});
} }
private async writeDefaultEngineSettings() { private async writeDefaultEngineSettings() {
try { try {
const engine_json = join(JanInferenceOpenAIExtension._homeDir, JanInferenceOpenAIExtension._engineMetadataFileName)
const destPath = join(JanInferenceOpenAIExtension._homeDir, JanInferenceOpenAIExtension._engineMetadataFileName) if (await fs.checkFileExists(engine_json)) {
// TODO: Check with @louis for adding new binding this._engineSettings = JSON.parse(await fs.readFile(engine_json))
// if (await fs.checkFileExists(destPath)) { }
const default_engine_settings = { else {
"base_url": "https://api.openai.com/v1", await fs.writeFile(engine_json, JSON.stringify(this._engineSettings, null, 2))
"api_key": "sk-<your key here>" }
}
console.log(`Writing OpenAI engine settings to ${destPath}`)
await fs.writeFile(destPath, JSON.stringify(default_engine_settings, null, 2))
// }
// else {
// console.log(`OpenAI engine settings already exist at ${destPath}`)
// }
} catch (err) { } catch (err) {
console.error(err) console.error(err)
} }
@ -146,6 +153,22 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
}); });
} }
private static async handleModelInit(data: Model) {
console.log('Model init success', data)
// Add filter data engine = openai
if (data.engine !== 'openai') { return }
// If model success
events.emit(EventName.OnModelReady, {modelId: data.id})
// If model failed
// events.emit(EventName.OnModelFail, {modelId: data.id})
}
private static async handleModelStop(data: Model) {
// Add filter data engine = openai
if (data.engine !== 'openai') { return }
events.emit(EventName.OnModelStop, {modelId: data.id})
}
/** /**
* Handles a new message request by making an inference request and emitting events. * Handles a new message request by making an inference request and emitting events.
* Function registered in event manager, should be static to avoid binding issues. * Function registered in event manager, should be static to avoid binding issues.
@ -169,7 +192,6 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
object: "thread.message", object: "thread.message",
}; };
events.emit(EventName.OnMessageResponse, message); events.emit(EventName.OnMessageResponse, message);
console.log(JSON.stringify(data, null, 2));
instance.isCancelled = false; instance.isCancelled = false;
instance.controller = new AbortController(); instance.controller = new AbortController();