fix: Update code based on comments from @james

This commit is contained in:
hiro 2023-12-07 15:13:04 +07:00
parent fbf8ff9d3d
commit 1177007ad6
10 changed files with 166 additions and 150 deletions

View File

@ -25,7 +25,7 @@ const readFile: (path: string) => Promise<any> = (path) => global.core.api?.read
* @param {string} path
* @returns {boolean} A boolean indicating whether the path is a file.
*/
const checkFileExists = (path: string): Promise<boolean> => global.core.api?.checkFileExists(path)
const exists = (path: string): Promise<boolean> => global.core.api?.exists(path)
/**
* List the directory files
* @param {string} path - The path of the directory to list files.
@ -75,7 +75,7 @@ export const fs = {
isDirectory,
writeFile,
readFile,
checkFileExists,
exists,
listFiles,
mkdir,
rmdir,

View File

@ -41,11 +41,8 @@ export type MessageRequest = {
/** Messages for constructing a chat completion request **/
messages?: ChatCompletionMessage[];
/** Runtime parameters for constructing a chat completion request **/
parameters?: ModelRuntimeParams;
/** Settings for constructing a chat completion request **/
model?: ModelInfo
model?: ModelInfo;
};
/**

View File

@ -56,7 +56,7 @@ export function handleFsIPCs() {
* @param path - The path of the file to check.
* @returns A promise that resolves with a boolean indicating whether the file exists.
*/
ipcMain.handle('checkFileExists', async (_event, path: string) => {
ipcMain.handle('exists', async (_event, path: string) => {
return new Promise((resolve, reject) => {
const fullPath = join(userSpacePath, path)
fs.existsSync(fullPath) ? resolve(true) : resolve(false)

View File

@ -31,7 +31,7 @@ export function fsInvokers() {
* Reads a file at the specified path.
* @param {string} path - The path of the file to read.
*/
checkFileExists: (path: string) => ipcRenderer.invoke('checkFileExists', path),
exists: (path: string) => ipcRenderer.invoke('exists', path),
/**
* Writes data to a file at the specified path.

View File

@ -24,4 +24,3 @@ interface ModelOperationResponse {
error?: any;
modelFile?: string;
}

View File

@ -33,17 +33,17 @@ import { join } from "path";
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
*/
export default class JanInferenceNitroExtension implements InferenceExtension {
private static readonly _homeDir = 'engines'
private static readonly _engineMetadataFileName = 'nitro.json'
private static readonly _homeDir = "engines";
private static readonly _engineMetadataFileName = "nitro.json";
static _currentModel: Model;
private static _currentModel: Model;
static _engineSettings: EngineSettings = {
"ctx_len": 2048,
"ngl": 100,
"cont_batching": false,
"embedding": false
}
private static _engineSettings: EngineSettings = {
ctx_len: 2048,
ngl: 100,
cont_batching: false,
embedding: false,
};
controller = new AbortController();
isCancelled = false;
@ -59,8 +59,8 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
* Subscribes to events emitted by the @janhq/core package.
*/
onLoad(): void {
fs.mkdir(JanInferenceNitroExtension._homeDir)
this.writeDefaultEngineSettings()
fs.mkdir(JanInferenceNitroExtension._homeDir);
this.writeDefaultEngineSettings();
// Events subscription
events.on(EventName.OnMessageSent, (data) =>
@ -112,42 +112,51 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
private async writeDefaultEngineSettings() {
try {
const engine_json = join(JanInferenceNitroExtension._homeDir, JanInferenceNitroExtension._engineMetadataFileName)
if (await fs.checkFileExists(engine_json)) {
JanInferenceNitroExtension._engineSettings = JSON.parse(await fs.readFile(engine_json))
}
else {
await fs.writeFile(engine_json, JSON.stringify(JanInferenceNitroExtension._engineSettings, null, 2))
const engineFile = join(
JanInferenceNitroExtension._homeDir,
JanInferenceNitroExtension._engineMetadataFileName
);
if (await fs.exists(engineFile)) {
JanInferenceNitroExtension._engineSettings = JSON.parse(
await fs.readFile(engineFile)
);
} else {
await fs.writeFile(
engineFile,
JSON.stringify(JanInferenceNitroExtension._engineSettings, null, 2)
);
}
} catch (err) {
console.error(err)
console.error(err);
}
}
private static async handleModelInit(model: Model) {
if (model.engine !== "nitro") { return }
if (model.engine !== "nitro") {
return;
}
const userSpacePath = await getUserSpace();
const modelFullPath = join(userSpacePath, "models", model.id, model.id);
const nitro_init_result = await executeOnMain(MODULE, "initModel", {
const nitroInitResult = await executeOnMain(MODULE, "initModel", {
modelFullPath: modelFullPath,
model: model
model: model,
});
if (nitro_init_result.error === null) {
events.emit(EventName.OnModelFail, model)
}
else{
if (nitroInitResult.error === null) {
events.emit(EventName.OnModelFail, model);
} else {
JanInferenceNitroExtension._currentModel = model;
events.emit(EventName.OnModelReady, model);
}
}
private static async handleModelStop(model: Model) {
if (model.engine !== 'nitro') { return }
else {
await executeOnMain(MODULE, "stopModel")
events.emit(EventName.OnModelStopped, model)
if (model.engine !== "nitro") {
return;
} else {
await executeOnMain(MODULE, "stopModel");
events.emit(EventName.OnModelStopped, model);
}
}
@ -174,8 +183,7 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
data.messages ?? [],
JanInferenceNitroExtension._engineSettings,
JanInferenceNitroExtension._currentModel
)
.subscribe({
).subscribe({
next: (_content) => {},
complete: async () => {
resolve(message);
@ -197,7 +205,9 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
data: MessageRequest,
instance: JanInferenceNitroExtension
) {
if (data.model.engine !== 'nitro') { return }
if (data.model.engine !== "nitro") {
return;
}
const timestamp = Date.now();
const message: ThreadMessage = {
id: ulid(),

View File

@ -26,9 +26,9 @@ let currentModelFile = null;
*/
function stopModel(): Promise<ModelOperationResponse> {
return new Promise((resolve, reject) => {
checkAndUnloadNitro()
resolve({ error: undefined})
})
checkAndUnloadNitro();
resolve({ error: undefined });
});
}
/**
@ -41,9 +41,8 @@ function stopModel(): Promise<ModelOperationResponse> {
function initModel(wrapper: any): Promise<ModelOperationResponse> {
currentModelFile = wrapper.modelFullPath;
if (wrapper.model.engine !== "nitro") {
return Promise.resolve({ error: "Not a nitro model" })
}
else {
return Promise.resolve({ error: "Not a nitro model" });
} else {
log.info("Started to load model " + wrapper.model.modelFullPath);
const settings = {
llama_model_path: currentModelFile,
@ -148,13 +147,12 @@ async function checkAndUnloadNitro() {
// If inUse - try unload or kill process, otherwise do nothing
if (inUse) {
// Attempt to unload model
return await fetch(NITRO_HTTP_UNLOAD_MODEL_URL, {
return fetch(NITRO_HTTP_UNLOAD_MODEL_URL, {
method: "GET",
headers: {
"Content-Type": "application/json",
},
})
.catch((err) => {
}).catch((err) => {
console.error(err);
// Fallback to kill the port
return killSubprocess();

View File

@ -8,18 +8,18 @@ declare interface EngineSettings {
}
enum OpenAIChatCompletionModelName {
'gpt-3.5-turbo-instruct' = 'gpt-3.5-turbo-instruct',
'gpt-3.5-turbo-instruct-0914' = 'gpt-3.5-turbo-instruct-0914',
'gpt-4-1106-preview' = 'gpt-4-1106-preview',
'gpt-3.5-turbo-0613' = 'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-0301' = 'gpt-3.5-turbo-0301',
'gpt-3.5-turbo' = 'gpt-3.5-turbo',
'gpt-3.5-turbo-16k-0613' = 'gpt-3.5-turbo-16k-0613',
'gpt-3.5-turbo-1106' = 'gpt-3.5-turbo-1106',
'gpt-4-vision-preview' = 'gpt-4-vision-preview',
'gpt-4' = 'gpt-4',
'gpt-4-0314' = 'gpt-4-0314',
'gpt-4-0613' = 'gpt-4-0613',
"gpt-3.5-turbo-instruct" = "gpt-3.5-turbo-instruct",
"gpt-3.5-turbo-instruct-0914" = "gpt-3.5-turbo-instruct-0914",
"gpt-4-1106-preview" = "gpt-4-1106-preview",
"gpt-3.5-turbo-0613" = "gpt-3.5-turbo-0613",
"gpt-3.5-turbo-0301" = "gpt-3.5-turbo-0301",
"gpt-3.5-turbo" = "gpt-3.5-turbo",
"gpt-3.5-turbo-16k-0613" = "gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-1106" = "gpt-3.5-turbo-1106",
"gpt-4-vision-preview" = "gpt-4-vision-preview",
"gpt-4" = "gpt-4",
"gpt-4-0314" = "gpt-4-0314",
"gpt-4-0613" = "gpt-4-0613",
}
declare type OpenAIModel = Omit<Model, "id"> & {

View File

@ -31,14 +31,14 @@ import { EngineSettings, OpenAIModel } from "./@types/global";
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
*/
export default class JanInferenceOpenAIExtension implements InferenceExtension {
private static readonly _homeDir = 'engines'
private static readonly _engineMetadataFileName = 'openai.json'
private static readonly _homeDir = "engines";
private static readonly _engineMetadataFileName = "openai.json";
static _currentModel: OpenAIModel;
private static _currentModel: OpenAIModel;
static _engineSettings: EngineSettings = {
"base_url": "https://api.openai.com/v1",
"api_key": "sk-<your key here>"
private static _engineSettings: EngineSettings = {
base_url: "https://api.openai.com/v1",
api_key: "sk-<your key here>",
};
controller = new AbortController();
@ -56,8 +56,8 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
* Subscribes to events emitted by the @janhq/core package.
*/
onLoad(): void {
fs.mkdir(JanInferenceOpenAIExtension._homeDir)
JanInferenceOpenAIExtension.writeDefaultEngineSettings()
fs.mkdir(JanInferenceOpenAIExtension._homeDir);
JanInferenceOpenAIExtension.writeDefaultEngineSettings();
// Events subscription
events.on(EventName.OnMessageSent, (data) =>
@ -87,20 +87,27 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
modelId: string,
settings?: ModelSettingParams
): Promise<void> {
return
return;
}
static async writeDefaultEngineSettings() {
try {
const engine_json = join(JanInferenceOpenAIExtension._homeDir, JanInferenceOpenAIExtension._engineMetadataFileName)
if (await fs.checkFileExists(engine_json)) {
JanInferenceOpenAIExtension._engineSettings = JSON.parse(await fs.readFile(engine_json))
}
else {
await fs.writeFile(engine_json, JSON.stringify(JanInferenceOpenAIExtension._engineSettings, null, 2))
const engineFile = join(
JanInferenceOpenAIExtension._homeDir,
JanInferenceOpenAIExtension._engineMetadataFileName
);
if (await fs.exists(engineFile)) {
JanInferenceOpenAIExtension._engineSettings = JSON.parse(
await fs.readFile(engineFile)
);
} else {
await fs.writeFile(
engineFile,
JSON.stringify(JanInferenceOpenAIExtension._engineSettings, null, 2)
);
}
} catch (err) {
console.error(err)
console.error(err);
}
}
/**
@ -137,10 +144,11 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
};
return new Promise(async (resolve, reject) => {
requestInference(data.messages ?? [],
requestInference(
data.messages ?? [],
JanInferenceOpenAIExtension._engineSettings,
JanInferenceOpenAIExtension._currentModel)
.subscribe({
JanInferenceOpenAIExtension._currentModel
).subscribe({
next: (_content) => {},
complete: async () => {
resolve(message);
@ -153,19 +161,22 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
}
private static async handleModelInit(model: OpenAIModel) {
if (model.engine !== 'openai') { return }
else {
JanInferenceOpenAIExtension._currentModel = model
JanInferenceOpenAIExtension.writeDefaultEngineSettings()
if (model.engine !== "openai") {
return;
} else {
JanInferenceOpenAIExtension._currentModel = model;
JanInferenceOpenAIExtension.writeDefaultEngineSettings();
// Todo: Check model list with API key
events.emit(EventName.OnModelReady, model)
events.emit(EventName.OnModelReady, model);
// events.emit(EventName.OnModelFail, model)
}
}
private static async handleModelStop(model: OpenAIModel) {
if (model.engine !== 'openai') { return }
events.emit(EventName.OnModelStopped, model)
if (model.engine !== "openai") {
return;
}
events.emit(EventName.OnModelStopped, model);
}
/**
@ -178,7 +189,9 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
data: MessageRequest,
instance: JanInferenceOpenAIExtension
) {
if (data.model.engine !== 'openai') { return }
if (data.model.engine !== "openai") {
return;
}
const timestamp = Date.now();
const message: ThreadMessage = {

View File

@ -231,7 +231,6 @@ export default function useSendChatMessage() {
await WaitForModelStarting(modelId)
setQueuedMessage(false)
}
console.log('messageRequest', messageRequest)
events.emit(EventName.OnMessageSent, messageRequest)
}