fix: Update code based on comments from @james
This commit is contained in:
parent
fbf8ff9d3d
commit
1177007ad6
@ -25,7 +25,7 @@ const readFile: (path: string) => Promise<any> = (path) => global.core.api?.read
|
|||||||
* @param {string} path
|
* @param {string} path
|
||||||
* @returns {boolean} A boolean indicating whether the path is a file.
|
* @returns {boolean} A boolean indicating whether the path is a file.
|
||||||
*/
|
*/
|
||||||
const checkFileExists = (path: string): Promise<boolean> => global.core.api?.checkFileExists(path)
|
const exists = (path: string): Promise<boolean> => global.core.api?.exists(path)
|
||||||
/**
|
/**
|
||||||
* List the directory files
|
* List the directory files
|
||||||
* @param {string} path - The path of the directory to list files.
|
* @param {string} path - The path of the directory to list files.
|
||||||
@ -75,7 +75,7 @@ export const fs = {
|
|||||||
isDirectory,
|
isDirectory,
|
||||||
writeFile,
|
writeFile,
|
||||||
readFile,
|
readFile,
|
||||||
checkFileExists,
|
exists,
|
||||||
listFiles,
|
listFiles,
|
||||||
mkdir,
|
mkdir,
|
||||||
rmdir,
|
rmdir,
|
||||||
|
|||||||
@ -41,11 +41,8 @@ export type MessageRequest = {
|
|||||||
/** Messages for constructing a chat completion request **/
|
/** Messages for constructing a chat completion request **/
|
||||||
messages?: ChatCompletionMessage[];
|
messages?: ChatCompletionMessage[];
|
||||||
|
|
||||||
/** Runtime parameters for constructing a chat completion request **/
|
|
||||||
parameters?: ModelRuntimeParams;
|
|
||||||
|
|
||||||
/** Settings for constructing a chat completion request **/
|
/** Settings for constructing a chat completion request **/
|
||||||
model?: ModelInfo
|
model?: ModelInfo;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@ -56,7 +56,7 @@ export function handleFsIPCs() {
|
|||||||
* @param path - The path of the file to check.
|
* @param path - The path of the file to check.
|
||||||
* @returns A promise that resolves with a boolean indicating whether the file exists.
|
* @returns A promise that resolves with a boolean indicating whether the file exists.
|
||||||
*/
|
*/
|
||||||
ipcMain.handle('checkFileExists', async (_event, path: string) => {
|
ipcMain.handle('exists', async (_event, path: string) => {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const fullPath = join(userSpacePath, path)
|
const fullPath = join(userSpacePath, path)
|
||||||
fs.existsSync(fullPath) ? resolve(true) : resolve(false)
|
fs.existsSync(fullPath) ? resolve(true) : resolve(false)
|
||||||
|
|||||||
@ -31,7 +31,7 @@ export function fsInvokers() {
|
|||||||
* Reads a file at the specified path.
|
* Reads a file at the specified path.
|
||||||
* @param {string} path - The path of the file to read.
|
* @param {string} path - The path of the file to read.
|
||||||
*/
|
*/
|
||||||
checkFileExists: (path: string) => ipcRenderer.invoke('checkFileExists', path),
|
exists: (path: string) => ipcRenderer.invoke('exists', path),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Writes data to a file at the specified path.
|
* Writes data to a file at the specified path.
|
||||||
|
|||||||
@ -24,4 +24,3 @@ interface ModelOperationResponse {
|
|||||||
error?: any;
|
error?: any;
|
||||||
modelFile?: string;
|
modelFile?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,17 +33,17 @@ import { join } from "path";
|
|||||||
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
|
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
|
||||||
*/
|
*/
|
||||||
export default class JanInferenceNitroExtension implements InferenceExtension {
|
export default class JanInferenceNitroExtension implements InferenceExtension {
|
||||||
private static readonly _homeDir = 'engines'
|
private static readonly _homeDir = "engines";
|
||||||
private static readonly _engineMetadataFileName = 'nitro.json'
|
private static readonly _engineMetadataFileName = "nitro.json";
|
||||||
|
|
||||||
static _currentModel: Model;
|
private static _currentModel: Model;
|
||||||
|
|
||||||
static _engineSettings: EngineSettings = {
|
private static _engineSettings: EngineSettings = {
|
||||||
"ctx_len": 2048,
|
ctx_len: 2048,
|
||||||
"ngl": 100,
|
ngl: 100,
|
||||||
"cont_batching": false,
|
cont_batching: false,
|
||||||
"embedding": false
|
embedding: false,
|
||||||
}
|
};
|
||||||
|
|
||||||
controller = new AbortController();
|
controller = new AbortController();
|
||||||
isCancelled = false;
|
isCancelled = false;
|
||||||
@ -59,8 +59,8 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
|
|||||||
* Subscribes to events emitted by the @janhq/core package.
|
* Subscribes to events emitted by the @janhq/core package.
|
||||||
*/
|
*/
|
||||||
onLoad(): void {
|
onLoad(): void {
|
||||||
fs.mkdir(JanInferenceNitroExtension._homeDir)
|
fs.mkdir(JanInferenceNitroExtension._homeDir);
|
||||||
this.writeDefaultEngineSettings()
|
this.writeDefaultEngineSettings();
|
||||||
|
|
||||||
// Events subscription
|
// Events subscription
|
||||||
events.on(EventName.OnMessageSent, (data) =>
|
events.on(EventName.OnMessageSent, (data) =>
|
||||||
@ -112,42 +112,51 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
|
|||||||
|
|
||||||
private async writeDefaultEngineSettings() {
|
private async writeDefaultEngineSettings() {
|
||||||
try {
|
try {
|
||||||
const engine_json = join(JanInferenceNitroExtension._homeDir, JanInferenceNitroExtension._engineMetadataFileName)
|
const engineFile = join(
|
||||||
if (await fs.checkFileExists(engine_json)) {
|
JanInferenceNitroExtension._homeDir,
|
||||||
JanInferenceNitroExtension._engineSettings = JSON.parse(await fs.readFile(engine_json))
|
JanInferenceNitroExtension._engineMetadataFileName
|
||||||
}
|
);
|
||||||
else {
|
if (await fs.exists(engineFile)) {
|
||||||
await fs.writeFile(engine_json, JSON.stringify(JanInferenceNitroExtension._engineSettings, null, 2))
|
JanInferenceNitroExtension._engineSettings = JSON.parse(
|
||||||
|
await fs.readFile(engineFile)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
await fs.writeFile(
|
||||||
|
engineFile,
|
||||||
|
JSON.stringify(JanInferenceNitroExtension._engineSettings, null, 2)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async handleModelInit(model: Model) {
|
private static async handleModelInit(model: Model) {
|
||||||
if (model.engine !== "nitro") { return }
|
if (model.engine !== "nitro") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
const userSpacePath = await getUserSpace();
|
const userSpacePath = await getUserSpace();
|
||||||
const modelFullPath = join(userSpacePath, "models", model.id, model.id);
|
const modelFullPath = join(userSpacePath, "models", model.id, model.id);
|
||||||
|
|
||||||
const nitro_init_result = await executeOnMain(MODULE, "initModel", {
|
const nitroInitResult = await executeOnMain(MODULE, "initModel", {
|
||||||
modelFullPath: modelFullPath,
|
modelFullPath: modelFullPath,
|
||||||
model: model
|
model: model,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (nitro_init_result.error === null) {
|
if (nitroInitResult.error === null) {
|
||||||
events.emit(EventName.OnModelFail, model)
|
events.emit(EventName.OnModelFail, model);
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
JanInferenceNitroExtension._currentModel = model;
|
JanInferenceNitroExtension._currentModel = model;
|
||||||
events.emit(EventName.OnModelReady, model);
|
events.emit(EventName.OnModelReady, model);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async handleModelStop(model: Model) {
|
private static async handleModelStop(model: Model) {
|
||||||
if (model.engine !== 'nitro') { return }
|
if (model.engine !== "nitro") {
|
||||||
else {
|
return;
|
||||||
await executeOnMain(MODULE, "stopModel")
|
} else {
|
||||||
events.emit(EventName.OnModelStopped, model)
|
await executeOnMain(MODULE, "stopModel");
|
||||||
|
events.emit(EventName.OnModelStopped, model);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,8 +183,7 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
|
|||||||
data.messages ?? [],
|
data.messages ?? [],
|
||||||
JanInferenceNitroExtension._engineSettings,
|
JanInferenceNitroExtension._engineSettings,
|
||||||
JanInferenceNitroExtension._currentModel
|
JanInferenceNitroExtension._currentModel
|
||||||
)
|
).subscribe({
|
||||||
.subscribe({
|
|
||||||
next: (_content) => {},
|
next: (_content) => {},
|
||||||
complete: async () => {
|
complete: async () => {
|
||||||
resolve(message);
|
resolve(message);
|
||||||
@ -197,7 +205,9 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
|
|||||||
data: MessageRequest,
|
data: MessageRequest,
|
||||||
instance: JanInferenceNitroExtension
|
instance: JanInferenceNitroExtension
|
||||||
) {
|
) {
|
||||||
if (data.model.engine !== 'nitro') { return }
|
if (data.model.engine !== "nitro") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
const timestamp = Date.now();
|
const timestamp = Date.now();
|
||||||
const message: ThreadMessage = {
|
const message: ThreadMessage = {
|
||||||
id: ulid(),
|
id: ulid(),
|
||||||
|
|||||||
@ -26,9 +26,9 @@ let currentModelFile = null;
|
|||||||
*/
|
*/
|
||||||
function stopModel(): Promise<ModelOperationResponse> {
|
function stopModel(): Promise<ModelOperationResponse> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
checkAndUnloadNitro()
|
checkAndUnloadNitro();
|
||||||
resolve({ error: undefined})
|
resolve({ error: undefined });
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -41,9 +41,8 @@ function stopModel(): Promise<ModelOperationResponse> {
|
|||||||
function initModel(wrapper: any): Promise<ModelOperationResponse> {
|
function initModel(wrapper: any): Promise<ModelOperationResponse> {
|
||||||
currentModelFile = wrapper.modelFullPath;
|
currentModelFile = wrapper.modelFullPath;
|
||||||
if (wrapper.model.engine !== "nitro") {
|
if (wrapper.model.engine !== "nitro") {
|
||||||
return Promise.resolve({ error: "Not a nitro model" })
|
return Promise.resolve({ error: "Not a nitro model" });
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
log.info("Started to load model " + wrapper.model.modelFullPath);
|
log.info("Started to load model " + wrapper.model.modelFullPath);
|
||||||
const settings = {
|
const settings = {
|
||||||
llama_model_path: currentModelFile,
|
llama_model_path: currentModelFile,
|
||||||
@ -148,13 +147,12 @@ async function checkAndUnloadNitro() {
|
|||||||
// If inUse - try unload or kill process, otherwise do nothing
|
// If inUse - try unload or kill process, otherwise do nothing
|
||||||
if (inUse) {
|
if (inUse) {
|
||||||
// Attempt to unload model
|
// Attempt to unload model
|
||||||
return await fetch(NITRO_HTTP_UNLOAD_MODEL_URL, {
|
return fetch(NITRO_HTTP_UNLOAD_MODEL_URL, {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
})
|
}).catch((err) => {
|
||||||
.catch((err) => {
|
|
||||||
console.error(err);
|
console.error(err);
|
||||||
// Fallback to kill the port
|
// Fallback to kill the port
|
||||||
return killSubprocess();
|
return killSubprocess();
|
||||||
|
|||||||
@ -8,18 +8,18 @@ declare interface EngineSettings {
|
|||||||
}
|
}
|
||||||
|
|
||||||
enum OpenAIChatCompletionModelName {
|
enum OpenAIChatCompletionModelName {
|
||||||
'gpt-3.5-turbo-instruct' = 'gpt-3.5-turbo-instruct',
|
"gpt-3.5-turbo-instruct" = "gpt-3.5-turbo-instruct",
|
||||||
'gpt-3.5-turbo-instruct-0914' = 'gpt-3.5-turbo-instruct-0914',
|
"gpt-3.5-turbo-instruct-0914" = "gpt-3.5-turbo-instruct-0914",
|
||||||
'gpt-4-1106-preview' = 'gpt-4-1106-preview',
|
"gpt-4-1106-preview" = "gpt-4-1106-preview",
|
||||||
'gpt-3.5-turbo-0613' = 'gpt-3.5-turbo-0613',
|
"gpt-3.5-turbo-0613" = "gpt-3.5-turbo-0613",
|
||||||
'gpt-3.5-turbo-0301' = 'gpt-3.5-turbo-0301',
|
"gpt-3.5-turbo-0301" = "gpt-3.5-turbo-0301",
|
||||||
'gpt-3.5-turbo' = 'gpt-3.5-turbo',
|
"gpt-3.5-turbo" = "gpt-3.5-turbo",
|
||||||
'gpt-3.5-turbo-16k-0613' = 'gpt-3.5-turbo-16k-0613',
|
"gpt-3.5-turbo-16k-0613" = "gpt-3.5-turbo-16k-0613",
|
||||||
'gpt-3.5-turbo-1106' = 'gpt-3.5-turbo-1106',
|
"gpt-3.5-turbo-1106" = "gpt-3.5-turbo-1106",
|
||||||
'gpt-4-vision-preview' = 'gpt-4-vision-preview',
|
"gpt-4-vision-preview" = "gpt-4-vision-preview",
|
||||||
'gpt-4' = 'gpt-4',
|
"gpt-4" = "gpt-4",
|
||||||
'gpt-4-0314' = 'gpt-4-0314',
|
"gpt-4-0314" = "gpt-4-0314",
|
||||||
'gpt-4-0613' = 'gpt-4-0613',
|
"gpt-4-0613" = "gpt-4-0613",
|
||||||
}
|
}
|
||||||
|
|
||||||
declare type OpenAIModel = Omit<Model, "id"> & {
|
declare type OpenAIModel = Omit<Model, "id"> & {
|
||||||
|
|||||||
@ -31,14 +31,14 @@ import { EngineSettings, OpenAIModel } from "./@types/global";
|
|||||||
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
|
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
|
||||||
*/
|
*/
|
||||||
export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
||||||
private static readonly _homeDir = 'engines'
|
private static readonly _homeDir = "engines";
|
||||||
private static readonly _engineMetadataFileName = 'openai.json'
|
private static readonly _engineMetadataFileName = "openai.json";
|
||||||
|
|
||||||
static _currentModel: OpenAIModel;
|
private static _currentModel: OpenAIModel;
|
||||||
|
|
||||||
static _engineSettings: EngineSettings = {
|
private static _engineSettings: EngineSettings = {
|
||||||
"base_url": "https://api.openai.com/v1",
|
base_url: "https://api.openai.com/v1",
|
||||||
"api_key": "sk-<your key here>"
|
api_key: "sk-<your key here>",
|
||||||
};
|
};
|
||||||
|
|
||||||
controller = new AbortController();
|
controller = new AbortController();
|
||||||
@ -56,8 +56,8 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
|||||||
* Subscribes to events emitted by the @janhq/core package.
|
* Subscribes to events emitted by the @janhq/core package.
|
||||||
*/
|
*/
|
||||||
onLoad(): void {
|
onLoad(): void {
|
||||||
fs.mkdir(JanInferenceOpenAIExtension._homeDir)
|
fs.mkdir(JanInferenceOpenAIExtension._homeDir);
|
||||||
JanInferenceOpenAIExtension.writeDefaultEngineSettings()
|
JanInferenceOpenAIExtension.writeDefaultEngineSettings();
|
||||||
|
|
||||||
// Events subscription
|
// Events subscription
|
||||||
events.on(EventName.OnMessageSent, (data) =>
|
events.on(EventName.OnMessageSent, (data) =>
|
||||||
@ -87,20 +87,27 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
|||||||
modelId: string,
|
modelId: string,
|
||||||
settings?: ModelSettingParams
|
settings?: ModelSettingParams
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
return
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
static async writeDefaultEngineSettings() {
|
static async writeDefaultEngineSettings() {
|
||||||
try {
|
try {
|
||||||
const engine_json = join(JanInferenceOpenAIExtension._homeDir, JanInferenceOpenAIExtension._engineMetadataFileName)
|
const engineFile = join(
|
||||||
if (await fs.checkFileExists(engine_json)) {
|
JanInferenceOpenAIExtension._homeDir,
|
||||||
JanInferenceOpenAIExtension._engineSettings = JSON.parse(await fs.readFile(engine_json))
|
JanInferenceOpenAIExtension._engineMetadataFileName
|
||||||
}
|
);
|
||||||
else {
|
if (await fs.exists(engineFile)) {
|
||||||
await fs.writeFile(engine_json, JSON.stringify(JanInferenceOpenAIExtension._engineSettings, null, 2))
|
JanInferenceOpenAIExtension._engineSettings = JSON.parse(
|
||||||
|
await fs.readFile(engineFile)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
await fs.writeFile(
|
||||||
|
engineFile,
|
||||||
|
JSON.stringify(JanInferenceOpenAIExtension._engineSettings, null, 2)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
@ -137,10 +144,11 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
|||||||
};
|
};
|
||||||
|
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
requestInference(data.messages ?? [],
|
requestInference(
|
||||||
|
data.messages ?? [],
|
||||||
JanInferenceOpenAIExtension._engineSettings,
|
JanInferenceOpenAIExtension._engineSettings,
|
||||||
JanInferenceOpenAIExtension._currentModel)
|
JanInferenceOpenAIExtension._currentModel
|
||||||
.subscribe({
|
).subscribe({
|
||||||
next: (_content) => {},
|
next: (_content) => {},
|
||||||
complete: async () => {
|
complete: async () => {
|
||||||
resolve(message);
|
resolve(message);
|
||||||
@ -153,19 +161,22 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static async handleModelInit(model: OpenAIModel) {
|
private static async handleModelInit(model: OpenAIModel) {
|
||||||
if (model.engine !== 'openai') { return }
|
if (model.engine !== "openai") {
|
||||||
else {
|
return;
|
||||||
JanInferenceOpenAIExtension._currentModel = model
|
} else {
|
||||||
JanInferenceOpenAIExtension.writeDefaultEngineSettings()
|
JanInferenceOpenAIExtension._currentModel = model;
|
||||||
|
JanInferenceOpenAIExtension.writeDefaultEngineSettings();
|
||||||
// Todo: Check model list with API key
|
// Todo: Check model list with API key
|
||||||
events.emit(EventName.OnModelReady, model)
|
events.emit(EventName.OnModelReady, model);
|
||||||
// events.emit(EventName.OnModelFail, model)
|
// events.emit(EventName.OnModelFail, model)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async handleModelStop(model: OpenAIModel) {
|
private static async handleModelStop(model: OpenAIModel) {
|
||||||
if (model.engine !== 'openai') { return }
|
if (model.engine !== "openai") {
|
||||||
events.emit(EventName.OnModelStopped, model)
|
return;
|
||||||
|
}
|
||||||
|
events.emit(EventName.OnModelStopped, model);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -178,7 +189,9 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
|
|||||||
data: MessageRequest,
|
data: MessageRequest,
|
||||||
instance: JanInferenceOpenAIExtension
|
instance: JanInferenceOpenAIExtension
|
||||||
) {
|
) {
|
||||||
if (data.model.engine !== 'openai') { return }
|
if (data.model.engine !== "openai") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const timestamp = Date.now();
|
const timestamp = Date.now();
|
||||||
const message: ThreadMessage = {
|
const message: ThreadMessage = {
|
||||||
|
|||||||
@ -231,7 +231,6 @@ export default function useSendChatMessage() {
|
|||||||
await WaitForModelStarting(modelId)
|
await WaitForModelStarting(modelId)
|
||||||
setQueuedMessage(false)
|
setQueuedMessage(false)
|
||||||
}
|
}
|
||||||
console.log('messageRequest', messageRequest)
|
|
||||||
events.emit(EventName.OnMessageSent, messageRequest)
|
events.emit(EventName.OnMessageSent, messageRequest)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user