refactor: add app and nitro log - resolve dependencies issue (#1447)

* refactor: add app and nitro log - resolve dependencies issue

* fix: update guidance message on inference error

* chore: add timestamp to log files

* chore: add clear logs action
This commit is contained in:
Louis 2024-01-10 16:43:28 +07:00 committed by GitHub
parent a3f14d50ff
commit 74ed081e4f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 356 additions and 267 deletions

View File

@ -53,6 +53,8 @@ export default [
'crypto',
'url',
'http',
'os',
'util'
],
watch: {
include: 'src/node/**',

View File

@ -12,6 +12,7 @@ export enum AppRoute {
baseName = 'baseName',
startServer = 'startServer',
stopServer = 'stopServer',
log = 'log'
}
export enum AppEvent {

View File

@ -77,13 +77,12 @@ const openExternalUrl: (url: string) => Promise<any> = (url) =>
const getResourcePath: () => Promise<string> = () => global.core.api?.getResourcePath()
/**
* Gets the file's stats.
* Log to file from browser processes.
*
* @param path - The path to the file.
* @returns {Promise<FileStat>} - A promise that resolves with the file's stats.
* @param message - Message to log.
*/
const fileStat: (path: string) => Promise<FileStat | undefined> = (path) =>
global.core.api?.fileStat(path)
const log: (message: string, fileName?: string) => void = (message, fileName) =>
global.core.api?.log(message, fileName)
/**
* Register extension point function type definition
@ -108,5 +107,6 @@ export {
joinPath,
openExternalUrl,
baseName,
fileStat,
log,
FileStat
}

View File

@ -1,3 +1,5 @@
import { FileStat } from "./types"
/**
* Writes data to a file at the specified path.
* @returns {Promise<any>} A Promise that resolves when the file is written successfully.
@ -58,6 +60,17 @@ const syncFile: (src: string, dest: string) => Promise<any> = (src, dest) =>
*/
const copyFileSync = (...args: any[]) => global.core.api?.copyFileSync(...args)
/**
* Gets the file's stats.
*
* @param path - The path to the file.
* @returns {Promise<FileStat>} - A promise that resolves with the file's stats.
*/
const fileStat: (path: string) => Promise<FileStat | undefined> = (path) =>
global.core.api?.fileStat(path)
// TODO: Export `dummy` fs functions automatically
// Currently adding these manually
export const fs = {
@ -71,4 +84,5 @@ export const fs = {
appendFileSync,
copyFileSync,
syncFile,
fileStat
}

View File

@ -3,11 +3,6 @@ import { JanApiRouteConfiguration, RouteConfiguration } from './configuration'
import { join } from 'path'
import { ContentType, MessageStatus, Model, ThreadMessage } from './../../../index'
import fetch from 'node-fetch'
import { ulid } from 'ulid'
import request from 'request'
const progress = require('request-progress')
const os = require('os')
const path = join(os.homedir(), 'jan')
@ -209,6 +204,7 @@ export const createMessage = async (threadId: string, message: any) => {
const threadMessagesFileName = 'messages.jsonl'
try {
const { ulid } = require('ulid')
const msgId = ulid()
const createdAt = Date.now()
const threadMessage: ThreadMessage = {
@ -260,8 +256,10 @@ export const downloadModel = async (modelId: string) => {
// path to model binary
const modelBinaryPath = join(directoryPath, modelId)
const rq = request(model.source_url)
const request = require('request')
const rq = request(model.source_url)
const progress = require('request-progress')
progress(rq, {})
.on('progress', function (state: any) {
console.log('progress', JSON.stringify(state, null, 2))
@ -324,6 +322,7 @@ export const chatCompletions = async (request: any, reply: any) => {
}
console.debug(apiUrl)
console.debug(JSON.stringify(headers))
const fetch = require('node-fetch')
const response = await fetch(apiUrl, {
method: 'POST',
headers: headers,

View File

@ -1,11 +1,10 @@
import { DownloadRoute } from '../../../api'
import { join } from 'path'
import { userSpacePath, DownloadManager, HttpServer } from '../../index'
import { userSpacePath } from '../../extension/manager'
import { DownloadManager } from '../../download'
import { HttpServer } from '../HttpServer'
import { createWriteStream } from 'fs'
const request = require('request')
const progress = require('request-progress')
export const downloadRouter = async (app: HttpServer) => {
app.post(`/${DownloadRoute.downloadFile}`, async (req, res) => {
const body = JSON.parse(req.body as any)
@ -19,6 +18,9 @@ export const downloadRouter = async (app: HttpServer) => {
const localPath = normalizedArgs[1]
const fileName = localPath.split('/').pop() ?? ''
const request = require('request')
const progress = require('request-progress')
const rq = request(normalizedArgs[0])
progress(rq, {})
.on('progress', function (state: any) {

View File

@ -1,12 +1,10 @@
import { join, extname } from 'path'
import { ExtensionRoute } from '../../../api'
import {
userSpacePath,
ModuleManager,
getActiveExtensions,
installExtensions,
HttpServer,
} from '../../index'
import { ExtensionRoute } from '../../../api/index'
import { userSpacePath } from '../../extension/manager'
import { ModuleManager } from '../../module'
import { getActiveExtensions, installExtensions } from '../../extension/store'
import { HttpServer } from '../HttpServer'
import { readdirSync } from 'fs'
export const extensionRouter = async (app: HttpServer) => {

View File

@ -1,6 +1,7 @@
import { FileSystemRoute } from '../../../api'
import { join } from 'path'
import { HttpServer, userSpacePath } from '../../index'
import { HttpServer } from '../HttpServer'
import { userSpacePath } from '../../extension/manager'
export const fsRouter = async (app: HttpServer) => {
const moduleName = 'fs'

View File

@ -1,5 +1,9 @@
import { HttpServer } from '../HttpServer'
import { commonRouter, threadRouter, fsRouter, extensionRouter, downloadRouter } from './index'
import { commonRouter } from './common'
import { threadRouter } from './thread'
import { fsRouter } from './fs'
import { extensionRouter } from './extension'
import { downloadRouter } from './download'
export const v1Router = async (app: HttpServer) => {
// MARK: External Routes

View File

@ -1,4 +1,3 @@
import { Request } from "request";
/**
* Manages file downloads and network requests.
@ -18,7 +17,7 @@ export class DownloadManager {
* @param {string} fileName - The name of the file.
* @param {Request | undefined} request - The network request to set, or undefined to clear the request.
*/
setRequest(fileName: string, request: Request | undefined) {
setRequest(fileName: string, request: any | undefined) {
this.networkRequests[fileName] = request;
}
}

View File

@ -1,7 +1,5 @@
import { rmdirSync } from 'fs'
import { resolve, join } from 'path'
import { manifest, extract } from 'pacote'
import * as Arborist from '@npmcli/arborist'
import { ExtensionManager } from './manager'
/**
@ -41,6 +39,7 @@ export default class Extension {
* @param {Object} [options] Options provided to pacote when fetching the manifest.
*/
constructor(origin?: string, options = {}) {
const Arborist = require('@npmcli/arborist')
const defaultOpts = {
version: false,
fullMetadata: false,
@ -74,13 +73,15 @@ export default class Extension {
async getManifest() {
// Get the package's manifest (package.json object)
try {
const mnf = await manifest(this.specifier, this.installOptions)
// set the Package properties based on the it's manifest
this.name = mnf.name
this.version = mnf.version
this.main = mnf.main
this.description = mnf.description
await import('pacote').then((pacote) => {
return pacote.manifest(this.specifier, this.installOptions).then((mnf) => {
// set the Package properties based on the it's manifest
this.name = mnf.name
this.version = mnf.version
this.main = mnf.main
this.description = mnf.description
})
})
} catch (error) {
throw new Error(`Package ${this.origin} does not contain a valid manifest: ${error}`)
}
@ -99,7 +100,8 @@ export default class Extension {
await this.getManifest()
// Install the package in a child folder of the given folder
await extract(
const pacote = await import('pacote')
await pacote.extract(
this.specifier,
join(ExtensionManager.instance.extensionsPath ?? '', this.name ?? ''),
this.installOptions,
@ -164,10 +166,13 @@ export default class Extension {
* @returns the latest available version if a new version is available or false if not.
*/
async isUpdateAvailable() {
if (this.origin) {
const mnf = await manifest(this.origin)
return mnf.version !== this.version ? mnf.version : false
}
return import('pacote').then((pacote) => {
if (this.origin) {
return pacote.manifest(this.origin).then((mnf) => {
return mnf.version !== this.version ? mnf.version : false
})
}
})
}
/**

View File

@ -1,7 +1,6 @@
import { join, resolve } from "path";
import { existsSync, mkdirSync, writeFileSync } from "fs";
import { init } from "./index";
import { homedir } from "os"
/**
* Manages extension installation and migration.
@ -20,22 +19,6 @@ export class ExtensionManager {
}
}
/**
* Sets up the extensions by initializing the `extensions` module with the `confirmInstall` and `extensionsPath` options.
* The `confirmInstall` function always returns `true` to allow extension installation.
* The `extensionsPath` option specifies the path to install extensions to.
*/
setupExtensions() {
init({
// Function to check from the main process that user wants to install a extension
confirmInstall: async (_extensions: string[]) => {
return true;
},
// Path to install extension to
extensionsPath: join(userSpacePath, "extensions"),
});
}
setExtensionsPath(extPath: string) {
// Create folder if it does not exist
let extDir;

View File

@ -3,16 +3,24 @@ import util from 'util'
import path from 'path'
import os from 'os'
const appDir = path.join(os.homedir(), 'jan')
export const logDir = path.join(os.homedir(), 'jan', 'logs')
export const logPath = path.join(appDir, 'app.log')
export const log = function (message: string, fileName: string = 'app.log') {
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true })
}
if (!message.startsWith('[')) {
message = `[APP]::${message}`
}
export const log = function (d: any) {
if (fs.existsSync(appDir)) {
var log_file = fs.createWriteStream(logPath, {
message = `${new Date().toISOString()} ${message}`
if (fs.existsSync(logDir)) {
var log_file = fs.createWriteStream(path.join(logDir, fileName), {
flags: 'a',
})
log_file.write(util.format(d) + '\n')
log_file.write(util.format(message) + '\n')
log_file.close()
console.debug(message)
}
}

View File

@ -1,9 +1,9 @@
import { app, ipcMain, shell, nativeTheme } from 'electron'
import { app, ipcMain, shell } from 'electron'
import { join, basename } from 'path'
import { WindowManager } from './../managers/window'
import { getResourcePath, userSpacePath } from './../utils/path'
import { AppRoute } from '@janhq/core'
import { ExtensionManager, ModuleManager } from '@janhq/core/node'
import { ModuleManager, init, log } from '@janhq/core/node'
import { startServer, stopServer } from '@janhq/server'
export function handleAppIPCs() {
@ -59,7 +59,7 @@ export function handleAppIPCs() {
app.isPackaged ? join(getResourcePath(), 'docs', 'openapi') : undefined
)
)
/**
* Stop Jan API Server.
*/
@ -82,8 +82,22 @@ export function handleAppIPCs() {
require.resolve(join(userSpacePath, 'extensions', modulePath))
]
}
ExtensionManager.instance.setupExtensions()
init({
// Function to check from the main process that user wants to install a extension
confirmInstall: async (_extensions: string[]) => {
return true
},
// Path to install extension to
extensionsPath: join(userSpacePath, 'extensions'),
})
WindowManager.instance.currentWindow?.reload()
}
})
/**
* Log message to log file.
*/
ipcMain.handle(AppRoute.log, async (_event, message, fileName) =>
log(message, fileName)
)
}

View File

@ -6,7 +6,7 @@ import { FileManagerRoute } from '@janhq/core'
import { userSpacePath, getResourcePath } from './../utils/path'
import fs from 'fs'
import { join } from 'path'
import { FileStat } from '@janhq/core/.'
import { FileStat } from '@janhq/core'
/**
* Handles file system extensions operations.

View File

@ -6,7 +6,7 @@ import { createUserSpace } from './utils/path'
* Managers
**/
import { WindowManager } from './managers/window'
import { ExtensionManager, ModuleManager } from '@janhq/core/node'
import { log, ModuleManager } from '@janhq/core/node'
/**
* IPC Handlers
@ -17,14 +17,19 @@ import { handleFileMangerIPCs } from './handlers/fileManager'
import { handleAppIPCs } from './handlers/app'
import { handleAppUpdates } from './handlers/update'
import { handleFsIPCs } from './handlers/fs'
/**
* Utils
**/
import { migrateExtensions } from './utils/migration'
import { cleanUpAndQuit } from './utils/clean'
import { setupExtensions } from './utils/extension'
app
.whenReady()
.then(createUserSpace)
.then(migrateExtensions)
.then(ExtensionManager.instance.setupExtensions)
.then(setupExtensions)
.then(setupMenu)
.then(handleIPCs)
.then(handleAppUpdates)
@ -93,5 +98,5 @@ function handleIPCs() {
*/
process.on('uncaughtException', function (err) {
// TODO: Write error to log file in #1447
console.error(err)
log(`Error: ${err}`)
})

View File

@ -0,0 +1,13 @@
import { init, userSpacePath } from '@janhq/core/node'
import path from 'path'
export const setupExtensions = () => {
init({
// Function to check from the main process that user wants to install a extension
confirmInstall: async (_extensions: string[]) => {
return true
},
// Path to install extension to
extensionsPath: path.join(userSpacePath, 'extensions'),
})
}

View File

@ -50,6 +50,7 @@
"bundleDependencies": [
"tcp-port-used",
"fetch-retry",
"os-utils"
"os-utils",
"@janhq/core"
]
}

View File

@ -1,5 +1,6 @@
declare const MODULE: string;
declare const INFERENCE_URL: string;
declare const TROUBLESHOOTING_URL: string;
/**
* The parameters for the initModel function.

View File

@ -0,0 +1,138 @@
/**
* Default GPU settings
**/
const DEFALT_SETTINGS = {
notify: true,
run_mode: "cpu",
nvidia_driver: {
exist: false,
version: "",
},
cuda: {
exist: false,
version: "",
},
gpus: [],
gpu_highest_vram: "",
};
/**
* Validate nvidia and cuda for linux and windows
*/
async function updateNvidiaDriverInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=driver_version --format=csv,noheader",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
const firstLine = stdout.split("\n")[0].trim();
data["nvidia_driver"].exist = true;
data["nvidia_driver"].version = firstLine;
} else {
data["nvidia_driver"].exist = false;
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
function checkFileExistenceInPaths(file: string, paths: string[]): boolean {
return paths.some((p) => existsSync(path.join(p, file)));
}
function updateCudaExistence() {
let filesCuda12: string[];
let filesCuda11: string[];
let paths: string[];
let cudaVersion: string = "";
if (process.platform === "win32") {
filesCuda12 = ["cublas64_12.dll", "cudart64_12.dll", "cublasLt64_12.dll"];
filesCuda11 = ["cublas64_11.dll", "cudart64_11.dll", "cublasLt64_11.dll"];
paths = process.env.PATH ? process.env.PATH.split(path.delimiter) : [];
} else {
filesCuda12 = ["libcudart.so.12", "libcublas.so.12", "libcublasLt.so.12"];
filesCuda11 = ["libcudart.so.11.0", "libcublas.so.11", "libcublasLt.so.11"];
paths = process.env.LD_LIBRARY_PATH
? process.env.LD_LIBRARY_PATH.split(path.delimiter)
: [];
paths.push("/usr/lib/x86_64-linux-gnu/");
}
let cudaExists = filesCuda12.every(
(file) => existsSync(file) || checkFileExistenceInPaths(file, paths)
);
if (!cudaExists) {
cudaExists = filesCuda11.every(
(file) => existsSync(file) || checkFileExistenceInPaths(file, paths)
);
if (cudaExists) {
cudaVersion = "11";
}
} else {
cudaVersion = "12";
}
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
data["cuda"].exist = cudaExists;
data["cuda"].version = cudaVersion;
if (cudaExists) {
data.run_mode = "gpu";
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
}
async function updateGpuInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
// Get GPU info and gpu has higher memory first
let highestVram = 0;
let highestVramId = "0";
let gpus = stdout
.trim()
.split("\n")
.map((line) => {
let [id, vram] = line.split(", ");
vram = vram.replace(/\r/g, "");
if (parseFloat(vram) > highestVram) {
highestVram = parseFloat(vram);
highestVramId = id;
}
return { id, vram };
});
data["gpus"] = gpus;
data["gpu_highest_vram"] = highestVramId;
} else {
data["gpus"] = [];
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}

View File

@ -21,6 +21,7 @@ import {
Model,
joinPath,
InferenceExtension,
log,
} from "@janhq/core";
import { requestInference } from "./helpers/sse";
import { ulid } from "ulid";
@ -270,6 +271,7 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
}
message.status = MessageStatus.Error;
events.emit(EventName.OnMessageUpdate, message);
log(`[APP]::Error: ${err.message}`);
},
});
}

View File

@ -1,18 +1,17 @@
const fs = require("fs");
const fsPromises = fs.promises;
const path = require("path");
const { exec, spawn } = require("child_process");
const tcpPortUsed = require("tcp-port-used");
const fetchRetry = require("fetch-retry")(global.fetch);
const osUtils = require("os-utils");
const { readFileSync, writeFileSync, existsSync } = require("fs");
const { log } = require("@janhq/core/node");
// The PORT to use for the Nitro subprocess
const PORT = 3928;
const LOCAL_HOST = "127.0.0.1";
const NITRO_HTTP_SERVER_URL = `http://${LOCAL_HOST}:${PORT}`;
const NITRO_HTTP_LOAD_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/loadmodel`;
const NITRO_HTTP_UNLOAD_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/unloadModel`;
const NITRO_HTTP_VALIDATE_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/modelstatus`;
const NITRO_HTTP_KILL_URL = `${NITRO_HTTP_SERVER_URL}/processmanager/destroy`;
const SUPPORTED_MODEL_FORMAT = ".gguf";
@ -23,21 +22,6 @@ const NVIDIA_INFO_FILE = path.join(
"settings.json"
);
const DEFALT_SETTINGS = {
notify: true,
run_mode: "cpu",
nvidia_driver: {
exist: false,
version: "",
},
cuda: {
exist: false,
version: "",
},
gpus: [],
gpu_highest_vram: "",
};
// The subprocess instance for Nitro
let subprocess = undefined;
let currentModelFile: string = undefined;
@ -54,137 +38,6 @@ function stopModel(): Promise<void> {
return killSubprocess();
}
/**
* Validate nvidia and cuda for linux and windows
*/
async function updateNvidiaDriverInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=driver_version --format=csv,noheader",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
const firstLine = stdout.split("\n")[0].trim();
data["nvidia_driver"].exist = true;
data["nvidia_driver"].version = firstLine;
} else {
data["nvidia_driver"].exist = false;
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
function isExists(file: string, paths: string[]): boolean {
return paths.some((p) => existsSync(path.join(p, file)));
}
function updateCudaExistence() {
let filesCuda12: string[];
let filesCuda11: string[];
let paths: string[];
let cudaVersion: string = "";
if (process.platform === "win32") {
filesCuda12 = ["cublas64_12.dll", "cudart64_12.dll", "cublasLt64_12.dll"];
filesCuda11 = ["cublas64_11.dll", "cudart64_11.dll", "cublasLt64_11.dll"];
paths = process.env.PATH ? process.env.PATH.split(path.delimiter) : [];
} else {
filesCuda12 = ["libcudart.so.12", "libcublas.so.12", "libcublasLt.so.12"];
filesCuda11 = ["libcudart.so.11.0", "libcublas.so.11", "libcublasLt.so.11"];
paths = process.env.LD_LIBRARY_PATH
? process.env.LD_LIBRARY_PATH.split(path.delimiter)
: [];
paths.push("/usr/lib/x86_64-linux-gnu/");
}
let cudaExists = filesCuda12.every(
(file) => existsSync(file) || isExists(file, paths)
);
if (!cudaExists) {
cudaExists = filesCuda11.every(
(file) => existsSync(file) || isExists(file, paths)
);
if (cudaExists) {
cudaVersion = "11";
}
} else {
cudaVersion = "12";
}
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
data["cuda"].exist = cudaExists;
data["cuda"].version = cudaVersion;
if (cudaExists) {
data.run_mode = "gpu";
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
}
async function updateGpuInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
// Get GPU info and gpu has higher memory first
let highestVram = 0;
let highestVramId = "0";
let gpus = stdout
.trim()
.split("\n")
.map((line) => {
let [id, vram] = line.split(", ");
vram = vram.replace(/\r/g, "");
if (parseFloat(vram) > highestVram) {
highestVram = parseFloat(vram);
highestVramId = id;
}
return { id, vram };
});
data["gpus"] = gpus;
data["gpu_highest_vram"] = highestVramId;
} else {
data["gpus"] = [];
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
async function updateNvidiaInfo() {
if (process.platform !== "darwin") {
await Promise.all([
updateNvidiaDriverInfo(),
updateCudaExistence(),
updateGpuInfo(),
]);
}
}
/**
* Initializes a Nitro subprocess to load a machine learning model.
* @param wrapper - The model wrapper.
@ -238,31 +91,28 @@ async function initModel(wrapper: any): Promise<ModelOperationResponse> {
async function loadModel(nitroResourceProbe: any | undefined) {
// Gather system information for CPU physical cores and memory
if (!nitroResourceProbe) nitroResourceProbe = await getResourcesInfo();
return (
killSubprocess()
.then(() => tcpPortUsed.waitUntilFree(PORT, 300, 5000))
// wait for 500ms to make sure the port is free for windows platform
.then(() => {
if (process.platform === "win32") {
return sleep(500);
} else {
return sleep(0);
}
})
.then(() => spawnNitroProcess(nitroResourceProbe))
.then(() => loadLLMModel(currentSettings))
.then(validateModelStatus)
.catch((err) => {
console.error("error: ", err);
// TODO: Broadcast error so app could display proper error message
return { error: err, currentModelFile };
})
);
}
// Add function sleep
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
return killSubprocess()
.then(() => tcpPortUsed.waitUntilFree(PORT, 300, 5000))
.then(() => {
/**
* There is a problem with Windows process manager
* Should wait for awhile to make sure the port is free and subprocess is killed
* The tested threshold is 500ms
**/
if (process.platform === "win32") {
return new Promise((resolve) => setTimeout(resolve, 500));
} else {
return Promise.resolve();
}
})
.then(() => spawnNitroProcess(nitroResourceProbe))
.then(() => loadLLMModel(currentSettings))
.then(validateModelStatus)
.catch((err) => {
log(`[NITRO]::Error: ${err}`);
// TODO: Broadcast error so app could display proper error message
return { error: err, currentModelFile };
});
}
function promptTemplateConverter(promptTemplate) {
@ -312,6 +162,7 @@ function promptTemplateConverter(promptTemplate) {
* @returns A Promise that resolves when the model is loaded successfully, or rejects with an error message if the model is not found or fails to load.
*/
function loadLLMModel(settings): Promise<Response> {
log(`[NITRO]::Debug: Loading model with params ${settings}`);
return fetchRetry(NITRO_HTTP_LOAD_MODEL_URL, {
method: "POST",
headers: {
@ -320,6 +171,8 @@ function loadLLMModel(settings): Promise<Response> {
body: JSON.stringify(settings),
retries: 3,
retryDelay: 500,
}).catch((err) => {
log(`[NITRO]::Error: Load model failed with error ${err}`);
});
}
@ -360,7 +213,8 @@ async function validateModelStatus(): Promise<ModelOperationResponse> {
async function killSubprocess(): Promise<void> {
const controller = new AbortController();
setTimeout(() => controller.abort(), 5000);
console.debug("Start requesting to kill Nitro...");
log(`[NITRO]::Debug: Request to kill Nitro`);
return fetch(NITRO_HTTP_KILL_URL, {
method: "DELETE",
signal: controller.signal,
@ -371,20 +225,17 @@ async function killSubprocess(): Promise<void> {
})
.catch(() => {})
.then(() => tcpPortUsed.waitUntilFree(PORT, 300, 5000))
.then(() => console.debug("Nitro is killed"));
.then(() => log(`[NITRO]::Debug: Nitro process is terminated`));
}
/**
* Look for the Nitro binary and execute it
* Using child-process to spawn the process
* Should run exactly platform specified Nitro binary version
*/
/**
* Spawns a Nitro subprocess.
* @param nitroResourceProbe - The Nitro resource probe.
* @returns A promise that resolves when the Nitro subprocess is started.
*/
function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
console.debug("Starting Nitro subprocess...");
log(`[NITRO]::Debug: Spawning Nitro subprocess...`);
return new Promise(async (resolve, reject) => {
let binaryFolder = path.join(__dirname, "bin"); // Current directory by default
let cudaVisibleDevices = "";
@ -426,7 +277,7 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
const binaryPath = path.join(binaryFolder, binaryName);
// Execute the binary
subprocess = spawn(binaryPath, [1, LOCAL_HOST, PORT], {
subprocess = spawn(binaryPath, ["1", LOCAL_HOST, PORT.toString()], {
cwd: binaryFolder,
env: {
...process.env,
@ -436,16 +287,15 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
// Handle subprocess output
subprocess.stdout.on("data", (data) => {
console.debug(`stdout: ${data}`);
log(`[NITRO]::Debug: ${data}`);
});
subprocess.stderr.on("data", (data) => {
console.error("subprocess error:" + data.toString());
console.error(`stderr: ${data}`);
log(`[NITRO]::Error: ${data}`);
});
subprocess.on("close", (code) => {
console.debug(`child process exited with code ${code}`);
log(`[NITRO]::Debug: Nitro exited with code: ${code}`);
subprocess = null;
reject(`child process exited with code ${code}`);
});
@ -463,7 +313,7 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
function getResourcesInfo(): Promise<ResourcesInfo> {
return new Promise(async (resolve) => {
const cpu = await osUtils.cpuCount();
console.debug("cpu: ", cpu);
log(`[NITRO]::CPU informations - ${cpu}`);
const response: ResourcesInfo = {
numCpuPhysicalCore: cpu,
memAvailable: 0,
@ -472,6 +322,23 @@ function getResourcesInfo(): Promise<ResourcesInfo> {
});
}
/**
* This will retrive GPU informations and persist settings.json
* Will be called when the extension is loaded to turn on GPU acceleration if supported
*/
async function updateNvidiaInfo() {
if (process.platform !== "darwin") {
await Promise.all([
updateNvidiaDriverInfo(),
updateCudaExistence(),
updateGpuInfo(),
]);
}
}
/**
* Retrieve current nitro process
*/
const getCurrentNitroProcessInfo = (): Promise<any> => {
nitroProcessInfo = {
isRunning: subprocess != null,
@ -479,6 +346,11 @@ const getCurrentNitroProcessInfo = (): Promise<any> => {
return nitroProcessInfo;
};
/**
* Every module should have a dispose function
* This will be called when the extension is unloaded and should clean up any resources
* Also called when app is closed
*/
function dispose() {
// clean other registered resources here
killSubprocess();

View File

@ -22,6 +22,7 @@ module.exports = {
process.env.INFERENCE_URL ||
"http://127.0.0.1:3928/inferences/llamacpp/chat_completion"
),
TROUBLESHOOTING_URL: JSON.stringify("https://jan.ai/guides/troubleshooting")
}),
],
output: {

View File

@ -5,7 +5,6 @@ import {
abortDownload,
getResourcePath,
getUserSpace,
fileStat,
InferenceEngine,
joinPath,
ModelExtension,
@ -281,7 +280,7 @@ export default class JanModelExtension implements ModelExtension {
if (file.endsWith('.json')) continue
const path = await joinPath([JanModelExtension._homeDir, dirName, file])
const fileStats = await fileStat(path)
const fileStats = await fs.fileStat(path)
if (fileStats.isDirectory) continue
binaryFileSize = fileStats.size
binaryFileName = file

View File

@ -9,12 +9,13 @@ dotenv.config();
const JAN_API_HOST = process.env.JAN_API_HOST || "127.0.0.1";
const JAN_API_PORT = Number.parseInt(process.env.JAN_API_PORT || "1337");
const serverLogPath = path.join(os.homedir(), "jan", "server.log");
const serverLogPath = path.join(os.homedir(), "jan", "logs", "server.log");
let server: any | undefined = undefined;
export const startServer = async (schemaPath?: string, baseDir?: string) => {
try {
log(`[API]::Debug: Starting JAN API server...`, "server.log")
server = fastify({
logger: {
level: "info",
@ -62,17 +63,18 @@ export const startServer = async (schemaPath?: string, baseDir?: string) => {
host: JAN_API_HOST,
})
.then(() => {
log(`JAN API listening at: http://${JAN_API_HOST}:${JAN_API_PORT}`);
log(`[API]::Debug: JAN API listening at: http://${JAN_API_HOST}:${JAN_API_PORT}`);
});
} catch (e) {
log(e);
log(`[API]::Error: ${e}`);
}
};
export const stopServer = async () => {
try {
log(`[API]::Debug: Server stopped`, "server.log")
await server.close();
} catch (e) {
log(e);
log(`[API]::Error: ${e}`);
}
};

View File

@ -3,6 +3,7 @@
import { useContext, useEffect, useState } from 'react'
import { fs } from '@janhq/core'
import {
Switch,
Button,
@ -20,6 +21,7 @@ import ShortCut from '@/containers/Shortcut'
import { FeatureToggleContext } from '@/context/FeatureToggle'
import { useSettings } from '@/hooks/useSettings'
import { toaster } from '@/containers/Toast'
const serverEnabledAtom = atom<boolean>(false)
@ -37,6 +39,16 @@ const Advanced = () => {
})
}, [])
const clearLogs = async () => {
if (await fs.existsSync(`file://logs`)) {
await fs.rmdirSync(`file://logs`, { recursive: true })
}
toaster({
title: 'Logs cleared',
description: 'All logs have been cleared.',
})
}
return (
<div className="block w-full">
{/* CPU / GPU switching */}
@ -137,6 +149,19 @@ const Advanced = () => {
</Button>
</div>
)}
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
<div className="w-4/5 flex-shrink-0 space-y-1.5">
<div className="flex gap-x-2">
<h6 className="text-sm font-semibold capitalize">Clear logs</h6>
</div>
<p className="whitespace-pre-wrap leading-relaxed">
Clear all logs from Jan app.
</p>
</div>
<Button size="sm" themes="secondary" onClick={clearLogs}>
Clear
</Button>
</div>
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
<div className="w-4/5 flex-shrink-0 space-y-1.5">
<div className="flex gap-x-2">