refactor: add app and nitro log - resolve dependencies issue (#1447)

* refactor: add app and nitro log - resolve dependencies issue

* fix: update guidance message on inference error

* chore: add timestamp to log files

* chore: add clear logs action
This commit is contained in:
Louis 2024-01-10 16:43:28 +07:00 committed by GitHub
parent a3f14d50ff
commit 74ed081e4f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 356 additions and 267 deletions

View File

@ -53,6 +53,8 @@ export default [
'crypto', 'crypto',
'url', 'url',
'http', 'http',
'os',
'util'
], ],
watch: { watch: {
include: 'src/node/**', include: 'src/node/**',

View File

@ -12,6 +12,7 @@ export enum AppRoute {
baseName = 'baseName', baseName = 'baseName',
startServer = 'startServer', startServer = 'startServer',
stopServer = 'stopServer', stopServer = 'stopServer',
log = 'log'
} }
export enum AppEvent { export enum AppEvent {

View File

@ -77,13 +77,12 @@ const openExternalUrl: (url: string) => Promise<any> = (url) =>
const getResourcePath: () => Promise<string> = () => global.core.api?.getResourcePath() const getResourcePath: () => Promise<string> = () => global.core.api?.getResourcePath()
/** /**
* Gets the file's stats. * Log to file from browser processes.
* *
* @param path - The path to the file. * @param message - Message to log.
* @returns {Promise<FileStat>} - A promise that resolves with the file's stats.
*/ */
const fileStat: (path: string) => Promise<FileStat | undefined> = (path) => const log: (message: string, fileName?: string) => void = (message, fileName) =>
global.core.api?.fileStat(path) global.core.api?.log(message, fileName)
/** /**
* Register extension point function type definition * Register extension point function type definition
@ -108,5 +107,6 @@ export {
joinPath, joinPath,
openExternalUrl, openExternalUrl,
baseName, baseName,
fileStat, log,
FileStat
} }

View File

@ -1,3 +1,5 @@
import { FileStat } from "./types"
/** /**
* Writes data to a file at the specified path. * Writes data to a file at the specified path.
* @returns {Promise<any>} A Promise that resolves when the file is written successfully. * @returns {Promise<any>} A Promise that resolves when the file is written successfully.
@ -58,6 +60,17 @@ const syncFile: (src: string, dest: string) => Promise<any> = (src, dest) =>
*/ */
const copyFileSync = (...args: any[]) => global.core.api?.copyFileSync(...args) const copyFileSync = (...args: any[]) => global.core.api?.copyFileSync(...args)
/**
* Gets the file's stats.
*
* @param path - The path to the file.
* @returns {Promise<FileStat>} - A promise that resolves with the file's stats.
*/
const fileStat: (path: string) => Promise<FileStat | undefined> = (path) =>
global.core.api?.fileStat(path)
// TODO: Export `dummy` fs functions automatically // TODO: Export `dummy` fs functions automatically
// Currently adding these manually // Currently adding these manually
export const fs = { export const fs = {
@ -71,4 +84,5 @@ export const fs = {
appendFileSync, appendFileSync,
copyFileSync, copyFileSync,
syncFile, syncFile,
fileStat
} }

View File

@ -3,11 +3,6 @@ import { JanApiRouteConfiguration, RouteConfiguration } from './configuration'
import { join } from 'path' import { join } from 'path'
import { ContentType, MessageStatus, Model, ThreadMessage } from './../../../index' import { ContentType, MessageStatus, Model, ThreadMessage } from './../../../index'
import fetch from 'node-fetch'
import { ulid } from 'ulid'
import request from 'request'
const progress = require('request-progress')
const os = require('os') const os = require('os')
const path = join(os.homedir(), 'jan') const path = join(os.homedir(), 'jan')
@ -209,6 +204,7 @@ export const createMessage = async (threadId: string, message: any) => {
const threadMessagesFileName = 'messages.jsonl' const threadMessagesFileName = 'messages.jsonl'
try { try {
const { ulid } = require('ulid')
const msgId = ulid() const msgId = ulid()
const createdAt = Date.now() const createdAt = Date.now()
const threadMessage: ThreadMessage = { const threadMessage: ThreadMessage = {
@ -260,8 +256,10 @@ export const downloadModel = async (modelId: string) => {
// path to model binary // path to model binary
const modelBinaryPath = join(directoryPath, modelId) const modelBinaryPath = join(directoryPath, modelId)
const rq = request(model.source_url)
const request = require('request')
const rq = request(model.source_url)
const progress = require('request-progress')
progress(rq, {}) progress(rq, {})
.on('progress', function (state: any) { .on('progress', function (state: any) {
console.log('progress', JSON.stringify(state, null, 2)) console.log('progress', JSON.stringify(state, null, 2))
@ -324,6 +322,7 @@ export const chatCompletions = async (request: any, reply: any) => {
} }
console.debug(apiUrl) console.debug(apiUrl)
console.debug(JSON.stringify(headers)) console.debug(JSON.stringify(headers))
const fetch = require('node-fetch')
const response = await fetch(apiUrl, { const response = await fetch(apiUrl, {
method: 'POST', method: 'POST',
headers: headers, headers: headers,

View File

@ -1,11 +1,10 @@
import { DownloadRoute } from '../../../api' import { DownloadRoute } from '../../../api'
import { join } from 'path' import { join } from 'path'
import { userSpacePath, DownloadManager, HttpServer } from '../../index' import { userSpacePath } from '../../extension/manager'
import { DownloadManager } from '../../download'
import { HttpServer } from '../HttpServer'
import { createWriteStream } from 'fs' import { createWriteStream } from 'fs'
const request = require('request')
const progress = require('request-progress')
export const downloadRouter = async (app: HttpServer) => { export const downloadRouter = async (app: HttpServer) => {
app.post(`/${DownloadRoute.downloadFile}`, async (req, res) => { app.post(`/${DownloadRoute.downloadFile}`, async (req, res) => {
const body = JSON.parse(req.body as any) const body = JSON.parse(req.body as any)
@ -19,6 +18,9 @@ export const downloadRouter = async (app: HttpServer) => {
const localPath = normalizedArgs[1] const localPath = normalizedArgs[1]
const fileName = localPath.split('/').pop() ?? '' const fileName = localPath.split('/').pop() ?? ''
const request = require('request')
const progress = require('request-progress')
const rq = request(normalizedArgs[0]) const rq = request(normalizedArgs[0])
progress(rq, {}) progress(rq, {})
.on('progress', function (state: any) { .on('progress', function (state: any) {

View File

@ -1,12 +1,10 @@
import { join, extname } from 'path' import { join, extname } from 'path'
import { ExtensionRoute } from '../../../api' import { ExtensionRoute } from '../../../api/index'
import { import { userSpacePath } from '../../extension/manager'
userSpacePath, import { ModuleManager } from '../../module'
ModuleManager, import { getActiveExtensions, installExtensions } from '../../extension/store'
getActiveExtensions, import { HttpServer } from '../HttpServer'
installExtensions,
HttpServer,
} from '../../index'
import { readdirSync } from 'fs' import { readdirSync } from 'fs'
export const extensionRouter = async (app: HttpServer) => { export const extensionRouter = async (app: HttpServer) => {

View File

@ -1,6 +1,7 @@
import { FileSystemRoute } from '../../../api' import { FileSystemRoute } from '../../../api'
import { join } from 'path' import { join } from 'path'
import { HttpServer, userSpacePath } from '../../index' import { HttpServer } from '../HttpServer'
import { userSpacePath } from '../../extension/manager'
export const fsRouter = async (app: HttpServer) => { export const fsRouter = async (app: HttpServer) => {
const moduleName = 'fs' const moduleName = 'fs'

View File

@ -1,5 +1,9 @@
import { HttpServer } from '../HttpServer' import { HttpServer } from '../HttpServer'
import { commonRouter, threadRouter, fsRouter, extensionRouter, downloadRouter } from './index' import { commonRouter } from './common'
import { threadRouter } from './thread'
import { fsRouter } from './fs'
import { extensionRouter } from './extension'
import { downloadRouter } from './download'
export const v1Router = async (app: HttpServer) => { export const v1Router = async (app: HttpServer) => {
// MARK: External Routes // MARK: External Routes

View File

@ -1,4 +1,3 @@
import { Request } from "request";
/** /**
* Manages file downloads and network requests. * Manages file downloads and network requests.
@ -18,7 +17,7 @@ export class DownloadManager {
* @param {string} fileName - The name of the file. * @param {string} fileName - The name of the file.
* @param {Request | undefined} request - The network request to set, or undefined to clear the request. * @param {Request | undefined} request - The network request to set, or undefined to clear the request.
*/ */
setRequest(fileName: string, request: Request | undefined) { setRequest(fileName: string, request: any | undefined) {
this.networkRequests[fileName] = request; this.networkRequests[fileName] = request;
} }
} }

View File

@ -1,7 +1,5 @@
import { rmdirSync } from 'fs' import { rmdirSync } from 'fs'
import { resolve, join } from 'path' import { resolve, join } from 'path'
import { manifest, extract } from 'pacote'
import * as Arborist from '@npmcli/arborist'
import { ExtensionManager } from './manager' import { ExtensionManager } from './manager'
/** /**
@ -41,6 +39,7 @@ export default class Extension {
* @param {Object} [options] Options provided to pacote when fetching the manifest. * @param {Object} [options] Options provided to pacote when fetching the manifest.
*/ */
constructor(origin?: string, options = {}) { constructor(origin?: string, options = {}) {
const Arborist = require('@npmcli/arborist')
const defaultOpts = { const defaultOpts = {
version: false, version: false,
fullMetadata: false, fullMetadata: false,
@ -74,13 +73,15 @@ export default class Extension {
async getManifest() { async getManifest() {
// Get the package's manifest (package.json object) // Get the package's manifest (package.json object)
try { try {
const mnf = await manifest(this.specifier, this.installOptions) await import('pacote').then((pacote) => {
return pacote.manifest(this.specifier, this.installOptions).then((mnf) => {
// set the Package properties based on the it's manifest // set the Package properties based on the it's manifest
this.name = mnf.name this.name = mnf.name
this.version = mnf.version this.version = mnf.version
this.main = mnf.main this.main = mnf.main
this.description = mnf.description this.description = mnf.description
})
})
} catch (error) { } catch (error) {
throw new Error(`Package ${this.origin} does not contain a valid manifest: ${error}`) throw new Error(`Package ${this.origin} does not contain a valid manifest: ${error}`)
} }
@ -99,7 +100,8 @@ export default class Extension {
await this.getManifest() await this.getManifest()
// Install the package in a child folder of the given folder // Install the package in a child folder of the given folder
await extract( const pacote = await import('pacote')
await pacote.extract(
this.specifier, this.specifier,
join(ExtensionManager.instance.extensionsPath ?? '', this.name ?? ''), join(ExtensionManager.instance.extensionsPath ?? '', this.name ?? ''),
this.installOptions, this.installOptions,
@ -164,10 +166,13 @@ export default class Extension {
* @returns the latest available version if a new version is available or false if not. * @returns the latest available version if a new version is available or false if not.
*/ */
async isUpdateAvailable() { async isUpdateAvailable() {
if (this.origin) { return import('pacote').then((pacote) => {
const mnf = await manifest(this.origin) if (this.origin) {
return mnf.version !== this.version ? mnf.version : false return pacote.manifest(this.origin).then((mnf) => {
} return mnf.version !== this.version ? mnf.version : false
})
}
})
} }
/** /**

View File

@ -1,7 +1,6 @@
import { join, resolve } from "path"; import { join, resolve } from "path";
import { existsSync, mkdirSync, writeFileSync } from "fs"; import { existsSync, mkdirSync, writeFileSync } from "fs";
import { init } from "./index";
import { homedir } from "os" import { homedir } from "os"
/** /**
* Manages extension installation and migration. * Manages extension installation and migration.
@ -20,22 +19,6 @@ export class ExtensionManager {
} }
} }
/**
* Sets up the extensions by initializing the `extensions` module with the `confirmInstall` and `extensionsPath` options.
* The `confirmInstall` function always returns `true` to allow extension installation.
* The `extensionsPath` option specifies the path to install extensions to.
*/
setupExtensions() {
init({
// Function to check from the main process that user wants to install a extension
confirmInstall: async (_extensions: string[]) => {
return true;
},
// Path to install extension to
extensionsPath: join(userSpacePath, "extensions"),
});
}
setExtensionsPath(extPath: string) { setExtensionsPath(extPath: string) {
// Create folder if it does not exist // Create folder if it does not exist
let extDir; let extDir;

View File

@ -3,16 +3,24 @@ import util from 'util'
import path from 'path' import path from 'path'
import os from 'os' import os from 'os'
const appDir = path.join(os.homedir(), 'jan') export const logDir = path.join(os.homedir(), 'jan', 'logs')
export const logPath = path.join(appDir, 'app.log') export const log = function (message: string, fileName: string = 'app.log') {
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true })
}
if (!message.startsWith('[')) {
message = `[APP]::${message}`
}
export const log = function (d: any) { message = `${new Date().toISOString()} ${message}`
if (fs.existsSync(appDir)) {
var log_file = fs.createWriteStream(logPath, { if (fs.existsSync(logDir)) {
var log_file = fs.createWriteStream(path.join(logDir, fileName), {
flags: 'a', flags: 'a',
}) })
log_file.write(util.format(d) + '\n') log_file.write(util.format(message) + '\n')
log_file.close() log_file.close()
console.debug(message)
} }
} }

View File

@ -1,9 +1,9 @@
import { app, ipcMain, shell, nativeTheme } from 'electron' import { app, ipcMain, shell } from 'electron'
import { join, basename } from 'path' import { join, basename } from 'path'
import { WindowManager } from './../managers/window' import { WindowManager } from './../managers/window'
import { getResourcePath, userSpacePath } from './../utils/path' import { getResourcePath, userSpacePath } from './../utils/path'
import { AppRoute } from '@janhq/core' import { AppRoute } from '@janhq/core'
import { ExtensionManager, ModuleManager } from '@janhq/core/node' import { ModuleManager, init, log } from '@janhq/core/node'
import { startServer, stopServer } from '@janhq/server' import { startServer, stopServer } from '@janhq/server'
export function handleAppIPCs() { export function handleAppIPCs() {
@ -59,7 +59,7 @@ export function handleAppIPCs() {
app.isPackaged ? join(getResourcePath(), 'docs', 'openapi') : undefined app.isPackaged ? join(getResourcePath(), 'docs', 'openapi') : undefined
) )
) )
/** /**
* Stop Jan API Server. * Stop Jan API Server.
*/ */
@ -82,8 +82,22 @@ export function handleAppIPCs() {
require.resolve(join(userSpacePath, 'extensions', modulePath)) require.resolve(join(userSpacePath, 'extensions', modulePath))
] ]
} }
ExtensionManager.instance.setupExtensions() init({
// Function to check from the main process that user wants to install a extension
confirmInstall: async (_extensions: string[]) => {
return true
},
// Path to install extension to
extensionsPath: join(userSpacePath, 'extensions'),
})
WindowManager.instance.currentWindow?.reload() WindowManager.instance.currentWindow?.reload()
} }
}) })
/**
* Log message to log file.
*/
ipcMain.handle(AppRoute.log, async (_event, message, fileName) =>
log(message, fileName)
)
} }

View File

@ -6,7 +6,7 @@ import { FileManagerRoute } from '@janhq/core'
import { userSpacePath, getResourcePath } from './../utils/path' import { userSpacePath, getResourcePath } from './../utils/path'
import fs from 'fs' import fs from 'fs'
import { join } from 'path' import { join } from 'path'
import { FileStat } from '@janhq/core/.' import { FileStat } from '@janhq/core'
/** /**
* Handles file system extensions operations. * Handles file system extensions operations.

View File

@ -6,7 +6,7 @@ import { createUserSpace } from './utils/path'
* Managers * Managers
**/ **/
import { WindowManager } from './managers/window' import { WindowManager } from './managers/window'
import { ExtensionManager, ModuleManager } from '@janhq/core/node' import { log, ModuleManager } from '@janhq/core/node'
/** /**
* IPC Handlers * IPC Handlers
@ -17,14 +17,19 @@ import { handleFileMangerIPCs } from './handlers/fileManager'
import { handleAppIPCs } from './handlers/app' import { handleAppIPCs } from './handlers/app'
import { handleAppUpdates } from './handlers/update' import { handleAppUpdates } from './handlers/update'
import { handleFsIPCs } from './handlers/fs' import { handleFsIPCs } from './handlers/fs'
/**
* Utils
**/
import { migrateExtensions } from './utils/migration' import { migrateExtensions } from './utils/migration'
import { cleanUpAndQuit } from './utils/clean' import { cleanUpAndQuit } from './utils/clean'
import { setupExtensions } from './utils/extension'
app app
.whenReady() .whenReady()
.then(createUserSpace) .then(createUserSpace)
.then(migrateExtensions) .then(migrateExtensions)
.then(ExtensionManager.instance.setupExtensions) .then(setupExtensions)
.then(setupMenu) .then(setupMenu)
.then(handleIPCs) .then(handleIPCs)
.then(handleAppUpdates) .then(handleAppUpdates)
@ -93,5 +98,5 @@ function handleIPCs() {
*/ */
process.on('uncaughtException', function (err) { process.on('uncaughtException', function (err) {
// TODO: Write error to log file in #1447 // TODO: Write error to log file in #1447
console.error(err) log(`Error: ${err}`)
}) })

View File

@ -0,0 +1,13 @@
import { init, userSpacePath } from '@janhq/core/node'
import path from 'path'
export const setupExtensions = () => {
init({
// Function to check from the main process that user wants to install a extension
confirmInstall: async (_extensions: string[]) => {
return true
},
// Path to install extension to
extensionsPath: path.join(userSpacePath, 'extensions'),
})
}

View File

@ -50,6 +50,7 @@
"bundleDependencies": [ "bundleDependencies": [
"tcp-port-used", "tcp-port-used",
"fetch-retry", "fetch-retry",
"os-utils" "os-utils",
"@janhq/core"
] ]
} }

View File

@ -1,5 +1,6 @@
declare const MODULE: string; declare const MODULE: string;
declare const INFERENCE_URL: string; declare const INFERENCE_URL: string;
declare const TROUBLESHOOTING_URL: string;
/** /**
* The parameters for the initModel function. * The parameters for the initModel function.

View File

@ -0,0 +1,138 @@
/**
* Default GPU settings
**/
const DEFALT_SETTINGS = {
notify: true,
run_mode: "cpu",
nvidia_driver: {
exist: false,
version: "",
},
cuda: {
exist: false,
version: "",
},
gpus: [],
gpu_highest_vram: "",
};
/**
* Validate nvidia and cuda for linux and windows
*/
async function updateNvidiaDriverInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=driver_version --format=csv,noheader",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
const firstLine = stdout.split("\n")[0].trim();
data["nvidia_driver"].exist = true;
data["nvidia_driver"].version = firstLine;
} else {
data["nvidia_driver"].exist = false;
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
function checkFileExistenceInPaths(file: string, paths: string[]): boolean {
return paths.some((p) => existsSync(path.join(p, file)));
}
function updateCudaExistence() {
let filesCuda12: string[];
let filesCuda11: string[];
let paths: string[];
let cudaVersion: string = "";
if (process.platform === "win32") {
filesCuda12 = ["cublas64_12.dll", "cudart64_12.dll", "cublasLt64_12.dll"];
filesCuda11 = ["cublas64_11.dll", "cudart64_11.dll", "cublasLt64_11.dll"];
paths = process.env.PATH ? process.env.PATH.split(path.delimiter) : [];
} else {
filesCuda12 = ["libcudart.so.12", "libcublas.so.12", "libcublasLt.so.12"];
filesCuda11 = ["libcudart.so.11.0", "libcublas.so.11", "libcublasLt.so.11"];
paths = process.env.LD_LIBRARY_PATH
? process.env.LD_LIBRARY_PATH.split(path.delimiter)
: [];
paths.push("/usr/lib/x86_64-linux-gnu/");
}
let cudaExists = filesCuda12.every(
(file) => existsSync(file) || checkFileExistenceInPaths(file, paths)
);
if (!cudaExists) {
cudaExists = filesCuda11.every(
(file) => existsSync(file) || checkFileExistenceInPaths(file, paths)
);
if (cudaExists) {
cudaVersion = "11";
}
} else {
cudaVersion = "12";
}
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
data["cuda"].exist = cudaExists;
data["cuda"].version = cudaVersion;
if (cudaExists) {
data.run_mode = "gpu";
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
}
async function updateGpuInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
// Get GPU info and gpu has higher memory first
let highestVram = 0;
let highestVramId = "0";
let gpus = stdout
.trim()
.split("\n")
.map((line) => {
let [id, vram] = line.split(", ");
vram = vram.replace(/\r/g, "");
if (parseFloat(vram) > highestVram) {
highestVram = parseFloat(vram);
highestVramId = id;
}
return { id, vram };
});
data["gpus"] = gpus;
data["gpu_highest_vram"] = highestVramId;
} else {
data["gpus"] = [];
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}

View File

@ -21,6 +21,7 @@ import {
Model, Model,
joinPath, joinPath,
InferenceExtension, InferenceExtension,
log,
} from "@janhq/core"; } from "@janhq/core";
import { requestInference } from "./helpers/sse"; import { requestInference } from "./helpers/sse";
import { ulid } from "ulid"; import { ulid } from "ulid";
@ -270,6 +271,7 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
} }
message.status = MessageStatus.Error; message.status = MessageStatus.Error;
events.emit(EventName.OnMessageUpdate, message); events.emit(EventName.OnMessageUpdate, message);
log(`[APP]::Error: ${err.message}`);
}, },
}); });
} }

View File

@ -1,18 +1,17 @@
const fs = require("fs"); const fs = require("fs");
const fsPromises = fs.promises;
const path = require("path"); const path = require("path");
const { exec, spawn } = require("child_process"); const { exec, spawn } = require("child_process");
const tcpPortUsed = require("tcp-port-used"); const tcpPortUsed = require("tcp-port-used");
const fetchRetry = require("fetch-retry")(global.fetch); const fetchRetry = require("fetch-retry")(global.fetch);
const osUtils = require("os-utils"); const osUtils = require("os-utils");
const { readFileSync, writeFileSync, existsSync } = require("fs"); const { readFileSync, writeFileSync, existsSync } = require("fs");
const { log } = require("@janhq/core/node");
// The PORT to use for the Nitro subprocess // The PORT to use for the Nitro subprocess
const PORT = 3928; const PORT = 3928;
const LOCAL_HOST = "127.0.0.1"; const LOCAL_HOST = "127.0.0.1";
const NITRO_HTTP_SERVER_URL = `http://${LOCAL_HOST}:${PORT}`; const NITRO_HTTP_SERVER_URL = `http://${LOCAL_HOST}:${PORT}`;
const NITRO_HTTP_LOAD_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/loadmodel`; const NITRO_HTTP_LOAD_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/loadmodel`;
const NITRO_HTTP_UNLOAD_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/unloadModel`;
const NITRO_HTTP_VALIDATE_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/modelstatus`; const NITRO_HTTP_VALIDATE_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/modelstatus`;
const NITRO_HTTP_KILL_URL = `${NITRO_HTTP_SERVER_URL}/processmanager/destroy`; const NITRO_HTTP_KILL_URL = `${NITRO_HTTP_SERVER_URL}/processmanager/destroy`;
const SUPPORTED_MODEL_FORMAT = ".gguf"; const SUPPORTED_MODEL_FORMAT = ".gguf";
@ -23,21 +22,6 @@ const NVIDIA_INFO_FILE = path.join(
"settings.json" "settings.json"
); );
const DEFALT_SETTINGS = {
notify: true,
run_mode: "cpu",
nvidia_driver: {
exist: false,
version: "",
},
cuda: {
exist: false,
version: "",
},
gpus: [],
gpu_highest_vram: "",
};
// The subprocess instance for Nitro // The subprocess instance for Nitro
let subprocess = undefined; let subprocess = undefined;
let currentModelFile: string = undefined; let currentModelFile: string = undefined;
@ -54,137 +38,6 @@ function stopModel(): Promise<void> {
return killSubprocess(); return killSubprocess();
} }
/**
* Validate nvidia and cuda for linux and windows
*/
async function updateNvidiaDriverInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=driver_version --format=csv,noheader",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
const firstLine = stdout.split("\n")[0].trim();
data["nvidia_driver"].exist = true;
data["nvidia_driver"].version = firstLine;
} else {
data["nvidia_driver"].exist = false;
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
function isExists(file: string, paths: string[]): boolean {
return paths.some((p) => existsSync(path.join(p, file)));
}
function updateCudaExistence() {
let filesCuda12: string[];
let filesCuda11: string[];
let paths: string[];
let cudaVersion: string = "";
if (process.platform === "win32") {
filesCuda12 = ["cublas64_12.dll", "cudart64_12.dll", "cublasLt64_12.dll"];
filesCuda11 = ["cublas64_11.dll", "cudart64_11.dll", "cublasLt64_11.dll"];
paths = process.env.PATH ? process.env.PATH.split(path.delimiter) : [];
} else {
filesCuda12 = ["libcudart.so.12", "libcublas.so.12", "libcublasLt.so.12"];
filesCuda11 = ["libcudart.so.11.0", "libcublas.so.11", "libcublasLt.so.11"];
paths = process.env.LD_LIBRARY_PATH
? process.env.LD_LIBRARY_PATH.split(path.delimiter)
: [];
paths.push("/usr/lib/x86_64-linux-gnu/");
}
let cudaExists = filesCuda12.every(
(file) => existsSync(file) || isExists(file, paths)
);
if (!cudaExists) {
cudaExists = filesCuda11.every(
(file) => existsSync(file) || isExists(file, paths)
);
if (cudaExists) {
cudaVersion = "11";
}
} else {
cudaVersion = "12";
}
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
data["cuda"].exist = cudaExists;
data["cuda"].version = cudaVersion;
if (cudaExists) {
data.run_mode = "gpu";
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
}
async function updateGpuInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
// Get GPU info and gpu has higher memory first
let highestVram = 0;
let highestVramId = "0";
let gpus = stdout
.trim()
.split("\n")
.map((line) => {
let [id, vram] = line.split(", ");
vram = vram.replace(/\r/g, "");
if (parseFloat(vram) > highestVram) {
highestVram = parseFloat(vram);
highestVramId = id;
}
return { id, vram };
});
data["gpus"] = gpus;
data["gpu_highest_vram"] = highestVramId;
} else {
data["gpus"] = [];
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
async function updateNvidiaInfo() {
if (process.platform !== "darwin") {
await Promise.all([
updateNvidiaDriverInfo(),
updateCudaExistence(),
updateGpuInfo(),
]);
}
}
/** /**
* Initializes a Nitro subprocess to load a machine learning model. * Initializes a Nitro subprocess to load a machine learning model.
* @param wrapper - The model wrapper. * @param wrapper - The model wrapper.
@ -238,31 +91,28 @@ async function initModel(wrapper: any): Promise<ModelOperationResponse> {
async function loadModel(nitroResourceProbe: any | undefined) { async function loadModel(nitroResourceProbe: any | undefined) {
// Gather system information for CPU physical cores and memory // Gather system information for CPU physical cores and memory
if (!nitroResourceProbe) nitroResourceProbe = await getResourcesInfo(); if (!nitroResourceProbe) nitroResourceProbe = await getResourcesInfo();
return ( return killSubprocess()
killSubprocess() .then(() => tcpPortUsed.waitUntilFree(PORT, 300, 5000))
.then(() => tcpPortUsed.waitUntilFree(PORT, 300, 5000)) .then(() => {
// wait for 500ms to make sure the port is free for windows platform /**
.then(() => { * There is a problem with Windows process manager
if (process.platform === "win32") { * Should wait for awhile to make sure the port is free and subprocess is killed
return sleep(500); * The tested threshold is 500ms
} else { **/
return sleep(0); if (process.platform === "win32") {
} return new Promise((resolve) => setTimeout(resolve, 500));
}) } else {
.then(() => spawnNitroProcess(nitroResourceProbe)) return Promise.resolve();
.then(() => loadLLMModel(currentSettings)) }
.then(validateModelStatus) })
.catch((err) => { .then(() => spawnNitroProcess(nitroResourceProbe))
console.error("error: ", err); .then(() => loadLLMModel(currentSettings))
// TODO: Broadcast error so app could display proper error message .then(validateModelStatus)
return { error: err, currentModelFile }; .catch((err) => {
}) log(`[NITRO]::Error: ${err}`);
); // TODO: Broadcast error so app could display proper error message
} return { error: err, currentModelFile };
});
// Add function sleep
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
} }
function promptTemplateConverter(promptTemplate) { function promptTemplateConverter(promptTemplate) {
@ -312,6 +162,7 @@ function promptTemplateConverter(promptTemplate) {
* @returns A Promise that resolves when the model is loaded successfully, or rejects with an error message if the model is not found or fails to load. * @returns A Promise that resolves when the model is loaded successfully, or rejects with an error message if the model is not found or fails to load.
*/ */
function loadLLMModel(settings): Promise<Response> { function loadLLMModel(settings): Promise<Response> {
log(`[NITRO]::Debug: Loading model with params ${settings}`);
return fetchRetry(NITRO_HTTP_LOAD_MODEL_URL, { return fetchRetry(NITRO_HTTP_LOAD_MODEL_URL, {
method: "POST", method: "POST",
headers: { headers: {
@ -320,6 +171,8 @@ function loadLLMModel(settings): Promise<Response> {
body: JSON.stringify(settings), body: JSON.stringify(settings),
retries: 3, retries: 3,
retryDelay: 500, retryDelay: 500,
}).catch((err) => {
log(`[NITRO]::Error: Load model failed with error ${err}`);
}); });
} }
@ -360,7 +213,8 @@ async function validateModelStatus(): Promise<ModelOperationResponse> {
async function killSubprocess(): Promise<void> { async function killSubprocess(): Promise<void> {
const controller = new AbortController(); const controller = new AbortController();
setTimeout(() => controller.abort(), 5000); setTimeout(() => controller.abort(), 5000);
console.debug("Start requesting to kill Nitro..."); log(`[NITRO]::Debug: Request to kill Nitro`);
return fetch(NITRO_HTTP_KILL_URL, { return fetch(NITRO_HTTP_KILL_URL, {
method: "DELETE", method: "DELETE",
signal: controller.signal, signal: controller.signal,
@ -371,20 +225,17 @@ async function killSubprocess(): Promise<void> {
}) })
.catch(() => {}) .catch(() => {})
.then(() => tcpPortUsed.waitUntilFree(PORT, 300, 5000)) .then(() => tcpPortUsed.waitUntilFree(PORT, 300, 5000))
.then(() => console.debug("Nitro is killed")); .then(() => log(`[NITRO]::Debug: Nitro process is terminated`));
} }
/**
* Look for the Nitro binary and execute it
* Using child-process to spawn the process
* Should run exactly platform specified Nitro binary version
*/
/** /**
* Spawns a Nitro subprocess. * Spawns a Nitro subprocess.
* @param nitroResourceProbe - The Nitro resource probe. * @param nitroResourceProbe - The Nitro resource probe.
* @returns A promise that resolves when the Nitro subprocess is started. * @returns A promise that resolves when the Nitro subprocess is started.
*/ */
function spawnNitroProcess(nitroResourceProbe: any): Promise<any> { function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
console.debug("Starting Nitro subprocess..."); log(`[NITRO]::Debug: Spawning Nitro subprocess...`);
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
let binaryFolder = path.join(__dirname, "bin"); // Current directory by default let binaryFolder = path.join(__dirname, "bin"); // Current directory by default
let cudaVisibleDevices = ""; let cudaVisibleDevices = "";
@ -426,7 +277,7 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
const binaryPath = path.join(binaryFolder, binaryName); const binaryPath = path.join(binaryFolder, binaryName);
// Execute the binary // Execute the binary
subprocess = spawn(binaryPath, [1, LOCAL_HOST, PORT], { subprocess = spawn(binaryPath, ["1", LOCAL_HOST, PORT.toString()], {
cwd: binaryFolder, cwd: binaryFolder,
env: { env: {
...process.env, ...process.env,
@ -436,16 +287,15 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
// Handle subprocess output // Handle subprocess output
subprocess.stdout.on("data", (data) => { subprocess.stdout.on("data", (data) => {
console.debug(`stdout: ${data}`); log(`[NITRO]::Debug: ${data}`);
}); });
subprocess.stderr.on("data", (data) => { subprocess.stderr.on("data", (data) => {
console.error("subprocess error:" + data.toString()); log(`[NITRO]::Error: ${data}`);
console.error(`stderr: ${data}`);
}); });
subprocess.on("close", (code) => { subprocess.on("close", (code) => {
console.debug(`child process exited with code ${code}`); log(`[NITRO]::Debug: Nitro exited with code: ${code}`);
subprocess = null; subprocess = null;
reject(`child process exited with code ${code}`); reject(`child process exited with code ${code}`);
}); });
@ -463,7 +313,7 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
function getResourcesInfo(): Promise<ResourcesInfo> { function getResourcesInfo(): Promise<ResourcesInfo> {
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const cpu = await osUtils.cpuCount(); const cpu = await osUtils.cpuCount();
console.debug("cpu: ", cpu); log(`[NITRO]::CPU informations - ${cpu}`);
const response: ResourcesInfo = { const response: ResourcesInfo = {
numCpuPhysicalCore: cpu, numCpuPhysicalCore: cpu,
memAvailable: 0, memAvailable: 0,
@ -472,6 +322,23 @@ function getResourcesInfo(): Promise<ResourcesInfo> {
}); });
} }
/**
* This will retrive GPU informations and persist settings.json
* Will be called when the extension is loaded to turn on GPU acceleration if supported
*/
async function updateNvidiaInfo() {
if (process.platform !== "darwin") {
await Promise.all([
updateNvidiaDriverInfo(),
updateCudaExistence(),
updateGpuInfo(),
]);
}
}
/**
* Retrieve current nitro process
*/
const getCurrentNitroProcessInfo = (): Promise<any> => { const getCurrentNitroProcessInfo = (): Promise<any> => {
nitroProcessInfo = { nitroProcessInfo = {
isRunning: subprocess != null, isRunning: subprocess != null,
@ -479,6 +346,11 @@ const getCurrentNitroProcessInfo = (): Promise<any> => {
return nitroProcessInfo; return nitroProcessInfo;
}; };
/**
* Every module should have a dispose function
* This will be called when the extension is unloaded and should clean up any resources
* Also called when app is closed
*/
function dispose() { function dispose() {
// clean other registered resources here // clean other registered resources here
killSubprocess(); killSubprocess();

View File

@ -22,6 +22,7 @@ module.exports = {
process.env.INFERENCE_URL || process.env.INFERENCE_URL ||
"http://127.0.0.1:3928/inferences/llamacpp/chat_completion" "http://127.0.0.1:3928/inferences/llamacpp/chat_completion"
), ),
TROUBLESHOOTING_URL: JSON.stringify("https://jan.ai/guides/troubleshooting")
}), }),
], ],
output: { output: {

View File

@ -5,7 +5,6 @@ import {
abortDownload, abortDownload,
getResourcePath, getResourcePath,
getUserSpace, getUserSpace,
fileStat,
InferenceEngine, InferenceEngine,
joinPath, joinPath,
ModelExtension, ModelExtension,
@ -281,7 +280,7 @@ export default class JanModelExtension implements ModelExtension {
if (file.endsWith('.json')) continue if (file.endsWith('.json')) continue
const path = await joinPath([JanModelExtension._homeDir, dirName, file]) const path = await joinPath([JanModelExtension._homeDir, dirName, file])
const fileStats = await fileStat(path) const fileStats = await fs.fileStat(path)
if (fileStats.isDirectory) continue if (fileStats.isDirectory) continue
binaryFileSize = fileStats.size binaryFileSize = fileStats.size
binaryFileName = file binaryFileName = file

View File

@ -9,12 +9,13 @@ dotenv.config();
const JAN_API_HOST = process.env.JAN_API_HOST || "127.0.0.1"; const JAN_API_HOST = process.env.JAN_API_HOST || "127.0.0.1";
const JAN_API_PORT = Number.parseInt(process.env.JAN_API_PORT || "1337"); const JAN_API_PORT = Number.parseInt(process.env.JAN_API_PORT || "1337");
const serverLogPath = path.join(os.homedir(), "jan", "server.log"); const serverLogPath = path.join(os.homedir(), "jan", "logs", "server.log");
let server: any | undefined = undefined; let server: any | undefined = undefined;
export const startServer = async (schemaPath?: string, baseDir?: string) => { export const startServer = async (schemaPath?: string, baseDir?: string) => {
try { try {
log(`[API]::Debug: Starting JAN API server...`, "server.log")
server = fastify({ server = fastify({
logger: { logger: {
level: "info", level: "info",
@ -62,17 +63,18 @@ export const startServer = async (schemaPath?: string, baseDir?: string) => {
host: JAN_API_HOST, host: JAN_API_HOST,
}) })
.then(() => { .then(() => {
log(`JAN API listening at: http://${JAN_API_HOST}:${JAN_API_PORT}`); log(`[API]::Debug: JAN API listening at: http://${JAN_API_HOST}:${JAN_API_PORT}`);
}); });
} catch (e) { } catch (e) {
log(e); log(`[API]::Error: ${e}`);
} }
}; };
export const stopServer = async () => { export const stopServer = async () => {
try { try {
log(`[API]::Debug: Server stopped`, "server.log")
await server.close(); await server.close();
} catch (e) { } catch (e) {
log(e); log(`[API]::Error: ${e}`);
} }
}; };

View File

@ -3,6 +3,7 @@
import { useContext, useEffect, useState } from 'react' import { useContext, useEffect, useState } from 'react'
import { fs } from '@janhq/core'
import { import {
Switch, Switch,
Button, Button,
@ -20,6 +21,7 @@ import ShortCut from '@/containers/Shortcut'
import { FeatureToggleContext } from '@/context/FeatureToggle' import { FeatureToggleContext } from '@/context/FeatureToggle'
import { useSettings } from '@/hooks/useSettings' import { useSettings } from '@/hooks/useSettings'
import { toaster } from '@/containers/Toast'
const serverEnabledAtom = atom<boolean>(false) const serverEnabledAtom = atom<boolean>(false)
@ -37,6 +39,16 @@ const Advanced = () => {
}) })
}, []) }, [])
const clearLogs = async () => {
if (await fs.existsSync(`file://logs`)) {
await fs.rmdirSync(`file://logs`, { recursive: true })
}
toaster({
title: 'Logs cleared',
description: 'All logs have been cleared.',
})
}
return ( return (
<div className="block w-full"> <div className="block w-full">
{/* CPU / GPU switching */} {/* CPU / GPU switching */}
@ -137,6 +149,19 @@ const Advanced = () => {
</Button> </Button>
</div> </div>
)} )}
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
<div className="w-4/5 flex-shrink-0 space-y-1.5">
<div className="flex gap-x-2">
<h6 className="text-sm font-semibold capitalize">Clear logs</h6>
</div>
<p className="whitespace-pre-wrap leading-relaxed">
Clear all logs from Jan app.
</p>
</div>
<Button size="sm" themes="secondary" onClick={clearLogs}>
Clear
</Button>
</div>
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none"> <div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
<div className="w-4/5 flex-shrink-0 space-y-1.5"> <div className="w-4/5 flex-shrink-0 space-y-1.5">
<div className="flex gap-x-2"> <div className="flex gap-x-2">