Merge branch 'main' into add/model-list

This commit is contained in:
Hoang Ha 2023-12-29 16:23:48 +07:00 committed by GitHub
commit d99fc186b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 535 additions and 89 deletions

View File

@ -70,7 +70,7 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
<tr style="text-align: center">
<td style="text-align:center"><b>Experimental (Nighlty Build)</b></td>
<td style="text-align:center" colspan="4">
<a href='https://github.com/janhq/jan/actions/runs/7341513351'>
<a href='https://github.com/janhq/jan/actions/runs/7350847486'>
<b>Github action artifactory</b>
</a>
</td>
@ -81,16 +81,14 @@ Download the latest version of Jan at https://jan.ai/ or visit the **[GitHub Rel
## Demo
<p align="center">
<video src="https://github.com/janhq/jan/assets/89722390/43adfddc-7980-4ae6-b544-719f04660dd7">
</video>
</p>
![Demo](/demo.gif)
_Video: Jan v0.4.0 on Mac Air M2, 16GB Ventura_
_Realtime Video: Jan v0.4.3-nightly on a Mac M1, 16GB Sonoma 14_
## Quicklinks
#### Jan
- [Jan website](https://jan.ai/)
- [Jan Github](https://github.com/janhq/jan)
- [User Guides](https://jan.ai/docs)
@ -98,8 +96,10 @@ _Video: Jan v0.4.0 on Mac Air M2, 16GB Ventura_
- [API reference](https://jan.ai/api-reference/)
- [Specs](https://jan.ai/specs/)
#### Nitro:
Nitro is a high-efficiency C++ inference engine for edge computing, powering Jan. It is lightweight and embeddable, ideal for product integration.
#### Nitro
Nitro is a high-efficiency C++ inference engine for edge computing. It is lightweight and embeddable, and can be used on its own within your own projects.
- [Nitro Website](https://nitro.jan.ai)
- [Nitro Github](https://github.com/janhq/nitro)
- [Documentation](https://nitro.jan.ai/docs)
@ -118,21 +118,22 @@ To reset your installation:
```
This will remove all build artifacts and cached files:
- Delete Jan from your `/Applications` folder
- Clear Application cache in `/Users/$(whoami)/Library/Caches/jan`
2. Use the following commands to remove any dangling backend processes:
```sh
ps aux | grep nitro
```
```sh
ps aux | grep nitro
```
Look for processes like "nitro" and "nitro_arm_64," and kill them one by one with:
Look for processes like "nitro" and "nitro_arm_64," and kill them one by one with:
```sh
kill -9 <PID>
```
```sh
kill -9 <PID>
```
## Contributing
Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) file
@ -176,19 +177,22 @@ This will build the app MacOS m1/m2 for production (with code signing already do
## Nightly Build
Nightly build is a process where the software is built automatically every night. This helps in detecting and fixing bugs early in the development cycle. The process for this project is defined in [`.github/workflows/jan-electron-build-nightly.yml`](.github/workflows/jan-electron-build-nightly.yml)
You can join our Discord server [here](https://discord.gg/FTk2MvZwJH) and go to channel [github-jan](https://discordapp.com/channels/1107178041848909847/1148534730359308298) to monitor the build process.
Our nightly build process for this project is defined in [`.github/workflows/jan-electron-build-nightly.yml`](.github/workflows/jan-electron-build-nightly.yml)
The nightly build is triggered at 2:00 AM UTC every day.
The nightly build can be downloaded from the url notified in the Discord channel. Please access the url from the browser and download the build artifacts from there.
Getting on Nightly:
1. Join our Discord server [here](https://discord.gg/FTk2MvZwJH) and go to channel [github-jan](https://discordapp.com/channels/1107178041848909847/1148534730359308298).
2. Download the build artifacts from the channel.
3. Subsequently, to get the latest nightly, just quit and restart the app.
4. Upon app restart, you will be automatically prompted to update to the latest nightly build.
## Manual Build
Manual build is a process where the software is built manually by the developers. This is usually done when a new feature is implemented or a bug is fixed. The process for this project is defined in [`.github/workflows/jan-electron-build-nightly.yml`](.github/workflows/jan-electron-build-nightly.yml)
Stable releases are triggered by manual builds. This is usually done for new features or a bug fixes.
It is similar to the nightly build process, except that it is triggered manually by the developers.
The process for this project is defined in [`.github/workflows/jan-electron-build-nightly.yml`](.github/workflows/jan-electron-build-nightly.yml)
## Acknowledgements

View File

@ -9,7 +9,8 @@ export enum AppRoute {
openAppDirectory = 'openAppDirectory',
openFileExplore = 'openFileExplorer',
relaunch = 'relaunch',
joinPath = 'joinPath'
joinPath = 'joinPath',
baseName = 'baseName',
}
export enum AppEvent {

View File

@ -51,6 +51,27 @@ const openFileExplorer: (path: string) => Promise<any> = (path) =>
*/
const joinPath: (paths: string[]) => Promise<string> = (paths) => global.core.api?.joinPath(paths)
/**
* Retrive the basename from an url.
* @param path - The path to retrieve.
* @returns {Promise<string>} A promise that resolves with the basename.
*/
const baseName: (paths: string[]) => Promise<string> = (path) => global.core.api?.baseName(path)
/**
* Opens an external URL in the default web browser.
*
* @param {string} url - The URL to open.
* @returns {Promise<any>} - A promise that resolves when the URL has been successfully opened.
*/
const openExternalUrl: (url: string) => Promise<any> = (url) =>
global.core.api?.openExternalUrl(url)
/**
* Gets the resource path of the application.
*
* @returns {Promise<string>} - A promise that resolves with the resource path.
*/
const getResourcePath: () => Promise<string> = () => global.core.api?.getResourcePath()
/**
@ -74,4 +95,6 @@ export {
openFileExplorer,
getResourcePath,
joinPath,
openExternalUrl,
baseName,
}

View File

@ -1,6 +1,6 @@
import { AppRoute } from '../../../api'
import { HttpServer } from '../HttpServer'
import { join } from 'path'
import { basename, join } from 'path'
import {
chatCompletions,
deleteBuilder,
@ -36,7 +36,11 @@ export const commonRouter = async (app: HttpServer) => {
// App Routes
app.post(`/app/${AppRoute.joinPath}`, async (request: any, reply: any) => {
const args = JSON.parse(request.body) as any[]
console.debug('joinPath: ', ...args[0])
reply.send(JSON.stringify(join(...args[0])))
})
app.post(`/app/${AppRoute.baseName}`, async (request: any, reply: any) => {
const args = JSON.parse(request.body) as any[]
reply.send(JSON.stringify(basename(args[0])))
})
}

BIN
demo.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.9 MiB

View File

@ -1,13 +1,9 @@
import { app, ipcMain, shell, nativeTheme } from 'electron'
import { join } from 'path'
import { join, basename } from 'path'
import { WindowManager } from './../managers/window'
import { userSpacePath } from './../utils/path'
import { AppRoute } from '@janhq/core'
import { getResourcePath } from './../utils/path'
import {
ExtensionManager,
ModuleManager,
} from '@janhq/core/node'
import { ExtensionManager, ModuleManager } from '@janhq/core/node'
export function handleAppIPCs() {
/**
@ -53,6 +49,13 @@ export function handleAppIPCs() {
join(...paths)
)
/**
* Retrieve basename from given path, respect to the current OS.
*/
ipcMain.handle(AppRoute.baseName, async (_event, path: string) =>
basename(path)
)
/**
* Relaunches the app in production - reload window in development.
* @param _event - The IPC event object.

View File

@ -46,8 +46,11 @@ export function handleDownloaderIPCs() {
*/
ipcMain.handle(DownloadRoute.downloadFile, async (_event, url, fileName) => {
const userDataPath = join(app.getPath('home'), 'jan')
if (typeof fileName === 'string' && fileName.includes('file:/')) {
fileName = fileName.replace('file:/', '')
if (
typeof fileName === 'string' &&
(fileName.includes('file:/') || fileName.includes('file:\\'))
) {
fileName = fileName.replace('file:/', '').replace('file:\\', '')
}
const destination = resolve(userDataPath, fileName)
const rq = request(url)

View File

@ -2,8 +2,8 @@ import { ipcMain } from 'electron'
// @ts-ignore
import reflect from '@alumna/reflect'
import { FileManagerRoute, getResourcePath } from '@janhq/core'
import { userSpacePath } from './../utils/path'
import { FileManagerRoute } from '@janhq/core'
import { userSpacePath, getResourcePath } from './../utils/path'
/**
* Handles file system extensions operations.

View File

@ -13,8 +13,16 @@ export function handleFsIPCs() {
return import(moduleName).then((mdl) =>
mdl[route](
...args.map((arg) =>
typeof arg === 'string' && arg.includes('file:/')
? join(userSpacePath, arg.replace('file:/', ''))
typeof arg === 'string' &&
(arg.includes(`file:/`) || arg.includes(`file:\\`))
? join(
userSpacePath,
arg
.replace(`file://`, '')
.replace(`file:/`, '')
.replace(`file:\\\\`, '')
.replace(`file:\\`, '')
)
: arg
)
)

View File

@ -25,7 +25,7 @@ export default class JSONConversationalExtension
*/
async onLoad() {
if (!(await fs.existsSync(JSONConversationalExtension._homeDir)))
fs.mkdirSync(JSONConversationalExtension._homeDir)
await fs.mkdirSync(JSONConversationalExtension._homeDir)
console.debug('JSONConversationalExtension loaded')
}

View File

@ -32,7 +32,8 @@ import { join } from "path";
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
*/
export default class JanInferenceNitroExtension implements InferenceExtension {
private static readonly _homeDir = "engines";
private static readonly _homeDir = "file://engines";
private static readonly _settingsDir = "file://settings";
private static readonly _engineMetadataFileName = "nitro.json";
private static _currentModel: Model;
@ -58,9 +59,13 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
/**
* Subscribes to events emitted by the @janhq/core package.
*/
async onLoad() {
if (!(await fs.existsSync(JanInferenceNitroExtension._homeDir)))
fs.mkdirSync(JanInferenceNitroExtension._homeDir);
async onLoad(): Promise<void> {
if (!(await fs.existsSync(JanInferenceNitroExtension._homeDir))) {
await fs.mkdirSync(JanInferenceNitroExtension._homeDir).catch((err) => console.debug(err));
}
if (!(await fs.existsSync(JanInferenceNitroExtension._settingsDir)))
await fs.mkdirSync(JanInferenceNitroExtension._settingsDir);
this.writeDefaultEngineSettings();
// Events subscription
@ -79,6 +84,24 @@ export default class JanInferenceNitroExtension implements InferenceExtension {
events.on(EventName.OnInferenceStopped, () => {
JanInferenceNitroExtension.handleInferenceStopped(this);
});
// Attempt to fetch nvidia info
await executeOnMain(MODULE, "updateNvidiaInfo", {});
const gpuDriverConf = await fs.readFileSync(
join(JanInferenceNitroExtension._settingsDir, "settings.json")
);
if (gpuDriverConf.notify && gpuDriverConf.run_mode === "cpu") {
// Driver is fully installed, but not in use
if (gpuDriverConf.nvidia_driver?.exist && gpuDriverConf.cuda?.exist) {
events.emit("OnGPUCompatiblePrompt", {});
// Prompt user to switch
} else if (gpuDriverConf.nvidia_driver?.exist) {
// Prompt user to install cuda toolkit
events.emit("OnGPUDriverMissingPrompt", {});
}
}
Promise.resolve()
}
/**

View File

@ -1,9 +1,11 @@
const fs = require("fs");
const fsPromises = fs.promises;
const path = require("path");
const { spawn } = require("child_process");
const { exec, spawn } = require("child_process");
const tcpPortUsed = require("tcp-port-used");
const fetchRetry = require("fetch-retry")(global.fetch);
const si = require("systeminformation");
const { readFileSync, writeFileSync, existsSync } = require("fs");
// The PORT to use for the Nitro subprocess
const PORT = 3928;
@ -14,6 +16,27 @@ const NITRO_HTTP_UNLOAD_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacp
const NITRO_HTTP_VALIDATE_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/modelstatus`;
const NITRO_HTTP_KILL_URL = `${NITRO_HTTP_SERVER_URL}/processmanager/destroy`;
const SUPPORTED_MODEL_FORMAT = ".gguf";
const NVIDIA_INFO_FILE = path.join(
require("os").homedir(),
"jan",
"settings",
"settings.json"
);
const DEFALT_SETTINGS = {
"notify": true,
"run_mode": "cpu",
"nvidia_driver": {
"exist": false,
"version": ""
},
"cuda": {
"exist": false,
"version": ""
},
"gpus": [],
"gpu_highest_vram": ""
}
// The subprocess instance for Nitro
let subprocess = undefined;
@ -29,6 +52,125 @@ function stopModel(): Promise<void> {
return killSubprocess();
}
/**
* Validate nvidia and cuda for linux and windows
*/
async function updateNvidiaDriverInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=driver_version --format=csv,noheader",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
const firstLine = stdout.split("\n")[0].trim();
data["nvidia_driver"].exist = true;
data["nvidia_driver"].version = firstLine;
} else {
data["nvidia_driver"].exist = false;
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
function checkFileExistenceInPaths(file: string, paths: string[]): boolean {
return paths.some((p) => existsSync(path.join(p, file)));
}
function updateCudaExistence() {
let files: string[];
let paths: string[];
if (process.platform === "win32") {
files = ["cublas64_12.dll", "cudart64_12.dll", "cublasLt64_12.dll"];
paths = process.env.PATH ? process.env.PATH.split(path.delimiter) : [];
const nitro_cuda_path = path.join(__dirname, "bin", "win-cuda");
paths.push(nitro_cuda_path);
} else {
files = ["libcudart.so.12", "libcublas.so.12", "libcublasLt.so.12"];
paths = process.env.LD_LIBRARY_PATH
? process.env.LD_LIBRARY_PATH.split(path.delimiter)
: [];
const nitro_cuda_path = path.join(__dirname, "bin", "linux-cuda");
paths.push(nitro_cuda_path);
paths.push("/usr/lib/x86_64-linux-gnu/");
}
let cudaExists = files.every(
(file) => existsSync(file) || checkFileExistenceInPaths(file, paths)
);
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
data["cuda"].exist = cudaExists;
if (cudaExists) {
data.run_mode = "gpu";
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
}
async function updateGpuInfo(): Promise<void> {
exec(
"nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits",
(error, stdout) => {
let data;
try {
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf8"));
} catch (error) {
data = DEFALT_SETTINGS;
}
if (!error) {
// Get GPU info and gpu has higher memory first
let highestVram = 0;
let highestVramId = "0";
let gpus = stdout
.trim()
.split("\n")
.map((line) => {
let [id, vram] = line.split(", ");
vram = vram.replace(/\r/g, "");
if (parseFloat(vram) > highestVram) {
highestVram = parseFloat(vram);
highestVramId = id;
}
return { id, vram };
});
data["gpus"] = gpus;
data["gpu_highest_vram"] = highestVramId;
} else {
data["gpus"] = [];
}
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
Promise.resolve();
}
);
}
async function updateNvidiaInfo() {
if (process.platform !== "darwin") {
await Promise.all([
updateNvidiaDriverInfo(),
updateCudaExistence(),
updateGpuInfo(),
]);
}
}
/**
* Initializes a Nitro subprocess to load a machine learning model.
* @param wrapper - The model wrapper.
@ -222,14 +364,26 @@ async function killSubprocess(): Promise<void> {
* Using child-process to spawn the process
* Should run exactly platform specified Nitro binary version
*/
/**
* Spawns a Nitro subprocess.
* @param nitroResourceProbe - The Nitro resource probe.
* @returns A promise that resolves when the Nitro subprocess is started.
*/
function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
console.debug("Starting Nitro subprocess...");
return new Promise(async (resolve, reject) => {
let binaryFolder = path.join(__dirname, "bin"); // Current directory by default
let cudaVisibleDevices = "";
let binaryName;
if (process.platform === "win32") {
binaryName = "win-start.bat";
let nvida_info = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf8"));
if (nvida_info["run_mode"] === "cpu") {
binaryFolder = path.join(binaryFolder, "win-cpu");
} else {
binaryFolder = path.join(binaryFolder, "win-cuda");
cudaVisibleDevices = nvida_info["gpu_highest_vram"];
}
binaryName = "nitro.exe";
} else if (process.platform === "darwin") {
if (process.arch === "arm64") {
binaryFolder = path.join(binaryFolder, "mac-arm64");
@ -238,13 +392,24 @@ function spawnNitroProcess(nitroResourceProbe: any): Promise<any> {
}
binaryName = "nitro";
} else {
binaryName = "linux-start.sh";
let nvida_info = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf8"));
if (nvida_info["run_mode"] === "cpu") {
binaryFolder = path.join(binaryFolder, "win-cpu");
} else {
binaryFolder = path.join(binaryFolder, "win-cuda");
cudaVisibleDevices = nvida_info["gpu_highest_vram"];
}
binaryName = "nitro";
}
const binaryPath = path.join(binaryFolder, binaryName);
// Execute the binary
subprocess = spawn(binaryPath, [1, LOCAL_HOST, PORT], {
cwd: binaryFolder,
env: {
...process.env,
CUDA_VISIBLE_DEVICES: cudaVisibleDevices,
},
});
// Handle subprocess output
@ -296,4 +461,5 @@ module.exports = {
stopModel,
killSubprocess,
dispose,
updateNvidiaInfo,
};

View File

@ -53,9 +53,13 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
/**
* Subscribes to events emitted by the @janhq/core package.
*/
async onLoad() {
if (!(await fs.existsSync(JanInferenceOpenAIExtension._homeDir)))
fs.mkdirSync(JanInferenceOpenAIExtension._homeDir);
async onLoad(): Promise<void> {
if (!(await fs.existsSync(JanInferenceOpenAIExtension._homeDir))) {
await fs
.mkdirSync(JanInferenceOpenAIExtension._homeDir)
.catch((err) => console.debug(err));
}
JanInferenceOpenAIExtension.writeDefaultEngineSettings();
// Events subscription
@ -73,6 +77,7 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
events.on(EventName.OnInferenceStopped, () => {
JanInferenceOpenAIExtension.handleInferenceStopped(this);
});
Promise.resolve();
}
/**
@ -87,7 +92,7 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
JanInferenceOpenAIExtension._engineMetadataFileName
);
if (await fs.existsSync(engineFile)) {
const engine = await fs.readFileSync(engineFile, 'utf-8');
const engine = await fs.readFileSync(engineFile, "utf-8");
JanInferenceOpenAIExtension._engineSettings =
typeof engine === "object" ? engine : JSON.parse(engine);
} else {

View File

@ -34,7 +34,7 @@ import { EngineSettings } from "./@types/global";
export default class JanInferenceTritonTrtLLMExtension
implements InferenceExtension
{
private static readonly _homeDir = "engines";
private static readonly _homeDir = "file://engines";
private static readonly _engineMetadataFileName = "triton_trtllm.json";
static _currentModel: Model;

View File

@ -8,8 +8,8 @@ import {
InferenceEngine,
joinPath,
} from '@janhq/core'
import { basename } from 'path'
import { ModelExtension, Model } from '@janhq/core'
import { baseName } from '@janhq/core/.'
/**
* A extension for models
@ -34,7 +34,7 @@ export default class JanModelExtension implements ModelExtension {
* Called when the extension is loaded.
* @override
*/
onLoad(): void {
async onLoad() {
this.copyModelsToHomeDir()
}
@ -48,7 +48,7 @@ export default class JanModelExtension implements ModelExtension {
try {
// list all of the files under the home directory
if (fs.existsSync(JanModelExtension._homeDir)) {
if (await fs.existsSync(JanModelExtension._homeDir)) {
// ignore if the model is already downloaded
console.debug('Models already persisted.')
return
@ -62,7 +62,7 @@ export default class JanModelExtension implements ModelExtension {
const srcPath = await joinPath([resourePath, 'models'])
const userSpace = await getUserSpace()
const destPath = await joinPath([userSpace, JanModelExtension._homeDir])
const destPath = await joinPath([userSpace, 'models'])
await fs.syncFile(srcPath, destPath)
@ -98,7 +98,7 @@ export default class JanModelExtension implements ModelExtension {
// try to retrieve the download file name from the source url
// if it fails, use the model ID as the file name
const extractedFileName = basename(model.source_url)
const extractedFileName = await model.source_url.split('/').pop()
const fileName = extractedFileName
.toLowerCase()
.endsWith(JanModelExtension._supportedModelFormat)

View File

@ -18,7 +18,7 @@ export default class JanMonitoringExtension implements MonitoringExtension {
/**
* Called when the extension is loaded.
*/
onLoad(): void {}
async onLoad() {}
/**
* Called when the extension is unloaded.

View File

@ -0,0 +1,84 @@
import React from 'react'
import { openExternalUrl } from '@janhq/core'
import {
ModalClose,
ModalFooter,
ModalContent,
Modal,
ModalTitle,
ModalHeader,
Button,
} from '@janhq/uikit'
import { useAtom } from 'jotai'
import { isShowNotificationAtom, useSettings } from '@/hooks/useSettings'
const GPUDriverPrompt: React.FC = () => {
const [showNotification, setShowNotification] = useAtom(
isShowNotificationAtom
)
const { saveSettings } = useSettings()
const onDoNotShowAgainChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const isChecked = !e.target.checked
saveSettings({ notify: isChecked })
}
const openChanged = () => {
setShowNotification(false)
}
return (
<div>
<Modal open={showNotification} onOpenChange={openChanged}>
<ModalContent>
<ModalHeader>
<ModalTitle>Missing Nvidia Driver and Cuda Toolkit</ModalTitle>
</ModalHeader>
<p>
It seems like you are missing Nvidia Driver or Cuda Toolkit or both.
Please follow the instructions on the{' '}
<span
className="cursor-pointer text-blue-600"
onClick={() =>
openExternalUrl('https://developer.nvidia.com/cuda-toolkit')
}
>
NVidia Cuda Toolkit Installation Page
</span>{' '}
and the{' '}
<span
className="cursor-pointer text-blue-600"
onClick={() =>
openExternalUrl('https://www.nvidia.com/Download/index.aspx')
}
>
Nvidia Driver Installation Page
</span>
.
</p>
<div className="flex items-center space-x-2">
<input
id="default-checkbox"
type="checkbox"
onChange={onDoNotShowAgainChange}
className="h-4 w-4 rounded border-gray-300 bg-gray-100 text-blue-600 focus:ring-2 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700 dark:ring-offset-gray-800 dark:focus:ring-blue-600"
/>
<span>Don&apos;t show again</span>
</div>
<ModalFooter>
<div className="flex gap-x-2">
<ModalClose asChild>
<Button themes="ghost">OK</Button>
</ModalClose>
</div>
</ModalFooter>
</ModalContent>
</Modal>
</div>
)
}
export default GPUDriverPrompt

View File

@ -119,6 +119,8 @@ export default function EventHandler({ children }: { children: ReactNode }) {
}
}
}
function handleGpuCompatiblePrompt() {}
function handleGpuDriverMissingPrompt() {}
useEffect(() => {
if (window.core?.events) {
@ -127,6 +129,8 @@ export default function EventHandler({ children }: { children: ReactNode }) {
events.on(EventName.OnModelReady, handleModelReady)
events.on(EventName.OnModelFail, handleModelFail)
events.on(EventName.OnModelStopped, handleModelStopped)
events.on('OnGPUCompatiblePrompt', handleGpuCompatiblePrompt)
events.on('OnGPUDriverMissingPrompt', handleGpuDriverMissingPrompt)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])

View File

@ -1,9 +1,8 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import { basename } from 'path'
import { PropsWithChildren, useEffect, useRef } from 'react'
import { baseName } from '@janhq/core'
import { useAtomValue, useSetAtom } from 'jotai'
import { useDownloadState } from '@/hooks/useDownloadState'
@ -37,10 +36,11 @@ export default function EventListenerWrapper({ children }: PropsWithChildren) {
useEffect(() => {
if (window && window.electronAPI) {
window.electronAPI.onFileDownloadUpdate(
(_event: string, state: any | undefined) => {
async (_event: string, state: any | undefined) => {
if (!state) return
const modelName = await baseName(state.fileName)
const model = modelsRef.current.find(
(model) => modelBinFileName(model) === basename(state.fileName)
(model) => modelBinFileName(model) === modelName
)
if (model)
setDownloadState({
@ -50,25 +50,31 @@ export default function EventListenerWrapper({ children }: PropsWithChildren) {
}
)
window.electronAPI.onFileDownloadError((_event: string, state: any) => {
console.error('Download error', state)
const model = modelsRef.current.find(
(model) => modelBinFileName(model) === basename(state.fileName)
)
if (model) setDownloadStateFailed(model.id)
})
window.electronAPI.onFileDownloadSuccess((_event: string, state: any) => {
if (state && state.fileName) {
window.electronAPI.onFileDownloadError(
async (_event: string, state: any) => {
console.error('Download error', state)
const modelName = await baseName(state.fileName)
const model = modelsRef.current.find(
(model) => modelBinFileName(model) === basename(state.fileName)
(model) => modelBinFileName(model) === modelName
)
if (model) {
setDownloadStateSuccess(model.id)
setDownloadedModels([...downloadedModelRef.current, model])
if (model) setDownloadStateFailed(model.id)
}
)
window.electronAPI.onFileDownloadSuccess(
async (_event: string, state: any) => {
if (state && state.fileName) {
const modelName = await baseName(state.fileName)
const model = modelsRef.current.find(
async (model) => modelBinFileName(model) === modelName
)
if (model) {
setDownloadStateSuccess(model.id)
setDownloadedModels([...downloadedModelRef.current, model])
}
}
}
})
)
window.electronAPI.onAppUpdateDownloadUpdate(
(_event: string, progress: any) => {

View File

@ -8,6 +8,7 @@ import { TooltipProvider } from '@janhq/uikit'
import { PostHogProvider } from 'posthog-js/react'
import GPUDriverPrompt from '@/containers/GPUDriverPromptModal'
import EventListenerWrapper from '@/containers/Providers/EventListener'
import JotaiWrapper from '@/containers/Providers/Jotai'
import ThemeWrapper from '@/containers/Providers/Theme'
@ -25,11 +26,11 @@ import { instance } from '@/utils/posthog'
import { extensionManager } from '@/extension'
const Providers = (props: PropsWithChildren) => {
const { children } = props
const [setupCore, setSetupCore] = useState(false)
const [activated, setActivated] = useState(false)
const { children } = props
async function setupExtensions() {
// Register all active extensions
await extensionManager.registerActive()
@ -74,6 +75,7 @@ const Providers = (props: PropsWithChildren) => {
<FeatureToggleWrapper>
<EventListenerWrapper>
<TooltipProvider delayDuration={0}>{children}</TooltipProvider>
<GPUDriverPrompt />
</EventListenerWrapper>
<Toaster position="top-right" />
</FeatureToggleWrapper>

View File

@ -2,7 +2,9 @@ import { fs, joinPath } from '@janhq/core'
export const useEngineSettings = () => {
const readOpenAISettings = async () => {
if (!fs.existsSync(await joinPath(['file://engines', 'openai.json'])))
if (
!(await fs.existsSync(await joinPath(['file://engines', 'openai.json'])))
)
return {}
const settings = await fs.readFileSync(
await joinPath(['file://engines', 'openai.json']),

67
web/hooks/useSettings.ts Normal file
View File

@ -0,0 +1,67 @@
import { useEffect, useState } from 'react'
import { fs, joinPath } from '@janhq/core'
import { atom, useAtom } from 'jotai'
export const isShowNotificationAtom = atom<boolean>(false)
export const useSettings = () => {
const [isGPUModeEnabled, setIsGPUModeEnabled] = useState(false) // New state for GPU mode
const [showNotification, setShowNotification] = useAtom(
isShowNotificationAtom
)
useEffect(() => {
setTimeout(() => validateSettings, 3000)
}, [])
const validateSettings = async () => {
readSettings().then((settings) => {
if (
settings &&
settings.notify &&
((settings.nvidia_driver?.exist && !settings.cuda?.exist) ||
!settings.nvidia_driver?.exist)
) {
setShowNotification(true)
}
// Check if run_mode is 'gpu' or 'cpu' and update state accordingly
setIsGPUModeEnabled(settings?.run_mode === 'gpu')
})
}
const readSettings = async () => {
if (!window?.core?.api) {
return
}
const settingsFile = await joinPath(['file://settings', 'settings.json'])
if (await fs.existsSync(settingsFile)) {
const settings = await fs.readFileSync(settingsFile, 'utf-8')
return typeof settings === 'object' ? settings : JSON.parse(settings)
}
return {}
}
const saveSettings = async ({
runMode,
notify,
}: {
runMode?: string | undefined
notify?: boolean | undefined
}) => {
const settingsFile = await joinPath(['file://settings', 'settings.json'])
const settings = await readSettings()
if (runMode != null) settings.run_mode = runMode
if (notify != null) settings.notify = notify
await fs.writeFileSync(settingsFile, JSON.stringify(settings))
}
return {
showNotification,
isGPUModeEnabled,
readSettings,
saveSettings,
setShowNotification,
validateSettings,
}
}

View File

@ -1,4 +1,4 @@
import React from 'react'
import React, { useContext } from 'react'
import { getUserSpace, openFileExplorer, joinPath } from '@janhq/core'
@ -14,6 +14,8 @@ import DropdownListSidebar, {
selectedModelAtom,
} from '@/containers/DropdownListSidebar'
import { FeatureToggleContext } from '@/context/FeatureToggle'
import { useCreateNewThread } from '@/hooks/useCreateNewThread'
import { toSettingParams } from '@/utils/model_param'
@ -35,6 +37,7 @@ const Sidebar: React.FC = () => {
const selectedModel = useAtomValue(selectedModelAtom)
const { updateThreadMetadata } = useCreateNewThread()
const threadStates = useAtomValue(threadStatesAtom)
const { experimentalFeatureEnabed } = useContext(FeatureToggleContext)
const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom)
const modelSettingParams = toSettingParams(activeModelParams)
@ -197,7 +200,7 @@ const Sidebar: React.FC = () => {
</div>
</div>
</CardSidebar>
{Object.keys(modelSettingParams).length ? (
{experimentalFeatureEnabed && Object.keys(modelSettingParams).length ? (
<CardSidebar
title="Engine"
onRevealInFinderClick={onReviewInFinderClick}

View File

@ -1,17 +1,57 @@
'use client'
import { useContext } from 'react'
import { useContext, useEffect, useState } from 'react'
import { Switch, Button } from '@janhq/uikit'
import { FeatureToggleContext } from '@/context/FeatureToggle'
import { useSettings } from '@/hooks/useSettings'
const Advanced = () => {
const { experimentalFeatureEnabed, setExperimentalFeatureEnabled } =
useContext(FeatureToggleContext)
const [gpuEnabled, setGpuEnabled] = useState<boolean>(false)
const { readSettings, saveSettings, validateSettings, setShowNotification } =
useSettings()
useEffect(() => {
readSettings().then((settings) => {
setGpuEnabled(settings.run_mode === 'gpu')
})
}, [])
return (
<div className="block w-full">
{/* CPU / GPU switching */}
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
<div className="w-4/5 flex-shrink-0 space-y-1.5">
<div className="flex gap-x-2">
<h6 className="text-sm font-semibold capitalize">NVidia GPU</h6>
</div>
<p className="whitespace-pre-wrap leading-relaxed">
Enable GPU acceleration for NVidia GPUs.
</p>
</div>
<Switch
checked={gpuEnabled}
onCheckedChange={(e: boolean) => {
if (e === true) {
saveSettings({ runMode: 'gpu' })
setGpuEnabled(true)
setShowNotification(false)
setTimeout(() => {
validateSettings()
}, 300)
} else {
saveSettings({ runMode: 'cpu' })
setGpuEnabled(false)
}
}}
/>
</div>
{/* Experimental */}
<div className="flex w-full items-start justify-between border-b border-border py-4 first:pt-0 last:border-none">
<div className="w-4/5 flex-shrink-0 space-y-1.5">
<div className="flex gap-x-2">
@ -20,8 +60,7 @@ const Advanced = () => {
</h6>
</div>
<p className="whitespace-pre-wrap leading-relaxed">
Enable experimental features that may be unstable
tested.
Enable experimental features that may be unstable tested.
</p>
</div>
<Switch
@ -44,7 +83,8 @@ const Advanced = () => {
</h6>
</div>
<p className="whitespace-pre-wrap leading-relaxed">
Open the directory where your app data, like conversation history and model configurations, is located.
Open the directory where your app data, like conversation history
and model configurations, is located.
</p>
</div>
<Button

View File

@ -1,10 +1,8 @@
import { basename } from 'path'
import { Model } from '@janhq/core'
export const modelBinFileName = (model: Model) => {
const modelFormatExt = '.gguf'
const extractedFileName = basename(model.source_url) ?? model.id
const extractedFileName = model.source_url?.split('/').pop() ?? model.id
const fileName = extractedFileName.toLowerCase().endsWith(modelFormatExt)
? extractedFileName
: model.id