diff --git a/.github/workflows/jan-electron-linter-and-test.yml b/.github/workflows/jan-electron-linter-and-test.yml index 40085391f..05d320bbd 100644 --- a/.github/workflows/jan-electron-linter-and-test.yml +++ b/.github/workflows/jan-electron-linter-and-test.yml @@ -22,6 +22,7 @@ on: branches: - main - dev + - release/** paths: - "electron/**" - .github/workflows/jan-electron-linter-and-test.yml @@ -66,17 +67,51 @@ jobs: CSC_IDENTITY_AUTO_DISCOVERY: "false" test-on-windows: + if: github.event_name == 'push' strategy: + fail-fast: false matrix: antivirus-tools: ['mcafee', 'default-windows-security','bit-defender'] runs-on: windows-desktop-${{ matrix.antivirus-tools }} steps: - name: Clean workspace run: | - Remove-Item -Path .\* -Force -Recurse + Remove-Item -Path "\\?\$(Get-Location)\*" -Force -Recurse $path = "$Env:APPDATA\jan" if (Test-Path $path) { - Remove-Item $path -Recurse -Force + Remove-Item "\\?\$path" -Recurse -Force + } else { + Write-Output "Folder does not exist." + } + - name: Getting the repo + uses: actions/checkout@v3 + + - name: Installing node + uses: actions/setup-node@v1 + with: + node-version: 20 + + # Clean cache, continue on error + - name: "Cleanup cache" + shell: powershell + continue-on-error: true + run: | + make clean + + - name: Linter and test + shell: powershell + run: | + make test + test-on-windows-pr: + if: github.event_name == 'pull_request' + runs-on: windows-desktop-default-windows-security + steps: + - name: Clean workspace + run: | + Remove-Item -Path "\\?\$(Get-Location)\*" -Force -Recurse + $path = "$Env:APPDATA\jan" + if (Test-Path $path) { + Remove-Item "\\?\$path" -Recurse -Force } else { Write-Output "Folder does not exist." } diff --git a/Makefile b/Makefile index a45477b29..0a53a18ed 100644 --- a/Makefile +++ b/Makefile @@ -53,15 +53,17 @@ build: check-file-counts clean: ifeq ($(OS),Windows_NT) powershell -Command "Get-ChildItem -Path . -Include node_modules, .next, dist, build, out -Recurse -Directory | Remove-Item -Recurse -Force" + powershell -Command "Get-ChildItem -Path . -Include package-lock.json -Recurse -File | Remove-Item -Recurse -Force" powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz" powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz" - rmdir /s /q "%USERPROFILE%\jan\extensions" + powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }" else ifeq ($(shell uname -s),Linux) find . -name "node_modules" -type d -prune -exec rm -rf '{}' + find . -name ".next" -type d -exec rm -rf '{}' + find . -name "dist" -type d -exec rm -rf '{}' + find . -name "build" -type d -exec rm -rf '{}' + find . -name "out" -type d -exec rm -rf '{}' + + find . -name "packake-lock.json" -type f -exec rm -rf '{}' + rm -rf ./pre-install/*.tgz rm -rf ./electron/pre-install/*.tgz rm -rf "~/jan/extensions" @@ -72,6 +74,7 @@ else find . -name "dist" -type d -exec rm -rf '{}' + find . -name "build" -type d -exec rm -rf '{}' + find . -name "out" -type d -exec rm -rf '{}' + + find . -name "packake-lock.json" -type f -exec rm -rf '{}' + rm -rf ./pre-install/*.tgz rm -rf ./electron/pre-install/*.tgz rm -rf ~/jan/extensions diff --git a/README.md b/README.md index 2ce60c655..1465fadeb 100644 --- a/README.md +++ b/README.md @@ -43,31 +43,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute Stable (Recommended) - + jan.exe - + Intel - + M1/M2 - + jan.deb - + jan.AppImage @@ -76,31 +76,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute Experimental (Nightly Build) - + jan.exe - + Intel - + M1/M2 - + jan.deb - + jan.AppImage @@ -327,6 +327,7 @@ Jan builds on top of other open-source projects: - [llama.cpp](https://github.com/ggerganov/llama.cpp) - [LangChain](https://github.com/langchain-ai) - [TensorRT](https://github.com/NVIDIA/TensorRT) +- [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) ## Contact diff --git a/core/package.json b/core/package.json index 2f4f6b576..9e4d8d69a 100644 --- a/core/package.json +++ b/core/package.json @@ -8,8 +8,8 @@ ], "homepage": "https://jan.ai", "license": "AGPL-3.0", - "main": "dist/core.umd.js", - "module": "dist/core.es5.js", + "main": "dist/core.es5.js", + "module": "dist/core.cjs.js", "typings": "dist/types/index.d.ts", "files": [ "dist", @@ -17,8 +17,7 @@ ], "author": "Jan ", "exports": { - ".": "./dist/core.umd.js", - "./sdk": "./dist/core.umd.js", + ".": "./dist/core.es5.js", "./node": "./dist/node/index.cjs.js" }, "typesVersions": { @@ -27,10 +26,6 @@ "./dist/core.es5.js.map", "./dist/types/index.d.ts" ], - "sdk": [ - "./dist/core.es5.js.map", - "./dist/types/index.d.ts" - ], "node": [ "./dist/node/index.cjs.js.map", "./dist/types/node/index.d.ts" @@ -38,15 +33,16 @@ } }, "scripts": { - "lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'", + "lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'", "test": "jest", "prebuild": "rimraf dist", "build": "tsc --module commonjs && rollup -c rollup.config.ts", "start": "rollup -c rollup.config.ts -w" }, "devDependencies": { + "@rollup/plugin-replace": "^5.0.5", "@types/jest": "^29.5.12", - "@types/node": "^12.0.2", + "@types/node": "^20.11.4", "eslint": "8.57.0", "eslint-plugin-jest": "^27.9.0", "jest": "^29.7.0", @@ -63,6 +59,6 @@ }, "dependencies": { "rxjs": "^7.8.1", - "ulid": "^2.3.0" + "ulidx": "^2.3.0" } } diff --git a/core/rollup.config.ts b/core/rollup.config.ts index 95305bf25..865e86d5c 100644 --- a/core/rollup.config.ts +++ b/core/rollup.config.ts @@ -3,17 +3,16 @@ import commonjs from 'rollup-plugin-commonjs' import sourceMaps from 'rollup-plugin-sourcemaps' import typescript from 'rollup-plugin-typescript2' import json from 'rollup-plugin-json' +import replace from '@rollup/plugin-replace' const pkg = require('./package.json') -const libraryName = 'core' - export default [ { input: `src/index.ts`, output: [ - { file: pkg.main, name: libraryName, format: 'umd', sourcemap: true }, - { file: pkg.module, format: 'es', sourcemap: true }, + // { file: pkg.main, name: libraryName, format: 'umd', sourcemap: true }, + { file: pkg.main, format: 'es', sourcemap: true }, ], // Indicate here external modules you don't wanna include in your bundle (i.e.: 'lodash') external: ['path'], @@ -30,7 +29,13 @@ export default [ // Allow node_modules resolution, so you can use 'external' to control // which external modules to include in the bundle // https://github.com/rollup/rollup-plugin-node-resolve#usage - resolve(), + replace({ + 'node:crypto': 'crypto', + 'delimiters': ['"', '"'], + }), + resolve({ + browser: true, + }), // Resolve source maps to the original source sourceMaps(), @@ -46,7 +51,7 @@ export default [ 'pacote', '@types/pacote', '@npmcli/arborist', - 'ulid', + 'ulidx', 'node-fetch', 'fs', 'request', @@ -64,7 +69,7 @@ export default [ // Allow json resolution json(), // Compile TypeScript files - typescript({ useTsconfigDeclarationDir: true, exclude: ['src/*.ts', 'src/extensions/**'] }), + typescript({ useTsconfigDeclarationDir: true }), // Allow bundling cjs modules (unlike webpack, rollup doesn't understand cjs) commonjs(), // Allow node_modules resolution, so you can use 'external' to control diff --git a/core/src/api/index.ts b/core/src/api/index.ts index f97593934..8e41da0d1 100644 --- a/core/src/api/index.ts +++ b/core/src/api/index.ts @@ -33,7 +33,7 @@ export enum AppRoute { stopServer = 'stopServer', log = 'log', logServer = 'logServer', - systemInformations = 'systemInformations', + systemInformation = 'systemInformation', showToast = 'showToast', } @@ -95,6 +95,8 @@ export enum FileManagerRoute { getUserHomePath = 'getUserHomePath', fileStat = 'fileStat', writeBlob = 'writeBlob', + mkdir = 'mkdir', + rm = 'rm', } export type ApiFunction = (...args: any[]) => any diff --git a/core/src/core.ts b/core/src/core.ts index b8cbd3162..32244e784 100644 --- a/core/src/core.ts +++ b/core/src/core.ts @@ -1,4 +1,4 @@ -import { DownloadRequest, FileStat, NetworkConfig } from './types' +import { DownloadRequest, FileStat, NetworkConfig, SystemInformation } from './types' /** * Execute a extension module function in main process @@ -13,7 +13,7 @@ const executeOnMain: (extension: string, method: string, ...args: any[]) => Prom extension, method, ...args -) => global.core?.api?.invokeExtensionFunc(extension, method, ...args) +) => globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args) /** * Downloads a file from a URL and saves it to the local file system. @@ -26,7 +26,7 @@ const executeOnMain: (extension: string, method: string, ...args: any[]) => Prom const downloadFile: (downloadRequest: DownloadRequest, network?: NetworkConfig) => Promise = ( downloadRequest, network -) => global.core?.api?.downloadFile(downloadRequest, network) +) => globalThis.core?.api?.downloadFile(downloadRequest, network) /** * Aborts the download of a specific file. @@ -34,14 +34,14 @@ const downloadFile: (downloadRequest: DownloadRequest, network?: NetworkConfig) * @returns {Promise} A promise that resolves when the download has been aborted. */ const abortDownload: (fileName: string) => Promise = (fileName) => - global.core.api?.abortDownload(fileName) + globalThis.core.api?.abortDownload(fileName) /** * Gets Jan's data folder path. * * @returns {Promise} A Promise that resolves with Jan's data folder path. */ -const getJanDataFolderPath = (): Promise => global.core.api?.getJanDataFolderPath() +const getJanDataFolderPath = (): Promise => globalThis.core.api?.getJanDataFolderPath() /** * Opens the file explorer at a specific path. @@ -49,21 +49,22 @@ const getJanDataFolderPath = (): Promise => global.core.api?.getJanDataF * @returns {Promise} A promise that resolves when the file explorer is opened. */ const openFileExplorer: (path: string) => Promise = (path) => - global.core.api?.openFileExplorer(path) + globalThis.core.api?.openFileExplorer(path) /** * Joins multiple paths together. * @param paths - The paths to join. * @returns {Promise} A promise that resolves with the joined path. */ -const joinPath: (paths: string[]) => Promise = (paths) => global.core.api?.joinPath(paths) +const joinPath: (paths: string[]) => Promise = (paths) => + globalThis.core.api?.joinPath(paths) /** * Retrive the basename from an url. * @param path - The path to retrieve. * @returns {Promise} A promise that resolves with the basename. */ -const baseName: (paths: string) => Promise = (path) => global.core.api?.baseName(path) +const baseName: (paths: string) => Promise = (path) => globalThis.core.api?.baseName(path) /** * Opens an external URL in the default web browser. @@ -72,20 +73,20 @@ const baseName: (paths: string) => Promise = (path) => global.core.api?. * @returns {Promise} - A promise that resolves when the URL has been successfully opened. */ const openExternalUrl: (url: string) => Promise = (url) => - global.core.api?.openExternalUrl(url) + globalThis.core.api?.openExternalUrl(url) /** * Gets the resource path of the application. * * @returns {Promise} - A promise that resolves with the resource path. */ -const getResourcePath: () => Promise = () => global.core.api?.getResourcePath() +const getResourcePath: () => Promise = () => globalThis.core.api?.getResourcePath() /** * Gets the user's home path. * @returns return user's home path */ -const getUserHomePath = (): Promise => global.core.api?.getUserHomePath() +const getUserHomePath = (): Promise => globalThis.core.api?.getUserHomePath() /** * Log to file from browser processes. @@ -93,7 +94,7 @@ const getUserHomePath = (): Promise => global.core.api?.getUserHomePath( * @param message - Message to log. */ const log: (message: string, fileName?: string) => void = (message, fileName) => - global.core.api?.log(message, fileName) + globalThis.core.api?.log(message, fileName) /** * Check whether the path is a subdirectory of another path. @@ -104,13 +105,14 @@ const log: (message: string, fileName?: string) => void = (message, fileName) => * @returns {Promise} - A promise that resolves with a boolean indicating whether the path is a subdirectory. */ const isSubdirectory: (from: string, to: string) => Promise = (from: string, to: string) => - global.core.api?.isSubdirectory(from, to) + globalThis.core.api?.isSubdirectory(from, to) /** * Get system information * @returns {Promise} - A promise that resolves with the system information. */ -const systemInformations: () => Promise = () => global.core.api?.systemInformations() +const systemInformation: () => Promise = () => + globalThis.core.api?.systemInformation() /** * Show toast message from browser processes. @@ -119,7 +121,7 @@ const systemInformations: () => Promise = () => global.core.api?.systemInfo * @returns */ const showToast: (title: string, message: string) => void = (title, message) => - global.core.api?.showToast(title, message) + globalThis.core.api?.showToast(title, message) /** * Register extension point function type definition */ @@ -146,7 +148,7 @@ export { log, isSubdirectory, getUserHomePath, - systemInformations, + systemInformation, showToast, FileStat, } diff --git a/core/src/events.ts b/core/src/events.ts index 700807b0c..da85f7e3b 100644 --- a/core/src/events.ts +++ b/core/src/events.ts @@ -5,7 +5,7 @@ * @param handler The handler function to call when the event is observed. */ const on: (eventName: string, handler: Function) => void = (eventName, handler) => { - global.core?.events?.on(eventName, handler) + globalThis.core?.events?.on(eventName, handler) } /** @@ -15,7 +15,7 @@ const on: (eventName: string, handler: Function) => void = (eventName, handler) * @param handler The handler function to call when the event is observed. */ const off: (eventName: string, handler: Function) => void = (eventName, handler) => { - global.core?.events?.off(eventName, handler) + globalThis.core?.events?.off(eventName, handler) } /** @@ -25,7 +25,7 @@ const off: (eventName: string, handler: Function) => void = (eventName, handler) * @param object The object to pass to the event callback. */ const emit: (eventName: string, object: any) => void = (eventName, object) => { - global.core?.events?.emit(eventName, object) + globalThis.core?.events?.emit(eventName, object) } export const events = { diff --git a/core/src/extension.ts b/core/src/extension.ts index 22accb4b4..973d4778a 100644 --- a/core/src/extension.ts +++ b/core/src/extension.ts @@ -19,6 +19,7 @@ export interface Compatibility { const ALL_INSTALLATION_STATE = [ 'NotRequired', // not required. 'Installed', // require and installed. Good to go. + 'Updatable', // require and installed but need to be updated. 'NotInstalled', // require to be installed. 'Corrupted', // require but corrupted. Need to redownload. ] as const @@ -59,6 +60,13 @@ export abstract class BaseExtension implements ExtensionType { return undefined } + /** + * Determine if the extension is updatable. + */ + updatable(): boolean { + return false + } + /** * Determine if the prerequisites for the extension are installed. * diff --git a/core/src/extensions/ai-engines/AIEngine.ts b/core/src/extensions/ai-engines/AIEngine.ts index 608b5c193..c65c081fd 100644 --- a/core/src/extensions/ai-engines/AIEngine.ts +++ b/core/src/extensions/ai-engines/AIEngine.ts @@ -14,7 +14,9 @@ export abstract class AIEngine extends BaseExtension { // The model folder modelFolder: string = 'models' - abstract models(): Promise + models(): Promise { + return Promise.resolve([]) + } /** * On extension load, subscribe to events. diff --git a/core/src/extensions/ai-engines/LocalOAIEngine.ts b/core/src/extensions/ai-engines/LocalOAIEngine.ts index 79dbcbf5e..f6557cd8f 100644 --- a/core/src/extensions/ai-engines/LocalOAIEngine.ts +++ b/core/src/extensions/ai-engines/LocalOAIEngine.ts @@ -1,4 +1,4 @@ -import { executeOnMain, getJanDataFolderPath, joinPath } from '../../core' +import { executeOnMain, getJanDataFolderPath, joinPath, systemInformation } from '../../core' import { events } from '../../events' import { Model, ModelEvent } from '../../types' import { OAIEngine } from './OAIEngine' @@ -9,9 +9,9 @@ import { OAIEngine } from './OAIEngine' */ export abstract class LocalOAIEngine extends OAIEngine { // The inference engine + abstract nodeModule: string loadModelFunctionName: string = 'loadModel' unloadModelFunctionName: string = 'unloadModel' - isRunning: boolean = false /** * On extension load, subscribe to events. @@ -19,22 +19,27 @@ export abstract class LocalOAIEngine extends OAIEngine { onLoad() { super.onLoad() // These events are applicable to local inference providers - events.on(ModelEvent.OnModelInit, (model: Model) => this.onModelInit(model)) - events.on(ModelEvent.OnModelStop, (model: Model) => this.onModelStop(model)) + events.on(ModelEvent.OnModelInit, (model: Model) => this.loadModel(model)) + events.on(ModelEvent.OnModelStop, (model: Model) => this.unloadModel(model)) } /** * Load the model. */ - async onModelInit(model: Model) { + async loadModel(model: Model) { if (model.engine.toString() !== this.provider) return const modelFolder = await joinPath([await getJanDataFolderPath(), this.modelFolder, model.id]) - - const res = await executeOnMain(this.nodeModule, this.loadModelFunctionName, { - modelFolder, - model, - }) + const systemInfo = await systemInformation() + const res = await executeOnMain( + this.nodeModule, + this.loadModelFunctionName, + { + modelFolder, + model, + }, + systemInfo + ) if (res?.error) { events.emit(ModelEvent.OnModelFail, { @@ -45,16 +50,14 @@ export abstract class LocalOAIEngine extends OAIEngine { } else { this.loadedModel = model events.emit(ModelEvent.OnModelReady, model) - this.isRunning = true } } /** * Stops the model. */ - onModelStop(model: Model) { - if (model.engine?.toString() !== this.provider) return - - this.isRunning = false + unloadModel(model: Model) { + if (model.engine && model.engine?.toString() !== this.provider) return + this.loadedModel = undefined executeOnMain(this.nodeModule, this.unloadModelFunctionName).then(() => { events.emit(ModelEvent.OnModelStopped, {}) diff --git a/core/src/extensions/ai-engines/OAIEngine.ts b/core/src/extensions/ai-engines/OAIEngine.ts index 3e583c9b9..5936005bb 100644 --- a/core/src/extensions/ai-engines/OAIEngine.ts +++ b/core/src/extensions/ai-engines/OAIEngine.ts @@ -1,5 +1,5 @@ import { requestInference } from './helpers/sse' -import { ulid } from 'ulid' +import { ulid } from 'ulidx' import { AIEngine } from './AIEngine' import { ChatCompletionRole, @@ -23,7 +23,6 @@ import { events } from '../../events' export abstract class OAIEngine extends AIEngine { // The inference engine abstract inferenceUrl: string - abstract nodeModule: string // Controller to handle stop requests controller = new AbortController() @@ -38,7 +37,7 @@ export abstract class OAIEngine extends AIEngine { onLoad() { super.onLoad() events.on(MessageEvent.OnMessageSent, (data: MessageRequest) => this.inference(data)) - events.on(InferenceEvent.OnInferenceStopped, () => this.onInferenceStopped()) + events.on(InferenceEvent.OnInferenceStopped, () => this.stopInference()) } /** @@ -78,7 +77,13 @@ export abstract class OAIEngine extends AIEngine { ...data.model, } - requestInference(this.inferenceUrl, data.messages ?? [], model, this.controller).subscribe({ + requestInference( + this.inferenceUrl, + data.messages ?? [], + model, + this.controller, + this.headers() + ).subscribe({ next: (content: any) => { const messageContent: ThreadContent = { type: ContentType.Text, @@ -109,8 +114,15 @@ export abstract class OAIEngine extends AIEngine { /** * Stops the inference. */ - onInferenceStopped() { + stopInference() { this.isCancelled = true this.controller?.abort() } + + /** + * Headers for the inference request + */ + headers(): HeadersInit { + return {} + } } diff --git a/core/src/extensions/ai-engines/RemoteOAIEngine.ts b/core/src/extensions/ai-engines/RemoteOAIEngine.ts new file mode 100644 index 000000000..5e9804b23 --- /dev/null +++ b/core/src/extensions/ai-engines/RemoteOAIEngine.ts @@ -0,0 +1,46 @@ +import { events } from '../../events' +import { Model, ModelEvent } from '../../types' +import { OAIEngine } from './OAIEngine' + +/** + * Base OAI Remote Inference Provider + * Added the implementation of loading and unloading model (applicable to local inference providers) + */ +export abstract class RemoteOAIEngine extends OAIEngine { + // The inference engine + abstract apiKey: string + /** + * On extension load, subscribe to events. + */ + onLoad() { + super.onLoad() + // These events are applicable to local inference providers + events.on(ModelEvent.OnModelInit, (model: Model) => this.loadModel(model)) + events.on(ModelEvent.OnModelStop, (model: Model) => this.unloadModel(model)) + } + + /** + * Load the model. + */ + async loadModel(model: Model) { + if (model.engine.toString() !== this.provider) return + events.emit(ModelEvent.OnModelReady, model) + } + /** + * Stops the model. + */ + unloadModel(model: Model) { + if (model.engine && model.engine.toString() !== this.provider) return + events.emit(ModelEvent.OnModelStopped, {}) + } + + /** + * Headers for the inference request + */ + override headers(): HeadersInit { + return { + 'Authorization': `Bearer ${this.apiKey}`, + 'api-key': `${this.apiKey}`, + } + } +} diff --git a/core/src/extensions/ai-engines/helpers/sse.ts b/core/src/extensions/ai-engines/helpers/sse.ts index 3d810d934..723d0dc13 100644 --- a/core/src/extensions/ai-engines/helpers/sse.ts +++ b/core/src/extensions/ai-engines/helpers/sse.ts @@ -12,7 +12,8 @@ export function requestInference( id: string parameters: ModelRuntimeParams }, - controller?: AbortController + controller?: AbortController, + headers?: HeadersInit ): Observable { return new Observable((subscriber) => { const requestBody = JSON.stringify({ @@ -27,6 +28,7 @@ export function requestInference( 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': '*', 'Accept': model.parameters.stream ? 'text/event-stream' : 'application/json', + ...headers, }, body: requestBody, signal: controller?.signal, diff --git a/core/src/extensions/ai-engines/index.ts b/core/src/extensions/ai-engines/index.ts index f4da62a7c..fc341380a 100644 --- a/core/src/extensions/ai-engines/index.ts +++ b/core/src/extensions/ai-engines/index.ts @@ -1,3 +1,4 @@ export * from './AIEngine' export * from './OAIEngine' export * from './LocalOAIEngine' +export * from './RemoteOAIEngine' diff --git a/core/src/extensions/monitoring.ts b/core/src/extensions/monitoring.ts index 8d61580fc..2d75e0218 100644 --- a/core/src/extensions/monitoring.ts +++ b/core/src/extensions/monitoring.ts @@ -1,5 +1,5 @@ import { BaseExtension, ExtensionTypeEnum } from '../extension' -import { GpuSetting, MonitoringInterface } from '../index' +import { GpuSetting, MonitoringInterface, OperatingSystemInfo } from '../index' /** * Monitoring extension for system monitoring. @@ -16,4 +16,5 @@ export abstract class MonitoringExtension extends BaseExtension implements Monit abstract getGpuSetting(): Promise abstract getResourcesInfo(): Promise abstract getCurrentLoad(): Promise + abstract getOsInfo(): Promise } diff --git a/core/src/fs.ts b/core/src/fs.ts index 71538ae9c..2eb22f9d6 100644 --- a/core/src/fs.ts +++ b/core/src/fs.ts @@ -4,7 +4,7 @@ import { FileStat } from './types' * Writes data to a file at the specified path. * @returns {Promise} A Promise that resolves when the file is written successfully. */ -const writeFileSync = (...args: any[]) => global.core.api?.writeFileSync(...args) +const writeFileSync = (...args: any[]) => globalThis.core.api?.writeFileSync(...args) /** * Writes blob data to a file at the specified path. @@ -13,47 +13,52 @@ const writeFileSync = (...args: any[]) => global.core.api?.writeFileSync(...args * @returns */ const writeBlob: (path: string, data: string) => Promise = (path, data) => - global.core.api?.writeBlob(path, data) + globalThis.core.api?.writeBlob(path, data) /** * Reads the contents of a file at the specified path. * @returns {Promise} A Promise that resolves with the contents of the file. */ -const readFileSync = (...args: any[]) => global.core.api?.readFileSync(...args) +const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync(...args) /** * Check whether the file exists * @param {string} path * @returns {boolean} A boolean indicating whether the path is a file. */ -const existsSync = (...args: any[]) => global.core.api?.existsSync(...args) +const existsSync = (...args: any[]) => globalThis.core.api?.existsSync(...args) /** * List the directory files * @returns {Promise} A Promise that resolves with the contents of the directory. */ -const readdirSync = (...args: any[]) => global.core.api?.readdirSync(...args) +const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync(...args) /** * Creates a directory at the specified path. * @returns {Promise} A Promise that resolves when the directory is created successfully. */ -const mkdirSync = (...args: any[]) => global.core.api?.mkdirSync(...args) +const mkdirSync = (...args: any[]) => globalThis.core.api?.mkdirSync(...args) + +const mkdir = (...args: any[]) => globalThis.core.api?.mkdir(...args) /** * Removes a directory at the specified path. * @returns {Promise} A Promise that resolves when the directory is removed successfully. */ const rmdirSync = (...args: any[]) => - global.core.api?.rmdirSync(...args, { recursive: true, force: true }) + globalThis.core.api?.rmdirSync(...args, { recursive: true, force: true }) + +const rm = (path: string) => globalThis.core.api?.rm(path) + /** * Deletes a file from the local file system. * @param {string} path - The path of the file to delete. * @returns {Promise} A Promise that resolves when the file is deleted. */ -const unlinkSync = (...args: any[]) => global.core.api?.unlinkSync(...args) +const unlinkSync = (...args: any[]) => globalThis.core.api?.unlinkSync(...args) /** * Appends data to a file at the specified path. */ -const appendFileSync = (...args: any[]) => global.core.api?.appendFileSync(...args) +const appendFileSync = (...args: any[]) => globalThis.core.api?.appendFileSync(...args) /** * Synchronizes a file from a source path to a destination path. @@ -62,15 +67,15 @@ const appendFileSync = (...args: any[]) => global.core.api?.appendFileSync(...ar * @returns {Promise} - A promise that resolves when the file has been successfully synchronized. */ const syncFile: (src: string, dest: string) => Promise = (src, dest) => - global.core.api?.syncFile(src, dest) + globalThis.core.api?.syncFile(src, dest) /** * Copy file sync. */ -const copyFileSync = (...args: any[]) => global.core.api?.copyFileSync(...args) +const copyFileSync = (...args: any[]) => globalThis.core.api?.copyFileSync(...args) const copyFile: (src: string, dest: string) => Promise = (src, dest) => - global.core.api?.copyFile(src, dest) + globalThis.core.api?.copyFile(src, dest) /** * Gets the file's stats. @@ -82,7 +87,7 @@ const copyFile: (src: string, dest: string) => Promise = (src, dest) => const fileStat: (path: string, outsideJanDataFolder?: boolean) => Promise = ( path, outsideJanDataFolder -) => global.core.api?.fileStat(path, outsideJanDataFolder) +) => globalThis.core.api?.fileStat(path, outsideJanDataFolder) // TODO: Export `dummy` fs functions automatically // Currently adding these manually @@ -92,7 +97,9 @@ export const fs = { existsSync, readdirSync, mkdirSync, + mkdir, rmdirSync, + rm, unlinkSync, appendFileSync, copyFileSync, diff --git a/core/src/node/api/processors/fsExt.ts b/core/src/node/api/processors/fsExt.ts index 4787da65b..7b08e24c9 100644 --- a/core/src/node/api/processors/fsExt.ts +++ b/core/src/node/api/processors/fsExt.ts @@ -88,4 +88,28 @@ export class FSExt implements Processor { }) }) } + + mkdir(path: string): Promise { + return new Promise((resolve, reject) => { + fs.mkdir(path, { recursive: true }, (err) => { + if (err) { + reject(err) + } else { + resolve() + } + }) + }) + } + + rm(path: string): Promise { + return new Promise((resolve, reject) => { + fs.rm(path, { recursive: true }, (err) => { + if (err) { + reject(err) + } else { + resolve() + } + }) + }) + } } diff --git a/core/src/node/api/restful/common.ts b/core/src/node/api/restful/common.ts index 433632989..c8061c34a 100644 --- a/core/src/node/api/restful/common.ts +++ b/core/src/node/api/restful/common.ts @@ -40,7 +40,7 @@ export const commonRouter = async (app: HttpServer) => { }) // Threads - app.post(`/threads/`, async (req, res) => createThread(req.body)) + app.post(`/threads`, async (req, res) => createThread(req.body)) app.get(`/threads/:threadId/messages`, async (req, res) => getMessages(req.params.threadId).then(normalizeData) diff --git a/core/src/node/api/restful/helper/builder.ts b/core/src/node/api/restful/helper/builder.ts index 6b9bbb3a8..1ef8d34a4 100644 --- a/core/src/node/api/restful/helper/builder.ts +++ b/core/src/node/api/restful/helper/builder.ts @@ -216,7 +216,7 @@ export const createMessage = async (threadId: string, message: any) => { const threadMessagesFileName = 'messages.jsonl' try { - const { ulid } = require('ulid') + const { ulid } = require('ulidx') const msgId = ulid() const createdAt = Date.now() const threadMessage: ThreadMessage = { diff --git a/core/src/node/extension/store.ts b/core/src/node/extension/store.ts index 93b1aeb2b..630756485 100644 --- a/core/src/node/extension/store.ts +++ b/core/src/node/extension/store.ts @@ -93,8 +93,7 @@ export function persistExtensions() { */ export async function installExtensions(extensions: any) { const installed: Extension[] = [] - for (const ext of extensions) { - // Set install options and activation based on input type + const installations = extensions.map((ext: any): Promise => { const isObject = typeof ext === 'object' const spec = isObject ? [ext.specifier, ext] : [ext] const activate = isObject ? ext.activate !== false : true @@ -102,15 +101,17 @@ export async function installExtensions(extensions: any) { // Install and possibly activate extension const extension = new Extension(...spec) if (!extension.origin) { - continue + return Promise.resolve() } - await extension._install() - if (activate) extension.setActive(true) + return extension._install().then(() => { + if (activate) extension.setActive(true) + // Add extension to store if needed + addExtension(extension) + installed.push(extension) + }) + }) - // Add extension to store if needed - addExtension(extension) - installed.push(extension) - } + await Promise.all(installations) // Return list of all installed extensions return installed diff --git a/core/src/node/helper/config.ts b/core/src/node/helper/config.ts index 06f2b03cd..b5ec2e029 100644 --- a/core/src/node/helper/config.ts +++ b/core/src/node/helper/config.ts @@ -82,26 +82,34 @@ export const getJanExtensionsPath = (): string => { */ export const physicalCpuCount = async (): Promise => { const platform = os.platform() - if (platform === 'linux') { - const output = await exec('lscpu -p | egrep -v "^#" | sort -u -t, -k 2,4 | wc -l') - return parseInt(output.trim(), 10) - } else if (platform === 'darwin') { - const output = await exec('sysctl -n hw.physicalcpu_max') - return parseInt(output.trim(), 10) - } else if (platform === 'win32') { - const output = await exec('WMIC CPU Get NumberOfCores') - return output - .split(os.EOL) - .map((line: string) => parseInt(line)) - .filter((value: number) => !isNaN(value)) - .reduce((sum: number, number: number) => sum + number, 1) - } else { - const cores = os.cpus().filter((cpu: any, index: number) => { - const hasHyperthreading = cpu.model.includes('Intel') - const isOdd = index % 2 === 1 - return !hasHyperthreading || isOdd - }) - return cores.length + try { + if (platform === 'linux') { + const output = await exec('lscpu -p | egrep -v "^#" | sort -u -t, -k 2,4 | wc -l') + return parseInt(output.trim(), 10) + } else if (platform === 'darwin') { + const output = await exec('sysctl -n hw.physicalcpu_max') + return parseInt(output.trim(), 10) + } else if (platform === 'win32') { + const output = await exec('WMIC CPU Get NumberOfCores') + return output + .split(os.EOL) + .map((line: string) => parseInt(line)) + .filter((value: number) => !isNaN(value)) + .reduce((sum: number, number: number) => sum + number, 1) + } else { + const cores = os.cpus().filter((cpu: any, index: number) => { + const hasHyperthreading = cpu.model.includes('Intel') + const isOdd = index % 2 === 1 + return !hasHyperthreading || isOdd + }) + return cores.length + } + } catch (err) { + console.warn('Failed to get physical CPU count', err) + // Divide by 2 to get rid of hyper threading + const coreCount = Math.ceil(os.cpus().length / 2) + console.debug('Using node API to get physical CPU count:', coreCount) + return coreCount } } @@ -118,7 +126,7 @@ const exec = async (command: string): Promise => { } export const getEngineConfiguration = async (engineId: string) => { - if (engineId !== 'openai') { + if (engineId !== 'openai' && engineId !== 'groq') { return undefined } const directoryPath = join(getJanDataFolderPath(), 'engines') diff --git a/core/src/node/helper/resource.ts b/core/src/node/helper/resource.ts index c79a63688..faaaace05 100644 --- a/core/src/node/helper/resource.ts +++ b/core/src/node/helper/resource.ts @@ -1,6 +1,6 @@ import { SystemResourceInfo } from '../../types' import { physicalCpuCount } from './config' -import { log, logServer } from './log' +import { log } from './log' export const getSystemResourceInfo = async (): Promise => { const cpu = await physicalCpuCount() diff --git a/core/src/node/index.ts b/core/src/node/index.ts index 31f2f076e..02d921fd6 100644 --- a/core/src/node/index.ts +++ b/core/src/node/index.ts @@ -4,3 +4,5 @@ export * from './extension/manager' export * from './extension/store' export * from './api' export * from './helper' +export * from './../types' +export * from './../api' diff --git a/core/src/types/miscellaneous/systemResourceInfo.ts b/core/src/types/miscellaneous/systemResourceInfo.ts index f7dd4a82b..fb059b1ba 100644 --- a/core/src/types/miscellaneous/systemResourceInfo.ts +++ b/core/src/types/miscellaneous/systemResourceInfo.ts @@ -30,3 +30,27 @@ export type GpuSettingInfo = { name: string arch?: string } + +export type SystemInformation = { + gpuSetting: GpuSetting + osInfo?: OperatingSystemInfo +} + +export const SupportedPlatforms = ['win32', 'linux', 'darwin'] as const +export type SupportedPlatformTuple = typeof SupportedPlatforms +export type SupportedPlatform = SupportedPlatformTuple[number] + +export type OperatingSystemInfo = { + platform: SupportedPlatform | 'unknown' + arch: string + release: string + machine: string + version: string + totalMem: number + freeMem: number +} + +export type CpuCoreInfo = { + model: string + speed: number +} diff --git a/core/src/types/model/modelEntity.ts b/core/src/types/model/modelEntity.ts index 74568686b..d62a7c387 100644 --- a/core/src/types/model/modelEntity.ts +++ b/core/src/types/model/modelEntity.ts @@ -18,6 +18,7 @@ export type ModelInfo = { export enum InferenceEngine { nitro = 'nitro', openai = 'openai', + groq = 'groq', triton_trtllm = 'triton_trtllm', nitro_tensorrt_llm = 'nitro-tensorrt-llm', diff --git a/docs/blog/01-january-10-2024-bitdefender-false-positive-flag.mdx b/docs/blog/2024-01-10-bitdefender-false-positive-flag.mdx similarity index 96% rename from docs/blog/01-january-10-2024-bitdefender-false-positive-flag.mdx rename to docs/blog/2024-01-10-bitdefender-false-positive-flag.mdx index 3e3923337..4e6c9b542 100644 --- a/docs/blog/01-january-10-2024-bitdefender-false-positive-flag.mdx +++ b/docs/blog/2024-01-10-bitdefender-false-positive-flag.mdx @@ -1,10 +1,27 @@ --- -title: "Post Mortem: Bitdefender False Positive Flag" +title: 'Post Mortem: Bitdefender False Positive Flag' description: "10th January 2024, Jan's 0.4.4 Release on Windows triggered Bitdefender to incorrectly flag it as infected with Gen:Variant.Tedy.258323, leading to automatic quarantine warnings on users' computers." slug: /postmortems/january-10-2024-bitdefender-false-positive-flag tags: [Postmortem] -authors: [hientominh, Van-QA] -date: 2024-01-10 +keywords: + [ + postmortem, + bitdefender, + false positive, + antivirus, + jan, + nitro, + incident, + incident response, + supply chain security, + user communication, + documentation, + antivirus compatibility, + cross-platform testing, + proactive incident response, + user education, + lessons learned, + ] --- Following the recent incident related to Jan version 0.4.4 triggering Bitdefender on Windows with Gen:Variant.Tedy.258323 on January 10, 2024, we wanted to provide a comprehensive postmortem and outline the necessary follow-up actions. diff --git a/docs/blog/2024-03-19-TensorRT-LLM.md b/docs/blog/2024-03-19-TensorRT-LLM.md new file mode 100644 index 000000000..03afb2179 --- /dev/null +++ b/docs/blog/2024-03-19-TensorRT-LLM.md @@ -0,0 +1,145 @@ +--- +title: Benchmarking TensorRT-LLM vs. llama.cpp +description: Jan has added support for the TensorRT-LLM Inference Engine, as an alternative to llama.cpp. We provide a performance benchmark that shows the head-to-head comparison of the two Inference Engine and model formats, with TensorRT-LLM providing better performance but consumes significantly more VRAM and RAM. +tags: [Nvidia, TensorRT-LLM, llama.cpp, 3090, 4090, "inference engine"] +unlisted: true +--- + +Jan has added support [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) as an alternative to the default [llama.cpp](https://github.com/ggerganov/llama.cpp) inference engine. TensorRT-LLM allows Nvidia GPU owners to run blazing fast LLM inference as a hardware-optimized LLM inference engine that compiles models to [run extremely fast on Nvidia GPUs](https://blogs.nvidia.com/blog/tensorrt-llm-windows-stable-diffusion-rtx/). + +You can follow our [TensorRT-LLM Guide](/guides/providers/tensorrt-llm) to try it out today. We've also added a few TensorRT-LLM models to Jan's Model Hub for download: + +- Mistral 7b +- TinyLlama-1.1b +- TinyJensen-1.1b 😂 + +:::tip + +TensorRT-LLM support is available in [v0.4.9](https://github.com/janhq/jan/releases/tag/v0.4.9), but should be considered an experimental feature. + +Please report bugs on [Github](https://github.com/janhq/jan) or on our Discord's [#tensorrt-llm](https://discord.com/channels/1107178041848909847/1201832734704795688) channel. + +::: + +## Performance Benchmarks + + +We were really curious to see how TensorRT-LLM would perform vs. llama.cpp on consumer-grade GPUs. TensorRT-LLM has previously been shown by Nvidia to reach performance of up to [10,000 tokens/s](https://nvidia.github.io/TensorRT-LLM/blogs/H100vsA100.html) on datacenter-grade GPUs. As most of Jan's users are proud card carrying members of the [GPU Poor](https://www.semianalysis.com/p/google-gemini-eats-the-world-gemini#the-gpu-poor), we wanted to see how the two inference engine performed on the same hardware. + +:::info + +An interesting aside: Jan actually started out in June 2023 building on [FastTransformer](https://github.com/NVIDIA/FasterTransformer), the precursor library to TensorRT-LLM. TensorRT-LLM was released in September 2023, making it a very young library. We're excited to see it's roadmap develop! + +::: + +### Test Setup + +We picked 3 hardware platforms to run the test on, based on Jan's userbase's self-reported common hardware platforms. + +| NVIDIA GPU | VRAM Used (GB) | CUDA Cores | Tensor Cores | Memory Bus Width (bit) | Memory Bandwidth (GB/s) | +| ------------------------- | -------------- | ---------- | ------------ | ---------------------- | ----------------------- | +| RTX 4090 Desktop (Ada) | 24 | 16,384 | 512 | 384 | ~1000 | +| RTX 3090 Desktop (Ampere) | 24 | 10,496 | 328 | 384 | 935.8 | +| RTX 4060 Laptop (Ada) | 8 | 3,072 | 96 | 128 | 272 | + +:::warning[Low-spec Machines?] + +We didn't bother including low-spec machines: TensorRT-LLM is meant for performance, and simply doesn't work on lower grade Nvidia GPUs, or computers without GPUs. + +TensorRT-LLM provides blazing fast performance at the cost of [memory usage](https://nvidia.github.io/TensorRT-LLM/memory.html). This means that the performance improvements only show up in higher-range GPUs with larger VRAMs. + +We've found that [llama.cpp](https://github.com/ggerganov/llama.cpp) does an incredible job of democratizing inference to the [GPU Poor](https://www.semianalysis.com/p/google-gemini-eats-the-world-gemini#the-gpu-poor) with CPU-only or lower-range GPUs. Huge shout outs to the [llama.cpp maintainers](https://github.com/ggerganov/llama.cpp/graphs/contributors) and the [ggml.ai](https://ggml.ai/) team. + +::: + +We chose the popular Mistral 7b model to run on both GGUF and TensorRT-LLM, picking comparable quantizations. + +#### llama.cpp Setup +- For llama.cpp, we used `Mistral-7b-q4_k_m` +- [ ] Fill in `ngl` params, GPU offload etc + +#### TensorRT-LLM Setup +- For TensorRT-LLM, we used `Mistral-7b-int4 AWQ` +- We ran TensorRT-LLM with `free_gpu_memory_fraction` to test it with the lowest VRAM consumption (performance may be affected) +- Note: We picked AWQ for TensorRT-LLM as a handicap as AWQ supposedly sacrifices performance for quality + +#### Experiment Setup +We ran the experiment using a standardized inference request in a sandboxed environment on the same machine: +- We ran tests 5 times for each inference engine, on a baremetal PC with no other applications open +- Each inference request was of `batch_size` 1 and `input_len` 2048, `output_len` 512 as a realistic test case +- CPU and Memory usage were obtained from.... Windows Task Manager 😱 +- GPU usage was obtained from `nvtop`, `htop`, and `nvidia-smi` + +## Results + +Our biggest takeaway: TensorRT-LLM is faster than llama.cpp on 4090s and 3090s with larger VRAMs. However, on smaller GPUs (e.g. Laptop 4060 GPUs), + +| | 4090 Desktop | 3090 Desktop | 4060 Laptop | +| ------------ | ------------ | ------------ | ----------- | +| TensorRT-LLM | ✅ 159t/s | ✅ 140.27t/s | ❌ 19t/s | +| llama.cpp | 101.3t/s | 90t/s | 22t/s | + +### RTX-4090 Desktop + +:::info[Hardware Details] + +- CPU: Intel 13th series +- GPU: NVIDIA GPU 4090 (Ampere - sm 86) +- RAM: 32GB +- OS: Windows 11 Pro on Proxmox + +::: + +Nvidia's RTX-4090 is their top-of-the-line consumer GPU, and retails for [approximately $2,000](https://www.amazon.com/rtx-4090/s?k=rtx+4090). + +#### Mistral-7b int4 + +| Metrics | GGUF (using GPU) | TensorRT-LLM | Difference | +| -------------------- | -------------------- | ------------ | -------------- | +| Throughput (token/s) | 101.3 | 159 | ✅ 57% faster | +| VRAM Used (GB) | 5.5 | 6.3 | 🤔 14% more | +| RAM Used (GB) | 0.54 | 0.42 | 🤯 20% less | +| Disk Size (GB) | 4.07 | 3.66 | 🤯 10% smaller | + + +### RTX-3090 Desktop + +:::info[Hardware Details] + +- CPU: Intel 13th series +- GPU: NVIDIA GPU 3090 (Ampere - sm 86) +- RAM: 64GB +- OS: Windows + +::: + +#### Mistral-7b int4 + +| Metrics | GGUF (using GPU) | TensorRT-LLM | Difference | +| -------------------- | -------------------- | ------------ | ------------ | +| Throughput (token/s) | 90 | ✅ 140.27 | ✅ 55% faster | +| VRAM Used (GB) | 6.0 | 6.8 | 🤔 13% more | +| RAM Used (GB) | 0.54 | 0.42 | 🤯 22% less | +| Disk Size (GB) | 4.07 | 3.66 | 🤯 10% less | + +### RTX-4060 Laptop + +- [ ] Dan to re-run perf tests and fill in details + +:::info[Hardware Details] + +- Manufacturer: Acer Nitro 16 Phenix +- CPU: Ryzen 7000 +- RAM: 16GB +- GPU: NVIDIA Laptop GPU 4060 (Ada) + +::: + +#### Mistral-7b int4 + +| Metrics | GGUF (using the GPU) | TensorRT-LLM | Difference | +| -------------------- | -------------------- | ------------ | ---------- | +| Throughput (token/s) | 22 | ❌ 19 | | +| VRAM Used (GB) | 2.1 | 7.7 | | +| RAM Used (GB) | 0.3 | 13.5 | | +| Disk Size (GB) | 4.07 | 4.07 | | \ No newline at end of file diff --git a/docs/docs/about/about.md b/docs/docs/about/about.md index a047ab910..1e755752c 100644 --- a/docs/docs/about/about.md +++ b/docs/docs/about/about.md @@ -4,14 +4,15 @@ slug: /about description: Jan is a desktop application that turns computers into thinking machines. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, about Jan, desktop application, thinking machine, diff --git a/docs/docs/about/assets/solar-punk.webp b/docs/docs/about/assets/solar-punk.webp new file mode 100644 index 000000000..20829fea4 Binary files /dev/null and b/docs/docs/about/assets/solar-punk.webp differ diff --git a/docs/docs/about/assets/vision-1.webp b/docs/docs/about/assets/vision-1.webp new file mode 100644 index 000000000..66e41b543 Binary files /dev/null and b/docs/docs/about/assets/vision-1.webp differ diff --git a/docs/docs/about/faq.md b/docs/docs/about/faq.md index 29832e211..12c08a244 100644 --- a/docs/docs/about/faq.md +++ b/docs/docs/about/faq.md @@ -1,3 +1,7 @@ +--- +title: Frequently Asked Questions (FAQ) - Jan +--- + # Frequently Asked Questions (FAQ) ## What is Jan? diff --git a/docs/docs/about/roadmap.md b/docs/docs/about/roadmap.md index 1c789d733..82b4fa34d 100644 --- a/docs/docs/about/roadmap.md +++ b/docs/docs/about/roadmap.md @@ -3,4 +3,4 @@ title: Roadmap --- - [ ] [Immediate Roadmap on Github](https://github.com/orgs/janhq/projects/5/views/16) -- [ ] [Longer-term Roadmap on Discord](https://discord.gg/Ey62mynnYr) \ No newline at end of file +- [ ] [Longer-term Roadmap on Discord](https://discord.gg/Ey62mynnYr) diff --git a/docs/docs/about/vision.md b/docs/docs/about/vision.md new file mode 100644 index 000000000..7c3ffac63 --- /dev/null +++ b/docs/docs/about/vision.md @@ -0,0 +1,74 @@ +--- +title: Jan's Vision +slug: /vision +description: Jan is a desktop application that turns computers into thinking machines. +keywords: + [ + Jan AI, + Jan, + ChatGPT alternative, + local AI, + private AI, + conversational AI, + no-subscription fee, + large language model, + about Jan, + desktop application, + thinking machine, + jan vision, + ] +--- + +## Jan's vision is to shape a future where humans and machines collaborate, continuing our legacy as toolmakers + +Throughout history, humanity has thrived by mastering tools, from [controlling fire](https://en.wikipedia.org/wiki/Control_of_fire_by_early_humans) to [inventing the wheel](https://en.wikipedia.org/wiki/Wheel). These leaps weren't just about survival, they were foundational to our civilization. + +Today, we stand on the brink of a new frontier with artificial intelligence. AI is not merely another tool, it represents a new form of collaboration between humans and machines - promising to enhance our creativity, augment our lives, and deepen our understanding of the world. + +![jan ai shapes the future](./assets/vision-1.webp) + +In the future we envision, AI will be as integral to our lives as fire and the wheel once were, with each individual having their own machines/robots. Mastering AI, like mastering fire, will require understanding its potential, respecting its power, and learning to control it for the betterment of humanity. + +### Inspired by Science Fiction, Grounded in Optimism + +Our vision is influenced by the harmonious coexistence of humans and machines in science fiction. From the helpful companionship of [C3PO](https://tr.wikipedia.org/wiki/C-3PO) and [Jarvis](https://en.wikipedia.org/wiki/J.A.R.V.I.S.) to the strategic alliances in [Halo](https://www.imdb.com/title/tt2934286/), these stories showcase a future where technology amplifies human potential. + +### Jan's Role in Shaping the Future + +Jan is our contribution to this future - a tool designed to augment human capabilities, not replace them. We are committed to developing AI that works for humanity, enhancing our creativity, productivity, and well-being. With Jan, we aim to empower individuals and communities to achieve more, together. + +Our vision is not just a dream, it's a blueprint for a future where technology and humanity harmonize to unlock unprecedented possibilities. + +## How we imagine the world in the future + +We are fundamentally optimistic about the future. Jan aligns with the [Solarpunk movement](https://en.wikipedia.org/wiki/Solarpunk), which envisions a world where technology and nature coexist and flourish together. We reject the notion of climate doomerism and instead, focus on the positive impact we can make with AI. + +![solarpunk and jan](./assets/solar-punk.webp) + +Imagine a world where every individual is empowered by their own robots, where machines are not just tools but partners in our journey. This is the future Jan is striving to create. + +Now, let's take a glimpse into this future through a day in the life of Emre, a reflection of how Jan's vision manifests in everyday life. + +## A Day in the Life of Emre in 2050 + +> In 2050, Emre wakes up to the gentle sound of birds chirping, a soothing alarm created by **his own AI robot, Jan**. As he gets ready for the day, **Jan has already prepared** his schedule, factoring in his preferences and the day's weather. +> +> At breakfast, Emre discusses his upcoming project with **Jan, who offers insights and suggestions**, enhancing Emre's creativity. As he heads to work, his self-driving car, **integrated with Jan**, takes the most scenic and efficient route, allowing Emre to enjoy a moment of tranquility. +> +> In the office, Emre collaborates with colleagues from around the globe in a virtual workspace. **Jan assists** by translating languages in real-time and organizing ideas, making collaboration seamless and productive. +> +> During lunch, Emre decides to explore a new hobby. **Jan quickly curates** a list of resources and connects Emre with a virtual mentor, making learning accessible and enjoyable. +> +> In the afternoon, Emre takes a break to connect with nature. His smart garden, **managed by Jan**, is thriving, blending technology with the natural world in perfect harmony. +> +> As the day winds down, Emre reflects on his accomplishments. **With Jan's help**, he's been able to focus on what truly matters, achieving a balance between work, personal growth, and well-being. +> +> In 2050, Jan is more than just a tool, it's an integral part of Emre's life, **augmenting his abilities** and enabling him to live a more fulfilling life. + +What a day, hah! + +--- + +Jan's vision commits to developing thinking machines that work alongside humans - learning, adapting, and contributing to a broader, smarter society. This journey isn't just about technology. It's about creating a future where humans and machines collaborate. + +Let's build the future together - join the journey! diff --git a/docs/docs/acknowledgements.md b/docs/docs/acknowledgements.md index c68c4ed86..0ec3176c0 100644 --- a/docs/docs/acknowledgements.md +++ b/docs/docs/acknowledgements.md @@ -4,14 +4,15 @@ description: Jan is a ChatGPT-alternative that runs on your own computer, with a slug: /acknowledgements keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, acknowledgements, third-party libraries, ] @@ -24,3 +25,4 @@ We would like to express our gratitude to the following third-party libraries th - [llama.cpp](https://github.com/ggerganov/llama.cpp/blob/master/LICENSE) - [LangChain.js](https://github.com/langchain-ai/langchainjs/blob/main/LICENSE) - [TensorRT](https://github.com/NVIDIA/TensorRT/blob/main/LICENSE) +- [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM/blob/main/LICENSE) diff --git a/docs/docs/community/community.mdx b/docs/docs/community/community.mdx index d4866490e..f4ce0da87 100644 --- a/docs/docs/community/community.mdx +++ b/docs/docs/community/community.mdx @@ -4,14 +4,20 @@ slug: /community description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, + about Jan, + desktop application, + thinking machine, + community, + socials, ] --- diff --git a/docs/docs/developer/01-overview/01-architecture.md b/docs/docs/developer/01-overview/01-architecture.md index 432b12537..09fffed69 100644 --- a/docs/docs/developer/01-overview/01-architecture.md +++ b/docs/docs/developer/01-overview/01-architecture.md @@ -4,14 +4,16 @@ slug: /developer/architecture description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, + architecture, ] --- diff --git a/docs/docs/developer/01-overview/02-file-based.md b/docs/docs/developer/01-overview/02-file-based.md index 653eba3f5..2b0c15a68 100644 --- a/docs/docs/developer/01-overview/02-file-based.md +++ b/docs/docs/developer/01-overview/02-file-based.md @@ -4,14 +4,16 @@ slug: /developer/file-based description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, + file based approach, ] --- diff --git a/docs/docs/developer/01-overview/03-user-interface.md b/docs/docs/developer/01-overview/03-user-interface.md index eb6eac89e..3454b2f61 100644 --- a/docs/docs/developer/01-overview/03-user-interface.md +++ b/docs/docs/developer/01-overview/03-user-interface.md @@ -4,14 +4,16 @@ slug: /developer/ui description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, + UI kit, ] --- diff --git a/docs/docs/developer/01-overview/04-install-and-prerequisites.md b/docs/docs/developer/01-overview/04-install-and-prerequisites.md index 9752f7b72..efd7ebe76 100644 --- a/docs/docs/developer/01-overview/04-install-and-prerequisites.md +++ b/docs/docs/developer/01-overview/04-install-and-prerequisites.md @@ -4,14 +4,15 @@ slug: /developer/prereq description: Guide to install and setup Jan for development. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, installation, prerequisites, developer setup, diff --git a/docs/docs/developer/01-overview/README.md b/docs/docs/developer/01-overview/README.md index 7bc3524de..b73c77aeb 100644 --- a/docs/docs/developer/01-overview/README.md +++ b/docs/docs/developer/01-overview/README.md @@ -4,14 +4,15 @@ slug: /developer description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/02-build-assistant/01-your-first-assistant.md b/docs/docs/developer/02-build-assistant/01-your-first-assistant.md index 16b80fc5e..f96dd2802 100644 --- a/docs/docs/developer/02-build-assistant/01-your-first-assistant.md +++ b/docs/docs/developer/02-build-assistant/01-your-first-assistant.md @@ -1,17 +1,18 @@ --- title: Your First Assistant -slug: /developer/build-assistant/your-first-assistant/ +slug: /developer/assistant/your-first-assistant/ description: A quick start on how to build an assistant. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, quick start, build assistant, ] @@ -20,4 +21,3 @@ keywords: :::caution This is currently under development. ::: - diff --git a/docs/docs/developer/02-build-assistant/02-assistant-anatomy.md b/docs/docs/developer/02-build-assistant/02-assistant-anatomy.md index e6951a05b..124817372 100644 --- a/docs/docs/developer/02-build-assistant/02-assistant-anatomy.md +++ b/docs/docs/developer/02-build-assistant/02-assistant-anatomy.md @@ -1,17 +1,18 @@ --- title: Anatomy of an Assistant -slug: /developer/build-assistant/assistant-anatomy/ +slug: /developer/assistant/assistant-anatomy/ description: An overview of assistant.json keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build assistant, assistant anatomy, ] diff --git a/docs/docs/developer/02-build-assistant/03-package-your-assistant.md b/docs/docs/developer/02-build-assistant/03-package-your-assistant.md index 12fa1510c..e18bcc5f6 100644 --- a/docs/docs/developer/02-build-assistant/03-package-your-assistant.md +++ b/docs/docs/developer/02-build-assistant/03-package-your-assistant.md @@ -1,17 +1,18 @@ --- title: Package your Assistant -slug: /developer/build-assistant/package-your-assistant/ +slug: /developer/assistant/package-your-assistant/ description: Package your assistant for sharing and publishing. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, quick start, build assistant, ] diff --git a/docs/docs/developer/02-build-assistant/README.mdx b/docs/docs/developer/02-build-assistant/README.mdx index 29cf8b63d..35944a8cc 100644 --- a/docs/docs/developer/02-build-assistant/README.mdx +++ b/docs/docs/developer/02-build-assistant/README.mdx @@ -1,17 +1,10 @@ --- title: Build an Assistant -slug: /developer/build-assistant +slug: /developer/assistant description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, + Jan, Rethink the Computer, local AI, privacy focus, free and open source, private and offline, conversational AI, no-subscription fee, large language models, build assistant, ] --- diff --git a/docs/docs/developer/03-build-engine/01-your-first-engine.md b/docs/docs/developer/03-build-engine/01-your-first-engine.md index 0670d63c4..2c69b2199 100644 --- a/docs/docs/developer/03-build-engine/01-your-first-engine.md +++ b/docs/docs/developer/03-build-engine/01-your-first-engine.md @@ -1,17 +1,18 @@ --- title: Your First Engine -slug: /developer/build-engine/your-first-engine/ +slug: /developer/engine/your-first-engine/ description: A quick start on how to build your first engine keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, quick start, build engine, ] @@ -21,4 +22,4 @@ keywords: This is currently under development. ::: -A quickstart on how to integrate tensorrt llm \ No newline at end of file +A quickstart on how to integrate tensorrt llm diff --git a/docs/docs/developer/03-build-engine/02-engine-anatomy.md b/docs/docs/developer/03-build-engine/02-engine-anatomy.md index 2f8c69a04..5f45339eb 100644 --- a/docs/docs/developer/03-build-engine/02-engine-anatomy.md +++ b/docs/docs/developer/03-build-engine/02-engine-anatomy.md @@ -1,17 +1,18 @@ --- title: Anatomy of an Engine -slug: /developer/build-engine/engine-anatomy +slug: /developer/engine/engine-anatomy description: An overview of engine.json keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build engine, engine anatomy, ] @@ -19,4 +20,4 @@ keywords: :::caution This is currently under development. -::: \ No newline at end of file +::: diff --git a/docs/docs/developer/03-build-engine/03-package-your-engine.md b/docs/docs/developer/03-build-engine/03-package-your-engine.md index 794e1abb2..a44807c68 100644 --- a/docs/docs/developer/03-build-engine/03-package-your-engine.md +++ b/docs/docs/developer/03-build-engine/03-package-your-engine.md @@ -1,17 +1,18 @@ --- title: Package your Engine -slug: /developer/build-engine/package-your-engine/ +slug: /developer/engine/package-your-engine/ description: Package your engine for sharing and publishing. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build engine, engine anatomy, ] diff --git a/docs/docs/developer/03-build-engine/README.mdx b/docs/docs/developer/03-build-engine/README.mdx index a2521ff54..e4c5980e0 100644 --- a/docs/docs/developer/03-build-engine/README.mdx +++ b/docs/docs/developer/03-build-engine/README.mdx @@ -1,17 +1,18 @@ --- title: Build an Inference Engine -slug: /developer/build-engine/ +slug: /developer/engine/ description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build assistant, ] --- diff --git a/docs/docs/developer/04-build-extension/01-your-first-extension.md b/docs/docs/developer/04-build-extension/01-your-first-extension.md index f89f34053..4dd413ca6 100644 --- a/docs/docs/developer/04-build-extension/01-your-first-extension.md +++ b/docs/docs/developer/04-build-extension/01-your-first-extension.md @@ -1,17 +1,18 @@ --- title: Your First Extension -slug: /developer/build-extension/your-first-extension/ +slug: /developer/extension/your-first-extension/ description: A quick start on how to build your first extension keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, quick start, build extension, ] @@ -76,13 +77,13 @@ There are a few things to keep in mind when writing your extension code: In `index.ts`, you will see that the extension function will return a `Promise`. ```typescript - import { core } from "@janhq/core"; + import { core } from '@janhq/core' function onStart(): Promise { - return core.invokePluginFunc(MODULE_PATH, "run", 0); + return core.invokePluginFunc(MODULE_PATH, 'run', 0) } ``` For more information about the Jan Extension Core module, see the [documentation](https://github.com/janhq/jan/blob/main/core/README.md). -Now, go ahead and start customizing your extension! Happy coding! \ No newline at end of file +Now, go ahead and start customizing your extension! Happy coding! diff --git a/docs/docs/developer/04-build-extension/02-extension-anatomy.md b/docs/docs/developer/04-build-extension/02-extension-anatomy.md index 7c3cd1911..b41f1f0f4 100644 --- a/docs/docs/developer/04-build-extension/02-extension-anatomy.md +++ b/docs/docs/developer/04-build-extension/02-extension-anatomy.md @@ -1,17 +1,18 @@ --- title: Anatomy of an Extension -slug: /developer/build-extension/extension-anatomy +slug: /developer/extension/extension-anatomy description: An overview of extensions.json keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build extension, extension anatomy, ] diff --git a/docs/docs/developer/04-build-extension/03-package-your-extension.md b/docs/docs/developer/04-build-extension/03-package-your-extension.md index cf7ffc6ba..a4c894695 100644 --- a/docs/docs/developer/04-build-extension/03-package-your-extension.md +++ b/docs/docs/developer/04-build-extension/03-package-your-extension.md @@ -1,17 +1,18 @@ --- title: Package your Engine -slug: /developer/build-extension/package-your-extension/ +slug: /developer/extension/package-your-extension/ description: Package your extension for sharing and publishing. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build extension, extension anatomy, ] diff --git a/docs/docs/developer/04-build-extension/README.mdx b/docs/docs/developer/04-build-extension/README.mdx index a981281e7..ce7fce5b2 100644 --- a/docs/docs/developer/04-build-extension/README.mdx +++ b/docs/docs/developer/04-build-extension/README.mdx @@ -1,17 +1,10 @@ --- title: Build an Extension -slug: /developer/build-extension/ +slug: /developer/extension/ description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, + Jan, Rethink the Computer, local AI, privacy focus, free and open source, private and offline, conversational AI, no-subscription fee, large language models, build extension, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/README.mdx b/docs/docs/developer/05-framework/03-engineering/README.mdx index c3337ab2e..18c3904a4 100644 --- a/docs/docs/developer/05-framework/03-engineering/README.mdx +++ b/docs/docs/developer/05-framework/03-engineering/README.mdx @@ -4,14 +4,15 @@ slug: /developer/engineering description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, spec, engineering, ] diff --git a/docs/docs/developer/05-framework/03-engineering/assistants.md b/docs/docs/developer/05-framework/03-engineering/assistants.md index fa9c593ab..2a2dc2681 100644 --- a/docs/docs/developer/05-framework/03-engineering/assistants.md +++ b/docs/docs/developer/05-framework/03-engineering/assistants.md @@ -1,16 +1,17 @@ --- -title: "Assistants" +title: 'Assistants' description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/chats.md b/docs/docs/developer/05-framework/03-engineering/chats.md index eb0ae287a..b13240e63 100644 --- a/docs/docs/developer/05-framework/03-engineering/chats.md +++ b/docs/docs/developer/05-framework/03-engineering/chats.md @@ -3,14 +3,15 @@ title: Chats description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/files.md b/docs/docs/developer/05-framework/03-engineering/files.md index 59ca27ec9..950d8729f 100644 --- a/docs/docs/developer/05-framework/03-engineering/files.md +++ b/docs/docs/developer/05-framework/03-engineering/files.md @@ -1,16 +1,17 @@ --- -title: "Files" +title: 'Files' description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/fine-tuning.md b/docs/docs/developer/05-framework/03-engineering/fine-tuning.md index 53ca2b206..dac7cf54a 100644 --- a/docs/docs/developer/05-framework/03-engineering/fine-tuning.md +++ b/docs/docs/developer/05-framework/03-engineering/fine-tuning.md @@ -1,16 +1,17 @@ --- -title: "Fine-tuning" +title: 'Fine-tuning' description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/messages.md b/docs/docs/developer/05-framework/03-engineering/messages.md index 8f2497002..37cd061c5 100644 --- a/docs/docs/developer/05-framework/03-engineering/messages.md +++ b/docs/docs/developer/05-framework/03-engineering/messages.md @@ -3,14 +3,15 @@ title: Messages description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/models.md b/docs/docs/developer/05-framework/03-engineering/models.md index 4e4c3c604..4895954d7 100644 --- a/docs/docs/developer/05-framework/03-engineering/models.md +++ b/docs/docs/developer/05-framework/03-engineering/models.md @@ -3,14 +3,15 @@ title: Models description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/prompts.md b/docs/docs/developer/05-framework/03-engineering/prompts.md index 22fc578af..5897309d1 100644 --- a/docs/docs/developer/05-framework/03-engineering/prompts.md +++ b/docs/docs/developer/05-framework/03-engineering/prompts.md @@ -3,14 +3,15 @@ title: Prompts description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-engineering/threads.md b/docs/docs/developer/05-framework/03-engineering/threads.md index a1cd2b4df..161b93948 100644 --- a/docs/docs/developer/05-framework/03-engineering/threads.md +++ b/docs/docs/developer/05-framework/03-engineering/threads.md @@ -3,14 +3,15 @@ title: Threads description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-product/README.mdx b/docs/docs/developer/05-framework/03-product/README.mdx index ca3a13b3a..5b3f35f5d 100644 --- a/docs/docs/developer/05-framework/03-product/README.mdx +++ b/docs/docs/developer/05-framework/03-product/README.mdx @@ -4,14 +4,15 @@ slug: /developer/product description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, spec, product, ] diff --git a/docs/docs/developer/05-framework/03-product/chat.md b/docs/docs/developer/05-framework/03-product/chat.md index b0dcce2d6..fdd87644b 100644 --- a/docs/docs/developer/05-framework/03-product/chat.md +++ b/docs/docs/developer/05-framework/03-product/chat.md @@ -3,14 +3,15 @@ title: Chat description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-product/hub.md b/docs/docs/developer/05-framework/03-product/hub.md index 7171f8378..ebbb24fd7 100644 --- a/docs/docs/developer/05-framework/03-product/hub.md +++ b/docs/docs/developer/05-framework/03-product/hub.md @@ -3,14 +3,15 @@ title: Hub description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-product/jan.md b/docs/docs/developer/05-framework/03-product/jan.md index 9e8973360..f42e4643a 100644 --- a/docs/docs/developer/05-framework/03-product/jan.md +++ b/docs/docs/developer/05-framework/03-product/jan.md @@ -3,14 +3,15 @@ title: Jan (The Default Assistant) description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-product/settings.md b/docs/docs/developer/05-framework/03-product/settings.md index 514139a00..327fa5c97 100644 --- a/docs/docs/developer/05-framework/03-product/settings.md +++ b/docs/docs/developer/05-framework/03-product/settings.md @@ -3,14 +3,15 @@ title: Settings description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/03-product/system-monitor.md b/docs/docs/developer/05-framework/03-product/system-monitor.md index 761d9a7bf..fc4a91751 100644 --- a/docs/docs/developer/05-framework/03-product/system-monitor.md +++ b/docs/docs/developer/05-framework/03-product/system-monitor.md @@ -3,14 +3,15 @@ title: System Monitor description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/developer/05-framework/README.md b/docs/docs/developer/05-framework/README.md index 770f5713a..c94ce9701 100644 --- a/docs/docs/developer/05-framework/README.md +++ b/docs/docs/developer/05-framework/README.md @@ -4,14 +4,15 @@ slug: /developer/framework/ description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/events/hcmc-oct23.md b/docs/docs/events/hcmc-oct23.md index 73898efcd..182153263 100644 --- a/docs/docs/events/hcmc-oct23.md +++ b/docs/docs/events/hcmc-oct23.md @@ -1,8 +1,9 @@ --- title: "Jan's AI Hacker House (Ho Chi Minh City)" -description: "24-27 Oct 2023, District 3, HCMC. AI-focused talks, workshops and social events. Hosted by Jan.ai" +description: '24-27 Oct 2023, District 3, HCMC. AI-focused talks, workshops and social events. Hosted by Jan.ai' slug: /events/hcmc-oct23 image: /img/hcmc-launch-party.png +keywords: [AI, Hacker House, Ho Chi Minh City, HCMC, Jan.ai] --- ![](/img/hcmc-launch-party.png) diff --git a/docs/docs/events/nvidia-llm-day-nov-23.md b/docs/docs/events/nvidia-llm-day-nov-23.md index d467dcb6e..a57776ef4 100644 --- a/docs/docs/events/nvidia-llm-day-nov-23.md +++ b/docs/docs/events/nvidia-llm-day-nov-23.md @@ -1,21 +1,20 @@ --- -title: "Nov 23: Nvidia GenAI Day" -description: Nvidia's LLM Day +title: 'Nov 23: Nvidia GenAI Day' +description: Nvidia's LLM Day --- ![](/img/nvidia-llm-day-header.png) ## Nvidia GenAI Innovation Day -Jan will be at Nvidia's GenAI Innovation Day in Nov '23, focusing on Enterprise use-cases of LLMs. +Jan will be at Nvidia's GenAI Innovation Day in Nov '23, focusing on Enterprise use-cases of LLMs. ### Location -- JW Marriott Hanoi Hotel +- JW Marriott Hanoi Hotel - 8:30am November 8th 2023 - Registration: [https://gmcgroup.com.vn/nvidia-genai-event/](https://gmcgroup.com.vn/nvidia-genai-event/) ### Programme ![](/img/nvidia-llm-day.png) - diff --git a/docs/docs/guides/advanced-settings/_category_.json b/docs/docs/guides/advanced-settings/_category_.json deleted file mode 100644 index 316758344..000000000 --- a/docs/docs/guides/advanced-settings/_category_.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "label": "Advanced Settings", - "position": 11, - "link": { - "type": "doc", - "id": "guides/advanced-settings/advanced-settings" - } -} \ No newline at end of file diff --git a/docs/docs/guides/advanced-settings/advanced-settings.mdx b/docs/docs/guides/advanced-settings/advanced-settings.mdx deleted file mode 100644 index ae3244cda..000000000 --- a/docs/docs/guides/advanced-settings/advanced-settings.mdx +++ /dev/null @@ -1,150 +0,0 @@ ---- -title: Advanced Settings -sidebar_position: 1 -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - advanced-settings, - ] ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -This guide will show you how to use the advanced settings in Jan. - -## Access the Advanced Settings -To access the Jan's advanced settings, follow the steps below: -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. You can configure the following settings: - -| Feature | Description | -|---------------------------|-----------------------------------------------------------------------------------------------------------------------| -| **Keyboard Shortcuts** | Keyboard shortcuts speed up your workflow. For a quick overview of useful keyboard shortcuts, refer to the list [below](advanced-settings.mdx#keyboard-shortcuts). | -| **Experimental Mode** | Enables experimental features that may be unstable. | -| **GPU Acceleration** | Enables the boosting of your model performance by using your GPU devices for acceleration. | -| **Jan Data Folder** | Location for messages, model configurations, and user data. Changeable to a different location. | -| **HTTPS Proxy & Ignore SSL Certificate** | Use a proxy server for internet connections and ignore SSL certificates for self-signed certificates. Please check out the guide on how to set up your own HTTPS proxy server [here](http-proxy.mdx). | -| **Clear Logs** | Removes all logs from the Jan application. | -| **Reset To Factory Default** | Resets the application to its original state, deleting all data including model customizations and conversation history. | - - - -## Keyboard Shortcuts - -Here are some of the keyboard shortcuts that you can use in Jan. - - - -| Combination | Description | -| --------------- | -------------------------------------------------- | -| `⌘ E` | Show list your models | -| `⌘ K` | Show list navigation pages | -| `⌘ B` | Toggle collapsible left panel | -| `⌘ ,` | Navigate to setting page | -| `Enter` | Send a message | -| `Shift + Enter` | Insert new line in input box | -| `Arrow Up` | Navigate to the previous option (within the search dialog) | -| `Arrow Down` | Navigate to the next option (within the search dialog) | - - - -| Combination | Description | -| --------------- | -------------------------------------------------- | -| `Ctrl E` | Show list your models | -| `Ctrl K` | Show list navigation pages | -| `Ctrl B` | Toggle collapsible left panel | -| `Ctrl ,` | Navigate to setting page | -| `Enter` | Send a message | -| `Shift + Enter` | Insert new line in input box | -| `Arrow Up` | Navigate to the previous option (within the search dialog) | -| `Arrow Down` | Navigate to the next option (within the search dialog) | - - - -| Combination | Description | -| --------------- | -------------------------------------------------- | -| `Ctrl E` | Show list your models | -| `Ctrl K` | Show list navigation pages | -| `Ctrl B` | Toggle collapsible left panel | -| `Ctrl ,` | Navigate to setting page | -| `Enter` | Send a message | -| `Shift + Enter` | Insert new line in input box | -| `Arrow Up` | Navigate to the previous option (within the search dialog) | -| `Arrow Down` | Navigate to the next option (within the search dialog) | - - - -:::note -The keyboard shortcuts are customizable. -::: - -## Enable the Experimental Mode -To try out new fetures that are still in testing phase, follow the steps below: -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. On the **Experimental Mode** click the slider to enable. - -## Enable the GPU Acceleration -To enhance your model performance, follow the steps below: - -:::warning -Ensure that you have read the [troubleshooting guide](/docs/guides/common-error/not-using-gpu.mdx) here for further assistance. -::: -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. On the **GPU Acceleration** click the slider to enable. - -## Access the Jan Data Folder -To access the folder where messages, model configurations and user data are stored, follow the steps below: -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. On the **Jan Data Folder** click the **folder icon (📂)** to access the data or the **pencil icon (✏️)** to change the folder where you keep your data. - -## Enable the HTTPS Proxy -To enable the HTTPS Proxy feature, follow the steps below: -1. Make sure to set up your HTTPS Proxy. Check out this [guide](http-proxy.mdx) for instructions on how to do it. -2. Navigate to the main dashboard. -3. Click the **gear icon (⚙️)** on the bottom left of your screen. -4. Under the **Settings screen**, click the **Advanced Settings**. -5. On the **HTTPS Proxy** click the slider to enable. -6. Input your domain in the blank field. - -## Ignore SSL Certificate -To Allow self-signed or unverified certificates, follow the steps below: -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. On the **Ignore SSL Certificates** click the slider to enable. - -## Clear Logs -To clear all logs on your Jan app, follow the steps below: -:::warning -This feature clears all the data in your **Jan Data Folder**. -::: -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. On the **Clear Logs** click the the **Clear** button. - -## Reset To Factory Default -To reset the Jan app to its original state, follow the steps below: -:::danger[Remember!] -This irreversible action is only recommended if the application is corrupted. -::: -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. On the **Reset To Factory Default** click the the **Reset** button. \ No newline at end of file diff --git a/docs/docs/guides/advanced-settings/http-proxy.mdx b/docs/docs/guides/advanced-settings/http-proxy.mdx deleted file mode 100644 index 7b2de339c..000000000 --- a/docs/docs/guides/advanced-settings/http-proxy.mdx +++ /dev/null @@ -1,118 +0,0 @@ ---- -title: HTTPS Proxy -sidebar_position: 2 -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - advanced-settings, - https-proxy, - ] ---- - - -## Why HTTPS Proxy? - -HTTPS Proxy encrypts data between your browser and the internet, making it hard for outsiders to intercept or read. It also helps you to maintain your privacy and security while being able to bypass regional restrictions on internet. - -:::note - -- When configuring Jan using an HTTPS proxy, the speed of the downloading model may be affected due to the encryption and decryption process. It also depends on the networking of the cloud service provider. -- HTTPS Proxy does not affect the remote model usage. - -::: - -## Setting Up Your Own HTTPS Proxy Server -This guide provides a simple overview of setting up an HTTPS proxy server using **Squid**, a widely used open-source proxy software. - -:::note -Other software options are also available depending on your requirements. -::: - -### Step 1: Choosing a Server -1. Firstly, you need to choose a server to host your proxy server. -:::note -We recommend using a well-known cloud provider service like: -- Amazon AWS -- Google Cloud -- Microsoft Azure -- Digital Ocean -::: - -2. Ensure that your server has a public IP address and is accessible from the internet. - -### Step 2: Installing Squid -Instal **Squid** using the following command: -```bash -sudo apt-get update -sudo apt-get install squid -``` - -### Step 3: Configure Squid for HTTPS - -To enable HTTPS, you will need to configure Squid with SSL support. - -1. Squid requires an SSL certificate to be able to handle HTTPS traffic. You can generate a self-signed certificate or obtain one from a Certificate Authority (CA). For a self-signed certificate, you can use OpenSSL: - -```bash -openssl req -new -newkey rsa:2048 -days 365 -nodes -x509 -keyout squid-proxy.pem -out squid-proxy.pem -``` - -2. Edit the Squid configuration file `/etc/squid/squid.conf` to include the path to your SSL certificate and enable the HTTPS port: - -```bash -http_port 3128 ssl-bump cert=/path/to/your/squid-proxy.pem -ssl_bump server-first all -ssl_bump bump all -``` - -3. To intercept HTTPS traffic, Squid uses a process called SSL Bumping. This process allows Squid to decrypt and re-encrypt HTTPS traffic. To enable SSL Bumping, ensure the `ssl_bump` directives are configured correctly in your `squid.conf` file. - -### Step 4 (Optional): Configure ACLs and Authentication - -1. You can define rules to control who can access your proxy. This is done by editing the squid.conf file and defining ACLs: - -```bash -acl allowed_ips src "/etc/squid/allowed_ips.txt" -http_access allow allowed_ips -``` - -2. If you want to add an authentication layer, Squid supports several authentication schemes. Basic authentication setup might look like this: - -```bash -auth_param basic program /usr/lib/squid/basic_ncsa_auth /etc/squid/passwords -acl authenticated proxy_auth REQUIRED -http_access allow authenticated -``` - -### Step 5: Restart and Test Your Proxy - -1. After configuring, restart Squid to apply the changes: - -```bash -sudo systemctl restart squid -``` - -2. To test, configure your browser or another client to use the proxy server with its IP address and port (default is 3128). -3. Check if you can access the internet through your proxy. - -:::tip - -Tips for Secure Your Proxy: -- **Firewall rules**: Ensure that only intended users or IP addresses can connect to your proxy server. This can be achieved by setting up appropriate firewall rules. -- **Regular updates**: Keep your server and proxy software updated to ensure that you are protected against known vulnerabilities. -- **Monitoring and logging**: Monitor your proxy server for unusual activity and enable logging to keep track of the traffic passing through your proxy. - -::: - -## Setting Up Jan to Use Your HTTPS Proxy - -Once you have your HTTPS proxy server set up, you can configure Jan to use it. -1. Navigate to `Settings` > `Advanced Settings` and specify the HTTPS proxy (proxy auto-configuration and SOCKS not supported). -2. You can turn on the feature `Ignore SSL Certificates` if you are using a self-signed certificate. This feature allows self-signed or unverified certificates. \ No newline at end of file diff --git a/docs/docs/guides/assets/jan-ai-download.png b/docs/docs/guides/assets/jan-ai-download.png deleted file mode 100644 index b175e65f8..000000000 Binary files a/docs/docs/guides/assets/jan-ai-download.png and /dev/null differ diff --git a/docs/docs/guides/assets/jan-ai-quickstart.png b/docs/docs/guides/assets/jan-ai-quickstart.png deleted file mode 100644 index 8f410ccee..000000000 Binary files a/docs/docs/guides/assets/jan-ai-quickstart.png and /dev/null differ diff --git a/docs/docs/guides/common-error/assets/janOpenAppDirectory.gif b/docs/docs/guides/assets/janOpenAppDirectory.gif similarity index 100% rename from docs/docs/guides/common-error/assets/janOpenAppDirectory.gif rename to docs/docs/guides/assets/janOpenAppDirectory.gif diff --git a/docs/docs/guides/common-error/assets/janSwitchCPUtoGPU.gif b/docs/docs/guides/assets/janSwitchCPUtoGPU.gif similarity index 100% rename from docs/docs/guides/common-error/assets/janSwitchCPUtoGPU.gif rename to docs/docs/guides/assets/janSwitchCPUtoGPU.gif diff --git a/docs/docs/guides/assets/quick.png b/docs/docs/guides/assets/quick.png deleted file mode 100644 index f8ad257e8..000000000 Binary files a/docs/docs/guides/assets/quick.png and /dev/null differ diff --git a/docs/docs/guides/best-practices.mdx b/docs/docs/guides/best-practices.mdx deleted file mode 100644 index 9dabef8dc..000000000 --- a/docs/docs/guides/best-practices.mdx +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: Best Practices -sidebar_position: 3 -description: Comprehensive set of best practices. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - acknowledgements, - third-party libraries, - ] ---- - -Jan is a versatile platform offering solutions for integrating AI locally across various platforms. This guide outlines best practices for developers, analysts, and AI enthusiasts to enhance their experience with Jan when adding AI locally to their computers. Implementing these practices will optimize the performance of AI models. - -## Follow the Quickstart Guide -The [quickstart guide](quickstart.mdx) is designed to facilitate a quick setup process. It provides a clear instruction and simple steps to get you up and running with Jan.ai quickly. Even, if you are inexperienced in AI, the quickstart can offer valuable insights and tips to help you get started quickly. - -## Setting up the Right Models -Jan offers a range of pre-configured AI models that are tailored to different tasks and industries. You should identify which on that aligns with your objectives. There are factors to be considered: -- Capabilities -- Accuracy -- Processing Speed - -:::note -- Some of these factors also depend on your hardware, please see Hardware Requirement. -- Choosing the right model is important to achieve the best performance. -::: - -## Setting up Jan -Ensure that you familiarize yourself with the Jan application. Jan offers advanced settings that you can adjust. These settings may influence how your AI behaves locally. Please see the [Advanced Settings](./advanced-settings/advanced-settings.mdx) article for a complete list of Jan's configurations and instructions on how to configure them. - -## Integrations -One of Jan's key features is its ability to integrate with many systems. Whether you are incorporating Jan.ai with any open-source LLM provider or other tools, it is important to understand the integration capabilities and limitations. - -## Mastering the Prompt Engineering -Prompt engineering is an important aspect when dealing with AI models to generate the desired outputs. Mastering this skill can significantly enhance the performance and the responses of the AI. Below are some tips that you can do for prompt engineering: -- Ask the model to adopt a persona -- Be specific and details get a more specific answers -- Provide examples or preference text or context at the beginning -- Use a clear and concise language -- Use certain keywords and phrases diff --git a/docs/docs/guides/common-error/broken-build.mdx b/docs/docs/guides/common-error/broken-build.mdx deleted file mode 100644 index 0e41d0b4d..000000000 --- a/docs/docs/guides/common-error/broken-build.mdx +++ /dev/null @@ -1,159 +0,0 @@ ---- -title: Broken Build -sidebar_position: 1 -hide_table_of_contents: true -description: A step-by-step guide to fix errors that prevent the project from compiling or running successfully. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - ] ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -This guide provides you steps to troubleshoot and to resolve the issue where your Jan is stuck in a broken build after installation. - - - - ### 1. Uninstall Jan - - Delete Jan from your `/Applications` folder. - - ### 2. Delete Application Data, Cache, and User Data - - ```zsh - # Step 1: Delete the application data - ## Newer versions - rm -rf ~/Library/Application\ Support/jan - ## Versions 0.2.0 and older - rm -rf ~/Library/Application\ Support/jan-electron - - # Step 2: Clear application cache - rm -rf ~/Library/Caches/jan* - - # Step 3: Remove all user data - rm -rf ~/jan - ``` - - ### 3. Additional Step for Versions Before 0.4.2 - - If you are using a version before `0.4.2`, you need to run the following commands: - - ```zsh - ps aux | grep nitro - # Looks for processes like `nitro` and `nitro_arm_64`, and kill them one by one by process ID - kill -9 - ``` - - ### 4. Download the Latest Version - - Download the latest version of Jan from our [homepage](https://jan.ai/). - - - - ### 1. Uninstall Jan - - To uninstall Jan on Windows, use the [Windows Control Panel](https://support.microsoft.com/en-us/windows/uninstall-or-remove-apps-and-programs-in-windows-4b55f974-2cc6-2d2b-d092-5905080eaf98). - - ### 2. Delete Application Data, Cache, and User Data - - ```sh - # You can delete the `/Jan` directory in Windows's AppData Directory by visiting the following path `%APPDATA%\Jan` - cd C:\Users\YOUR_USERNAME\AppData\Roaming - rm -r ./Jan - ``` - - ### 3. Additional Step for Versions Before 0.4.2 - - If you are using a version before `0.4.2`, you need to run the following commands: - - ```sh - # Find the process ID (PID) of the nitro process by filtering the list by process name - tasklist | findstr "nitro" - # Once you have the PID of the process you want to terminate, run the `taskkill` - taskkill /F /PID - ``` - - ### 4. Download the Latest Version - - Download the latest version of Jan from our [homepage](https://jan.ai/). - - - - - ### 1. Uninstall Jan - - - - - To uninstall Jan, you should use your package manager's uninstall or remove option. - - This will return your system to its state before the installation of Jan. - - This method can also reset all settings if you are experiencing any issues with Jan. - - - - - To uninstall Jan, run the following command.MDXContent - - ```sh - sudo apt-get remove jan - # where jan is the name of Jan package - ``` - - This will return your system to its state before the installation of Jan. - - This method can also be used to reset all settings if you are experiencing any issues with Jan. - - - - - To uninstall Jan, you can uninstall Jan by deleting the `.AppImage` file. - - If you wish to completely remove all user data associated with Jan after uninstallation, you can delete the user data at `~/jan`. - - This method can also reset all settings if you are experiencing any issues with Jan. - - - - - ### 2. Delete Application Data, Cache, and User Data - - ```sh - # You can delete the user data folders located at the following `~/jan` - rm -rf ~/jan - ``` - - ### 3. Additional Step for Versions Before 0.4.2 - - If you are using a version before `0.4.2`, you need to run the following commands: - - ```zsh - ps aux | grep nitro - # Looks for processes like `nitro` and `nitro_arm_64`, and kill them one by one by process ID - kill -9 - ``` - - ### 4. Download the Latest Version - - Download the latest version of Jan from our [homepage](https://jan.ai/). - - - -By following these steps, you can cleanly uninstall and reinstall Jan, ensuring a smooth and error-free experience with the latest version. - -:::note - -Before reinstalling Jan, ensure it's completely removed from all shared spaces if it's installed on multiple user accounts on your device. - -::: diff --git a/docs/docs/guides/common-error/not-using-gpu.mdx b/docs/docs/guides/common-error/not-using-gpu.mdx deleted file mode 100644 index a7dd788f8..000000000 --- a/docs/docs/guides/common-error/not-using-gpu.mdx +++ /dev/null @@ -1,161 +0,0 @@ ---- -title: Troubleshooting NVIDIA GPU -sidebar_position: 2 -description: A step-by-step guide to enable Jan to properly leverage NVIDIA GPU resources, avoiding performance issues. -keywords: [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - convZ - ersational AI, - no-subscription fee, - large language model, - troubleshooting, - using GPU, - ] ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - - -This guide provides steps to troubleshoot and resolve issues when the Jan app does not utilize the NVIDIA GPU on Windows and Linux systems. - -### 1. Ensure GPU Mode Requirements - - - - - #### NVIDIA Driver - - - Install an [NVIDIA Driver](https://www.nvidia.com/Download/index.aspx) supporting CUDA 11.7 or higher. - - Use the following command to verify the installation: - - ```sh - nvidia-smi - ``` - - #### CUDA Toolkit - - - Install a [CUDA toolkit](https://developer.nvidia.com/cuda-downloads) compatible with your NVIDIA driver. - - Use the following command to verify the installation: - - ```sh - nvcc --version - ``` - - - - - #### NVIDIA Driver - - - Install an [NVIDIA Driver](https://www.nvidia.com/Download/index.aspx) supporting CUDA 11.7 or higher. - - Use the following command to verify the installation: - - ```sh - nvidia-smi - ``` - - #### CUDA Toolkit - - - Install a [CUDA toolkit](https://developer.nvidia.com/cuda-downloads) compatible with your NVIDIA driver. - - Use the following command to verify the installation: - - ```sh - nvcc --version - ``` - #### Linux Specifics - - - Ensure that `gcc-11`, `g++-11`, `cpp-11`, or higher is installed. - - See [instructions](https://gcc.gnu.org/projects/cxx-status.html#cxx17) for Ubuntu installation. - - - **Post-Installation Actions**: Add CUDA libraries to `LD_LIBRARY_PATH`. - - Follow the [Post-installation Actions](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions) instructions. - - - - -### 2. Switch to GPU Mode - -Jan defaults to CPU mode but automatically switches to GPU mode if your system supports it, selecting the GPU with the highest VRAM. Check this setting in `Settings` > `Advanced Settings`. - -#### Troubleshooting Tips - -If GPU mode isn't enabled by default: - -1. Confirm that you have installed an NVIDIA driver supporting CUDA 11.7 or higher. Refer to [CUDA compatibility](https://docs.nvidia.com/deploy/cuda-compatibility/index.html#binary-compatibility__table-toolkit-driver). -2. Ensure compatibility of the CUDA toolkit with your NVIDIA driver. Refer to [CUDA compatibility](https://docs.nvidia.com/deploy/cuda-compatibility/index.html#binary-compatibility__table-toolkit-driver). -3. For Linux, add CUDA's `.so` libraries to the `LD_LIBRARY_PATH`. For Windows, ensure that CUDA's `.dll` libraries are in the PATH. Refer to [Windows setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html#environment-setup). - -### 3. Check GPU Settings - -1. Navigate to `Settings` > `Advanced Settings` > `Jan Data Folder` to access GPU settings. -2. Open the `settings.json` file in the `settings` folder. Here's an example: - -```json title="~/jan/settings/settings.json" -{ - "notify": true, - "run_mode": "gpu", - "nvidia_driver": { - "exist": true, - "version": "531.18" - }, - "cuda": { - "exist": true, - "version": "12" - }, - "gpus": [ - { - "id": "0", - "vram": "12282" - }, - { - "id": "1", - "vram": "6144" - }, - { - "id": "2", - "vram": "6144" - } - ], - "gpu_highest_vram": "0" -} -``` -### 4. Restart Jan -Restart Jan application to make sure it works. - -#### Troubleshooting Tips - - - Ensure `nvidia_driver` and `cuda` fields indicate installed software. - - If `gpus` field is empty or lacks your GPU, check NVIDIA driver and CUDA toolkit installations. - - For further assistance, share the `settings.json` file. - -### Tested Configurations - -- **Windows 11 Pro 64-bit:** - - GPU: NVIDIA GeForce RTX 4070ti - - CUDA: 12.2 - - NVIDIA driver: 531.18 (Bare metal) - -- **Ubuntu 22.04 LTS:** - - GPU: NVIDIA GeForce RTX 4070ti - - CUDA: 12.2 - - NVIDIA driver: 545 (Bare metal) - -- **Ubuntu 20.04 LTS:** - - GPU: NVIDIA GeForce GTX 1660ti - - CUDA: 12.1 - - NVIDIA driver: 535 (Proxmox VM passthrough GPU) - -- **Ubuntu 18.04 LTS:** - - GPU: NVIDIA GeForce GTX 1660ti - - CUDA: 12.1 - - NVIDIA driver: 535 (Proxmox VM passthrough GPU) - -### Common Issues and Solutions - -1. If the issue persists, try installing the [Nightly version](https://jan.ai/install/nightly/). -2. Ensure your (V)RAM is accessible; some users with virtual RAM may require additional configuration. -3. Seek assistance in [Jan Discord](https://discord.gg/mY69SZaMaC). \ No newline at end of file diff --git a/docs/docs/guides/error-codes/how-to-get-error-logs.mdx b/docs/docs/guides/error-codes/how-to-get-error-logs.mdx deleted file mode 100644 index 045468e33..000000000 --- a/docs/docs/guides/error-codes/how-to-get-error-logs.mdx +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: How to Get Error Logs -sidebar_position: 5 -description: A step-by-step guide to get the Jan app error logs. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - permission denied, - ] ---- - -To get the error logs of your Jan application, follow the steps below: -### Jan Application -1. Navigate to the main dashboard. -2. Click the **gear icon (⚙️)** on the bottom left of your screen. -3. Under the **Settings screen**, click the **Advanced Settings**. -4. On the **Jan Data Folder** click the **folder icon (📂)** to access the data. -5. Click the **logs** folder. - -### Jan UI -1. Open your Unix or Linux terminal. -2. Use the following commands to get the recent 50 lines of log files: -```bash -tail -n 50 ~/jan/logs/app.log - -``` - -### Jan API Server -1. Open your Unix or Linux terminal. -2. Use the following commands to get the recent 50 lines of log files: -```bash -tail -n 50 ~/jan/logs/server.log - -``` -:::warning -Ensure to redact any private or sensitive information when sharing logs or error details. -::: - -:::note -If you have any questions or are looking for support, please don't hesitate to contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ) or create a new issue in our [GitHub repository](https://github.com/janhq/jan/issues/new/choose). -::: \ No newline at end of file diff --git a/docs/docs/guides/error-codes/no-assistant-available.mdx b/docs/docs/guides/error-codes/no-assistant-available.mdx deleted file mode 100644 index 31d9a75e9..000000000 --- a/docs/docs/guides/error-codes/no-assistant-available.mdx +++ /dev/null @@ -1,31 +0,0 @@ ---- -title: No Assistant Available -sidebar_position: 7 -description: Troubleshooting steps to resolve issues no assistant available. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - no assistant available, - ] ---- - -When you encounter the following error message: -``` -No assistant available. -``` - -This issue arises when a new, unintentional file appears in `/jan/assistants`. - -It can be resolved through the following steps: - -1. Access the `/jan/assistants` directory using a file manager or terminal. - -2. Within `/jan/assistants`, this directory should only contain a folder named `jan`. Identify any file outside of this folder and remove it. \ No newline at end of file diff --git a/docs/docs/guides/error-codes/permission-denied.mdx b/docs/docs/guides/error-codes/permission-denied.mdx deleted file mode 100644 index 1d41d3b03..000000000 --- a/docs/docs/guides/error-codes/permission-denied.mdx +++ /dev/null @@ -1,39 +0,0 @@ ---- -title: Permission Denied -sidebar_position: 1 -description: A step-by-step guide to fix the issue when access is denied due to insufficient permissions. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - permission denied, - ] ---- - -When running Jan, you might encounter the following error message: - -``` -Uncaught (in promise) Error: Error invoking layout-480796bff433a3a3.js:538 remote method 'installExtension': -Error Package /Applications/Jan.app/Contents/Resources/app.asar.unpacked/pre-install/janhq-assistant-extension-1.0.0.tgz does not contain a valid manifest: -Error EACCES: permission denied, mkdtemp '/Users/username/.npm/_cacache/tmp/ueCMn4' -``` - -This error mainly caused by permission problem during installation. To resolve this issue, follow these steps: - -1. Open your terminal. - -2. Execute the following command to change ownership of the `~/.npm` directory to the current user: - -```sh -sudo chown -R $(whoami) ~/.npm -``` -:::note -This command ensures that the necessary permissions are granted for Jan installation, resolving the encountered error. -::: \ No newline at end of file diff --git a/docs/docs/guides/error-codes/something-amiss.mdx b/docs/docs/guides/error-codes/something-amiss.mdx deleted file mode 100644 index 0975754e3..000000000 --- a/docs/docs/guides/error-codes/something-amiss.mdx +++ /dev/null @@ -1,53 +0,0 @@ ---- -title: Something's Amiss -sidebar_position: 4 -description: A step-by-step guide to resolve an unspecified or general error. ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - - -When you start a chat with a model and encounter with a Something's Amiss error, here's how to resolve it: -1. Ensure your OS is up to date. -2. Choose a model smaller than 80% of your hardware's V/RAM. For example, on an 8GB machine, opt for models smaller than 6GB. -3. Install the latest [Nightly release](https://jan.ai/install/nightly/) or [clear the application cache](https://jan.ai/troubleshooting/stuck-on-broken-build/) when reinstalling Jan. -4. Confirm your V/RAM accessibility, particularly if using virtual RAM. -5. Nvidia GPU users should download [CUDA](https://developer.nvidia.com/cuda-downloads). -6. Linux users, ensure your system meets the requirements of gcc 11, g++ 11, cpp 11, or higher. Refer to this [link](https://jan.ai/guides/troubleshooting/gpu-not-used/#specific-requirements-for-linux) for details. -7. You might use the wrong port when you [check the app logs](https://jan.ai/troubleshooting/how-to-get-error-logs/) and encounter the Bind address failed at 127.0.0.1:3928 error. To check the port status, try use the `netstat` command, like the following: - - - - ```sh - netstat -an | grep 3928 - ``` - - - ```sh - netstat -ano | find "3928" - tasklist /fi "PID eq 3928" - ``` - - - ```sh - netstat -anpe | grep "3928" - ``` - - - -:::note - -`Netstat` displays the contents of various network-related data structures for active connections - -::: - -:::tip - -Jan uses the following ports: - -- Nitro: `3928` -- Jan API Server: `1337` -- Jan Documentation: `3001` - -::: \ No newline at end of file diff --git a/docs/docs/guides/error-codes/stuck-on-loading-model.mdx b/docs/docs/guides/error-codes/stuck-on-loading-model.mdx deleted file mode 100644 index 86a16b5fc..000000000 --- a/docs/docs/guides/error-codes/stuck-on-loading-model.mdx +++ /dev/null @@ -1,62 +0,0 @@ ---- -title: Stuck on Loading Model -sidebar_position: 8 -description: Troubleshooting steps to resolve issues related to the loading model. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - stuck on loading model, - ] ---- - -## 1. Issue: Model Loading Stuck Due To Missing Windows Management Instrumentation Command-line (WMIC) - -Encountering a stuck-on-loading model issue in Jan is caused by errors related to the `Windows Management Instrumentation Command-line (WMIC)` path not being included in the system's PATH environment variable. - -Error message: -``` -index.js:47 Uncaught (in promise) Error: Error invoking remote method 'invokeExtensionFunc': Error: Command failed: WMIC CPU Get NumberOfCores -``` - -It can be resolved through the following steps: - -1. **Open System Properties:** - - Press `Windows key + R`. - - Type `sysdm.cpl` and press `Enter`. - -2. **Access Environment Variables:** - - Go to the "Advanced" tab. - - Click the "Environment Variables" button. - -3. **Edit System PATH:** - - Under "System Variables" find and select `Path`. - - Click "Edit." - -4. **Add WMIC Path:** - - Click "New" and enter `C:\Windows\System32\Wbem`. - -5. **Save Changes:** - - Click "OK" to close and save your changes. - -6. **Verify Installation:** - - Restart any command prompts or terminals. - - Run `where wmic` to verify. Expected output: `C:\Windows\System32\wbem\WMIC.exe`. - - -## 2. Issue: Model Loading Stuck Due To CPU Without AVX - -Encountering an issue with models stuck on loading in Jan can be due to the use of older generation CPUs that do not support Advanced Vector Extensions (AVX). - -To check if your CPU supports AVX, visit the following link: [CPUs with AVX](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#CPUs_with_AVX) - -:::warning [Please use this with caution] -As a workaround, consider using an [emulator](https://www.intel.com/content/www/us/en/developer/articles/tool/software-development-emulator.html) to simulate AVX support. -::: \ No newline at end of file diff --git a/docs/docs/guides/error-codes/thread-disappreance.mdx b/docs/docs/guides/error-codes/thread-disappreance.mdx deleted file mode 100644 index 06235df56..000000000 --- a/docs/docs/guides/error-codes/thread-disappreance.mdx +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: Thread Disappearance -sidebar_position: 6 -description: Troubleshooting steps to resolve issues threads suddenly disappearance. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - thread disappearance, - ] ---- - -When you encounter the error of old threads suddenly disappear. This can happen when a new, unintentional file is created in `/jan/threads`. - -It can be resolved through the following steps: - -1. Go to `/jan/threads`. - -2. The `/jan/threads` directory contains many folders named with the prefix `jan_` followed by an ID (e.g., `jan_123`). Look for any file not conforming to this naming pattern and remove it. \ No newline at end of file diff --git a/docs/docs/guides/error-codes/undefined-issue.mdx b/docs/docs/guides/error-codes/undefined-issue.mdx deleted file mode 100644 index 223f686d1..000000000 --- a/docs/docs/guides/error-codes/undefined-issue.mdx +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: Undefined Issue -sidebar_position: 3 -description: A step-by-step guide to resolve errors when a variable or object is not defined. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - undefined issue, - ] ---- - -Encountering an `undefined issue` in Jan is caused by errors related to the Nitro tool or other internal processes. It can be resolved through the following steps: - -1. Clearing the Jan folder and then reopen the application to determine if the problem persists -2. Manually run the nitro tool located at `~/jan/extensions/@janhq/inference-nitro-extensions/dist/bin/(your-os)/nitro` to check for error messages. -3. Address any nitro error messages that are identified and reassess the persistence of the issue. -4. Reopen Jan to determine if the problem has been resolved after addressing any identified errors. -5. If the issue persists, please share the [app logs](https://jan.ai/troubleshooting/how-to-get-error-logs/) via [Jan Discord](https://discord.gg/mY69SZaMaC) for further assistance and troubleshooting. \ No newline at end of file diff --git a/docs/docs/guides/error-codes/unexpected-token.mdx b/docs/docs/guides/error-codes/unexpected-token.mdx deleted file mode 100644 index 4a00e447d..000000000 --- a/docs/docs/guides/error-codes/unexpected-token.mdx +++ /dev/null @@ -1,24 +0,0 @@ ---- -title: Unexpected Token -sidebar_position: 2 -description: A step-by-step guide to correct syntax errors caused by invalid JSON in the code. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - troubleshooting, - unexpected token, - ] ---- - -Encountering the `Unexpected token` error when initiating a chat with OpenAI models mainly caused by either your OpenAI key or where you access your OpenAI from. This issue can be solved through the following steps: - -1. Obtain an OpenAI API key from [OpenAI's developer platform](https://platform.openai.com/) and integrate it into your application. - -2. Trying a VPN could potentially solve the issue, especially if it's related to region locking for accessing OpenAI services. By connecting through a VPN, you may bypass such restrictions and successfully initiate chats with OpenAI models. \ No newline at end of file diff --git a/docs/docs/guides/extensions/README.mdx b/docs/docs/guides/extensions/README.mdx deleted file mode 100644 index fd8185b14..000000000 --- a/docs/docs/guides/extensions/README.mdx +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: Extensions -slug: /guides/extensions/ -sidebar_position: 5 -description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - build extension, - ] ---- - -import DocCardList from "@theme/DocCardList"; - - diff --git a/docs/docs/guides/extensions/assets/extension-setup.png b/docs/docs/guides/extensions/assets/extension-setup.png deleted file mode 100644 index 4f9ea63ee..000000000 Binary files a/docs/docs/guides/extensions/assets/extension-setup.png and /dev/null differ diff --git a/docs/docs/guides/extensions/assets/jan-ai-extensions.png b/docs/docs/guides/extensions/assets/jan-ai-extensions.png deleted file mode 100644 index 9d0a1dc6a..000000000 Binary files a/docs/docs/guides/extensions/assets/jan-ai-extensions.png and /dev/null differ diff --git a/docs/docs/guides/extensions/setup-ext.mdx b/docs/docs/guides/extensions/extensions.mdx similarity index 54% rename from docs/docs/guides/extensions/setup-ext.mdx rename to docs/docs/guides/extensions/extensions.mdx index c080283e9..63b71b390 100644 --- a/docs/docs/guides/extensions/setup-ext.mdx +++ b/docs/docs/guides/extensions/extensions.mdx @@ -1,22 +1,24 @@ --- -title: Extension Setup +title: What are Jan Extensions? +slug: /extensions +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. sidebar_position: 1 -description: Dive into the available extensions and configure them. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, - extension settings, + large language models, + Jan Extensions, + Extensions, ] --- - The current Jan Desktop Client has some default extensions built on top of this framework to enhance the user experience. In this guide, we will show you the list of default extensions and how to configure extension settings. ## Default Extensions @@ -25,34 +27,35 @@ You can find the default extensions in the `Settings` > `Extensions`. ## List of Default Extensions -| Extension Name | Version | Description | Source Code Link | -| -------------- | ------- | ----------- | ---------------- | -| Assistant Extension | `v1.0.0` | This extension enables assistants, including Jan, a default assistant that can call all downloaded models. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/assistant-extension ) | -| Conversational Extension | `v1.0.0` | This extension enables conversations and state persistence via your filesystem. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/conversational-extension) | -| Inference Nitro Extension | `v1.0.0` | This extension embeds Nitro, a lightweight (3 MB) inference engine in C++. See nitro.jan.ai. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/inference-nitro-extension) | -| Inference Openai Extension | `v1.0.0` | This extension enables OpenAI chat completion API calls. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/inference-openai-extension) | -| Inference Triton Trt Llm Extension | `v1.0.0` | This extension enables Nvidia's TensorRT-LLM as an inference engine option. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/inference-triton-trtllm-extension) | -| Model Extension | `v1.0.22` | Model Management Extension provides model exploration and seamless downloads. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/model-extension) | -| Monitoring Extension | `v1.0.9` | This extension offers system health and OS-level data. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/monitoring-extension) | +| Extension Name | Version | Description | Source Code Link | +| ---------------------------------- | --------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| Assistant Extension | `v1.0.0` | This extension enables assistants, including Jan, a default assistant that can call all downloaded models. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/assistant-extension) | +| Conversational Extension | `v1.0.0` | This extension enables conversations and state persistence via your filesystem. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/conversational-extension) | +| Inference Nitro Extension | `v1.0.0` | This extension embeds Nitro, a lightweight (3 MB) inference engine in C++. See nitro.jan.ai. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/inference-nitro-extension) | +| Inference Openai Extension | `v1.0.0` | This extension enables OpenAI chat completion API calls. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/inference-openai-extension) | +| Inference Triton Trt Llm Extension | `v1.0.0` | This extension enables Nvidia's TensorRT-LLM as an inference engine option. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/inference-triton-trtllm-extension) | +| Model Extension | `v1.0.22` | Model Management Extension provides model exploration and seamless downloads. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/model-extension) | +| Monitoring Extension | `v1.0.9` | This extension offers system health and OS-level data. | [Link to Source](https://github.com/janhq/jan/tree/dev/extensions/monitoring-extension) | ## Configure Extension Settings + To configure extension settings: + 1. Navigate to the `~/jan/extensions`. 2. Open the `extensions.json` file 3. Edit the file with options including: -| Option | Description | -|-----------------|-------------------------------------------------| -| `_active` | Enable/disable the extension. | -| `listeners` | Default listener setting. | -| `origin` | Extension file path. | -| `installOptions`| Version and metadata configuration. | -| `name` | Extension name. | -| `version` | Extension version. | -| `main` | Main file path. | -| `description` | Extension description. | -| `url` | Extension URL. | - +| Option | Description | +| ---------------- | ----------------------------------- | +| `_active` | Enable/disable the extension. | +| `listeners` | Default listener setting. | +| `origin` | Extension file path. | +| `installOptions` | Version and metadata configuration. | +| `name` | Extension name. | +| `version` | Extension version. | +| `main` | Main file path. | +| `description` | Extension description. | +| `url` | Extension URL. | ```json title="~/jan/extensions/extensions.json" { @@ -136,8 +139,25 @@ To configure extension settings: } ``` +## Import Custom Extension + +:::note +Currently, Jan only supports official extensions, which can be directly downloaded in Extension Settings. We plan to support 3rd party Extensions in the future. +::: + +For now you can always import a third party extension at your own risk by following the steps below: + +1. Navigate to **Settings** > **Extensions** > Click Select under **Manual Installation**. +2. Then, the ~/jan/extensions/extensions.json file will be updated automatically. + +:::caution + +You need to prepare the extension file in .tgz format to install the **non-default** extension. + +::: + :::info[Assistance and Support] If you have questions, please join our [Discord community](https://discord.gg/Dt7MxDyNNZ) for support, updates, and discussions. -::: \ No newline at end of file +::: diff --git a/docs/docs/guides/extensions/import-ext.mdx b/docs/docs/guides/extensions/import-ext.mdx deleted file mode 100644 index 28fa9045d..000000000 --- a/docs/docs/guides/extensions/import-ext.mdx +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: Import Extensions -sidebar_position: 2 -description: A step-by-step guide on how to import extensions. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - import extensions, - ] ---- - - -Besides default extensions, you can import extensions into Jan by following the steps below: - -1. Navigate to **Settings** > **Extensions** > Click Select under **Manual Installation**. -2. Then, the ~/jan/extensions/extensions.json file will be updated automatically. - -:::caution - -You need to prepare the extension file in .tgz format to install the **non-default** extension. - -::: - - -:::info[Assistance and Support] - -If you have questions, please join our [Discord community](https://discord.gg/Dt7MxDyNNZ) for support, updates, and discussions. - -::: \ No newline at end of file diff --git a/docs/docs/guides/faq.mdx b/docs/docs/guides/faq.mdx deleted file mode 100644 index 7e3d7d13d..000000000 --- a/docs/docs/guides/faq.mdx +++ /dev/null @@ -1,99 +0,0 @@ ---- -title: FAQs -slug: /guides/faqs -sidebar_position: 12 -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - acknowledgements, - third-party libraries, - ] ---- - -## General Issues - -- **Why can't I download models like Pandora 11B Q4 and Solar Instruct 10.7B Q4?** - - These models might have been removed or taken down. Please check the [Pre-configured Models](models-list.mdx) for the latest updates on model availability. - -- **Why does Jan display "Apologies, something's amiss" when I try to run it?** - - This issue may arise if you're using an older Intel chip that does not fully support AVX instructions required for running AI models. Upgrading your hardware may resolve this issue. - -- **How can I use Jan in Russia?** - - To use Jan in Russia, a VPN or [HTTPS - Proxy](./advanced-settings/http-proxy.mdx) is recommended to bypass any regional restrictions that might be in place. - -- **I'm experiencing an error on startup from Nitro. What should I do?** - - If you encounter errors with Nitro, try switching the path to use the Nitro executable for the version 12-0. This adjustment can help resolve path-related issues. - -## Download and Installation Issues - -- **What does "Error occurred: Unexpected token" mean?** - - This error usually indicates a problem with your internet connection or that your access to certain resources is being blocked. Using a VPN or [HTTPS - Proxy](./advanced-settings/http-proxy.mdx) can help avoid these issues by providing a secure and unrestricted internet connection. - -- **Why aren't my downloads working?** - - If you're having trouble downloading directly through Jan, you might want to download the model separately and then import it into Jan. Detailed instructions are available on [here](install.mdx). - -- **Jan AI doesn't open on my Mac with an Intel processor. What can I do?** - - Granting the `.npm` folder permission for the user can resolve issues related to permissions on macOS, especially for users with Intel processors. - -- **What should I do if the model download freezes?** - - If a model download freezes, consider importing the models manually. You can find more detailed guidance on how to do this at [Manual Import](./models/import-models.mdx) article. - -- **I received a message that the model GPT4 does not exist or I do not have access. What should I do?** - - This message typically means you need to top up your credit with OpenAI or check your access permissions for the model. - -- **I can't download models from "Explore the Hub." What's the solution?** - - Uninstalling Jan, clearing the cache, and reinstalling it following the guide provided [here](install.mdx) may help. Also, consider downloading the `.gguf` model via a browser as an alternative approach. - -## Technical Issues and Solutions - -- **How can I download models with a socks5 proxy or import a local model file?** - - Nightly builds of Jan offer support for downloading models with socks5 proxies or importing local model files. - -- **My device shows no GPU usage and lacks a Settings folder. What should I do?** - - Using the nightly builds of Jan can address issues related to GPU usage and the absence of a Settings folder, as these builds contain the latest fixes and features. - -- **Why does Jan display a toast message saying a model is loaded when it is not actually loaded?** - - This issue can be resolved by downloading the `.gguf` file from Hugging Face and replacing it in the model folder. This ensures the correct model is loaded. - -- **How to enable CORS when running Nitro?** - - By default, CORS (Cross-Origin Resource Sharing) is disabled when running Nitro. Enabling CORS can be necessary for certain operations and integrations. Check the official documentation for instructions on how to enable CORS if your workflow requires it. - -## Compatibility and Support - -- **How to use GPU AMD for Jan?** - - Jan now supports AMD GPUs through Vulkan. This enhancement allows users with AMD graphics cards to leverage GPU acceleration, improving performance for AI model computations. - -- **Is Jan available for Android or iOS?** - - Jan is primarily focused on the Desktop app and does not currently offer mobile apps for Android or iOS. The development team is concentrating on enhancing the desktop experience. - -## Development and Features - -- **Does Jan support Safetensors?** - - At the moment, Jan only supports GGUF. However, there are plans to support `.safetensor` files in the future. - -- **I hope to customize the installation path of each model. Is that possible?** - - Yes you can customize the installation path. Please see [here](https://jan.ai/guides/advanced-settings/#access-the-jan-data-folder) for more information. - -## Troubleshooting - -- **What should I do if there's high CPU usage while Jan is idle?** - - If you notice high CPU usage while Jan is idle, consider using the nightly builds of Jan - -- **What does the error "Failed to fetch" mean, and how can I fix it?** - - The "Failed to fetch" error typically occurs due to network issues or restrictions. Using the nightly builds of Jan may help overcome these issues by providing updated fixes and features. - -- **What should I do if "Failed to fetch" occurs using MacBook Pro with Intel HD Graphics 4000 1536 MB?** - - Ensure that the model size is less than 90% of your available VRAM and that the VRAM is accessible to the app. Managing the resources effectively can help mitigate this issue. - -:::info[Assistance and Support] - -If you have questions, please join our [Discord community](https://discord.gg/Dt7MxDyNNZ) for support, updates, and discussions. - -::: \ No newline at end of file diff --git a/docs/docs/guides/get-started/asset/download.gif b/docs/docs/guides/get-started/asset/download.gif new file mode 100644 index 000000000..10cb4b466 Binary files /dev/null and b/docs/docs/guides/get-started/asset/download.gif differ diff --git a/docs/docs/guides/get-started/asset/gpt.gif b/docs/docs/guides/get-started/asset/gpt.gif new file mode 100644 index 000000000..3972e812f Binary files /dev/null and b/docs/docs/guides/get-started/asset/gpt.gif differ diff --git a/docs/docs/guides/get-started/asset/model.gif b/docs/docs/guides/get-started/asset/model.gif new file mode 100644 index 000000000..e5abdf757 Binary files /dev/null and b/docs/docs/guides/get-started/asset/model.gif differ diff --git a/docs/docs/guides/get-started/hardware-setup.mdx b/docs/docs/guides/get-started/hardware-setup.mdx new file mode 100644 index 000000000..7225708cf --- /dev/null +++ b/docs/docs/guides/get-started/hardware-setup.mdx @@ -0,0 +1,25 @@ +--- +title: Hardware Setup +slug: /guides/hardware +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 3 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + hardware requirements, + Nvidia, + AMD, + CPU, + GPU, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/get-started/overview.mdx b/docs/docs/guides/get-started/overview.mdx new file mode 100644 index 000000000..c979d53b9 --- /dev/null +++ b/docs/docs/guides/get-started/overview.mdx @@ -0,0 +1,20 @@ +--- +title: Overview +slug: /guides +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 1 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/get-started/quickstart.mdx b/docs/docs/guides/get-started/quickstart.mdx new file mode 100644 index 000000000..243e4a59e --- /dev/null +++ b/docs/docs/guides/get-started/quickstart.mdx @@ -0,0 +1,277 @@ +--- +title: Quickstart +slug: /guides/quickstart +description: Get started quickly with Jan, a ChatGPT-alternative that runs on your own computer, with a local API server. Learn how to install Jan and select an AI model to start chatting. +sidebar_position: 2 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + quickstart, + getting started, + using AI model, + installation, + ] +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; +import download from './asset/download.gif'; +import gpt from './asset/gpt.gif'; +import model from './asset/model.gif'; + +To get started quickly with Jan, follow the steps below: + +## Step 1: Get Jan Desktop + + + + + #### Pre-requisites + Before installing Jan, ensure : + - You have a Mac with an Apple Silicon Processor. + - Homebrew and its dependencies are installed for installing Jan with Homebrew package. + - Your macOS version is 10.15 or higher. + + #### Stable Releases + + To download stable releases, go to [Jan](https://jan.ai/) > select **Download for Mac**. + + The download should be available as a `.dmg`. + + #### Nightly Releases + + We provide the Nightly Release so that you can test new features and see what might be coming in a future stable release. Please be aware that there might be bugs! + + You can download it from [Jan's Discord](https://discord.gg/FTk2MvZwJH) in the [`#nightly-builds`](https://discord.gg/q8szebnxZ7) channel. + + #### Experimental Model + + To enable the experimental mode, go to **Settings** > **Advanced Settings** and toggle the **Experimental Mode** + + #### Install with Homebrew + Install Jan with the following Homebrew command: + + ```brew + brew install --cask jan + ``` + +:::warning + +Homebrew package installation is currently limited to **Apple Silicon Macs**, with upcoming support for Windows and Linux. + +::: + + + + + #### Pre-requisites + Ensure that your system meets the following requirements: + - Windows 10 or higher is required to run Jan. + + To enable GPU support, you will need: + - NVIDIA GPU with CUDA Toolkit 11.7 or higher + - NVIDIA driver 470.63.01 or higher + + #### Stable Releases + + To download stable releases, go to [Jan](https://jan.ai/) > select **Download for Windows**. + + The download should be available as a `.exe` file. + + #### Nightly Releases + + We provide the Nightly Release so that you can test new features and see what might be coming in a future stable release. Please be aware that there might be bugs! + + You can download it from [Jan's Discord](https://discord.gg/FTk2MvZwJH) in the [`#nightly-builds`](https://discord.gg/q8szebnxZ7) channel. + + #### Experimental Model + + To enable the experimental mode, go to **Settings** > **Advanced Settings** and toggle the **Experimental Mode** + + #### Default Installation Directory + + By default, Jan is installed in the following directory: + + ```sh + # Default installation directory + C:\Users\{username}\AppData\Local\Programs\Jan + ``` + +:::warning + +If you are stuck in a broken build, go to the [Broken Build](/guides/common-error/broken-build) section of Common Errors. + +::: + + + + + #### Pre-requisites + Ensure that your system meets the following requirements: + - glibc 2.27 or higher (check with `ldd --version`) + - gcc 11, g++ 11, cpp 11, or higher, refer to this link for more information. + + To enable GPU support, you will need: + - NVIDIA GPU with CUDA Toolkit 11.7 or higher + - NVIDIA driver 470.63.01 or higher + + #### Stable Releases + + To download stable releases, go to [Jan](https://jan.ai/) > select **Download for Linux**. + + The download should be available as a `.AppImage` file or a `.deb` file. + + #### Nightly Releases + + We provide the Nightly Release so that you can test new features and see what might be coming in a future stable release. Please be aware that there might be bugs! + + You can download it from [Jan's Discord](https://discord.gg/FTk2MvZwJH) in the [`#nightly-builds`](https://discord.gg/q8szebnxZ7) channel. + + #### Experimental Model + + To enable the experimental mode, go to **Settings** > **Advanced Settings** and toggle the **Experimental Mode** + + + + + To install Jan, you should use your package manager's install or `dpkg`. + + + + + To install Jan, run the following command: + + ```sh + # Install Jan using dpkg + sudo dpkg -i jan-linux-amd64-{version}.deb + + # Install Jan using apt-get + sudo apt-get install ./jan-linux-amd64-{version}.deb + # where jan-linux-amd64-{version}.deb is path to the Jan package + ``` + + + + + To install Jan, run the following commands: + + ```sh + # Install Jan using AppImage + chmod +x jan-linux-x86_64-{version}.AppImage + ./jan-linux-x86_64-{version}.AppImage + # where jan-linux-x86_64-{version}.AppImage is path to the Jan package + ``` + + + + +:::warning + +If you are stuck in a broken build, go to the [Broken Build](/guides/common-error/broken-build) section of Common Errors. + +::: + + + + + +## Step 2: Download a Model + +Jan provides a variety of local AI models tailored to different needs, ready for download. These models are installed and run directly on the user's device. + +1. Go to the **Hub**. +2. Select the models that you would like to install, to see a model details click the dropdown button. +3. Click the **Download** button. + +
+ +
+ Download a Model +
+ +
+ +:::note + +Ensure you select the appropriate model size by balancing performance, cost, and resource considerations in line with your task's specific requirements and hardware specifications. +::: + +## Step 3: Connect to ChatGPT (Optional) + +Jan also provides access to remote models hosted on external servers, requiring an API key for connectivity. For example, to use the ChatGPT model with Jan, you must input your API key by following these steps: + +1. Go to the **Thread** tab. +2. Under the Model dropdown menu, select the ChatGPT model. +3. Fill in your ChatGPT API Key that you can get in your [OpenAI platform](https://platform.openai.com/account/api-keys). + +
+ +
+ Connect to ChatGPT +
+ +
+ +## Step 4: Chat with Models + +After downloading and configuring your model, you can immediately use it in the **Thread** tab. + +
+ +
+ Chat with a model +
+ +
+ +## Best Practices + +This section outlines best practices for developers, analysts, and AI enthusiasts to enhance their experience with Jan when adding AI locally to their computers. Implementing these practices will optimize the performance of AI models. + +### Follow the Quickstart Guide + +The quickstart guide above is designed to facilitate a quick setup process. It provides a clear instruction and simple steps to get you up and running with Jan quickly. Even, if you are inexperienced in AI. + +### Select the Right Models + +Jan offers a range of pre-configured AI models that are suited for different purposes. You should identify which on that aligns with your objectives. There are factors to be considered: + +- Capabilities +- Accuracy +- Processing Speed + +:::note + +- Some of these factors also depend on your hardware, please see Hardware Requirement. +- Choosing the right model is important to achieve the best performance. + ::: + +### Setting up Jan + +Ensure that you familiarize yourself with the Jan application. Jan offers advanced settings that you can adjust. These settings may influence how your AI behaves locally. Please see the [Advanced Settings](/guides/advanced) article for a complete list of Jan's configurations and instructions on how to configure them. + +### Integrations + +Jan can work with many different systems and tools. Whether you are incorporating Jan with any open-source LLM provider or other tools, it is important to understand the integration capabilities and limitations. + +### Mastering the Prompt Engineering + +Prompt engineering is an important aspect when dealing with AI models to generate the desired outputs. Mastering this skill can significantly enhance the performance and the responses of the AI. Below are some tips that you can do for prompt engineering: + +- Ask the model to adopt a persona +- Be specific and details get a more specific answers +- Provide examples or preference text or context at the beginning +- Use a clear and concise language +- Use certain keywords and phrases + +## Pre-configured Models + +To see the full list of Jan's pre-configured models, please see our official GitHub [here](https://github.com/janhq/jan). diff --git a/docs/docs/guides/get-started/settingup-gpu.mdx b/docs/docs/guides/get-started/settingup-gpu.mdx new file mode 100644 index 000000000..cd8fb3556 --- /dev/null +++ b/docs/docs/guides/get-started/settingup-gpu.mdx @@ -0,0 +1,20 @@ +--- +title: Setting Up GPU +slug: /guides/hardware/gpu +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 1 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/inference/overview-inference.mdx b/docs/docs/guides/inference/overview-inference.mdx new file mode 100644 index 000000000..79e306a67 --- /dev/null +++ b/docs/docs/guides/inference/overview-inference.mdx @@ -0,0 +1,20 @@ +--- +title: Overview +slug: /guides/engines +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 12 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/install.mdx b/docs/docs/guides/install.mdx deleted file mode 100644 index c8dcbf3c3..000000000 --- a/docs/docs/guides/install.mdx +++ /dev/null @@ -1,284 +0,0 @@ ---- -title: Installation -sidebar_position: 2 -hide_table_of_contents: true -description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - ] ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; -import installImageURL from './assets/jan-ai-download.png'; - - - - - ### Pre-requisites - Before installing Jan, ensure : - - You have a Mac with an Apple Silicon Processor. - - Homebrew and its dependencies are installed. (for Installing Jan with Homebrew Package) - - Your macOS version is 10.15 or higher. - - ### Stable Releases - - To download stable releases, go to [Jan.ai](https://jan.ai/) > select **Download for Mac**. - - The download should be available as a `.dmg`. - - ### Nightly Releases - - We provide the Nightly Release so that you can test new features and see what might be coming in a future stable release. Please be aware that there might be bugs! - - You can download it from [Jan's Discord](https://discord.gg/FTk2MvZwJH) in the [`#nightly-builds`](https://discord.gg/q8szebnxZ7) channel. - - ### Experimental Model - - To enable the experimental mode, go to **Settings** > **Advanced Settings** and toggle the **Experimental Mode** - - ### Install with Homebrew - Install Jan with the following Homebrew command: - - ```brew - brew install --cask jan - ``` - -:::warning - -Homebrew package installation is currently limited to **Apple Silicon Macs**, with upcoming support for Windows and Linux. - -::: - - - - - ### Pre-requisites - Ensure that your system meets the following requirements: - - Windows 10 or higher is required to run Jan. - - To enable GPU support, you will need: - - NVIDIA GPU with CUDA Toolkit 11.7 or higher - - NVIDIA driver 470.63.01 or higher - - ### Stable Releases - - To download stable releases, go to [Jan.ai](https://jan.ai/) > select **Download for Windows**. - - The download should be available as a `.exe` file. - - ### Nightly Releases - - We provide the Nightly Release so that you can test new features and see what might be coming in a future stable release. Please be aware that there might be bugs! - - You can download it from [Jan's Discord](https://discord.gg/FTk2MvZwJH) in the [`#nightly-builds`](https://discord.gg/q8szebnxZ7) channel. - - ### Experimental Model - - To enable the experimental mode, go to **Settings** > **Advanced Settings** and toggle the **Experimental Mode** - - ### Default Installation Directory - - By default, Jan is installed in the following directory: - - ```sh - # Default installation directory - C:\Users\{username}\AppData\Local\Programs\Jan - ``` - -:::warning - -If you are stuck in a broken build, go to the [Broken Build](/guides/common-error/broken-build) section of Common Errors. - -::: - - - - - ### Pre-requisites - Ensure that your system meets the following requirements: - - glibc 2.27 or higher (check with `ldd --version`) - - gcc 11, g++ 11, cpp 11, or higher, refer to this link for more information. - - To enable GPU support, you will need: - - NVIDIA GPU with CUDA Toolkit 11.7 or higher - - NVIDIA driver 470.63.01 or higher - - ### Stable Releases - - To download stable releases, go to [Jan.ai](https://jan.ai/) > select **Download for Linux**. - - The download should be available as a `.AppImage` file or a `.deb` file. - - ### Nightly Releases - - We provide the Nightly Release so that you can test new features and see what might be coming in a future stable release. Please be aware that there might be bugs! - - You can download it from [Jan's Discord](https://discord.gg/FTk2MvZwJH) in the [`#nightly-builds`](https://discord.gg/q8szebnxZ7) channel. - - ### Experimental Model - - To enable the experimental mode, go to **Settings** > **Advanced Settings** and toggle the **Experimental Mode** - - - - - To install Jan, you should use your package manager's install or `dpkg`. - - - - - To install Jan, run the following command: - - ```sh - # Install Jan using dpkg - sudo dpkg -i jan-linux-amd64-{version}.deb - - # Install Jan using apt-get - sudo apt-get install ./jan-linux-amd64-{version}.deb - # where jan-linux-amd64-{version}.deb is path to the Jan package - ``` - - - - - To install Jan, run the following commands: - - ```sh - # Install Jan using AppImage - chmod +x jan-linux-x86_64-{version}.AppImage - ./jan-linux-x86_64-{version}.AppImage - # where jan-linux-x86_64-{version}.AppImage is path to the Jan package - ``` - - - - -:::warning - -If you are stuck in a broken build, go to the [Broken Build](/guides/common-error/broken-build) section of Common Errors. - -::: - - - - ### Pre-requisites - Ensure that your system meets the following requirements: - - Linux or WSL2 Docker - - Latest Docker Engine and Docker Compose - - To enable GPU support, you will need: - - `nvidia-driver` - - `nvidia-docker2` - -:::note -- If you have not installed Docker, follow the instructions [here](https://docs.docker.com/engine/install/ubuntu/). -- If you have not installed the required file for GPU support, follow the instructions [here](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). -::: - - ### Run Jan in Docker - You can run Jan in Docker with two methods: - 1. Run Jan in CPU mode - 2. Run Jan in GPU mode - - - - To run Jan in Docker CPU mode, by using the following code: - - ```bash - # cpu mode with default file system - docker compose --profile cpu-fs up -d - - # cpu mode with S3 file system - docker compose --profile cpu-s3fs up -d - ``` - - - - - To run Jan in Docker CPU mode, follow the steps below: - 1. Check CUDA compatibility with your NVIDIA driver by running nvidia-smi and check the CUDA version in the output as shown below: - ```sh - nvidia-smi - - # Output - +---------------------------------------------------------------------------------------+ - | NVIDIA-SMI 531.18 Driver Version: 531.18 CUDA Version: 12.1 | - |-----------------------------------------+----------------------+----------------------+ - | GPU Name TCC/WDDM | Bus-Id Disp.A | Volatile Uncorr. ECC | - | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. | - | | | MIG M. | - |=========================================+======================+======================| - | 0 NVIDIA GeForce RTX 4070 Ti WDDM | 00000000:01:00.0 On | N/A | - | 0% 44C P8 16W / 285W| 1481MiB / 12282MiB | 2% Default | - | | | N/A | - +-----------------------------------------+----------------------+----------------------+ - | 1 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:02:00.0 Off | N/A | - | 0% 49C P8 14W / 120W| 0MiB / 6144MiB | 0% Default | - | | | N/A | - +-----------------------------------------+----------------------+----------------------+ - | 2 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:05:00.0 Off | N/A | - | 29% 38C P8 11W / 120W| 0MiB / 6144MiB | 0% Default | - | | | N/A | - +-----------------------------------------+----------------------+----------------------+ - - +---------------------------------------------------------------------------------------+ - | Processes: | - | GPU GI CI PID Type Process name GPU Memory | - | ID ID Usage | - |=======================================================================================| - ``` - 2. Visit [NVIDIA NGC Catalog](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/cuda/tags) and find the smallest minor version of image tag that matches your CUDA version (e.g., 12.1 -> 12.1.0) - 3. Update the `Dockerfile.gpu` line number 5 with the latest minor version of the image tag from step 2 (e.g. change `FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 AS base` to `FROM nvidia/cuda:12.1.0-runtime-ubuntu22.04 AS base`) - 4. Run Jan in GPU mode by using the following command: - - ```bash - # GPU mode with default file system - docker compose --profile gpu-fs up -d - - # GPU mode with S3 file system - docker compose --profile gpu-s3fs up -d - ``` - - - - ### Docker Compose Profile and Environment - The available Docker Compose profile and the environment variables listed below: - - #### Docker Compose Profile - - | Profile | Description | - |-----------|-------------------------------------------| - | cpu-fs | Run Jan in CPU mode with default file system | - | cpu-s3fs | Run Jan in CPU mode with S3 file system | - | gpu-fs | Run Jan in GPU mode with default file system | - | gpu-s3fs | Run Jan in GPU mode with S3 file system | - - #### Environment Variables - - | Environment Variable | Description | - |--------------------------|------------------------------------------------------------| - | S3_BUCKET_NAME | S3 bucket name - leave blank for default file system | - | AWS_ACCESS_KEY_ID | AWS access key ID - leave blank for default file system | - | AWS_SECRET_ACCESS_KEY | AWS secret access key - leave blank for default file system| - | AWS_ENDPOINT | AWS endpoint URL - leave blank for default file system | - | AWS_REGION | AWS region - leave blank for default file system | - | API_BASE_URL | Jan Server URL, please modify it as your public ip address or domain name default http://localhost:1377 | - - -:::warning - -If you are stuck in a broken build, go to the [Broken Build](/guides/common-error/broken-build/) section of Common Errors. - -::: - - - \ No newline at end of file diff --git a/docs/docs/guides/installation/README.mdx b/docs/docs/guides/installation/README.mdx new file mode 100644 index 000000000..cd8ca4cc5 --- /dev/null +++ b/docs/docs/guides/installation/README.mdx @@ -0,0 +1,107 @@ +--- +title: Installation +sidebar_position: 4 +slug: /guides/install/ +hide_table_of_contents: true +description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + ] +--- + +## Jan Device Compatible + +Jan is compatible with macOS, Windows, and Linux, making it accessible for a wide range of users. This compatibility allows users to leverage Jan's AI tools effectively, regardless of their device or operating system. + +:::note +For detailed system requirements and setup instructions, refer to our [Hardware Setup](/guides/hardware/) guide. +::: + +import DocCardList from "@theme/DocCardList"; + + + +## Install Server-Side + +To install Jan from source, follow the steps below: + +### Pre-requisites + +Before proceeding with the installation of Jan from source, ensure that the following software versions are installed on your system: + +- Node.js version 20.0.0 or higher +- Yarn version 1.22.0 or higher + +### Install Jan Development Build + +1. Clone the Jan repository from GitHub by using the following command: + +```bash +git clone https://github.com/janhq/jan +git checkout DESIRED_BRANCH +cd jan +``` + +2. Install the required dependencies by using the following Yarn command: + +```bash +yarn install + +# Build core module +yarn build:core + +# Packing base plugins +yarn build:plugins + +# Packing uikit +yarn build:uikit +``` + +3. Run the development server. + +```bash +yarn dev +``` + +This will start the development server and open the desktop app. During this step, you may encounter notifications about installing base plugins. Simply click **OK** and **Next** to continue. + +### Install Jan Production Build + +1. Clone the Jan repository from GitHub by using the following command: + +```bash +git clone https://github.com/janhq/jan +cd jan +``` + +2. Install the required dependencies by using the following Yarn command: + +```bash +yarn install + +# Build core module +yarn build:core + +# Packing base plugins +yarn build:plugins + +# Packing uikit +yarn build:uikit +``` + +3. Run the production server. + +```bash +yarn +``` + +This completes the installation process for Jan from source. The production-ready app for macOS can be found in the dist folder. diff --git a/docs/docs/guides/installation/docker.mdx b/docs/docs/guides/installation/docker.mdx new file mode 100644 index 000000000..55557058b --- /dev/null +++ b/docs/docs/guides/installation/docker.mdx @@ -0,0 +1,137 @@ +--- +title: Install with Docker +sidebar_position: 4 +slug: /guides/install/server +hide_table_of_contents: true +description: A step-by-step guide to install Jan using Docker. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Install on Docker, + Docker, + Helm, + ] +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +### Pre-requisites + + Ensure that your system meets the following requirements: + - Linux or WSL2 Docker + - Latest Docker Engine and Docker Compose + + To enable GPU support, you will need: + - `nvidia-driver` + - `nvidia-docker2` + + +:::note + +- If you have not installed Docker, follow the instructions [here](https://docs.docker.com/engine/install/ubuntu/). +- If you have not installed the required file for GPU support, follow the instructions [here](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). + ::: + + ### Run Jan in Docker + You can run Jan in Docker with two methods: + 1. Run Jan in CPU mode + 2. Run Jan in GPU mode + + + + To run Jan in Docker CPU mode, by using the following code: + + ```bash + # cpu mode with default file system + docker compose --profile cpu-fs up -d + + # cpu mode with S3 file system + docker compose --profile cpu-s3fs up -d + ``` + + + + + To run Jan in Docker CPU mode, follow the steps below: + 1. Check CUDA compatibility with your NVIDIA driver by running nvidia-smi and check the CUDA version in the output as shown below: + ```sh + nvidia-smi + + # Output + +---------------------------------------------------------------------------------------+ + | NVIDIA-SMI 531.18 Driver Version: 531.18 CUDA Version: 12.1 | + |-----------------------------------------+----------------------+----------------------+ + | GPU Name TCC/WDDM | Bus-Id Disp.A | Volatile Uncorr. ECC | + | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. | + | | | MIG M. | + |=========================================+======================+======================| + | 0 NVIDIA GeForce RTX 4070 Ti WDDM | 00000000:01:00.0 On | N/A | + | 0% 44C P8 16W / 285W| 1481MiB / 12282MiB | 2% Default | + | | | N/A | + +-----------------------------------------+----------------------+----------------------+ + | 1 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:02:00.0 Off | N/A | + | 0% 49C P8 14W / 120W| 0MiB / 6144MiB | 0% Default | + | | | N/A | + +-----------------------------------------+----------------------+----------------------+ + | 2 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:05:00.0 Off | N/A | + | 29% 38C P8 11W / 120W| 0MiB / 6144MiB | 0% Default | + | | | N/A | + +-----------------------------------------+----------------------+----------------------+ + + +---------------------------------------------------------------------------------------+ + | Processes: | + | GPU GI CI PID Type Process name GPU Memory | + | ID ID Usage | + |=======================================================================================| + ``` + 2. Visit [NVIDIA NGC Catalog](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/cuda/tags) and find the smallest minor version of image tag that matches your CUDA version (e.g., 12.1 -> 12.1.0) + 3. Update the `Dockerfile.gpu` line number 5 with the latest minor version of the image tag from step 2 (e.g. change `FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 AS base` to `FROM nvidia/cuda:12.1.0-runtime-ubuntu22.04 AS base`) + 4. Run Jan in GPU mode by using the following command: + + ```bash + # GPU mode with default file system + docker compose --profile gpu-fs up -d + + # GPU mode with S3 file system + docker compose --profile gpu-s3fs up -d + ``` + + + + ### Docker Compose Profile and Environment + The available Docker Compose profile and the environment variables listed below: + + #### Docker Compose Profile + + | Profile | Description | + |-----------|-------------------------------------------| + | cpu-fs | Run Jan in CPU mode with default file system | + | cpu-s3fs | Run Jan in CPU mode with S3 file system | + | gpu-fs | Run Jan in GPU mode with default file system | + | gpu-s3fs | Run Jan in GPU mode with S3 file system | + + #### Environment Variables + + | Environment Variable | Description | + |--------------------------|------------------------------------------------------------| + | S3_BUCKET_NAME | S3 bucket name - leave blank for default file system | + | AWS_ACCESS_KEY_ID | AWS access key ID - leave blank for default file system | + | AWS_SECRET_ACCESS_KEY | AWS secret access key - leave blank for default file system| + | AWS_ENDPOINT | AWS endpoint URL - leave blank for default file system | + | AWS_REGION | AWS region - leave blank for default file system | + | API_BASE_URL | Jan Server URL, please modify it as your public ip address or domain name default http://localhost:1377 | + +:::warning + +If you are stuck in a broken build, go to the [Broken Build](/troubleshooting/#broken-build) section of Common Errors. + +::: diff --git a/docs/docs/guides/installation/linux.mdx b/docs/docs/guides/installation/linux.mdx new file mode 100644 index 000000000..2e03fd6e7 --- /dev/null +++ b/docs/docs/guides/installation/linux.mdx @@ -0,0 +1,23 @@ +--- +title: Install on Linux +sidebar_position: 3 +slug: /guides/install/linux +hide_table_of_contents: true +description: A step-by-step guide to install Jan on your Linux. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Install on Linux, + Linux, + ] +--- + +Coming soon diff --git a/docs/docs/guides/installation/mac.mdx b/docs/docs/guides/installation/mac.mdx new file mode 100644 index 000000000..519fef644 --- /dev/null +++ b/docs/docs/guides/installation/mac.mdx @@ -0,0 +1,24 @@ +--- +title: Install on Mac +sidebar_position: 1 +slug: /guides/install/mac +hide_table_of_contents: true +description: A step-by-step guide to install Jan on your Mac. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + MacOs, + Install on Mac, + Apple devices, + ] +--- + +Coming soon diff --git a/docs/docs/guides/installation/windows.mdx b/docs/docs/guides/installation/windows.mdx new file mode 100644 index 000000000..58bd1597f --- /dev/null +++ b/docs/docs/guides/installation/windows.mdx @@ -0,0 +1,25 @@ +--- +title: Install on Windows +sidebar_position: 2 +slug: /guides/install/windows +hide_table_of_contents: true +description: A step-by-step guide to install Jan on your Windows. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Windows 10, + Windows 11, + Install on Windows, + Microsoft devices, + ] +--- + +Coming soon diff --git a/docs/docs/guides/integration/discord.mdx b/docs/docs/guides/integration/discord.mdx deleted file mode 100644 index 5cf846883..000000000 --- a/docs/docs/guides/integration/discord.mdx +++ /dev/null @@ -1,58 +0,0 @@ ---- -title: Discord -sidebar_position: 5 -description: A step-by-step guide on how to integrate Jan with a Discord bot. ---- - -## How to Integrate Discord Bot with Jan - -Discord bot can enhances your discord server interactions. By integrating Jan with it, you can significantly boost responsiveness and user engaggement in your discord server. - -To integrate Jan with a Discord bot, follow the steps below: - -### Step 1: Clone the repository - -To make this integration successful, it is necessary to clone the discord bot's [repository](https://github.com/jakobdylanc/discord-llm-chatbot). - -### Step 2: Install the Required Libraries - -After cloning the repository, run the following command: - -```sh -pip install -r requirements.txt -``` - -### Step 3: Set the Environment -1. Create a copy of `.env.example`. -2. Change the name to `.env`. -3. Set the environment with the following options: - -| Setting | Instructions | -| ------- | ------------ | -| `DISCORD_BOT_TOKEN` | Generate a new Discord application at [discord.com/developers/applications](https://discord.com/developers/applications), obtain a token from the Bot tab, and enable MESSAGE CONTENT INTENT. | -| `LLM` | For [Jan](https://jan.ai/), set to `local/openai/(MODEL_NAME)`, where `(MODEL_NAME)` is your loaded model's name. | -| `CUSTOM_SYSTEM_PROMPT` | Adjust the bot's behavior as needed. | -| `CUSTOM_DISCORD_STATUS` | Set a custom message for the bot's Discord profile. (Max 128 characters) | -| `ALLOWED_CHANNEL_IDS` | Enter Discord channel IDs where the bot can send messages, separated by commas. Leave blank to allow all channels. | -| `ALLOWED_ROLE_IDS` | Enter Discord role IDs allowed to use the bot, separated by commas. Leave blank to allow everyone. Including at least one role also disables DMs. | -| `MAX_IMAGES` | Max number of image attachments allowed per message when using a vision model. (Default: `5`) | -| `MAX_MESSAGES` | Max messages allowed in a reply chain. (Default: `20`) | -| `LOCAL_SERVER_URL` | URL of your local API server for LLMs starting with `local/`. (Default: `http://localhost:5000/v1`) | -| `LOCAL_API_KEY` | API key for your local API server with LLMs starting with `local/`. Usually safe to leave blank. | - -### Step 4: Insert the Bot -Invite the bot to your Discord server using the following URL: - -``` -https://discord.com/api/oauth2/authorize?client_id=(CLIENT_ID)&permissions=412317273088&scope=bot -``` -:::note -Replace `CLIENT_ID` with your Discord application's client ID from the OAuth2 tab -::: -### Step 5: Run the bot - -Run the bot by using the following command in your command prompt: - -```sh -python llmcord.py -``` \ No newline at end of file diff --git a/docs/docs/guides/integration/README.mdx b/docs/docs/guides/integrations/README.mdx similarity index 64% rename from docs/docs/guides/integration/README.mdx rename to docs/docs/guides/integrations/README.mdx index 1fc20ed66..7ede1233b 100644 --- a/docs/docs/guides/integration/README.mdx +++ b/docs/docs/guides/integrations/README.mdx @@ -1,18 +1,19 @@ --- title: Integrations -slug: /guides/integrations/ -sidebar_position: 6 +slug: /integrations/ +sidebar_position: 1 description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build extension, ] --- diff --git a/docs/docs/guides/integrations/crewai.mdx b/docs/docs/guides/integrations/crewai.mdx new file mode 100644 index 000000000..bda409059 --- /dev/null +++ b/docs/docs/guides/integrations/crewai.mdx @@ -0,0 +1,23 @@ +--- +title: CrewAI +sidebar_position: 19 +description: A step-by-step guide on how to integrate Jan with CrewAI. +slug: /integrations/crewai +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Continue integration, + CrewAI integration, + CrewAI, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/integrations/discord.mdx b/docs/docs/guides/integrations/discord.mdx new file mode 100644 index 000000000..8fd676f02 --- /dev/null +++ b/docs/docs/guides/integrations/discord.mdx @@ -0,0 +1,78 @@ +--- +title: Discord +slug: /integrations/discord +sidebar_position: 5 +description: A step-by-step guide on how to integrate Jan with a Discord bot. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Discord integration, + Discord, + bot, + ] +--- + +## Integrate Discord Bot with Jan + +Discord bot can enhances your discord server interactions. By integrating Jan with it, you can significantly boost responsiveness and user engaggement in your discord server. + +To integrate Jan with a Discord bot, follow the steps below: + +### Step 1: Clone the repository + +To make this integration successful, it is necessary to clone the discord bot's [repository](https://github.com/jakobdylanc/discord-llm-chatbot). + +### Step 2: Install the Required Libraries + +After cloning the repository, run the following command: + +```sh +pip install -r requirements.txt +``` + +### Step 3: Set the Environment + +1. Create a copy of `.env.example`. +2. Change the name to `.env`. +3. Set the environment with the following options: + +| Setting | Instructions | +| ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `DISCORD_BOT_TOKEN` | Generate a new Discord application at [discord.com/developers/applications](https://discord.com/developers/applications), obtain a token from the Bot tab, and enable MESSAGE CONTENT INTENT. | +| `LLM` | For [Jan](https://jan.ai/), set to `local/openai/(MODEL_NAME)`, where `(MODEL_NAME)` is your loaded model's name. | +| `CUSTOM_SYSTEM_PROMPT` | Adjust the bot's behavior as needed. | +| `CUSTOM_DISCORD_STATUS` | Set a custom message for the bot's Discord profile. (Max 128 characters) | +| `ALLOWED_CHANNEL_IDS` | Enter Discord channel IDs where the bot can send messages, separated by commas. Leave blank to allow all channels. | +| `ALLOWED_ROLE_IDS` | Enter Discord role IDs allowed to use the bot, separated by commas. Leave blank to allow everyone. Including at least one role also disables DMs. | +| `MAX_IMAGES` | Max number of image attachments allowed per message when using a vision model. (Default: `5`) | +| `MAX_MESSAGES` | Max messages allowed in a reply chain. (Default: `20`) | +| `LOCAL_SERVER_URL` | URL of your local API server for LLMs starting with `local/`. (Default: `http://localhost:5000/v1`) | +| `LOCAL_API_KEY` | API key for your local API server with LLMs starting with `local/`. Usually safe to leave blank. | + +### Step 4: Insert the Bot + +Invite the bot to your Discord server using the following URL: + +``` +https://discord.com/api/oauth2/authorize?client_id=(CLIENT_ID)&permissions=412317273088&scope=bot +``` + +:::note +Replace `CLIENT_ID` with your Discord application's client ID from the OAuth2 tab +::: + +### Step 5: Run the bot + +Run the bot by using the following command in your command prompt: + +```sh +python llmcord.py +``` diff --git a/docs/docs/guides/integration/openinterpreter.mdx b/docs/docs/guides/integrations/interpreter.mdx similarity index 80% rename from docs/docs/guides/integration/openinterpreter.mdx rename to docs/docs/guides/integrations/interpreter.mdx index a844155f5..9acd0fa4b 100644 --- a/docs/docs/guides/integration/openinterpreter.mdx +++ b/docs/docs/guides/integrations/interpreter.mdx @@ -1,11 +1,25 @@ --- title: Open Interpreter +slug: /integrations/interpreter sidebar_position: 6 description: A step-by-step guide on how to integrate Jan with Open Interpreter. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Open Interpreter integration, + Open Interpreter, + ] --- - -## How to Integrate Open Interpreter with Jan +## Integrate Open Interpreter with Jan [Open Interpreter](https://github.com/KillianLucas/open-interpreter/) lets LLMs run code (Python, Javascript, Shell, and more) locally. You can chat with Open Interpreter through a ChatGPT-like interface in your terminal by running `interpreter` after installing. To integrate Open Interpreter with Jan, follow the steps below: @@ -43,4 +57,4 @@ Before using Open Interpreter, configure the model in `Settings` > `My Model` fo interpreter --api_base http://localhost:1337/v1 --model mistral-ins-7b-q4 ``` -> **Open Interpreter is now ready for use!** \ No newline at end of file +> **Open Interpreter is now ready for use!** diff --git a/docs/docs/guides/integrations/overview-integration.mdx b/docs/docs/guides/integrations/overview-integration.mdx new file mode 100644 index 000000000..344ebaa5f --- /dev/null +++ b/docs/docs/guides/integrations/overview-integration.mdx @@ -0,0 +1,20 @@ +--- +title: Overview +slug: /integrationss +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 1 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/integration/raycast.mdx b/docs/docs/guides/integrations/raycast.mdx similarity index 71% rename from docs/docs/guides/integration/raycast.mdx rename to docs/docs/guides/integrations/raycast.mdx index a626b0061..3611dcf00 100644 --- a/docs/docs/guides/integration/raycast.mdx +++ b/docs/docs/guides/integrations/raycast.mdx @@ -1,11 +1,26 @@ --- title: Raycast -sidebar_position: 4 +slug: /integrations/raycast +sidebar_position: 17 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + raycast integration, + Raycast, + ] description: A step-by-step guide on how to integrate Jan with Raycast. --- +## Integrate Raycast with Jan -## How to Integrate Raycast [Raycast](https://www.raycast.com/) is a productivity tool designed for macOS that enhances workflow efficiency by providing quick access to various tasks and functionalities through a keyboard-driven interface. To integrate Raycast with Jan, follow the steps below: ### Step 1: Download the TinyLlama Model @@ -24,4 +39,4 @@ npm i && npm run dev ### Step 3: Search for Nitro and Run the Model -Search for `Nitro` using the program and you can use the models from Jan in RayCast. \ No newline at end of file +Search for `Nitro` using the program and you can use the models from Jan in RayCast. diff --git a/docs/docs/guides/integration/openrouter.mdx b/docs/docs/guides/integrations/router.mdx similarity index 73% rename from docs/docs/guides/integration/openrouter.mdx rename to docs/docs/guides/integrations/router.mdx index 2189db0d9..42d1b1940 100644 --- a/docs/docs/guides/integration/openrouter.mdx +++ b/docs/docs/guides/integrations/router.mdx @@ -1,11 +1,25 @@ --- title: OpenRouter +slug: /integrations/openrouter sidebar_position: 2 description: A step-by-step guide on how to integrate Jan with OpenRouter. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + OpenRouter integration, + OpenRouter, + ] --- - -## How to Integrate OpenRouter with Jan +## Integrate OpenRouter with Jan [OpenRouter](https://openrouter.ai/docs#quick-start) is a tool that gathers AI models. Developers can utilize its API to engage with diverse large language models, generative image models, and generative 3D object models. @@ -16,15 +30,16 @@ To connect Jan with OpenRouter for accessing remote Large Language Models (LLMs) 1. Find your API keys in the [OpenRouter API Key](https://openrouter.ai/keys). 2. Set the OpenRouter API key in `~/jan/engines/openai.json` file. -### Step 2: MModel Configuration +### Step 2: Model Configuration 1. Go to the directory `~/jan/models`. 2. Make a new folder called `openrouter-(modelname)`, like `openrouter-dolphin-mixtral-8x7b`. 3. Inside the folder, create a `model.json` file with the following settings: - - Set the `id` property to the model id obtained from OpenRouter. - - Set the `format` property to `api`. - - Set the `engine` property to `openai`. - - Ensure the `state` property is set to `ready`. + +- Set the `id` property to the model id obtained from OpenRouter. +- Set the `format` property to `api`. +- Set the `engine` property to `openai`. +- Ensure the `state` property is set to `ready`. ```json title="~/jan/models/openrouter-dolphin-mixtral-8x7b/model.json" { @@ -50,9 +65,10 @@ To connect Jan with OpenRouter for accessing remote Large Language Models (LLMs) ``` :::note -For more details regarding the `model.json` settings and parameters fields, please see [here](../models/integrate-remote.mdx#modeljson). +For more details regarding the `model.json` settings and parameters fields, please see [here](/guides/engines/remote-server/#modeljson). ::: - + ### Step 3 : Start the Model -1. Restart Jan and go to the **Hub**. -2. Find your model and click on the **Use** button. \ No newline at end of file + +1. Restart Jan and go to the **Hub**. +2. Find your model and click on the **Use** button. diff --git a/docs/docs/guides/integrations/unsloth.mdx b/docs/docs/guides/integrations/unsloth.mdx new file mode 100644 index 000000000..b99fa5ee7 --- /dev/null +++ b/docs/docs/guides/integrations/unsloth.mdx @@ -0,0 +1,22 @@ +--- +title: Unsloth +sidebar_position: 20 +slug: /integrations/unsloth +description: A step-by-step guide on how to integrate Jan with Unsloth. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Continue integration, + Unsloth integration, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/integration/vscode.mdx b/docs/docs/guides/integrations/vscode.mdx similarity index 82% rename from docs/docs/guides/integration/vscode.mdx rename to docs/docs/guides/integrations/vscode.mdx index 0bc112186..943ba7968 100644 --- a/docs/docs/guides/integration/vscode.mdx +++ b/docs/docs/guides/integrations/vscode.mdx @@ -1,17 +1,19 @@ --- -title: Continue -sidebar_position: 1 +title: Continue Integration +sidebar_position: 18 +slug: /integrations/continue description: A step-by-step guide on how to integrate Jan with Continue and VS Code. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, Continue integration, VSCode integration, ] @@ -20,7 +22,7 @@ keywords: import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -## How to Integrate with Continue VS Code +## Integrate with Continue VS Code [Continue](https://continue.dev/docs/intro) is an open-source autopilot compatible with Visual Studio Code and JetBrains, offering the simplest method to code with any LLM (Local Language Model). @@ -41,6 +43,7 @@ To set up Continue for use with Jan's Local Server, you must activate the Jan AP 3. Press the **Start Server** button ### Step 3: Configure Continue to Use Jan's Local Server + 1. Go to the `~/.continue` directory. @@ -74,11 +77,12 @@ To set up Continue for use with Jan's Local Server, you must activate the Jan AP ] } ``` + 2. Ensure the file has the following configurations: - - Ensure `openai` is selected as the `provider`. - - Match the `model` with the one enabled in the Jan API Server. - - Set `apiBase` to `http://localhost:1337`. - - Leave the `apiKey` field to `EMPTY`. + - Ensure `openai` is selected as the `provider`. + - Match the `model` with the one enabled in the Jan API Server. + - Set `apiBase` to `http://localhost:1337`. + - Leave the `apiKey` field to `EMPTY`. ### Step 4: Ensure the Using Model Is Activated in Jan @@ -92,8 +96,7 @@ To set up Continue for use with Jan's Local Server, you must activate the Jan AP 1. Highlight a code snippet and press `Command + Shift + M` to open the Left Panel. 2. Select Jan at the bottom and ask a question about the code, for example, `Explain this code`. - ### 2. Editing the code with the help of a large language model +### 2. Editing the code with the help of a large language model 1. Select a code snippet and use `Command + Shift + L`. 2. Enter your editing request, such as `Add comments to this code`. - \ No newline at end of file diff --git a/docs/docs/guides/common-error/README.mdx b/docs/docs/guides/local-providers/README.mdx similarity index 59% rename from docs/docs/guides/common-error/README.mdx rename to docs/docs/guides/local-providers/README.mdx index f819eb72a..36dbae13e 100644 --- a/docs/docs/guides/common-error/README.mdx +++ b/docs/docs/guides/local-providers/README.mdx @@ -1,18 +1,19 @@ --- -title: Common Error -slug: /guides/common-error/ -sidebar_position: 8 +title: Local Engines +slug: /guides/engines/local +sidebar_position: 13 description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build extension, ] --- diff --git a/docs/docs/guides/integration/assets/azure.png b/docs/docs/guides/local-providers/assets/azure.png similarity index 100% rename from docs/docs/guides/integration/assets/azure.png rename to docs/docs/guides/local-providers/assets/azure.png diff --git a/docs/docs/guides/integration/assets/cont.png b/docs/docs/guides/local-providers/assets/cont.png similarity index 100% rename from docs/docs/guides/integration/assets/cont.png rename to docs/docs/guides/local-providers/assets/cont.png diff --git a/docs/docs/guides/integration/assets/discordflow.png b/docs/docs/guides/local-providers/assets/discordflow.png similarity index 100% rename from docs/docs/guides/integration/assets/discordflow.png rename to docs/docs/guides/local-providers/assets/discordflow.png diff --git a/docs/docs/guides/providers/image.png b/docs/docs/guides/local-providers/assets/image.png similarity index 100% rename from docs/docs/guides/providers/image.png rename to docs/docs/guides/local-providers/assets/image.png diff --git a/docs/docs/guides/integration/assets/interpreter.png b/docs/docs/guides/local-providers/assets/interpreter.png similarity index 100% rename from docs/docs/guides/integration/assets/interpreter.png rename to docs/docs/guides/local-providers/assets/interpreter.png diff --git a/docs/docs/guides/integration/assets/jan-ai-continue-ask.png b/docs/docs/guides/local-providers/assets/jan-ai-continue-ask.png similarity index 100% rename from docs/docs/guides/integration/assets/jan-ai-continue-ask.png rename to docs/docs/guides/local-providers/assets/jan-ai-continue-ask.png diff --git a/docs/docs/guides/integration/assets/jan-ai-continue-comment.gif b/docs/docs/guides/local-providers/assets/jan-ai-continue-comment.gif similarity index 100% rename from docs/docs/guides/integration/assets/jan-ai-continue-comment.gif rename to docs/docs/guides/local-providers/assets/jan-ai-continue-comment.gif diff --git a/docs/docs/guides/integration/assets/jan-ai-discord-repo.png b/docs/docs/guides/local-providers/assets/jan-ai-discord-repo.png similarity index 100% rename from docs/docs/guides/integration/assets/jan-ai-discord-repo.png rename to docs/docs/guides/local-providers/assets/jan-ai-discord-repo.png diff --git a/docs/docs/guides/integration/assets/jan-ai-openrouter.gif b/docs/docs/guides/local-providers/assets/jan-ai-openrouter.gif similarity index 100% rename from docs/docs/guides/integration/assets/jan-ai-openrouter.gif rename to docs/docs/guides/local-providers/assets/jan-ai-openrouter.gif diff --git a/docs/docs/guides/integration/assets/lmstudio.png b/docs/docs/guides/local-providers/assets/lmstudio.png similarity index 100% rename from docs/docs/guides/integration/assets/lmstudio.png rename to docs/docs/guides/local-providers/assets/lmstudio.png diff --git a/docs/docs/guides/integration/assets/mistral.png b/docs/docs/guides/local-providers/assets/mistral.png similarity index 100% rename from docs/docs/guides/integration/assets/mistral.png rename to docs/docs/guides/local-providers/assets/mistral.png diff --git a/docs/docs/guides/integration/assets/ollama.png b/docs/docs/guides/local-providers/assets/ollama.png similarity index 100% rename from docs/docs/guides/integration/assets/ollama.png rename to docs/docs/guides/local-providers/assets/ollama.png diff --git a/docs/docs/guides/integration/assets/openrouter.png b/docs/docs/guides/local-providers/assets/openrouter.png similarity index 100% rename from docs/docs/guides/integration/assets/openrouter.png rename to docs/docs/guides/local-providers/assets/openrouter.png diff --git a/docs/docs/guides/integration/assets/raycast-image.png b/docs/docs/guides/local-providers/assets/raycast-image.png similarity index 100% rename from docs/docs/guides/integration/assets/raycast-image.png rename to docs/docs/guides/local-providers/assets/raycast-image.png diff --git a/docs/docs/guides/integration/assets/raycast.png b/docs/docs/guides/local-providers/assets/raycast.png similarity index 100% rename from docs/docs/guides/integration/assets/raycast.png rename to docs/docs/guides/local-providers/assets/raycast.png diff --git a/docs/docs/guides/integration/assets/vscode.png b/docs/docs/guides/local-providers/assets/vscode.png similarity index 100% rename from docs/docs/guides/integration/assets/vscode.png rename to docs/docs/guides/local-providers/assets/vscode.png diff --git a/docs/docs/guides/local-providers/llamacpp.mdx b/docs/docs/guides/local-providers/llamacpp.mdx new file mode 100644 index 000000000..ca6285c4a --- /dev/null +++ b/docs/docs/guides/local-providers/llamacpp.mdx @@ -0,0 +1,91 @@ +--- +title: LlamaCPP Extension +slug: /guides/engines/llamacpp +sidebar_position: 1 +description: A step-by-step guide on how to customize the LlamaCPP extension. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Llama CPP integration, + LlamaCPP Extension, + ] +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Overview + +[Nitro](https://github.com/janhq/nitro) is an inference server on top of [llama.cpp](https://github.com/ggerganov/llama.cpp). It provides an OpenAI-compatible API, queue, & scaling. + +## LlamaCPP Extension + +:::note +Nitro is the default AI engine downloaded with Jan. There is no additional setup needed. +::: + +In this guide, we'll walk you through the process of customizing your engine settings by configuring the `nitro.json` file + +1. Navigate to the `App Settings` > `Advanced` > `Open App Directory` > `~/jan/engine` folder. + + + + ```sh + cd ~/jan/engines + ``` + + + ```sh + C:/Users//jan/engines + ``` + + + ```sh + cd ~/jan/engines + ``` + + + +2. Modify the `nitro.json` file based on your needs. The default settings are shown below. + +```json title="~/jan/engines/nitro.json" +{ + "ctx_len": 2048, + "ngl": 100, + "cpu_threads": 1, + "cont_batching": false, + "embedding": false +} +``` + +The table below describes the parameters in the `nitro.json` file. + +| Parameter | Type | Description | +| --------------- | ----------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| `ctx_len` | **Integer** | Typically set at `2048`, `ctx_len` provides ample context for model operations like `GPT-3.5`. (_Maximum_: `4096`, _Minimum_: `1`) | +| `ngl` | **Integer** | Defaulted at `100`, `ngl` determines GPU layer usage. | +| `cpu_threads` | **Integer** | Determines CPU inference threads, limited by hardware and OS. (_Maximum_ determined by system) | +| `cont_batching` | **Integer** | Controls continuous batching, enhancing throughput for LLM inference. | +| `embedding` | **Integer** | Enables embedding utilization for tasks like document-enhanced chat in RAG-based applications. | + +:::tip + +- By default, the value of `ngl` is set to 100, which indicates that it will offload all. If you wish to offload only 50% of the GPU, you can set `ngl` to 15 because most models on Mistral or Llama are around ~ 30 layers. +- To utilize the embedding feature, include the JSON parameter `"embedding": true`. It will enable Nitro to process inferences with embedding capabilities. Please refer to the [Embedding in the Nitro documentation](https://nitro.jan.ai/features/embed) for a more detailed explanation. +- To utilize the continuous batching feature for boosting throughput and minimizing latency in large language model (LLM) inference, include `cont_batching: true`. For details, please refer to the [Continuous Batching in the Nitro documentation](https://nitro.jan.ai/features/cont-batch). + +::: + +:::info[Assistance and Support] + +If you have questions, please join our [Discord community](https://discord.gg/Dt7MxDyNNZ) for support, updates, and discussions. + +::: diff --git a/docs/docs/guides/integration/lmstudio.mdx b/docs/docs/guides/local-providers/lmstudio.mdx similarity index 93% rename from docs/docs/guides/integration/lmstudio.mdx rename to docs/docs/guides/local-providers/lmstudio.mdx index 33e48f33a..db4234700 100644 --- a/docs/docs/guides/integration/lmstudio.mdx +++ b/docs/docs/guides/local-providers/lmstudio.mdx @@ -1,24 +1,27 @@ --- title: LM Studio +slug: /guides/engines/lmstudio sidebar_position: 8 description: A step-by-step guide on how to integrate Jan with LM Studio. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, LM Studio integration, ] --- -## How to Integrate LM Studio with Jan +## Integrate LM Studio with Jan [LM Studio](https://lmstudio.ai/) enables you to explore, download, and run local Large Language Models (LLMs). You can integrate Jan with LM Studio using two methods: + 1. Integrate the LM Studio server with Jan UI 2. Migrate your downloaded model from LM Studio to Jan. @@ -28,6 +31,7 @@ To integrate LM Studio with Jan follow the steps below: In this guide, we're going to show you how to connect Jan to [LM Studio](https://lmstudio.ai/) using the second method. We'll use the [Phi 2 - GGUF](https://huggingface.co/TheBloke/phi-2-GGUF) model from Hugging Face as our example. ::: + ### Step 1: Server Setup 1. Access the `Local Inference Server` within LM Studio. @@ -56,7 +60,7 @@ Replace `(port)` with your chosen port number. The default is 1234. - Set `format` to `api`. - Specify `engine` as `openai`. - Set `state` to `ready`. - + ```json title="~/jan/models/lmstudio-phi-2/model.json" { "sources": [ @@ -80,10 +84,10 @@ Replace `(port)` with your chosen port number. The default is 1234. "engine": "openai" } ``` -:::note -For more details regarding the `model.json` settings and parameters fields, please see [here](../models/integrate-remote.mdx#modeljson). -::: +:::note +For more details regarding the `model.json` settings and parameters fields, please see [here](/guides/engines/remote-server/#modeljson). +::: ### Step 3: Starting the Model @@ -107,7 +111,6 @@ For more details regarding the `model.json` settings and parameters fields, ple Starting from version 0.4.7, Jan enables direct import of LM Studio models using absolute file paths. - ### Step 1: Locating the Model Path 1. Access `My Models` in LM Studio and locate your model folder. @@ -170,4 +173,4 @@ For Windows users, ensure to include double backslashes in the URL property, suc ### Step 3: Starting the Model 1. Restart Jan and proceed to the **Hub**. -2. Locate your model and click **Use** to activate it. \ No newline at end of file +2. Locate your model and click **Use** to activate it. diff --git a/docs/docs/guides/integration/ollama.mdx b/docs/docs/guides/local-providers/ollama.mdx similarity index 79% rename from docs/docs/guides/integration/ollama.mdx rename to docs/docs/guides/local-providers/ollama.mdx index 6c55bc856..2f7a26227 100644 --- a/docs/docs/guides/integration/ollama.mdx +++ b/docs/docs/guides/local-providers/ollama.mdx @@ -1,24 +1,27 @@ --- title: Ollama -sidebar_position: 9 +slug: /guides/engines/ollama +sidebar_position: 4 description: A step-by-step guide on how to integrate Jan with Ollama. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, Ollama integration, ] --- -## How to Integrate Ollama with Jan +## Integrate Ollama with Jan Ollama provides you with largen language that you can run locally. There are two methods to integrate Ollama with Jan: + 1. Integrate Ollama server with Jan. 2. Migrate the downloaded model from Ollama to Jan. @@ -39,7 +42,6 @@ ollama run 3. According to the [Ollama documentation on OpenAI compatibility](https://github.com/ollama/ollama/blob/main/docs/openai.md), you can connect to the Ollama server using the web address `http://localhost:11434/v1/chat/completions`. To do this, change the `openai.json` file in the `~/jan/engines` folder to add the Ollama server's full web address: - ```json title="~/jan/engines/openai.json" { "full_url": "http://localhost:11434/v1/chat/completions" @@ -51,10 +53,11 @@ ollama run 1. Navigate to the `~/jan/models` folder. 2. Create a folder named `(ollam-modelname)`, for example, `lmstudio-phi-2`. 3. Create a `model.json` file inside the folder including the following configurations: - - Set the `id` property to the model name as Ollama model name. - - Set the `format` property to `api`. - - Set the `engine` property to `openai`. - - Set the `state` property to `ready`. + +- Set the `id` property to the model name as Ollama model name. +- Set the `format` property to `api`. +- Set the `engine` property to `openai`. +- Set the `state` property to `ready`. ```json title="~/jan/models/llama2/model.json" { @@ -79,10 +82,12 @@ ollama run "engine": "openai" } ``` + :::note -For more details regarding the `model.json` settings and parameters fields, please see [here](../models/integrate-remote.mdx#modeljson). +For more details regarding the `model.json` settings and parameters fields, please see [here](/guides/engines/remote-server/#modeljson). ::: ### Step 3: Start the Model -1. Restart Jan and navigate to the **Hub**. -2. Locate your model and click the **Use** button. \ No newline at end of file + +1. Restart Jan and navigate to the **Hub**. +2. Locate your model and click the **Use** button. diff --git a/docs/docs/guides/local-providers/tensorrt.mdx b/docs/docs/guides/local-providers/tensorrt.mdx new file mode 100644 index 000000000..46f4346c9 --- /dev/null +++ b/docs/docs/guides/local-providers/tensorrt.mdx @@ -0,0 +1,115 @@ +--- +title: TensorRT-LLM Extension +slug: /guides/engines/tensorrt-llm +sidebar_position: 2 +description: A step-by-step guide on how to customize the TensorRT-LLM extension. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + TensorRT-LLM Extension, + TensorRT, + tensorRT, + extension, + ] +--- + +## Overview + +Users with Nvidia GPUs can get **20-40% faster token speeds** compared to using LlamaCPP engine on their laptop or desktops by using [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM). The greater implication is that you are running FP16, which is also more accurate than quantized models. + +## TensortRT-LLM Extension + +This guide walks you through how to install Jan's official [TensorRT-LLM Extension](https://github.com/janhq/nitro-tensorrt-llm). This extension uses [Nitro-TensorRT-LLM](https://github.com/janhq/nitro-tensorrt-llm) as the AI engine, instead of the default [Nitro-Llama-CPP](https://github.com/janhq/nitro). It includes an efficient C++ server to natively execute the [TRT-LLM C++ runtime](https://nvidia.github.io/TensorRT-LLM/gpt_runtime.html). It also comes with additional feature and performance improvements like OpenAI compatibility, tokenizer improvements, and queues. + +:::warning + +- This feature is only available for Windows users. Linux is coming soon. + +- Additionally, we only prebuilt a few demo models. You can always build your desired models directly on your machine. For more information, please see [here](#build-your-own-tensorrt-models). + +::: + +### Pre-requisites + +- A Windows PC +- Nvidia GPU(s): Ada or Ampere series (i.e. RTX 4000s & 3000s). More will be supported soon. +- 3GB+ of disk space to download TRT-LLM artifacts and a Nitro binary +- Jan v0.4.9+ or Jan v0.4.8-321+ (nightly) +- Nvidia Driver v535+ (For installation guide, please see [here](/troubleshooting/#1-ensure-gpu-mode-requirements)) +- CUDA Toolkit v12.2+ (For installation guide, please see [here](/troubleshooting/#1-ensure-gpu-mode-requirements)) + +### Step 1: Install TensorRT-Extension + +1. Go to **Settings** > **Extensions**. +2. Click **Install** next to the TensorRT-LLM Extension. +3. Check that files are correctly downloaded. + +```sh +ls ~\jan\extensions\@janhq\tensorrt-llm-extension\dist\bin +# Your Extension Folder should now include `nitro.exe`, among other artifacts needed to run TRT-LLM +``` + +### Step 2: Download a Compatible Model + +TensorRT-LLM can only run models in `TensorRT` format. These models, aka "TensorRT Engines", are prebuilt specifically for each target OS+GPU architecture. + +We offer a handful of precompiled models for Ampere and Ada cards that you can immediately download and play with: + +1. Restart the application and go to the Hub. +2. Look for models with the `TensorRT-LLM` label in the recommended models list > Click **Download**. + +:::note +This step might take some time. 🙏 +::: + +![image](https://hackmd.io/_uploads/rJewrEgRp.png) + +3. Click use and start chatting! +4. You may need to allow Nitro in your network + +![alt text](./assets/image.png) + +:::warning +If you are our nightly builds, you may have to reinstall the TensorRT-LLM extension each time you update the app. We're working on better extension lifecyles - stay tuned. +::: + +### Step 3: Configure Settings + +You can customize the default parameters for how Jan runs TensorRT-LLM. + +:::info +coming soon +::: + +## Troubleshooting + +### Incompatible Extension vs Engine versions + +For now, the model versions are pinned to the extension versions. + +### Uninstall Extension + +To uninstall the extension, follow the steps below: + +1. Quit the app. +2. Go to **Settings** > **Extensions**. +3. Delete the entire Extensions folder. +4. Reopen the app, only the default extensions should be restored. + +### Install Nitro-TensorRT-LLM manually + +To manually build the artifacts needed to run the server and TensorRT-LLM, you can reference the source code. [Read here](https://github.com/janhq/nitro-tensorrt-llm?tab=readme-ov-file#quickstart). + +### Build your own TensorRT models + +:::info +coming soon +::: diff --git a/docs/docs/guides/models-list.mdx b/docs/docs/guides/models-list.mdx deleted file mode 100644 index cd7107a92..000000000 --- a/docs/docs/guides/models-list.mdx +++ /dev/null @@ -1,70 +0,0 @@ ---- -title: Pre-configured Models -sidebar_position: 3 ---- - -## Overview - -Jan provides various pre-configured AI models with different capabilities. Please see the following list for details. - -| Model | Description | -| ----- | ----------- | -| Mistral Instruct 7B Q4 | A model designed for a comprehensive understanding through training on extensive internet data | -| OpenHermes Neural 7B Q4 | A merged model using the TIES method. It performs well in various benchmarks | -| Stealth 7B Q4 | This is a new experimental family designed to enhance Mathematical and Logical abilities | -| Trinity-v1.2 7B Q4 | An experimental model merge using the Slerp method | -| Openchat-3.5 7B Q4 | An open-source model that has a performance that surpasses that of ChatGPT-3.5 and Grok-1 across various benchmarks | -| Wizard Coder Python 13B Q5 | A Python coding model that demonstrates high proficiency in specific domains like coding and mathematics | -| OpenAI GPT 3.5 Turbo | The latest GPT-3.5 Turbo model with higher accuracy at responding in requested formats and a fix for a bug that caused a text encoding issue for non-English language function calls | -| OpenAI GPT 3.5 Turbo 16k 0613 | A Snapshot model of gpt-3.5-16k-turbo from June 13th 2023 | -| OpenAI GPT 4 | The latest GPT-4 model intended to reduce cases of “laziness” where the model doesn't complete a task | -| TinyLlama Chat 1.1B Q4 | A tiny model with only 1.1B. It's a good model for less powerful computers | -| Deepseek Coder 1.3B Q8 | A model that excelled in project-level code completion with advanced capabilities across multiple programming languages | -| Phi-2 3B Q8 | a 2.7B model, excelling in common sense and logical reasoning benchmarks, trained with synthetic texts and filtered websites | -| Llama 2 Chat 7B Q4 | A model that is specifically designed for a comprehensive understanding through training on extensive internet data | -| CodeNinja 7B Q4 | A model that is good for coding tasks and can handle various languages, including Python, C, C++, Rust, Java, JavaScript, and more | -| Noromaid 7B Q5 | A model designed for role-playing with human-like behavior. | -| Starling alpha 7B Q4 | An upgrade of Openchat 3.5 using RLAIF, is good at various benchmarks, especially with GPT-4 judging its performance | -| Yarn Mistral 7B Q4 | A language model for long context and supports a 128k token context window | -| LlaVa 1.5 7B Q5 K | A model can bring vision understanding to Jan | -| BakLlava 1 | A model can bring vision understanding to Jan | -| Solar Slerp 10.7B Q4 | A model that uses the Slerp merge method from SOLAR Instruct and Pandora-v1 | -| LlaVa 1.5 13B Q5 K | A model can bring vision understanding to Jan | -| Deepseek Coder 33B Q5 | A model that excelled in project-level code completion with advanced capabilities across multiple programming languages | -| Phind 34B Q5 | A multi-lingual model that is fine-tuned on 1.5B tokens of high-quality programming data, excels in various programming languages, and is designed to be steerable and user-friendly | -| Yi 34B Q5 | A specialized chat model is known for its diverse and creative responses and excels across various NLP tasks and benchmarks | -| Capybara 200k 34B Q5 | A long context length model that supports 200K tokens | -| Dolphin 8x7B Q4 | An uncensored model built on Mixtral-8x7b and it is good at programming tasks | -| Mixtral 8x7B Instruct Q4 | A pre-trained generative Sparse Mixture of Experts, which outperforms 70B models on most benchmarks | -| Tulu 2 70B Q4 | A strong model alternative to Llama 2 70b Chat to act as helpful assistants | -| Llama 2 Chat 70B Q4 | A model that is specifically designed for a comprehensive understanding through training on extensive internet data | - -:::note - -OpenAI GPT models require a subscription to use them further. To learn more, [click here](https://openai.com/pricing). - -::: - -## Model details - -| Model | Author | Model ID | Format | Size | -| ----- | ------ | -------- | ------ | ---- | -| Mistral Instruct 7B Q4 | MistralAI, The Bloke | `mistral-ins-7b-q4` | **GGUF** | 4.07GB | -| OpenHermes Neural 7B Q4 | Intel, Jan | `openhermes-neural-7b` | **GGUF** | 4.07GB | -| Stealth 7B Q4 | Jan | `stealth-v1.2-7b` | **GGUF** | 4.07GB | -| Trinity-v1.2 7B Q4 | Jan | `trinity-v1.2-7b` | **GGUF** | 4.07GB | -| Openchat-3.5 7B Q4 | Openchat | `openchat-3.5-7b` | **GGUF** | 4.07GB | -| Wizard Coder Python 13B Q5 | WizardLM, The Bloke | `wizardcoder-13b` | **GGUF** | 7.33GB | - | -| OpenAI GPT 3.5 Turbo | OpenAI | `gpt-3.5-turbo` | **GGUF** | - | -| OpenAI GPT 3.5 Turbo 16k 0613 | OpenAI | `gpt-3.5-turbo-16k-0613` | **GGUF** | - | -| OpenAI GPT 4 | OpenAI | `gpt-4` | **GGUF** | - | -| TinyLlama Chat 1.1B Q4 | TinyLlama | `tinyllama-1.1b` | **GGUF** | 638.01MB | -| Deepseek Coder 1.3B Q8 | Deepseek, The Bloke | `deepseek-coder-1.3b` | **GGUF** | 1.33GB | -| Phi-2 3B Q8 | Microsoft | `phi-2-3b` | **GGUF** | 2.76GB | -| Llama 2 Chat 7B Q4 | MetaAI, The Bloke | `llama2-chat-7b-q4` | **GGUF** | 3.80GB | -| CodeNinja 7B Q4 | Beowolx | `codeninja-1.0-7b` | **GGUF** | 4.07GB | -| Noromaid 7B Q5 | NeverSleep | `noromaid-7b` | **GGUF** | 4.07GB | -| Starling alpha 7B Q4 | Berkeley-nest, The Bloke | `starling-7b` | **GGUF** | 4.07GB | -| Yarn Mistral 7B Q4 | NousResearch, The Bloke | `yarn-mistral-7b` | **GGUF** | 4.07GB | -| LlaVa 1.5 7B Q5 K | Mys | `llava-1.5-7b-q5` | **GGUF** | 5.03GB | -| BakLlava 1 | Mys | `bakllava-1` | **GGUF** | 5.36GB | diff --git a/docs/docs/guides/models/README.mdx b/docs/docs/guides/models/README.mdx deleted file mode 100644 index 941eab3b6..000000000 --- a/docs/docs/guides/models/README.mdx +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: Models Setup -slug: /guides/models-setup/ -sidebar_position: 5 -description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - build extension, - ] ---- - -import DocCardList from "@theme/DocCardList"; - - diff --git a/docs/docs/guides/models/assets/jan-model-hub.png b/docs/docs/guides/models/assets/jan-model-hub.png deleted file mode 100644 index db4624f2f..000000000 Binary files a/docs/docs/guides/models/assets/jan-model-hub.png and /dev/null differ diff --git a/docs/docs/guides/models/customize-engine.mdx b/docs/docs/guides/models/customize-engine.mdx deleted file mode 100644 index 2f54204a8..000000000 --- a/docs/docs/guides/models/customize-engine.mdx +++ /dev/null @@ -1,79 +0,0 @@ ---- -title: Customize Engine Settings -sidebar_position: 1 -description: A step-by-step guide to change your engine's settings. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - import-models-manually, - customize-engine-settings, - ] ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - - -In this guide, we'll walk you through the process of customizing your engine settings by configuring the `nitro.json` file - -1. Navigate to the `App Settings` > `Advanced` > `Open App Directory` > `~/jan/engine` folder. - - - - ```sh - cd ~/jan/engines - ``` - - - ```sh - C:/Users//jan/engines - ``` - - - ```sh - cd ~/jan/engines - ``` - - - -2. Modify the `nitro.json` file based on your needs. The default settings are shown below. - -```json title="~/jan/engines/nitro.json" -{ - "ctx_len": 2048, - "ngl": 100, - "cpu_threads": 1, - "cont_batching": false, - "embedding": false -} -``` - -The table below describes the parameters in the `nitro.json` file. - -| Parameter | Type | Description | -| --------- | ---- | ----------- | -| `ctx_len` | **Integer** | Typically set at `2048`, `ctx_len` provides ample context for model operations like `GPT-3.5`. (*Maximum*: `4096`, *Minimum*: `1`) | -| `ngl` | **Integer** | Defaulted at `100`, `ngl` determines GPU layer usage. | -| `cpu_threads` | **Integer** | Determines CPU inference threads, limited by hardware and OS. (*Maximum* determined by system) | -| `cont_batching` | **Integer** | Controls continuous batching, enhancing throughput for LLM inference. | -| `embedding` | **Integer** | Enables embedding utilization for tasks like document-enhanced chat in RAG-based applications. | - -:::tip - - By default, the value of `ngl` is set to 100, which indicates that it will offload all. If you wish to offload only 50% of the GPU, you can set `ngl` to 15 because most models on Mistral or Llama are around ~ 30 layers. - - To utilize the embedding feature, include the JSON parameter `"embedding": true`. It will enable Nitro to process inferences with embedding capabilities. Please refer to the [Embedding in the Nitro documentation](https://nitro.jan.ai/features/embed) for a more detailed explanation. - - To utilize the continuous batching feature for boosting throughput and minimizing latency in large language model (LLM) inference, include `cont_batching: true`. For details, please refer to the [Continuous Batching in the Nitro documentation](https://nitro.jan.ai/features/cont-batch). - -::: - -:::info[Assistance and Support] - -If you have questions, please join our [Discord community](https://discord.gg/Dt7MxDyNNZ) for support, updates, and discussions. - -::: \ No newline at end of file diff --git a/docs/docs/guides/models/import-models.mdx b/docs/docs/guides/models/import-models.mdx deleted file mode 100644 index 9ed8953c7..000000000 --- a/docs/docs/guides/models/import-models.mdx +++ /dev/null @@ -1,257 +0,0 @@ ---- -title: Manual Import -sidebar_position: 3 -description: A step-by-step guide on how to perform manual import feature. -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - import-models-manually, - absolute-filepath, - ] ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; -import janModel from './assets/jan-model-hub.png'; - - -This guide will show you how to perform manual import. In this guide, we are using a GGUF model from [HuggingFace](https://huggingface.co/) and our latest model, [Trinity](https://huggingface.co/janhq/trinity-v1-GGUF), as an example. - -## Newer versions - nightly versions and v0.4.8+ - -Starting with version 0.4.8, Jan has introduced the capability to import models using a UI drag-and-drop method. This allows you to import models directly into the Jan application UI by dragging the `.GGUF` file from your directory into the Jan application. - -### 1. Get the Model -Download the model from HuggingFace in the `.GGUF` format. - -### 2. Import the Model -1. Open your Jan application. -2. Click the **Import Model** button. -3. Open your downloaded model. -4. Drag the `.GGUF` file from your directory into the Jan **Import Model** window. - -### 3. Done! - -If your model doesn't show up in the **Model Selector** in conversations, **restart the app** or contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ). - -## Newer versions - nightly versions and v0.4.7+ - -Starting from version 0.4.7, Jan has introduced the capability to import models using an absolute file path. It allows you to import models from any directory on your computer. - -### 1. Get the Absolute Filepath of the Model - -After downloading the model from HuggingFace, get the absolute filepath of the model. - -### 2. Configure the Model JSON - -1. Navigate to the `~/jan/models` folder. -2. Create a folder named ``, for example, `tinyllama`. -3. Create a `model.json` file inside the folder, including the following configurations: - -- Ensure the `id` property matches the folder name you created. -- Ensure the `url` property is the direct binary download link ending in `.gguf`. Now, you can use the absolute filepath of the model file. -- Ensure the `engine` property is set to `nitro`. - -```json -{ - "sources": [ - { - "filename": "tinyllama.gguf", - // highlight-next-line - "url": "" - } - ], - "id": "tinyllama-1.1b", - "object": "model", - "name": "(Absolute Path) TinyLlama Chat 1.1B Q4", - "version": "1.0", - "description": "TinyLlama is a tiny model with only 1.1B. It's a good model for less powerful computers.", - "format": "gguf", - "settings": { - "ctx_len": 4096, - "prompt_template": "<|system|>\n{system_message}<|user|>\n{prompt}<|assistant|>", - "llama_model_path": "tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf" - }, - "parameters": { - "temperature": 0.7, - "top_p": 0.95, - "stream": true, - "max_tokens": 2048, - "stop": [], - "frequency_penalty": 0, - "presence_penalty": 0 - }, - "metadata": { - "author": "TinyLlama", - "tags": ["Tiny", "Foundation Model"], - "size": 669000000 - }, - "engine": "nitro" -} -``` - -:::warning - -- If you are using Windows, you need to use double backslashes in the url property, for example: `C:\\Users\\username\\filename.gguf`. - -::: - -### 3. Done! - -If your model doesn't show up in the **Model Selector** in conversations, **restart the app** or contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ). - -## Newer versions - nightly versions and v0.4.4+ - -### 1. Create a Model Folder - -1. Navigate to the `App Settings` > `Advanced` > `Open App Directory` > `~/jan/models` folder. - - - - ```sh - cd ~/jan/models - ``` - - - ```sh - C:/Users//jan/models - ``` - - - ```sh - cd ~/jan/models - ``` - - - -2. In the `models` folder, create a folder with the name of the model. - -```sh -mkdir trinity-v1-7b -``` - -### 2. Drag & Drop the Model - -Drag and drop your model binary into this folder, ensuring the `modelname.gguf` is the same name as the folder name, e.g. `models/modelname`. - -### 3. Done! - -If your model doesn't show up in the **Model Selector** in conversations, **restart the app** or contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ). - -## Older versions - before v0.44 - -### 1. Create a Model Folder - -1. Navigate to the `App Settings` > `Advanced` > `Open App Directory` > `~/jan/models` folder. - - - - ```sh - cd ~/jan/models - ``` - - - ```sh - C:/Users//jan/models - ``` - - - ```sh - cd ~/jan/models - ``` - - - -2. In the `models` folder, create a folder with the name of the model. - -```sh -mkdir trinity-v1-7b -``` - -### 2. Create a Model JSON - -Jan follows a folder-based, [standard model template](https://jan.ai/docs/engineering/models/) called a `model.json` to persist the model configurations on your local filesystem. - -This means that you can easily reconfigure your models, export them, and share your preferences transparently. - - - - ```sh - cd trinity-v1-7b - touch model.json - ``` - - - ```sh - cd trinity-v1-7b - echo {} > model.json - ``` - - - ```sh - cd trinity-v1-7b - touch model.json - ``` - - - -To update `model.json`: - - - Match `id` with folder name. - - Ensure GGUF filename matches `id`. - - Set `source.url` to direct download link ending in `.gguf`. In HuggingFace, you can find the direct links in the `Files and versions` tab. - - Verify that you are using the correct `prompt_template`. This is usually provided in the HuggingFace model's description page. - -```json title="model.json" -{ - "sources": [ - { - "filename": "trinity-v1.Q4_K_M.gguf", - "url": "https://huggingface.co/janhq/trinity-v1-GGUF/resolve/main/trinity-v1.Q4_K_M.gguf" - } - ], - "id": "trinity-v1-7b", - "object": "model", - "name": "Trinity-v1 7B Q4", - "version": "1.0", - "description": "Trinity is an experimental model merge of GreenNodeLM & LeoScorpius using the Slerp method. Recommended for daily assistance purposes.", - "format": "gguf", - "settings": { - "ctx_len": 4096, - "prompt_template": "{system_message}\n### Instruction:\n{prompt}\n### Response:", - "llama_model_path": "trinity-v1.Q4_K_M.gguf" - }, - "parameters": { - "max_tokens": 4096 - }, - "metadata": { - "author": "Jan", - "tags": ["7B", "Merged"], - "size": 4370000000 - }, - "engine": "nitro" -} -``` - -:::note -For more details regarding the `model.json` settings and parameters fields, please see [here](/docs/guides/models/integrate-remote.mdx#modeljson). -::: - -### 3. Download the Model - -1. Restart Jan and navigate to the Hub. -2. Locate your model. -3. Click **Download** button to download the model binary. - -:::info[Assistance and Support] - -If you have questions, please join our [Discord community](https://discord.gg/Dt7MxDyNNZ) for support, updates, and discussions. - -::: \ No newline at end of file diff --git a/docs/docs/guides/providers/README.mdx b/docs/docs/guides/providers/README.mdx deleted file mode 100644 index aa3bfea1f..000000000 --- a/docs/docs/guides/providers/README.mdx +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: Inference Providers -slug: /guides/providers ---- - -import DocCardList from "@theme/DocCardList"; - - diff --git a/docs/docs/guides/providers/llama-cpp.md b/docs/docs/guides/providers/llama-cpp.md deleted file mode 100644 index d2b0daa2a..000000000 --- a/docs/docs/guides/providers/llama-cpp.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -title: llama.cpp -slug: /guides/providers/llama-cpp ---- - -## Overview - -[Nitro](https://github.com/janhq/nitro) is an inference server on top of [llama.cpp](https://github.com/ggerganov/llama.cpp). It provides an OpenAI-compatible API, queue, & scaling. - -Nitro is the default AI engine downloaded with Jan. There is no additional setup needed. \ No newline at end of file diff --git a/docs/docs/guides/providers/tensorrt-llm.md b/docs/docs/guides/providers/tensorrt-llm.md index 52da83b36..f73168873 100644 --- a/docs/docs/guides/providers/tensorrt-llm.md +++ b/docs/docs/guides/providers/tensorrt-llm.md @@ -1,73 +1,216 @@ --- title: TensorRT-LLM slug: /guides/providers/tensorrt-llm +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + TensorRT-LLM Extension, + TensorRT, + tensorRT, + extension, + ] --- -Users with Nvidia GPUs can get **20-40% faster\* token speeds** on their laptop or desktops by using [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM). The greater implication is that you are running FP16, which is also more accurate than quantized models. +:::info -This guide walks you through how to install Jan's official [TensorRT-LLM Extension](https://github.com/janhq/nitro-tensorrt-llm). This extension uses [Nitro-TensorRT-LLM](https://github.com/janhq/nitro-tensorrt-llm) as the AI engine, instead of the default [Nitro-Llama-CPP](https://github.com/janhq/nitro). It includes an efficient C++ server to natively execute the [TRT-LLM C++ runtime](https://nvidia.github.io/TensorRT-LLM/gpt_runtime.html). It also comes with additional feature and performance improvements like OpenAI compatibility, tokenizer improvements, and queues. +TensorRT-LLM support was launched in 0.4.9, and should be regarded as an Experimental feature. -*Compared to using LlamaCPP engine. +- Only Windows is supported for now. +- Please report bugs in our Discord's [#tensorrt-llm](https://discord.com/channels/1107178041848909847/1201832734704795688) channel. -:::warning -This feature is only available for Windows users. Linux is coming soon. +::: -Additionally, we only prebuilt a few demo models. You can always build your desired models directly on your machine. [Read here](#build-your-own-tensorrt-models). +Jan supports [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) as an alternate Inference Engine, for users who have Nvidia GPUs with large VRAM. TensorRT-LLM allows for blazing fast inference, but requires Nvidia GPUs with [larger VRAM](https://nvidia.github.io/TensorRT-LLM/memory.html). + +## What is TensorRT-LLM? + +[TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) is an hardware-optimized LLM inference engine for Nvidia GPUs, that compiles models to run extremely fast on Nvidia GPUs. + +- Mainly used on Nvidia's Datacenter-grade GPUs like the H100s [to produce 10,000 tok/s](https://nvidia.github.io/TensorRT-LLM/blogs/H100vsA100.html). +- Can be used on Nvidia's workstation (e.g. [A6000](https://www.nvidia.com/en-us/design-visualization/rtx-6000/)) and consumer-grade GPUs (e.g. [RTX 4090](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/rtx-4090/)) + +:::tip[Benefits] + +- Our performance testing shows 20-40% faster token/s speeds on consumer-grade GPUs +- On datacenter-grade GPUs, TensorRT-LLM can go up to 10,000 tokens/s +- TensorRT-LLM is a relatively new library, that was [released in Sept 2023](https://github.com/NVIDIA/TensorRT-LLM/graphs/contributors). We anticipate performance and resource utilization improvements in the future. + +::: + +:::warning[Caveats] + +- TensorRT-LLM requires models to be compiled into GPU and OS-specific "Model Engines" (vs. GGUF's "convert once, run anywhere" approach) +- TensorRT-LLM Model Engines tend to utilize larger amount of VRAM and RAM in exchange for performance +- This usually means only people with top-of-the-line Nvidia GPUs can use TensorRT-LLM ::: ## Requirements -- A Windows PC +### Hardware + +- Windows PC - Nvidia GPU(s): Ada or Ampere series (i.e. RTX 4000s & 3000s). More will be supported soon. - 3GB+ of disk space to download TRT-LLM artifacts and a Nitro binary -- Jan v0.4.9+ or Jan v0.4.8-321+ (nightly) -- Nvidia Driver v535+ ([installation guide](https://jan.ai/guides/common-error/not-using-gpu/#1-ensure-gpu-mode-requirements)) -- CUDA Toolkit v12.2+ ([installation guide](https://jan.ai/guides/common-error/not-using-gpu/#1-ensure-gpu-mode-requirements)) -## Install TensorRT-Extension +**Compatible GPUs** + +| Architecture | Supported? | Consumer-grade | Workstation-grade | +| ------------ | ---------- | -------------- | ----------------- | +| Ada | ✅ | 4050 and above | RTX A2000 Ada | +| Ampere | ✅ | 3050 and above | A100 | +| Turing | ❌ | Not Supported | Not Supported | + +:::info + +Please ping us in Discord's [#tensorrt-llm](https://discord.com/channels/1107178041848909847/1201832734704795688) channel if you would like Turing support. + +::: + +### Software + +- Jan v0.4.9+ or Jan v0.4.8-321+ (nightly) +- [Nvidia Driver v535+](https://jan.ai/guides/common-error/not-using-gpu/#1-ensure-gpu-mode-requirements) +- [CUDA Toolkit v12.2+](https://jan.ai/guides/common-error/not-using-gpu/#1-ensure-gpu-mode-requirements) + +## Getting Started + +### Install TensorRT-Extension 1. Go to Settings > Extensions -2. Click install next to the TensorRT-LLM Extension -3. Check that files are correctly downloaded +2. Install the TensorRT-LLM Extension + +:::info +You can check if files have been correctly downloaded: ```sh ls ~\jan\extensions\@janhq\tensorrt-llm-extension\dist\bin -# Your Extension Folder should now include `nitro.exe`, among other artifacts needed to run TRT-LLM +# Your Extension Folder should now include `nitro.exe`, among other `.dll` files needed to run TRT-LLM ``` -## Download a Compatible Model -TensorRT-LLM can only run models in `TensorRT` format. These models, aka "TensorRT Engines", are prebuilt specifically for each target OS+GPU architecture. - -We offer a handful of precompiled models for Ampere and Ada cards that you can immediately download and play with: - -1. Restart the application and go to the Hub -2. Look for models with the `TensorRT-LLM` label in the recommended models list. Click download. This step might take some time. 🙏 - -![image](https://hackmd.io/_uploads/rJewrEgRp.png) - -3. Click use and start chatting! -4. You may need to allow Nitro in your network - -![alt text](image.png) - -:::warning -If you are our nightly builds, you may have to reinstall the TensorRT-LLM extension each time you update the app. We're working on better extension lifecyles - stay tuned. ::: -## Configure Settings +### Download a TensorRT-LLM Model -You can customize the default parameters for how Jan runs TensorRT-LLM. +Jan's Hub has a few pre-compiled TensorRT-LLM models that you can download, which have a `TensorRT-LLM` label -:::info +- We automatically download the TensorRT-LLM Model Engine for your GPU architecture +- We have made a few 1.1b models available that can run even on Laptop GPUs with 8gb VRAM + +| Model | OS | Ada (40XX) | Ampere (30XX) | Description | +| ------------------- | ------- | ---------- | ------------- | --------------------------------------------------- | +| Llamacorn 1.1b | Windows | ✅ | ✅ | TinyLlama-1.1b, fine-tuned for usability | +| TinyJensen 1.1b | Windows | ✅ | ✅ | TinyLlama-1.1b, fine-tuned on Jensen Huang speeches | +| Mistral Instruct 7b | Windows | ✅ | ✅ | Mistral | + +### Importing Pre-built Models + +You can import a pre-built model, by creating a new folder in Jan's `/models` directory that includes: + +- TensorRT-LLM Engine files (e.g. `tokenizer`, `.engine`, etc) +- `model.json` that registers these files, and specifies `engine` as `nitro-tensorrt-llm` + +:::note[Sample model.json] + +Note the `engine` is `nitro-tensorrt-llm`: this won't work without it! + +```js +{ + "sources": [ + { + "filename": "config.json", + "url": "https://delta.jan.ai/dist/models///tensorrt-llm-v0.7.1/TinyJensen-1.1B-Chat-fp16/config.json" + }, + { + "filename": "mistral_float16_tp1_rank0.engine", + "url": "https://delta.jan.ai/dist/models///tensorrt-llm-v0.7.1/TinyJensen-1.1B-Chat-fp16/mistral_float16_tp1_rank0.engine" + }, + { + "filename": "tokenizer.model", + "url": "https://delta.jan.ai/dist/models///tensorrt-llm-v0.7.1/TinyJensen-1.1B-Chat-fp16/tokenizer.model" + }, + { + "filename": "special_tokens_map.json", + "url": "https://delta.jan.ai/dist/models///tensorrt-llm-v0.7.1/TinyJensen-1.1B-Chat-fp16/special_tokens_map.json" + }, + { + "filename": "tokenizer.json", + "url": "https://delta.jan.ai/dist/models///tensorrt-llm-v0.7.1/TinyJensen-1.1B-Chat-fp16/tokenizer.json" + }, + { + "filename": "tokenizer_config.json", + "url": "https://delta.jan.ai/dist/models///tensorrt-llm-v0.7.1/TinyJensen-1.1B-Chat-fp16/tokenizer_config.json" + }, + { + "filename": "model.cache", + "url": "https://delta.jan.ai/dist/models///tensorrt-llm-v0.7.1/TinyJensen-1.1B-Chat-fp16/model.cache" + } + ], + "id": "tinyjensen-1.1b-chat-fp16", + "object": "model", + "name": "TinyJensen 1.1B Chat FP16", + "version": "1.0", + "description": "Do you want to chat with Jensen Huan? Here you are", + "format": "TensorRT-LLM", + "settings": { + "ctx_len": 2048, + "text_model": false + }, + "parameters": { + "max_tokens": 4096 + }, + "metadata": { + "author": "LLama", + "tags": [ + "TensorRT-LLM", + "1B", + "Finetuned" + ], + "size": 2151000000 + }, + "engine": "nitro-tensorrt-llm" +} +``` + +::: + +### Using a TensorRT-LLM Model + +You can just select and use a TensorRT-LLM model from Jan's Thread interface. + +- Jan will automatically start the TensorRT-LLM model engine in the background +- You may encounter a pop-up from Windows Security, asking for Nitro to allow public and private network access + +:::info[Why does Nitro need network access?] + +- This is because Jan runs TensorRT-LLM using the [Nitro Server](https://github.com/janhq/nitro-tensorrt-llm/) +- Jan makes network calls to the Nitro server running on your computer on a separate port + +::: + +### Configure Settings + +:::note coming soon ::: ## Troubleshooting -### Incompatible Extension vs Engine versions +## Extension Details -For now, the model versions are pinned to the extension versions. +Jan's TensorRT-LLM Extension is built on top of the open source [Nitro TensorRT-LLM Server](https://github.com/janhq/nitro-tensorrt-llm), a C++ inference server on top of TensorRT-LLM that provides an OpenAI-compatible API. + +### Manual Build + +To manually build the artifacts needed to run the server and TensorRT-LLM, you can reference the source code. [Read here](https://github.com/janhq/nitro-tensorrt-llm?tab=readme-ov-file#quickstart). ### Uninstall Extension @@ -76,11 +219,7 @@ For now, the model versions are pinned to the extension versions. 3. Delete the entire Extensions folder. 4. Reopen the app, only the default extensions should be restored. -### Install Nitro-TensorRT-LLM manually - -To manually build the artifacts needed to run the server and TensorRT-LLM, you can reference the source code. [Read here](https://github.com/janhq/nitro-tensorrt-llm?tab=readme-ov-file#quickstart). - -### Build your own TensorRT models +## Build your own TensorRT models :::info coming soon diff --git a/docs/docs/guides/quickstart.mdx b/docs/docs/guides/quickstart.mdx deleted file mode 100644 index 84612716a..000000000 --- a/docs/docs/guides/quickstart.mdx +++ /dev/null @@ -1,68 +0,0 @@ ---- -title: Quickstart -slug: /guides -description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. -sidebar_position: 1 -keywords: - [ - Jan AI, - Jan, - ChatGPT alternative, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - ] ---- - -import installImageURL from './assets/jan-ai-quickstart.png'; -import flow from './assets/quick.png'; - -# Quickstart - -{/* After finish installing, here are steps for using Jan - -## Run Jan - - - - 1. Search Jan in the Dock and run the program. - - - 1. Search Jan in the Start menu and run the program. - - - 1. Go to the Jan directory and run the program. - - - -2. After you run Jan, the program will take you to the Threads window, with list of threads and each thread is a chatting box between you and the AI model. - -3. Go to the **Hub** under the **Thread** section and select the AI model that you want to use. For more info, go to the [Using Models](category/using-models) section. - -4. A new thread will be added. You can use Jan in the thread with the AI model that you selected before. */} - -To get started quickly with Jan, follow the steps below: - -### Step 1: Install Jan - -Go to [Jan.ai](https://jan.ai/) > Select your operating system > Install the program. - -:::note -To learn more about system requirements for your operating system, go to [Installation guide](/guides/install). -::: - -### Step 2: Select AI Model - -Before using Jan, you need to select an AI model that based on your hardware capabilities and specifications. Each model has their purposes, capabilities, and different requirements. To select AI models: - -Go to the **Hub** > select the models that you would like to install. - -:::note -For more info, go to [list of supported models](/guides/models-list/). -::: - -### Step 3: Use the AI Model - -After you install the AI model, you use it immediately under **Thread** tab. diff --git a/docs/docs/guides/error-codes/README.mdx b/docs/docs/guides/remote-providers/README.mdx similarity index 59% rename from docs/docs/guides/error-codes/README.mdx rename to docs/docs/guides/remote-providers/README.mdx index 39fb37ac9..0e3f6b747 100644 --- a/docs/docs/guides/error-codes/README.mdx +++ b/docs/docs/guides/remote-providers/README.mdx @@ -1,18 +1,19 @@ --- -title: Error Codes -slug: /guides/error-codes/ -sidebar_position: 7 +title: Remote Engines +slug: /guides/engines/remote +sidebar_position: 14 description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, build extension, ] --- diff --git a/docs/docs/guides/remote-providers/assets/azure.png b/docs/docs/guides/remote-providers/assets/azure.png new file mode 100644 index 000000000..b5b9dc46a Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/azure.png differ diff --git a/docs/docs/guides/remote-providers/assets/cont.png b/docs/docs/guides/remote-providers/assets/cont.png new file mode 100644 index 000000000..4803a6a39 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/cont.png differ diff --git a/docs/docs/guides/remote-providers/assets/discordflow.png b/docs/docs/guides/remote-providers/assets/discordflow.png new file mode 100644 index 000000000..904354942 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/discordflow.png differ diff --git a/docs/docs/guides/remote-providers/assets/interpreter.png b/docs/docs/guides/remote-providers/assets/interpreter.png new file mode 100644 index 000000000..c735e33ca Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/interpreter.png differ diff --git a/docs/docs/guides/remote-providers/assets/jan-ai-continue-ask.png b/docs/docs/guides/remote-providers/assets/jan-ai-continue-ask.png new file mode 100644 index 000000000..5ccc431d5 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/jan-ai-continue-ask.png differ diff --git a/docs/docs/guides/remote-providers/assets/jan-ai-continue-comment.gif b/docs/docs/guides/remote-providers/assets/jan-ai-continue-comment.gif new file mode 100644 index 000000000..d7b5a0ec7 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/jan-ai-continue-comment.gif differ diff --git a/docs/docs/guides/remote-providers/assets/jan-ai-discord-repo.png b/docs/docs/guides/remote-providers/assets/jan-ai-discord-repo.png new file mode 100644 index 000000000..77ec70192 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/jan-ai-discord-repo.png differ diff --git a/docs/docs/guides/remote-providers/assets/jan-ai-openrouter.gif b/docs/docs/guides/remote-providers/assets/jan-ai-openrouter.gif new file mode 100644 index 000000000..fa45ec182 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/jan-ai-openrouter.gif differ diff --git a/docs/docs/guides/remote-providers/assets/lmstudio.png b/docs/docs/guides/remote-providers/assets/lmstudio.png new file mode 100644 index 000000000..bffd0a00d Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/lmstudio.png differ diff --git a/docs/docs/guides/remote-providers/assets/mistral.png b/docs/docs/guides/remote-providers/assets/mistral.png new file mode 100644 index 000000000..0efeaef83 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/mistral.png differ diff --git a/docs/docs/guides/remote-providers/assets/ollama.png b/docs/docs/guides/remote-providers/assets/ollama.png new file mode 100644 index 000000000..02a3278bf Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/ollama.png differ diff --git a/docs/docs/guides/remote-providers/assets/openrouter.png b/docs/docs/guides/remote-providers/assets/openrouter.png new file mode 100644 index 000000000..5f051ee76 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/openrouter.png differ diff --git a/docs/docs/guides/remote-providers/assets/raycast-image.png b/docs/docs/guides/remote-providers/assets/raycast-image.png new file mode 100644 index 000000000..c0af00060 Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/raycast-image.png differ diff --git a/docs/docs/guides/remote-providers/assets/raycast.png b/docs/docs/guides/remote-providers/assets/raycast.png new file mode 100644 index 000000000..454d81f4b Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/raycast.png differ diff --git a/docs/docs/guides/remote-providers/assets/vscode.png b/docs/docs/guides/remote-providers/assets/vscode.png new file mode 100644 index 000000000..f361e16ab Binary files /dev/null and b/docs/docs/guides/remote-providers/assets/vscode.png differ diff --git a/docs/docs/guides/remote-providers/claude.mdx b/docs/docs/guides/remote-providers/claude.mdx new file mode 100644 index 000000000..968a469fe --- /dev/null +++ b/docs/docs/guides/remote-providers/claude.mdx @@ -0,0 +1,22 @@ +--- +title: Claude +sidebar_position: 6 +slug: /guides/engines/claude +description: A step-by-step guide on how to integrate Jan with LM Studio. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Claude integration, + claude, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/integration/groq.mdx b/docs/docs/guides/remote-providers/groq.mdx similarity index 97% rename from docs/docs/guides/integration/groq.mdx rename to docs/docs/guides/remote-providers/groq.mdx index a57bf16dd..c9837bcfc 100644 --- a/docs/docs/guides/integration/groq.mdx +++ b/docs/docs/guides/remote-providers/groq.mdx @@ -1,7 +1,7 @@ --- title: Groq -sidebar_position: 10 -slug: /guides/integration/groq +sidebar_position: 5 +slug: /guides/engines/groq description: Learn how to integrate Groq API with Jan for enhanced functionality. keywords: [ diff --git a/docs/docs/guides/integration/mistral.mdx b/docs/docs/guides/remote-providers/mistral.mdx similarity index 78% rename from docs/docs/guides/integration/mistral.mdx rename to docs/docs/guides/remote-providers/mistral.mdx index a44e23205..e93a02183 100644 --- a/docs/docs/guides/integration/mistral.mdx +++ b/docs/docs/guides/remote-providers/mistral.mdx @@ -1,26 +1,29 @@ --- title: Mistral AI -sidebar_position: 7 +sidebar_position: 4 +slug: /guides/engines/mistral description: A step-by-step guide on how to integrate Jan with Mistral AI. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, Mistral integration, ] --- ## How to Integrate Mistral AI with Jan -[Mistral AI](https://docs.mistral.ai/) provides two ways to use their Large Language Models (LLM): +[Mistral AI](https://docs.mistral.ai/) provides two ways to use their Large Language Models (LLM): + 1. API -2. Open-source models on Hugging Face. +2. Open-source models on Hugging Face. To integrate Jan with Mistral AI, follow the steps below: @@ -45,10 +48,10 @@ This tutorial demonstrates integrating Mistral AI with Jan using the API. 1. Navigate to `~/jan/models`. 2. Create a folder named `mistral-(modelname)` (e.g., `mistral-tiny`). 3. Inside, create a `model.json` file with these settings: - - Set `id` to the Mistral AI model ID. - - Set `format` to `api`. - - Set `engine` to `openai`. - - Set `state` to `ready`. + - Set `id` to the Mistral AI model ID. + - Set `format` to `api`. + - Set `engine` to `openai`. + - Set `state` to `ready`. ```json title="~/jan/models/mistral-tiny/model.json" { @@ -72,15 +75,15 @@ This tutorial demonstrates integrating Mistral AI with Jan using the API. }, "engine": "openai" } - ``` :::note -- For more details regarding the `model.json` settings and parameters fields, please see [here](../models/integrate-remote.mdx#modeljson). + +- For more details regarding the `model.json` settings and parameters fields, please see [here](/guides/engines/remote-server/#modeljson). - Mistral AI offers various endpoints. Refer to their [endpoint documentation](https://docs.mistral.ai/platform/endpoints/) to select the one that fits your requirements. Here, we use the `mistral-tiny` model as an example. -::: + ::: ### Step 3: Start the Model -1. Restart Jan and navigate to the **Hub**. -2. Locate your model and click the **Use** button. \ No newline at end of file +1. Restart Jan and navigate to the **Hub**. +2. Locate your model and click the **Use** button. diff --git a/docs/docs/guides/integration/azure.mdx b/docs/docs/guides/remote-providers/openai.mdx similarity index 79% rename from docs/docs/guides/integration/azure.mdx rename to docs/docs/guides/remote-providers/openai.mdx index 6c344a199..f0ac032c9 100644 --- a/docs/docs/guides/integration/azure.mdx +++ b/docs/docs/guides/remote-providers/openai.mdx @@ -1,23 +1,25 @@ --- title: Azure OpenAI -sidebar_position: 3 +sidebar_position: 2 +slug: /guides/engines/openai description: A step-by-step guide on how to integrate Jan with Azure OpenAI. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, integration, Azure OpenAI Service, ] --- -## How to Integrate Azure OpenAI with Jan +## Integrate Azure OpenAI with Jan The [Azure OpenAI Service](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview?source=docs) offers robust APIs, making it simple for you to incorporate OpenAI's language models into your applications. You can integrate Azure OpenAI with Jan by following the steps below: @@ -38,13 +40,14 @@ The [Azure OpenAI Service](https://learn.microsoft.com/en-us/azure/ai-services/o ### Step 2: Model Configuration -1. Go to the `~/jan/models` directory. +1. Go to the `~/jan/models` directory. 2. Make a new folder called `(your-deployment-name)`, for example `gpt-35-hieu-jan`. 3. Create a `model.json` file inside the folder with the specified configurations: - - Match the `id` property with both the folder name and your deployment name. - - Set the `format` property as `api`. - - Choose `openai` for the `engine` property. - - Set the `state` property as `ready`. + +- Match the `id` property with both the folder name and your deployment name. +- Set the `format` property as `api`. +- Choose `openai` for the `engine` property. +- Set the `state` property as `ready`. ```json title="~/jan/models/gpt-35-hieu-jan/model.json" { @@ -71,10 +74,10 @@ The [Azure OpenAI Service](https://learn.microsoft.com/en-us/azure/ai-services/o ``` :::note -For more details regarding the `model.json` settings and parameters fields, please see [here](../models/integrate-remote.mdx#modeljson). +For more details regarding the `model.json` settings and parameters fields, please see [here](/guides/engines/remote-server/#modeljson). ::: ### Step 3: Start the Model -1. Restart Jan and go to the Hub. -2. Find your model in Jan application and click on the Use button. \ No newline at end of file +1. Restart Jan and go to the Hub. +2. Find your model in Jan application and click on the Use button. diff --git a/docs/docs/guides/models/integrate-remote.mdx b/docs/docs/guides/remote-providers/remote-server-integration.mdx similarity index 80% rename from docs/docs/guides/models/integrate-remote.mdx rename to docs/docs/guides/remote-providers/remote-server-integration.mdx index af881f999..e96a5ba3d 100644 --- a/docs/docs/guides/models/integrate-remote.mdx +++ b/docs/docs/guides/remote-providers/remote-server-integration.mdx @@ -1,17 +1,19 @@ --- title: Remote Server Integration -sidebar_position: 2 +sidebar_position: 1 +slug: /guides/engines/remote-server description: A step-by-step guide on how to set up Jan to connect with any remote or local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, import-models-manually, remote server, OAI compatible, @@ -28,7 +30,6 @@ This guide will show you how to configure Jan as a client and point it to any re 2. In this folder, add a `model.json` file with Filename as `model.json`, `id` matching folder name, `Format` as `api`, `Engine` as `openai`, and `State` as `ready`. - ```json title="~/jan/models/gpt-3.5-turbo-16k/model.json" { "sources": [ @@ -55,28 +56,28 @@ This guide will show you how to configure Jan as a client and point it to any re ### `model.json` -The `model.json` file is used to set up your local models. +The `model.json` file is used to set up your local models. :::note -- If you've set up your model's configuration in `nitro.json`, please note that `model.json` can overwrite the settings. + +- If you've set up your model's configuration in `nitro.json`, please note that `model.json` can overwrite the settings. - When using OpenAI models like GPT-3.5 and GPT-4, you can use the default settings in `model.json` file. -::: + ::: There are two important fields in model.json that you need to setup: #### Settings + This is the field where to set your engine configurations, there are two imporant field that you need to define for your local models: -| Term | Description | -|-------------------|---------------------------------------------------------| -| `ctx_len` | Defined based on the model's context size. | +| Term | Description | +| ----------------- | --------------------------------------------------------------------- | +| `ctx_len` | Defined based on the model's context size. | | `prompt_template` | Defined based on the model's trained template (e.g., ChatML, Alpaca). | -To set up the `prompt_template` based on your model, follow the steps below: - 1. Visit [Hugging Face](https://huggingface.co/), an open-source machine learning platform. - 2. Find the current model that you're using (e.g., [Gemma 7b it](https://huggingface.co/google/gemma-7b-it)). - 3. Review the text and identify the template. +To set up the `prompt_template` based on your model, follow the steps below: 1. Visit [Hugging Face](https://huggingface.co/), an open-source machine learning platform. 2. Find the current model that you're using (e.g., [Gemma 7b it](https://huggingface.co/google/gemma-7b-it)). 3. Review the text and identify the template. #### Parameters + `parameters` is the adjustable settings that affect how your model operates or processes the data. The fields in `parameters` are typically general and can be the same across models. An example is provided below: @@ -91,12 +92,11 @@ The fields in `parameters` are typically general and can be the same across mode } ``` - :::tip - - You can find the list of available models in the [OpenAI Platform](https://platform.openai.com/docs/models/overview). - - The `id` property needs to match the model name in the list. - - For example, if you want to use the [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo), you must set the `id` property to `gpt-4-1106-preview`. +- You can find the list of available models in the [OpenAI Platform](https://platform.openai.com/docs/models/overview). +- The `id` property needs to match the model name in the list. + - For example, if you want to use the [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo), you must set the `id` property to `gpt-4-1106-preview`. ::: @@ -129,7 +129,7 @@ Currently, you can only connect to one OpenAI-compatible endpoint at a time. ### 1. Configure a Client Connection 1. Navigate to the `~/jan/engines` folder. -2. Modify the `openai.json file`. +2. Modify the `openai.json file`. :::note @@ -153,11 +153,11 @@ Please note that currently, the code that supports any OpenAI-compatible endpoin 1. In `~/jan/models`, create a folder named `mistral-ins-7b-q4`. 2. In this folder, add a `model.json` file with Filename as `model.json`, ensure the following configurations: - - `id` matching folder name. - - `Format` set to `api`. - - `Engine` set to `openai` - - `State` set to `ready`. +- `id` matching folder name. +- `Format` set to `api`. +- `Engine` set to `openai` +- `State` set to `ready`. ```json title="~/jan/models/mistral-ins-7b-q4/model.json" { @@ -181,8 +181,8 @@ Please note that currently, the code that supports any OpenAI-compatible endpoin }, "engine": "openai" } - ``` + ### 3. Start the Model 1. Restart Jan and navigate to the **Hub**. @@ -192,4 +192,4 @@ Please note that currently, the code that supports any OpenAI-compatible endpoin If you have questions or want more preconfigured GGUF models, please join our [Discord community](https://discord.gg/Dt7MxDyNNZ) for support, updates, and discussions. -::: \ No newline at end of file +::: diff --git a/docs/docs/guides/troubleshooting.mdx b/docs/docs/guides/troubleshooting.mdx new file mode 100644 index 000000000..fda83bb49 --- /dev/null +++ b/docs/docs/guides/troubleshooting.mdx @@ -0,0 +1,458 @@ +--- +title: Troubleshooting +slug: /troubleshooting +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 21 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + troubleshooting, + error codes, + broken build, + something amiss, + unexpected token, + undefined issue, + permission denied, + ] +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Broken Build + +To resolve the issue where your Jan is stuck in a broken build after installation. + + + + #### 1. Uninstall Jan + + Delete Jan from your `/Applications` folder. + + #### 2. Delete Application Data, Cache, and User Data + + ```zsh + # Step 1: Delete the application data + ## Newer versions + rm -rf ~/Library/Application\ Support/jan + ## Versions 0.2.0 and older + rm -rf ~/Library/Application\ Support/jan-electron + + # Step 2: Clear application cache + rm -rf ~/Library/Caches/jan* + + # Step 3: Remove all user data + rm -rf ~/jan + ``` + + #### 3. Additional Step for Versions Before 0.4.2 + + If you are using a version before `0.4.2`, you need to run the following commands: + + ```zsh + ps aux | grep nitro + # Looks for processes like `nitro` and `nitro_arm_64`, and kill them one by one by process ID + kill -9 + ``` + + #### 4. Download the Latest Version + + Download the latest version of Jan from our [homepage](https://jan.ai/). + + + + #### 1. Uninstall Jan + + To uninstall Jan on Windows, use the [Windows Control Panel](https://support.microsoft.com/en-us/windows/uninstall-or-remove-apps-and-programs-in-windows-4b55f974-2cc6-2d2b-d092-5905080eaf98). + + #### 2. Delete Application Data, Cache, and User Data + + ```sh + # You can delete the `/Jan` directory in Windows's AppData Directory by visiting the following path `%APPDATA%\Jan` + cd C:\Users\%USERNAME%\AppData\Roaming + rmdir /S jan + ``` + + #### 3. Additional Step for Versions Before 0.4.2 + + If you are using a version before `0.4.2`, you need to run the following commands: + + ```sh + # Find the process ID (PID) of the nitro process by filtering the list by process name + tasklist | findstr "nitro" + # Once you have the PID of the process you want to terminate, run the `taskkill` + taskkill /F /PID + ``` + + #### 4. Download the Latest Version + + Download the latest version of Jan from our [homepage](https://jan.ai/). + + + + + #### 1. Uninstall Jan + + + + + To uninstall Jan, you should use your package manager's uninstall or remove option. + + This will return your system to its state before the installation of Jan. + + This method can also reset all settings if you are experiencing any issues with Jan. + + + + + To uninstall Jan, run the following command.MDXContent + + ```sh + sudo apt-get remove jan + # where jan is the name of Jan package + ``` + + This will return your system to its state before the installation of Jan. + + This method can also be used to reset all settings if you are experiencing any issues with Jan. + + + + + To uninstall Jan, you can uninstall Jan by deleting the `.AppImage` file. + + If you wish to completely remove all user data associated with Jan after uninstallation, you can delete the user data at `~/jan`. + + This method can also reset all settings if you are experiencing any issues with Jan. + + + + + #### 2. Delete Application Data, Cache, and User Data + + ```sh + # You can delete the user data folders located at the following `~/jan` + rm -rf ~/jan + ``` + + #### 3. Additional Step for Versions Before 0.4.2 + + If you are using a version before `0.4.2`, you need to run the following commands: + + ```zsh + ps aux | grep nitro + # Looks for processes like `nitro` and `nitro_arm_64`, and kill them one by one by process ID + kill -9 + ``` + + #### 4. Download the Latest Version + + Download the latest version of Jan from our [homepage](https://jan.ai/). + + + + +By following these steps, you can cleanly uninstall and reinstall Jan, ensuring a smooth and error-free experience with the latest version. + +:::note + +Before reinstalling Jan, ensure it's completely removed from all shared spaces if it's installed on multiple user accounts on your device. + +::: + +## Troubleshooting NVIDIA GPU + +To resolve issues when the Jan app does not utilize the NVIDIA GPU on Windows and Linux systems. + +#### 1. Ensure GPU Mode Requirements + + + + + ##### NVIDIA Driver + + - Install an [NVIDIA Driver](https://www.nvidia.com/Download/index.aspx) supporting CUDA 11.7 or higher. + - Use the following command to verify the installation: + + ```sh + nvidia-smi + ``` + + ##### CUDA Toolkit + + - Install a [CUDA toolkit](https://developer.nvidia.com/cuda-downloads) compatible with your NVIDIA driver. + - Use the following command to verify the installation: + + ```sh + nvcc --version + ``` + + + + + ##### NVIDIA Driver + + - Install an [NVIDIA Driver](https://www.nvidia.com/Download/index.aspx) supporting CUDA 11.7 or higher. + - Use the following command to verify the installation: + + ```sh + nvidia-smi + ``` + + ##### CUDA Toolkit + + - Install a [CUDA toolkit](https://developer.nvidia.com/cuda-downloads) compatible with your NVIDIA driver. + - Use the following command to verify the installation: + + ```sh + nvcc --version + ``` + ##### Linux Specifics + + - Ensure that `gcc-11`, `g++-11`, `cpp-11`, or higher is installed. + - See [instructions](https://gcc.gnu.org/projects/cxx-status.html#cxx17) for Ubuntu installation. + + - **Post-Installation Actions**: Add CUDA libraries to `LD_LIBRARY_PATH`. + - Follow the [Post-installation Actions](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions) instructions. + + + + +#### 2. Switch to GPU Mode + +Jan defaults to CPU mode but automatically switches to GPU mode if your system supports it, selecting the GPU with the highest VRAM. Check this setting in `Settings` > `Advanced Settings`. + +##### Troubleshooting Tips + +If GPU mode isn't enabled by default: + +1. Confirm that you have installed an NVIDIA driver supporting CUDA 11.7 or higher. Refer to [CUDA compatibility](https://docs.nvidia.com/deploy/cuda-compatibility/index.html#binary-compatibility__table-toolkit-driver). +2. Ensure compatibility of the CUDA toolkit with your NVIDIA driver. Refer to [CUDA compatibility](https://docs.nvidia.com/deploy/cuda-compatibility/index.html#binary-compatibility__table-toolkit-driver). +3. For Linux, add CUDA's `.so` libraries to the `LD_LIBRARY_PATH`. For Windows, ensure that CUDA's `.dll` libraries are in the PATH. Refer to [Windows setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html#environment-setup). + +#### 3. Check GPU Settings + +1. Navigate to `Settings` > `Advanced Settings` > `Jan Data Folder` to access GPU settings. +2. Open the `settings.json` file in the `settings` folder. Here's an example: + +```json title="~/jan/settings/settings.json" +{ + "notify": true, + "run_mode": "gpu", + "nvidia_driver": { + "exist": true, + "version": "531.18" + }, + "cuda": { + "exist": true, + "version": "12" + }, + "gpus": [ + { + "id": "0", + "vram": "12282" + }, + { + "id": "1", + "vram": "6144" + }, + { + "id": "2", + "vram": "6144" + } + ], + "gpu_highest_vram": "0" +} +``` + +#### 4. Restart Jan + +Restart Jan application to make sure it works. + +##### Troubleshooting Tips + +- Ensure `nvidia_driver` and `cuda` fields indicate installed software. +- If `gpus` field is empty or lacks your GPU, check NVIDIA driver and CUDA toolkit installations. +- For further assistance, share the `settings.json` file. + +#### Tested Configurations + +- **Windows 11 Pro 64-bit:** + + - GPU: NVIDIA GeForce RTX 4070ti + - CUDA: 12.2 + - NVIDIA driver: 531.18 (Bare metal) + +- **Ubuntu 22.04 LTS:** + + - GPU: NVIDIA GeForce RTX 4070ti + - CUDA: 12.2 + - NVIDIA driver: 545 (Bare metal) + +- **Ubuntu 20.04 LTS:** + + - GPU: NVIDIA GeForce GTX 1660ti + - CUDA: 12.1 + - NVIDIA driver: 535 (Proxmox VM passthrough GPU) + +- **Ubuntu 18.04 LTS:** + - GPU: NVIDIA GeForce GTX 1660ti + - CUDA: 12.1 + - NVIDIA driver: 535 (Proxmox VM passthrough GPU) + +#### Common Issues and Solutions + +1. If the issue persists, try installing the [Nightly version](/guides/quickstart/#nightly-releases). +2. Ensure your (V)RAM is accessible; some users with virtual RAM may require additional configuration. +3. Seek assistance in [Jan Discord](https://discord.gg/mY69SZaMaC). + +## How to Get Error Logs + +To get the error logs of your Jan application, follow the steps below: + +#### Jan Application + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. On the **Jan Data Folder** click the **folder icon (📂)** to access the data. +5. Click the **logs** folder. + +#### Jan UI + +1. Open your Unix or Linux terminal. +2. Use the following commands to get the recent 50 lines of log files: + +```bash +tail -n 50 ~/jan/logs/app.log + +``` + +#### Jan API Server + +1. Open your Unix or Linux terminal. +2. Use the following commands to get the recent 50 lines of log files: + +```bash +tail -n 50 ~/jan/logs/server.log + +``` + +:::warning +Ensure to redact any private or sensitive information when sharing logs or error details. +::: + +:::note +If you have any questions or are looking for support, please don't hesitate to contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ) or create a new issue in our [GitHub repository](https://github.com/janhq/jan/issues/new/choose). +::: + +## Permission Denied + +When running Jan, you might encounter the following error message: + +``` +Uncaught (in promise) Error: Error invoking layout-480796bff433a3a3.js:538 remote method 'installExtension': +Error Package /Applications/Jan.app/Contents/Resources/app.asar.unpacked/pre-install/janhq-assistant-extension-1.0.0.tgz does not contain a valid manifest: +Error EACCES: permission denied, mkdtemp '/Users/username/.npm/_cacache/tmp/ueCMn4' +``` + +This error mainly caused by permission problem during installation. To resolve this issue, follow these steps: + +1. Open your terminal. + +2. Execute the following command to change ownership of the `~/.npm` directory to the current user: + +```sh +sudo chown -R $(whoami) ~/.npm +``` + +:::note + +- This command ensures that the necessary permissions are granted for Jan installation, resolving the encountered error. +- If you have any questions or are looking for support, please don't hesitate to contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ) or create a new issue in our [GitHub repository](https://github.com/janhq/jan/issues/new/choose). + ::: + +## Something's Amiss + +When you start a chat with a model and encounter with a Something's Amiss error, here's how to resolve it: + +1. Ensure your OS is up to date. +2. Choose a model smaller than 80% of your hardware's V/RAM. For example, on an 8GB machine, opt for models smaller than 6GB. +3. Install the latest [Nightly release](/guides/quickstart/#nightly-releases) or [clear the application cache](/troubleshooting/#broken-build) when reinstalling Jan. +4. Confirm your V/RAM accessibility, particularly if using virtual RAM. +5. Nvidia GPU users should download [CUDA](https://developer.nvidia.com/cuda-downloads). +6. Linux users, ensure your system meets the requirements of gcc 11, g++ 11, cpp 11, or higher. Refer to this [link](/troubleshooting/#troubleshooting-nvidia-gpu) for details. +7. You might use the wrong port when you [check the app logs](/troubleshooting/#how-to-get-error-logs) and encounter the Bind address failed at 127.0.0.1:3928 error. To check the port status, try use the `netstat` command, like the following: + + + + ```sh + netstat -an | grep 3928 + ``` + + + ```sh + netstat -ano | find "3928" + tasklist /fi "PID eq 3928" + ``` + + + ```sh + netstat -anpe | grep "3928" + ``` + + + +:::note + +`Netstat` displays the contents of various network-related data structures for active connections + +::: + +:::tip + +Jan uses the following ports: + +- Nitro: `3928` +- Jan API Server: `1337` +- Jan Documentation: `3001` + +::: + +:::note +If you have any questions or are looking for support, please don't hesitate to contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ) or create a new issue in our [GitHub repository](https://github.com/janhq/jan/issues/new/choose). +::: + +## Undefined Issue + +Encountering an `undefined issue` in Jan is caused by errors related to the Nitro tool or other internal processes. It can be resolved through the following steps: + +1. Clearing the Jan folder and then reopen the application to determine if the problem persists +2. Manually run the nitro tool located at `~/jan/extensions/@janhq/inference-nitro-extensions/dist/bin/(your-os)/nitro` to check for error messages. +3. Address any nitro error messages that are identified and reassess the persistence of the issue. +4. Reopen Jan to determine if the problem has been resolved after addressing any identified errors. +5. If the issue persists, please share the [app logs](/troubleshooting/#how-to-get-error-logs) via [Jan Discord](https://discord.gg/mY69SZaMaC) for further assistance and troubleshooting. + +:::note +If you have any questions or are looking for support, please don't hesitate to contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ) or create a new issue in our [GitHub repository](https://github.com/janhq/jan/issues/new/choose). +::: + +## Unexpected Token + +Encountering the `Unexpected token` error when initiating a chat with OpenAI models mainly caused by either your OpenAI key or where you access your OpenAI from. This issue can be solved through the following steps: + +1. Obtain an OpenAI API key from [OpenAI's developer platform](https://platform.openai.com/) and integrate it into your application. + +2. Trying a VPN could potentially solve the issue, especially if it's related to region locking for accessing OpenAI services. By connecting through a VPN, you may bypass such restrictions and successfully initiate chats with OpenAI models. + +:::note +If you have any questions or are looking for support, please don't hesitate to contact us via our [Discord community](https://discord.gg/Dt7MxDyNNZ) or create a new issue in our [GitHub repository](https://github.com/janhq/jan/issues/new/choose). +::: diff --git a/docs/docs/guides/user-guides/advanced-settings.mdx b/docs/docs/guides/user-guides/advanced-settings.mdx new file mode 100644 index 000000000..9a26e952f --- /dev/null +++ b/docs/docs/guides/user-guides/advanced-settings.mdx @@ -0,0 +1,271 @@ +--- +title: Advanced Settings +slug: /guides/advanced +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 11 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + Advanced Settings, + HTTPS Proxy, + SSL, + settings, + Jan settings, + ] +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +This guide will show you how to use the advanced settings in Jan. + +## Access the Advanced Settings + +To access the Jan's advanced settings, follow the steps below: + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. You can configure the following settings: + +| Feature | Description | +| ---------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| **Keyboard Shortcuts** | Keyboard shortcuts speed up your workflow. For a quick overview of useful keyboard shortcuts, refer to the list [below](advanced-settings.mdx#keyboard-shortcuts). | +| **Experimental Mode** | Enables experimental features that may be unstable. | +| **GPU Acceleration** | Enables the boosting of your model performance by using your GPU devices for acceleration. | +| **Jan Data Folder** | Location for messages, model configurations, and user data. Changeable to a different location. | +| **HTTPS Proxy & Ignore SSL Certificate** | Use a proxy server for internet connections and ignore SSL certificates for self-signed certificates. Please check out the guide on how to set up your own HTTPS proxy server [here](advanced-settings.mdx#https-proxy). | +| **Clear Logs** | Removes all logs from the Jan application. | +| **Reset To Factory Default** | Resets the application to its original state, deleting all data including model customizations and conversation history. | + +## Keyboard Shortcuts + +Here are some of the keyboard shortcuts that you can use in Jan. + + + +| Combination | Description | +| --------------- | -------------------------------------------------- | +| `⌘ E` | Show list your models | +| `⌘ K` | Show list navigation pages | +| `⌘ B` | Toggle collapsible left panel | +| `⌘ ,` | Navigate to setting page | +| `Enter` | Send a message | +| `Shift + Enter` | Insert new line in input box | +| `Arrow Up` | Navigate to the previous option (within the search dialog) | +| `Arrow Down` | Navigate to the next option (within the search dialog) | + + + + +| Combination | Description | +| --------------- | ---------------------------------------------------------- | +| `Ctrl E` | Show list your models | +| `Ctrl K` | Show list navigation pages | +| `Ctrl B` | Toggle collapsible left panel | +| `Ctrl ,` | Navigate to setting page | +| `Enter` | Send a message | +| `Shift + Enter` | Insert new line in input box | +| `Arrow Up` | Navigate to the previous option (within the search dialog) | +| `Arrow Down` | Navigate to the next option (within the search dialog) | + + + + +| Combination | Description | +| --------------- | ---------------------------------------------------------- | +| `Ctrl E` | Show list your models | +| `Ctrl K` | Show list navigation pages | +| `Ctrl B` | Toggle collapsible left panel | +| `Ctrl ,` | Navigate to setting page | +| `Enter` | Send a message | +| `Shift + Enter` | Insert new line in input box | +| `Arrow Up` | Navigate to the previous option (within the search dialog) | +| `Arrow Down` | Navigate to the next option (within the search dialog) | + + + + +:::note +The keyboard shortcuts are customizable. +::: + +## Enable the Experimental Mode + +To try out new fetures that are still in testing phase, follow the steps below: + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. On the **Experimental Mode** click the slider to enable. + +## Enable the GPU Acceleration + +To enhance your model performance, follow the steps below: + +:::warning +Ensure that you have read the [troubleshooting guide](/troubleshooting/#troubleshooting-nvidia-gpu) here for further assistance. +::: + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. On the **GPU Acceleration** click the slider to enable. + +## Access the Jan Data Folder + +To access the folder where messages, model configurations and user data are stored, follow the steps below: + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. On the **Jan Data Folder** click the **folder icon (📂)** to access the data or the **pencil icon (✏️)** to change the folder where you keep your data. + +## HTTPS Proxy + +HTTPS Proxy encrypts data between your browser and the internet, making it hard for outsiders to intercept or read. It also helps you to maintain your privacy and security while being able to bypass regional restrictions on internet. + +:::note + +- When configuring Jan using an HTTPS proxy, the speed of the downloading model may be affected due to the encryption and decryption process. It also depends on the networking of the cloud service provider. +- HTTPS Proxy does not affect the remote model usage. + +::: + +### Setting Up Your Own HTTPS Proxy Server + +This guide provides a simple overview of setting up an HTTPS proxy server using **Squid**, a widely used open-source proxy software. + +:::note +Other software options are also available depending on your requirements. +::: + +#### Step 1: Choosing a Server + +1. Firstly, you need to choose a server to host your proxy server. + :::note + We recommend using a well-known cloud provider service like: + +- Amazon AWS +- Google Cloud +- Microsoft Azure +- Digital Ocean + ::: + +2. Ensure that your server has a public IP address and is accessible from the internet. + +#### Step 2: Installing Squid + +Instal **Squid** using the following command: + +```bash +sudo apt-get update +sudo apt-get install squid +``` + +#### Step 3: Configure Squid for HTTPS + +To enable HTTPS, you will need to configure Squid with SSL support. + +1. Squid requires an SSL certificate to be able to handle HTTPS traffic. You can generate a self-signed certificate or obtain one from a Certificate Authority (CA). For a self-signed certificate, you can use OpenSSL: + +```bash +openssl req -new -newkey rsa:2048 -days 365 -nodes -x509 -keyout squid-proxy.pem -out squid-proxy.pem +``` + +2. Edit the Squid configuration file `/etc/squid/squid.conf` to include the path to your SSL certificate and enable the HTTPS port: + +```bash +http_port 3128 ssl-bump cert=/path/to/your/squid-proxy.pem +ssl_bump server-first all +ssl_bump bump all +``` + +3. To intercept HTTPS traffic, Squid uses a process called SSL Bumping. This process allows Squid to decrypt and re-encrypt HTTPS traffic. To enable SSL Bumping, ensure the `ssl_bump` directives are configured correctly in your `squid.conf` file. + +#### Step 4 (Optional): Configure ACLs and Authentication + +1. You can define rules to control who can access your proxy. This is done by editing the squid.conf file and defining ACLs: + +```bash +acl allowed_ips src "/etc/squid/allowed_ips.txt" +http_access allow allowed_ips +``` + +2. If you want to add an authentication layer, Squid supports several authentication schemes. Basic authentication setup might look like this: + +```bash +auth_param basic program /usr/lib/squid/basic_ncsa_auth /etc/squid/passwords +acl authenticated proxy_auth REQUIRED +http_access allow authenticated +``` + +#### Step 5: Restart and Test Your Proxy + +1. After configuring, restart Squid to apply the changes: + +```bash +sudo systemctl restart squid +``` + +2. To test, configure your browser or another client to use the proxy server with its IP address and port (default is 3128). +3. Check if you can access the internet through your proxy. + +:::tip + +Tips for Secure Your Proxy: + +- **Firewall rules**: Ensure that only intended users or IP addresses can connect to your proxy server. This can be achieved by setting up appropriate firewall rules. +- **Regular updates**: Keep your server and proxy software updated to ensure that you are protected against known vulnerabilities. +- **Monitoring and logging**: Monitor your proxy server for unusual activity and enable logging to keep track of the traffic passing through your proxy. + +::: + +### Setting Up Jan to Use Your HTTPS Proxy + +Once you have your HTTPS proxy server set up, you can configure Jan to use it. + +1. Navigate to **Settings** > **Advanced Settings**. +2. On the **HTTPS Proxy** click the slider to enable. +3. Input your domain in the blank field. + +## Ignore SSL Certificate + +To Allow self-signed or unverified certificates, follow the steps below: + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. On the **Ignore SSL Certificates** click the slider to enable. + +## Clear Logs + +To clear all logs on your Jan app, follow the steps below: +:::warning +This feature clears all the data in your **Jan Data Folder**. +::: + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. On the **Clear Logs** click the the **Clear** button. + +## Reset To Factory Default + +To reset the Jan app to its original state, follow the steps below: +:::danger[Remember!] +This irreversible action is only recommended if the application is corrupted. +::: + +1. Navigate to the main dashboard. +2. Click the **gear icon (⚙️)** on the bottom left of your screen. +3. Under the **Settings screen**, click the **Advanced Settings**. +4. On the **Reset To Factory Default** click the the **Reset** button. diff --git a/docs/docs/guides/user-guides/jan-data-folder.mdx b/docs/docs/guides/user-guides/jan-data-folder.mdx new file mode 100644 index 000000000..b2bf14968 --- /dev/null +++ b/docs/docs/guides/user-guides/jan-data-folder.mdx @@ -0,0 +1,23 @@ +--- +title: Jan Data Folder +slug: /guides/data-folder +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 6 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + data folder, + source folder, + Jan data, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/start-server.mdx b/docs/docs/guides/user-guides/local-server.mdx similarity index 59% rename from docs/docs/guides/start-server.mdx rename to docs/docs/guides/user-guides/local-server.mdx index d293bc646..421981503 100644 --- a/docs/docs/guides/start-server.mdx +++ b/docs/docs/guides/user-guides/local-server.mdx @@ -1,28 +1,47 @@ --- -title: Local Server -sidebar_position: 4 +title: Local Server or API Endpoint +slug: /guides/local-api description: A step-by-step guide to start Jan Local Server. +sidebar_position: 10 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + local server, + start server, + api endpoint, + ] --- - Jan provides a built-in API server that can be used as a drop-in for OpenAI's API local replacement. This guide will walk you through on how to start the local server and use it to make request to the local server. ## Step 1: Set the Local Server + To start the local server, follow the steps below: + 1. Navigate to the Jan main menu dashboard. 2. Click the corresponding icon on the bottom left side of your screen. 3. Select the model you want to use under the Model Settings screen to set the LLM for your local server. 4. Configure the server settings as follows: -| Feature | Description | Default Setting | -|-------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------| -| Local Server Address | By default, Jan is only accessible on the same computer it's running on, using the address 127.0.0.1. You can change this to 0.0.0.0 to let other devices on your local network access it. However, this is less secure than just allowing access from the same computer. | `localhost (127.0.0.1)` | -| Port | Jan runs on port 1337 by default. The port can be changed to any other port number as needed. | `1337` | -| Cross-Origin Resource Sharing (CORS) | Manages resource access from external domains. Enabled for security by default but can be disabled if needed. | Enabled | -| Verbose Server Logs | Provides extensive details about server activities as the local server runs, displayed at the center of the screen. | Not specified (implied enabled) | +| Feature | Description | Default Setting | +| ------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------- | +| Local Server Address | By default, Jan is only accessible on the same computer it's running on, using the address 127.0.0.1. You can change this to 0.0.0.0 to let other devices on your local network access it. However, this is less secure than just allowing access from the same computer. | `localhost (127.0.0.1)` | +| Port | Jan runs on port 1337 by default. The port can be changed to any other port number as needed. | `1337` | +| Cross-Origin Resource Sharing (CORS) | Manages resource access from external domains. Enabled for security by default but can be disabled if needed. | Enabled | +| Verbose Server Logs | Provides extensive details about server activities as the local server runs, displayed at the center of the screen. | Not specified (implied enabled) | ## Step 2: Start and Use the Built-in API Server + Once you have set the server settings, you can start the server by following the steps below: + 1. Click the **Start Server** button on the top left of your screen. :::note @@ -35,6 +54,7 @@ When the server starts, you'll see a message like `Server listening at http://12 4. In this example, we will show you how it works using the `Chat` endpoint. 5. Click the **Try it out** button. 6. The Chat endpoint has the following `cURL request example` when running using a `tinyllama-1.1b` model local server: + ```json { "messages": [ @@ -60,7 +80,9 @@ When the server starts, you'll see a message like `Server listening at http://12 } ' ``` + 7. The endpoint returns the following `JSON response body`: + ```json { "choices": [ @@ -84,4 +106,4 @@ When the server starts, you'll see a message like `Server listening at http://12 "total_tokens": 533 } } -``` \ No newline at end of file +``` diff --git a/docs/docs/guides/user-guides/manage-assistants.mdx b/docs/docs/guides/user-guides/manage-assistants.mdx new file mode 100644 index 000000000..27330f187 --- /dev/null +++ b/docs/docs/guides/user-guides/manage-assistants.mdx @@ -0,0 +1,22 @@ +--- +title: Manage Assistants +slug: /guides/assistants +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 8 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + manage assistants, + assistants, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/user-guides/manage-models.mdx b/docs/docs/guides/user-guides/manage-models.mdx new file mode 100644 index 000000000..1fa2ed3f0 --- /dev/null +++ b/docs/docs/guides/user-guides/manage-models.mdx @@ -0,0 +1,24 @@ +--- +title: Manage Models +slug: /guides/models +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 7 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + models, + remote models, + local models, + manage models, + ] +--- + +Coming Soon diff --git a/docs/docs/guides/thread.mdx b/docs/docs/guides/user-guides/manage-threads.mdx similarity index 81% rename from docs/docs/guides/thread.mdx rename to docs/docs/guides/user-guides/manage-threads.mdx index fdd8fb603..7344efb41 100644 --- a/docs/docs/guides/thread.mdx +++ b/docs/docs/guides/user-guides/manage-threads.mdx @@ -1,15 +1,28 @@ --- -title: Thread Management -sidebar_position: 3 -hide_table_of_contents: true +title: Manage Threads +slug: /guides/threads description: Manage your interaction with AI locally. +sidebar_position: 9 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + threads, + chat history, + thread history, + ] --- - -Jan provides a straightforward and private solution for managing your threads with AI on your own device. As you interact with AI using Jan, you'll accumulate a history of threads. +Jan provides a straightforward and private solution for managing your threads with AI on your own device. As you interact with AI using Jan, you'll accumulate a history of threads. Jan offers easy tools to organize, delete, or review your past threads with AI. This guide will show you how to keep your threads private and well-organized. - ### View Thread History To view your thread history, follow the steps below: 1. Navigate to the main dashboard. @@ -17,7 +30,7 @@ Jan offers easy tools to organize, delete, or review your past threads with AI. 3. To view a specific thread, simply choose the one you're interested in and then scroll up or down to explore the entire conversation. - ### Manage Thread via Jan Data Folder + ### Manage the Threads via Folder To manage your thread history and configurations, follow the steps below: 1. Navigate to the Thread that you want to manage via the list of threads on the left side of the dashboard. 2. Click on the **three dots (⋮)** in the Thread section. @@ -36,14 +49,12 @@ Jan offers easy tools to organize, delete, or review your past threads with AI. This will delete all messages in the thread while keeping the thread settings. ::: - ### Delete Threads History To delete a thread, follow the steps below: 1. Navigate to the Thread that you want to delete. 2. Click on the **three dots (⋮)** in the Thread section. 3. Sleect the **Delete Thread** button. - :::note This will delete all messages and the thread settings. ::: diff --git a/docs/docs/guides/user-guides/overview-guides.mdx b/docs/docs/guides/user-guides/overview-guides.mdx new file mode 100644 index 000000000..bb522ccf4 --- /dev/null +++ b/docs/docs/guides/user-guides/overview-guides.mdx @@ -0,0 +1,20 @@ +--- +title: Overview +slug: /guides/overview +description: Jan Docs | Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. +sidebar_position: 5 +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + ] +--- + +Coming Soon diff --git a/docs/docs/hardware/community.md b/docs/docs/hardware/community.md index e1825b24b..da142b7c3 100644 --- a/docs/docs/hardware/community.md +++ b/docs/docs/hardware/community.md @@ -1,12 +1,23 @@ --- title: Hardware Examples description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. -keywords: [Jan AI, Jan, ChatGPT alternative, local AI, private AI, conversational AI, no-subscription fee, large language model ] +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + ] --- ## Add your own example -Add your own examples to this page by creating a new file in the `docs/docs/hardware/examples` directory. +Add your own examples to this page by creating a new file in the `docs/docs/hardware/examples` directory. ```shell docs @@ -18,9 +29,10 @@ docs // highlight-next-line └── .md ``` + ### File and Title Convention -We use a specific naming convention for the file name. +We use a specific naming convention for the file name. ```shell # Filename @@ -52,4 +64,4 @@ You are allowed to include affiliate links in your example. ## Longer-Term -We will likely build a simple web app to make it easier to add your own examples, sort and retrieve. \ No newline at end of file +We will likely build a simple web app to make it easier to add your own examples, sort and retrieve. diff --git a/docs/docs/how-we-work.md b/docs/docs/how-we-work.md index e81099d18..5ab1cc15e 100644 --- a/docs/docs/how-we-work.md +++ b/docs/docs/how-we-work.md @@ -1,5 +1,22 @@ --- title: How We Work +slug: /how-we-work +description: How we work at Jan +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + build in public, + remote team, + how we work, + ] --- ### Open Source diff --git a/docs/docs/how-we-work/analytics/analytics.md b/docs/docs/how-we-work/analytics/analytics.md index 79e107a83..22957f01c 100644 --- a/docs/docs/how-we-work/analytics/analytics.md +++ b/docs/docs/how-we-work/analytics/analytics.md @@ -1,5 +1,20 @@ --- title: Analytics +slug: /how-we-work/analytics +description: Jan's Analytics philosophy and implementation +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + analytics, + ] --- Adhering to Jan's privacy preserving philosophy, our analytics philosophy is to get "barely-enough-to-function'. diff --git a/docs/docs/how-we-work/engineering/engineering.md b/docs/docs/how-we-work/engineering/engineering.md index 1db5c3912..63e797629 100644 --- a/docs/docs/how-we-work/engineering/engineering.md +++ b/docs/docs/how-we-work/engineering/engineering.md @@ -4,14 +4,15 @@ description: Jan is a ChatGPT-alternative that runs on your own computer, with a slug: /engineering keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/how-we-work/engineering/qa.mdx b/docs/docs/how-we-work/engineering/qa.mdx index f43caae4a..9957aaefa 100644 --- a/docs/docs/how-we-work/engineering/qa.mdx +++ b/docs/docs/how-we-work/engineering/qa.mdx @@ -4,14 +4,15 @@ description: Jan is a ChatGPT-alternative that runs on your own computer, with a slug: /engineering/qa keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/how-we-work/product-design/product-design.md b/docs/docs/how-we-work/product-design/product-design.md index a2016b6b8..30c1e5b21 100644 --- a/docs/docs/how-we-work/product-design/product-design.md +++ b/docs/docs/how-we-work/product-design/product-design.md @@ -1,5 +1,20 @@ --- title: Product & Design +slug: /how-we-work/product-design +description: How we work on product design +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + product design, + ] --- ## Roadmap @@ -8,4 +23,4 @@ title: Product & Design - Discord's #roadmap channel - Work with the community to turn conversations into Product Specs - Future System? - - Use Canny? \ No newline at end of file + - Use Canny? diff --git a/docs/docs/how-we-work/project-management/project-management.md b/docs/docs/how-we-work/project-management/project-management.md index 58af4a0d3..b1e76757e 100644 --- a/docs/docs/how-we-work/project-management/project-management.md +++ b/docs/docs/how-we-work/project-management/project-management.md @@ -1,5 +1,20 @@ --- title: Project Management +slug: /how-we-work/project-management +description: Project management at Jan +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + project management, + ] --- We use the [Jan Monorepo Project](https://github.com/orgs/janhq/projects/5) in Github to manage our roadmap and sprint Kanbans. @@ -58,7 +73,6 @@ We aim to always sprint on `tasks` that are a part of the [current roadmap](http - `Urgent bugs`: assign to an owner (or @engineers if you are not sure) && tag the current `sprint` & `milestone` - `All else`: assign the correct roadmap `label(s)` and owner (if any) - #### Request for help As a result, our feature prioritization can feel a bit black box at times. diff --git a/docs/docs/how-we-work/strategy/strategy.md b/docs/docs/how-we-work/strategy/strategy.md index 09d9b9fb4..001a7c8a2 100644 --- a/docs/docs/how-we-work/strategy/strategy.md +++ b/docs/docs/how-we-work/strategy/strategy.md @@ -3,6 +3,7 @@ title: Strategy --- We only have 2 planning parameters: + - 10 year vision - 2 week sprint - Quarterly OKRs @@ -46,7 +47,6 @@ Jan is a seamless user experience that runs on your personal computer, that glue - We run on top of a local folder of non-proprietary files, that anyone can tinker with (yes, even other apps!) - We provide open formats for packaging and distributing AI to run reproducibly across devices - ## Prerequisites - [Figma](https://figma.com) diff --git a/docs/docs/how-we-work/website-docs/website-docs.md b/docs/docs/how-we-work/website-docs/website-docs.md index 19fdc1676..007cd16d7 100644 --- a/docs/docs/how-we-work/website-docs/website-docs.md +++ b/docs/docs/how-we-work/website-docs/website-docs.md @@ -1,5 +1,21 @@ --- title: Website & Docs +slug: /how-we-work/website-docs/ +description: Information about the Jan website and documentation. +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + website, + documentation, + ] --- This website is built using [Docusaurus 3.0](https://docusaurus.io/), a modern static website generator. diff --git a/docs/docs/integrations/tensorrt.md b/docs/docs/integrations/tensorrt.md deleted file mode 100644 index 8a77d1436..000000000 --- a/docs/docs/integrations/tensorrt.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: TensorRT-LLM ---- - -## Quicklinks - -- Jan Framework [Extension Code](https://github.com/janhq/jan/tree/main/extensions/inference-triton-trtllm-extension) -- TensorRT [Source URL](https://github.com/NVIDIA/TensorRT-LLM) diff --git a/docs/docs/platforms/desktop.md b/docs/docs/platforms/desktop.md index fb4ea8389..1fed9274a 100644 --- a/docs/docs/platforms/desktop.md +++ b/docs/docs/platforms/desktop.md @@ -4,14 +4,15 @@ slug: /desktop description: Turn your computer into an AI PC keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/platforms/mobile.md b/docs/docs/platforms/mobile.md index 827544201..d502cb99a 100644 --- a/docs/docs/platforms/mobile.md +++ b/docs/docs/platforms/mobile.md @@ -4,13 +4,14 @@ slug: /mobile description: Jan Mobile allows you to bring your AI on the go keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] ---- \ No newline at end of file +--- diff --git a/docs/docs/pricing/pricing.md b/docs/docs/pricing/pricing.md index 233610468..958c021ee 100644 --- a/docs/docs/pricing/pricing.md +++ b/docs/docs/pricing/pricing.md @@ -1,6 +1,20 @@ --- title: Pricing slug: /pricing +description: Pricing for Jan +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + pricing, + ] --- | $0 | $1 | Enterprise | diff --git a/docs/docs/privacy/privacy.md b/docs/docs/privacy/privacy.md index 56e81f3a1..9bf408810 100644 --- a/docs/docs/privacy/privacy.md +++ b/docs/docs/privacy/privacy.md @@ -1,3 +1,7 @@ +--- +title: Privacy - Jan +--- + # Privacy Policy Jan is committed to protecting your privacy and ensuring that your personal information is handled in a safe and responsible way. This policy outlines how we collect, store, and use your personal information when you use our mobile application. diff --git a/docs/docs/releases/changelog/README.mdx b/docs/docs/releases/changelog/README.mdx index 09e6d8222..8cd181cd3 100644 --- a/docs/docs/releases/changelog/README.mdx +++ b/docs/docs/releases/changelog/README.mdx @@ -5,15 +5,16 @@ slug: /changelog description: Jan is a ChatGPT-alternative that runs on your own computer, with a local API server. keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, - build extension, + large language models, + changelog, ] --- diff --git a/docs/docs/releases/changelog/cache.json b/docs/docs/releases/changelog/cache.json index fff125158..13bb08d60 100644 --- a/docs/docs/releases/changelog/cache.json +++ b/docs/docs/releases/changelog/cache.json @@ -1,5 +1,568 @@ { "releases": [ + { + "url": "https://api.github.com/repos/janhq/jan/releases/147163406", + "assets_url": "https://api.github.com/repos/janhq/jan/releases/147163406/assets", + "upload_url": "https://uploads.github.com/repos/janhq/jan/releases/147163406/assets{?name,label}", + "html_url": "https://github.com/janhq/jan/releases/tag/v0.4.9", + "id": 147163406, + "author": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "node_id": "RE_kwDOKIBx0s4IxYkO", + "tag_name": "v0.4.9", + "target_commitish": "3a3bceb0c01bfe69d71156891f169e39bca7ebb7", + "name": "0.4.9", + "draft": false, + "prerelease": false, + "created_at": "2024-03-19T03:06:47Z", + "published_at": "2024-03-19T04:45:39Z", + "assets": [ + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157403858", + "id": 157403858, + "node_id": "RA_kwDOKIBx0s4JYcrS", + "name": "jan-linux-amd64-0.4.9.deb", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 118201794, + "download_count": 94, + "created_at": "2024-03-19T04:08:03Z", + "updated_at": "2024-03-19T04:08:06Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-linux-amd64-0.4.9.deb" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157403802", + "id": 157403802, + "node_id": "RA_kwDOKIBx0s4JYcqa", + "name": "jan-linux-x86_64-0.4.9.AppImage", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 156697166, + "download_count": 98, + "created_at": "2024-03-19T04:06:51Z", + "updated_at": "2024-03-19T04:06:55Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-linux-x86_64-0.4.9.AppImage" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157403986", + "id": 157403986, + "node_id": "RA_kwDOKIBx0s4JYctS", + "name": "jan-mac-arm64-0.4.9.dmg", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 132665337, + "download_count": 135, + "created_at": "2024-03-19T04:10:15Z", + "updated_at": "2024-03-19T04:10:26Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-arm64-0.4.9.dmg" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157403987", + "id": 157403987, + "node_id": "RA_kwDOKIBx0s4JYctT", + "name": "jan-mac-arm64-0.4.9.dmg.blockmap", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 139167, + "download_count": 1, + "created_at": "2024-03-19T04:10:15Z", + "updated_at": "2024-03-19T04:10:16Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-arm64-0.4.9.dmg.blockmap" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404007", + "id": 157404007, + "node_id": "RA_kwDOKIBx0s4JYctn", + "name": "jan-mac-arm64-0.4.9.zip", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/zip", + "state": "uploaded", + "size": 128089843, + "download_count": 222, + "created_at": "2024-03-19T04:10:32Z", + "updated_at": "2024-03-19T04:10:46Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-arm64-0.4.9.zip" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404006", + "id": 157404006, + "node_id": "RA_kwDOKIBx0s4JYctm", + "name": "jan-mac-arm64-0.4.9.zip.blockmap", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 134845, + "download_count": 1, + "created_at": "2024-03-19T04:10:31Z", + "updated_at": "2024-03-19T04:10:32Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-arm64-0.4.9.zip.blockmap" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404722", + "id": 157404722, + "node_id": "RA_kwDOKIBx0s4JYc4y", + "name": "jan-mac-x64-0.4.9.dmg", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 139245048, + "download_count": 39, + "created_at": "2024-03-19T04:17:33Z", + "updated_at": "2024-03-19T04:17:37Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-x64-0.4.9.dmg" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404721", + "id": 157404721, + "node_id": "RA_kwDOKIBx0s4JYc4x", + "name": "jan-mac-x64-0.4.9.dmg.blockmap", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 147376, + "download_count": 2, + "created_at": "2024-03-19T04:17:33Z", + "updated_at": "2024-03-19T04:17:33Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-x64-0.4.9.dmg.blockmap" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404738", + "id": 157404738, + "node_id": "RA_kwDOKIBx0s4JYc5C", + "name": "jan-mac-x64-0.4.9.zip", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/zip", + "state": "uploaded", + "size": 134752189, + "download_count": 40, + "created_at": "2024-03-19T04:17:52Z", + "updated_at": "2024-03-19T04:17:56Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-x64-0.4.9.zip" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404739", + "id": 157404739, + "node_id": "RA_kwDOKIBx0s4JYc5D", + "name": "jan-mac-x64-0.4.9.zip.blockmap", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 141832, + "download_count": 2, + "created_at": "2024-03-19T04:17:52Z", + "updated_at": "2024-03-19T04:17:52Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-mac-x64-0.4.9.zip.blockmap" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404826", + "id": 157404826, + "node_id": "RA_kwDOKIBx0s4JYc6a", + "name": "jan-win-x64-0.4.9.exe", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 129440528, + "download_count": 1078, + "created_at": "2024-03-19T04:18:43Z", + "updated_at": "2024-03-19T04:18:46Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-win-x64-0.4.9.exe" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404825", + "id": 157404825, + "node_id": "RA_kwDOKIBx0s4JYc6Z", + "name": "jan-win-x64-0.4.9.exe.blockmap", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "application/octet-stream", + "state": "uploaded", + "size": 136498, + "download_count": 570, + "created_at": "2024-03-19T04:18:43Z", + "updated_at": "2024-03-19T04:18:43Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/jan-win-x64-0.4.9.exe.blockmap" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157403860", + "id": 157403860, + "node_id": "RA_kwDOKIBx0s4JYcrU", + "name": "latest-linux.yml", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "text/yaml", + "state": "uploaded", + "size": 540, + "download_count": 321, + "created_at": "2024-03-19T04:08:06Z", + "updated_at": "2024-03-19T04:08:06Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/latest-linux.yml" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404835", + "id": 157404835, + "node_id": "RA_kwDOKIBx0s4JYc6j", + "name": "latest-mac.yml", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "text/yaml", + "state": "uploaded", + "size": 842, + "download_count": 743, + "created_at": "2024-03-19T04:18:53Z", + "updated_at": "2024-03-19T04:18:53Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/latest-mac.yml" + }, + { + "url": "https://api.github.com/repos/janhq/jan/releases/assets/157404832", + "id": 157404832, + "node_id": "RA_kwDOKIBx0s4JYc6g", + "name": "latest.yml", + "label": "", + "uploader": { + "login": "github-actions[bot]", + "id": 41898282, + "node_id": "MDM6Qm90NDE4OTgyODI=", + "avatar_url": "https://avatars.githubusercontent.com/in/15368?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/github-actions%5Bbot%5D", + "html_url": "https://github.com/apps/github-actions", + "followers_url": "https://api.github.com/users/github-actions%5Bbot%5D/followers", + "following_url": "https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}", + "gists_url": "https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}", + "starred_url": "https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/github-actions%5Bbot%5D/subscriptions", + "organizations_url": "https://api.github.com/users/github-actions%5Bbot%5D/orgs", + "repos_url": "https://api.github.com/users/github-actions%5Bbot%5D/repos", + "events_url": "https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}", + "received_events_url": "https://api.github.com/users/github-actions%5Bbot%5D/received_events", + "type": "Bot", + "site_admin": false + }, + "content_type": "text/yaml", + "state": "uploaded", + "size": 339, + "download_count": 1890, + "created_at": "2024-03-19T04:18:46Z", + "updated_at": "2024-03-19T04:18:46Z", + "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.9/latest.yml" + } + ], + "tarball_url": "https://api.github.com/repos/janhq/jan/tarball/v0.4.9", + "zipball_url": "https://api.github.com/repos/janhq/jan/zipball/v0.4.9", + "body": "## Changes\r\n\r\n- Release/v0.4.9 @namchuai (#2421)\r\n- Release cut 0.4.9 @louis-jan (#2398)\r\n- Update models.json @louis-jan (#2382)\r\n- docs: add about/faq @eckartal (#2329)\r\n- Update docs 14th mar @dan-jan (#2362)\r\n- feat: Nitro-Tensorrt-LLM Extension @louis-jan (#2280)\r\n- Sync release 0.4.8 to dev @louis-jan (#2297)\r\n\r\n## 🚀 Features\r\n\r\n- feat: quick ask support dark mode @urmauur (#2316)\r\n\r\n## 🐛 Fixes\r\n\r\n- fix: move tensorrt executable to engine @namchuai (#2400)\r\n- fix: unable to start web with quick ask @namchuai (#2402)\r\n- fix: fail-fast would cancel matrix jobs as soon as one job fails @louis-jan (#2401)\r\n- fix: clean working dir with long space support on Windows @louis-jan (#2399)\r\n- fix: line height typography blog and docs @urmauur (#2390)\r\n- fix: some regressions for tensorrt nightly build @namchuai (#2380)\r\n- fix: use model from model hub not load correct model in thread screen @namchuai (#2368)\r\n- fix: wrong engine handling @louis-jan (#2363)\r\n- fix: incompatible GPU error message @louis-jan (#2357)\r\n- fix: app does not recognize GPU first launch @louis-jan (#2350)\r\n- fix: disable rag \\& stream settings from tensorrt llm model.json @louis-jan (#2351)\r\n- fix: badge or progress tensorRtExtensionItem @urmauur (#2349)\r\n- fix: ts error when declar var in case @namchuai (#2348)\r\n- fix: some costmetic issues: badges corner, recommended for tensorrt models @namchuai (#2346)\r\n- Add icon file contain image size in file name to fix linux icon @hiento09 (#2344)\r\n- fix: put quick ask feature toggle under experimental feature @louis-jan (#2338)\r\n- fix: do not migrate extensions from quick ask window @louis-jan (#2336)\r\n- fix: existing changelog @hieu-jan (#2330)\r\n- fix: gate quick ask with feature toggle @louis-jan (#2331)\r\n- fix: quick app windows, tray and dock behaviors @louis-jan (#2327)\r\n- fix: jan app tray blocks app update @louis-jan (#2319)\r\n- fix: quick ask not show @louis-jan (#2315)\r\n- fix: quick ask blocks app update @louis-jan (#2310)\r\n- fix: message from quick ask not get the selected model @namchuai (#2307)\r\n- fix: replace robotjs by nutjs (#2295) @louis-jan (#2302)\r\n\r\n## 🧰 Maintenance\r\n\r\n- docs: Update sidebar and content for http proxy and import model @aindrajaya (#2328)\r\n- chore: temporary remove linux from tensorrt support @namchuai (#2386)\r\n- docs: Update broken-build.mdx @0xSage (#2385)\r\n- docs: sync updated content to main page @hieu-jan (#2384)\r\n- docs: fix broken link by redirecting to the right paths @aindrajaya (#2381)\r\n- docs: api reference 2.0 @aindrajaya (#2367)\r\n- docs: bump changelog v0.4.8 @hieu-jan (#2366)\r\n- docs: trt-llm extension guides @0xSage (#2353)\r\n- docs: enhance autogenerate changelog configuration @hieu-jan (#2289)\r\n- docs: update website-docs content @hieu-jan (#2287)\r\n- docs: update slogan @hieu-jan (#2282)\r\n- docs: sync updated content from dev to docs branch @hieu-jan (#2283)\r\n- docs: Fix install slug and fix navbar style in darkmode @aindrajaya (#2306)\r\n\r\n## Contributor\r\n\r\n@0xSage, @aindrajaya, @dan-jan, @eckartal, @hiento09, @hieu-jan, @jan-service-account, @louis-jan, @namchuai and @urmauur\r\n", + "reactions": { + "url": "https://api.github.com/repos/janhq/jan/releases/147163406/reactions", + "total_count": 1, + "+1": 1, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "mentions_count": 10 + }, { "url": "https://api.github.com/repos/janhq/jan/releases/145763492", "assets_url": "https://api.github.com/repos/janhq/jan/releases/145763492/assets", @@ -64,7 +627,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 110060688, - "download_count": 487, + "download_count": 852, "created_at": "2024-03-11T06:08:19Z", "updated_at": "2024-03-11T06:08:21Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-linux-amd64-0.4.8.deb" @@ -98,7 +661,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 145793120, - "download_count": 355, + "download_count": 735, "created_at": "2024-03-11T06:07:03Z", "updated_at": "2024-03-11T06:07:06Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-linux-x86_64-0.4.8.AppImage" @@ -132,7 +695,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 121575422, - "download_count": 666, + "download_count": 1258, "created_at": "2024-03-11T06:16:32Z", "updated_at": "2024-03-11T06:16:43Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-mac-arm64-0.4.8.dmg" @@ -166,7 +729,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 128586, - "download_count": 2, + "download_count": 3, "created_at": "2024-03-11T06:16:32Z", "updated_at": "2024-03-11T06:16:33Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-mac-arm64-0.4.8.dmg.blockmap" @@ -200,7 +763,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 117287741, - "download_count": 778, + "download_count": 1167, "created_at": "2024-03-11T06:16:48Z", "updated_at": "2024-03-11T06:17:07Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-mac-arm64-0.4.8.zip" @@ -268,7 +831,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 128115024, - "download_count": 260, + "download_count": 478, "created_at": "2024-03-11T06:14:43Z", "updated_at": "2024-03-11T06:14:49Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-mac-x64-0.4.8.dmg" @@ -302,7 +865,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 135139, - "download_count": 2, + "download_count": 3, "created_at": "2024-03-11T06:14:43Z", "updated_at": "2024-03-11T06:14:43Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-mac-x64-0.4.8.dmg.blockmap" @@ -336,7 +899,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 123950755, - "download_count": 132, + "download_count": 201, "created_at": "2024-03-11T06:15:11Z", "updated_at": "2024-03-11T06:15:17Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-mac-x64-0.4.8.zip" @@ -404,7 +967,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 119749864, - "download_count": 3852, + "download_count": 7280, "created_at": "2024-03-11T06:15:48Z", "updated_at": "2024-03-11T06:15:52Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-win-x64-0.4.8.exe" @@ -438,7 +1001,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 127370, - "download_count": 1741, + "download_count": 3195, "created_at": "2024-03-11T06:15:48Z", "updated_at": "2024-03-11T06:15:48Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/jan-win-x64-0.4.8.exe.blockmap" @@ -472,7 +1035,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 540, - "download_count": 1385, + "download_count": 2719, "created_at": "2024-03-11T06:08:22Z", "updated_at": "2024-03-11T06:08:22Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/latest-linux.yml" @@ -506,7 +1069,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 842, - "download_count": 3208, + "download_count": 5959, "created_at": "2024-03-11T06:18:08Z", "updated_at": "2024-03-11T06:18:08Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/latest-mac.yml" @@ -540,7 +1103,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 7760, + "download_count": 15868, "created_at": "2024-03-11T06:15:52Z", "updated_at": "2024-03-11T06:15:52Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.8/latest.yml" @@ -551,13 +1114,13 @@ "body": "## Changes\r\n\r\n- Release cut v0.4.8 @louis-jan (#2267)\r\n- Add modify notary team in CI @hiento09 (#2265)\r\n- Chore: Update new models to model hub @hahuyhoang411 (#2192)\r\n- Macos Notarize migrage to new Team ID @hiento09 (#2228)\r\n- docs: update API Reference assistants\\_id endpoint from DevDocs @avb-is-me (#2195)\r\n- docs: update API Reference assistants endpoint from DevDocs @avb-is-me (#2194)\r\n- docs: update API Reference threads endpoint from DevDocs @avb-is-me (#2182)\r\n- fix: wrong profile parameter in docker command @mooncool (#2159)\r\n- Sync release 0.4.7 to dev @louis-jan (#2151)\r\n- docs: add upstream acknowledgements @hieu-jan (#2136)\r\n- Sync dev branch to docs branch @hieu-jan (#2131)\r\n\r\n## 🚀 Features\r\n\r\n- feat: prompt user to download an update manually @louis-jan (#2261)\r\n- feat: Jan can see @hiro-v (#2069)\r\n- Revert feat: temporary remove dark mode @urmauur (#2221)\r\n- feat: add turborepo @louis-jan (#2220)\r\n- fix: change button import model on hub page @urmauur (#2178)\r\n- feat: temporary remove dark mode :( @urmauur (#2168)\r\n- feat: add import model feature @namchuai (#2104)\r\n- feat: restore docusaurus style @urmauur (#2152)\r\n- feat: add a simple way to convert Hugging Face model to GGUF @Helloyunho (#1972)\r\n\r\n## 🐛 Fixes\r\n\r\n- codesign script force sign @hiento09 (#2291)\r\n- fix: should not attach error messages to the completion request @louis-jan (#2258)\r\n- fix: image upload button and drag event are not enabled @louis-jan (#2248)\r\n- fix: error message being sent along with conversation when inference @namchuai (#2242)\r\n- fix: replaced user path from app log @namchuai (#2238)\r\n- fix: drag and drop support image format to support vision model @urmauur (#2237)\r\n- fix: re-configure changelog sections @hieu-jan (#2230)\r\n- fix: import from HuggingFace with random string is causing app crash @louis-jan (#2214)\r\n- fix: comment from QA regarding import model @namchuai (#2213)\r\n- fix: download model error does not reset state in model hub @namchuai (#2199)\r\n- fix: minor ui missing secondary background @urmauur (#2198)\r\n- docs: update docker command @hieu-jan (#2180)\r\n- fix: some bugs for import model @namchuai (#2181)\r\n- fix: change button import model on hub page @urmauur (#2178)\r\n- fix space between progress bar and title list of gpu @urmauur (#2177)\r\n- fix: disabled prompt user using dangerouslySetInnerHTML @urmauur (#2176)\r\n- fix: style list of gpus on system monitor @urmauur (#2172)\r\n- fix: system monitor expand overlap tooltip ribbon @urmauur (#2158)\r\n- Huggingface extension add codesign step for building on darwin @hiento09 (#2166)\r\n- Add run codesign for huggingface extension @hiento09 (#2163)\r\n- fix: system monitor ui @urmauur (#2135)\r\n\r\n## 🧰 Maintenance\r\n\r\n- chore: temporary remove convert model @namchuai (#2266)\r\n- docs: sync slug fix from dev branch to docs branch @hieu-jan (#2264)\r\n- docs: Update broken link and fix the slug @aindrajaya (#2260)\r\n- docs: Fix navbar issues. Keep stay when clicked other menu items from the sidebar @aindrajaya (#2253)\r\n- docs: sync docs hub fixes from dev to docs branch @hieu-jan (#2247)\r\n- docs: Update content for Hub page and Guides section @aindrajaya (#2245)\r\n- docs: Fix Dark Mode on the Hub page and Update the Navbar functionality @aindrajaya (#2243)\r\n- chore: sync dev branch to docs branch @hieu-jan (#2239)\r\n- Chore: add prefix latest for task clean r2 bucket @hiento09 (#2233)\r\n- fix: re-configure changelog sections @hieu-jan (#2230)\r\n- docs: add command run API server without frontend @hieu-jan (#2231)\r\n- docs: revamp entire Jan guides @hieu-jan (#2139)\r\n- chore: clean up some redundant code @namchuai (#2215)\r\n- docs: update API Reference chatCompletions from DevDocs @avb-is-me (#2171)\r\n- docs: update API Reference download model from DevDocs @avb-is-me (#2170)\r\n- docs: update API Reference model\\_id from DevDocs @avb-is-me (#2169)\r\n- docs: update API Reference listModel from DevDocs @avb-is-me (#2161)\r\n- docs: Update 08-antivirus-compatibility-testing.md @0xSage (#2186)\r\n- docs: adding new feature for v0.4.7 to release checklist @Van-QA (#2189)\r\n- docs: Update 01-integrate-continue.mdx @0xSage (#2187)\r\n- chore: bump nitro 0.3.14 @louis-jan (#2183)\r\n- docs: Sync dev branch to docs branch @hieu-jan (#2185)\r\n- docs: update docker command @hieu-jan (#2180)\r\n- docs: update wall of love @hieu-jan (#2179)\r\n- docs: add Jan newsletter @hieu-jan (#2174)\r\n- chore: make convert gguf as experimental feature @namchuai (#2156)\r\n- docs: update acknowledgements @hieu-jan (#2147)\r\n- feat: restore docusaurus style @urmauur (#2152)\r\n- docs: update run Jan in Docker mode @hieu-jan (#2150)\r\n- Docs pena team - Add Quickstart Docs @aindrajaya (#2138)\r\n- docs: hide incomplete pages @hieu-jan (#2127)\r\n\r\n## Contributor\r\n\r\n@0xSage, @Helloyunho, @Van-QA, @aindrajaya, @avb-is-me, @hahuyhoang411, @hiento09, @hieu-jan, @hiro-v, @jan-service-account, @louis-jan, @mooncool, @namchuai and @urmauur\r\n", "reactions": { "url": "https://api.github.com/repos/janhq/jan/releases/145763492/reactions", - "total_count": 5, + "total_count": 8, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, - "heart": 5, + "heart": 8, "rocket": 0, "eyes": 0 }, @@ -627,7 +1190,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 100168358, - "download_count": 1492, + "download_count": 1494, "created_at": "2024-02-26T02:39:48Z", "updated_at": "2024-02-26T02:39:51Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-linux-amd64-0.4.7.deb" @@ -661,7 +1224,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 135683130, - "download_count": 1323, + "download_count": 1329, "created_at": "2024-02-26T02:38:38Z", "updated_at": "2024-02-26T02:38:42Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-linux-x86_64-0.4.7.AppImage" @@ -695,7 +1258,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 116705772, - "download_count": 2655, + "download_count": 2658, "created_at": "2024-02-26T02:41:58Z", "updated_at": "2024-02-26T02:42:09Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-mac-arm64-0.4.7.dmg" @@ -729,7 +1292,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 124328, - "download_count": 4, + "download_count": 5, "created_at": "2024-02-26T02:41:58Z", "updated_at": "2024-02-26T02:41:59Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-mac-arm64-0.4.7.dmg.blockmap" @@ -763,7 +1326,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 112429002, - "download_count": 1568, + "download_count": 1569, "created_at": "2024-02-26T02:42:14Z", "updated_at": "2024-02-26T02:42:30Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-mac-arm64-0.4.7.zip" @@ -797,7 +1360,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 117816, - "download_count": 4, + "download_count": 5, "created_at": "2024-02-26T02:42:14Z", "updated_at": "2024-02-26T02:42:15Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-mac-arm64-0.4.7.zip.blockmap" @@ -865,7 +1428,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 130493, - "download_count": 3, + "download_count": 4, "created_at": "2024-02-26T02:45:43Z", "updated_at": "2024-02-26T02:45:43Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-mac-x64-0.4.7.dmg.blockmap" @@ -899,7 +1462,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 119095882, - "download_count": 328, + "download_count": 329, "created_at": "2024-02-26T02:45:59Z", "updated_at": "2024-02-26T02:46:04Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-mac-x64-0.4.7.zip" @@ -967,7 +1530,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 109668960, - "download_count": 14681, + "download_count": 14695, "created_at": "2024-02-26T02:48:10Z", "updated_at": "2024-02-26T02:48:12Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-win-x64-0.4.7.exe" @@ -1001,7 +1564,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 116340, - "download_count": 5853, + "download_count": 6509, "created_at": "2024-02-26T02:48:10Z", "updated_at": "2024-02-26T02:48:10Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/jan-win-x64-0.4.7.exe.blockmap" @@ -1035,7 +1598,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 540, - "download_count": 4866, + "download_count": 4867, "created_at": "2024-02-26T02:39:52Z", "updated_at": "2024-02-26T02:39:52Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/latest-linux.yml" @@ -1069,7 +1632,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 842, - "download_count": 11436, + "download_count": 11437, "created_at": "2024-02-26T02:47:00Z", "updated_at": "2024-02-26T02:47:00Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.7/latest-mac.yml" @@ -1190,7 +1753,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 122112210, - "download_count": 2420, + "download_count": 2421, "created_at": "2024-02-05T08:58:35Z", "updated_at": "2024-02-05T08:58:37Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-linux-amd64-0.4.6.deb" @@ -1224,7 +1787,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 161255742, - "download_count": 2193, + "download_count": 2197, "created_at": "2024-02-05T08:57:24Z", "updated_at": "2024-02-05T08:57:27Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-linux-x86_64-0.4.6.AppImage" @@ -1292,7 +1855,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 157046, - "download_count": 14, + "download_count": 15, "created_at": "2024-02-05T09:12:39Z", "updated_at": "2024-02-05T09:12:40Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-mac-arm64-0.4.6.dmg.blockmap" @@ -1326,7 +1889,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 144942589, - "download_count": 1718, + "download_count": 1719, "created_at": "2024-02-05T09:12:56Z", "updated_at": "2024-02-05T09:13:09Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-mac-arm64-0.4.6.zip" @@ -1360,7 +1923,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 151798, - "download_count": 9, + "download_count": 10, "created_at": "2024-02-05T09:12:56Z", "updated_at": "2024-02-05T09:12:57Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-mac-arm64-0.4.6.zip.blockmap" @@ -1394,7 +1957,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 156182072, - "download_count": 2041, + "download_count": 2042, "created_at": "2024-02-05T09:07:20Z", "updated_at": "2024-02-05T09:07:25Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-mac-x64-0.4.6.dmg" @@ -1428,7 +1991,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 164549, - "download_count": 13, + "download_count": 14, "created_at": "2024-02-05T09:07:20Z", "updated_at": "2024-02-05T09:07:20Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-mac-x64-0.4.6.dmg.blockmap" @@ -1530,7 +2093,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 136684856, - "download_count": 29516, + "download_count": 29522, "created_at": "2024-02-05T09:05:31Z", "updated_at": "2024-02-05T09:05:36Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-win-x64-0.4.6.exe" @@ -1564,7 +2127,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 145259, - "download_count": 8189, + "download_count": 8542, "created_at": "2024-02-05T09:05:31Z", "updated_at": "2024-02-05T09:05:31Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/jan-win-x64-0.4.6.exe.blockmap" @@ -1598,7 +2161,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 540, - "download_count": 8082, + "download_count": 8083, "created_at": "2024-02-05T08:58:38Z", "updated_at": "2024-02-05T08:58:38Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/latest-linux.yml" @@ -1632,7 +2195,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 842, - "download_count": 18034, + "download_count": 18035, "created_at": "2024-02-05T09:14:20Z", "updated_at": "2024-02-05T09:14:20Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/latest-mac.yml" @@ -1666,7 +2229,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 61911, + "download_count": 61912, "created_at": "2024-02-05T09:05:37Z", "updated_at": "2024-02-05T09:05:37Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.6/latest.yml" @@ -1753,7 +2316,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 100526314, - "download_count": 1107, + "download_count": 1108, "created_at": "2024-01-29T04:42:56Z", "updated_at": "2024-01-29T04:42:59Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-linux-amd64-0.4.5.deb" @@ -1855,7 +2418,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 133835, - "download_count": 3, + "download_count": 4, "created_at": "2024-01-29T05:04:02Z", "updated_at": "2024-01-29T05:04:02Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-arm64-0.4.5.dmg.blockmap" @@ -1889,7 +2452,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 122951194, - "download_count": 1062, + "download_count": 1063, "created_at": "2024-01-29T05:04:18Z", "updated_at": "2024-01-29T05:04:23Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-arm64-0.4.5.zip" @@ -1923,7 +2486,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 129367, - "download_count": 3, + "download_count": 4, "created_at": "2024-01-29T05:04:18Z", "updated_at": "2024-01-29T05:04:18Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-arm64-0.4.5.zip.blockmap" @@ -1957,7 +2520,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 134030913, - "download_count": 642, + "download_count": 643, "created_at": "2024-01-29T05:00:45Z", "updated_at": "2024-01-29T05:00:52Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-x64-0.4.5.dmg" @@ -2025,7 +2588,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 129562996, - "download_count": 189, + "download_count": 190, "created_at": "2024-01-29T05:01:35Z", "updated_at": "2024-01-29T05:01:41Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-x64-0.4.5.zip" @@ -2059,7 +2622,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 135459, - "download_count": 4, + "download_count": 5, "created_at": "2024-01-29T05:01:35Z", "updated_at": "2024-01-29T05:01:35Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-x64-0.4.5.zip.blockmap" @@ -2093,7 +2656,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 112164048, - "download_count": 9315, + "download_count": 9316, "created_at": "2024-01-29T04:51:58Z", "updated_at": "2024-01-29T04:52:00Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-win-x64-0.4.5.exe" @@ -2127,7 +2690,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 119750, - "download_count": 5169, + "download_count": 5239, "created_at": "2024-01-29T04:51:58Z", "updated_at": "2024-01-29T04:51:58Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/jan-win-x64-0.4.5.exe.blockmap" @@ -2195,7 +2758,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 6003, + "download_count": 6004, "created_at": "2024-01-29T05:04:24Z", "updated_at": "2024-01-29T05:04:24Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/latest-mac.yml" @@ -2229,7 +2792,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 17195, + "download_count": 17196, "created_at": "2024-01-29T04:52:00Z", "updated_at": "2024-01-29T04:52:01Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.5/latest.yml" @@ -2316,7 +2879,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 100113418, - "download_count": 2704, + "download_count": 2705, "created_at": "2024-01-16T01:43:11Z", "updated_at": "2024-01-16T01:43:13Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-linux-amd64-0.4.4.deb" @@ -2350,7 +2913,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 139077362, - "download_count": 2501, + "download_count": 2502, "created_at": "2024-01-16T01:41:56Z", "updated_at": "2024-01-16T01:41:59Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-linux-x86_64-0.4.4.AppImage" @@ -2384,7 +2947,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 127211966, - "download_count": 4863, + "download_count": 4864, "created_at": "2024-01-16T01:52:32Z", "updated_at": "2024-01-16T01:52:37Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-arm64-0.4.4.dmg" @@ -2418,7 +2981,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 132898, - "download_count": 6, + "download_count": 7, "created_at": "2024-01-16T01:52:32Z", "updated_at": "2024-01-16T01:52:32Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-arm64-0.4.4.dmg.blockmap" @@ -2520,7 +3083,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 133785404, - "download_count": 1929, + "download_count": 1930, "created_at": "2024-01-16T01:49:55Z", "updated_at": "2024-01-16T01:50:00Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-x64-0.4.4.dmg" @@ -2554,7 +3117,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 140010, - "download_count": 5, + "download_count": 6, "created_at": "2024-01-16T01:49:55Z", "updated_at": "2024-01-16T01:49:55Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-x64-0.4.4.dmg.blockmap" @@ -2588,7 +3151,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 129349430, - "download_count": 274, + "download_count": 275, "created_at": "2024-01-16T01:48:29Z", "updated_at": "2024-01-16T01:48:35Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-x64-0.4.4.zip" @@ -2622,7 +3185,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 135543, - "download_count": 5, + "download_count": 6, "created_at": "2024-01-16T01:48:29Z", "updated_at": "2024-01-16T01:48:29Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-x64-0.4.4.zip.blockmap" @@ -2656,7 +3219,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 111766336, - "download_count": 23199, + "download_count": 23208, "created_at": "2024-01-16T01:49:06Z", "updated_at": "2024-01-16T01:49:10Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-win-x64-0.4.4.exe" @@ -2690,7 +3253,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 118550, - "download_count": 7013, + "download_count": 7136, "created_at": "2024-01-16T01:49:11Z", "updated_at": "2024-01-16T01:49:11Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/jan-win-x64-0.4.4.exe.blockmap" @@ -2724,7 +3287,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 540, - "download_count": 6803, + "download_count": 6804, "created_at": "2024-01-16T01:43:13Z", "updated_at": "2024-01-16T01:43:13Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/latest-linux.yml" @@ -2758,7 +3321,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 10714, + "download_count": 10715, "created_at": "2024-01-16T01:52:44Z", "updated_at": "2024-01-16T01:52:45Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/latest-mac.yml" @@ -2792,7 +3355,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 25722, + "download_count": 25723, "created_at": "2024-01-16T01:49:12Z", "updated_at": "2024-01-16T01:49:12Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.4/latest.yml" @@ -2879,7 +3442,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 95840002, - "download_count": 5036, + "download_count": 5037, "created_at": "2023-12-21T14:11:45Z", "updated_at": "2023-12-21T14:11:49Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-linux-amd64-0.4.3.deb" @@ -2913,7 +3476,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 121463938, - "download_count": 9215, + "download_count": 9217, "created_at": "2023-12-21T14:19:40Z", "updated_at": "2023-12-21T14:19:45Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-mac-arm64-0.4.3.dmg" @@ -2981,7 +3544,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 117347980, - "download_count": 127, + "download_count": 128, "created_at": "2023-12-21T14:19:44Z", "updated_at": "2023-12-21T14:19:49Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-mac-arm64-0.4.3.zip" @@ -3015,7 +3578,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 120628, - "download_count": 7, + "download_count": 8, "created_at": "2023-12-21T14:19:43Z", "updated_at": "2023-12-21T14:19:44Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-mac-arm64-0.4.3.zip.blockmap" @@ -3049,7 +3612,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 128025547, - "download_count": 3360, + "download_count": 3363, "created_at": "2023-12-21T14:16:54Z", "updated_at": "2023-12-21T14:17:00Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-mac-x64-0.4.3.dmg" @@ -3083,7 +3646,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 132736, - "download_count": 8, + "download_count": 9, "created_at": "2023-12-21T14:16:54Z", "updated_at": "2023-12-21T14:16:54Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-mac-x64-0.4.3.dmg.blockmap" @@ -3117,7 +3680,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 123959766, - "download_count": 83, + "download_count": 84, "created_at": "2023-12-21T14:17:03Z", "updated_at": "2023-12-21T14:17:09Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-mac-x64-0.4.3.zip" @@ -3151,7 +3714,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 128011, - "download_count": 8, + "download_count": 9, "created_at": "2023-12-21T14:17:03Z", "updated_at": "2023-12-21T14:17:03Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-mac-x64-0.4.3.zip.blockmap" @@ -3185,7 +3748,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 105609992, - "download_count": 28147, + "download_count": 28157, "created_at": "2023-12-21T14:18:19Z", "updated_at": "2023-12-21T14:18:22Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-win-x64-0.4.3.exe" @@ -3219,7 +3782,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 110786, - "download_count": 4082, + "download_count": 4150, "created_at": "2023-12-21T14:18:23Z", "updated_at": "2023-12-21T14:18:23Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/jan-win-x64-0.4.3.exe.blockmap" @@ -3253,7 +3816,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 5273, + "download_count": 5274, "created_at": "2023-12-21T14:11:49Z", "updated_at": "2023-12-21T14:11:49Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/latest-linux.yml" @@ -3287,7 +3850,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 15929, + "download_count": 15930, "created_at": "2023-12-21T14:19:49Z", "updated_at": "2023-12-21T14:19:50Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/latest-mac.yml" @@ -3321,7 +3884,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 29750, + "download_count": 29751, "created_at": "2023-12-21T14:18:24Z", "updated_at": "2023-12-21T14:18:24Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.3/latest.yml" @@ -3408,7 +3971,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 95879008, - "download_count": 118, + "download_count": 119, "created_at": "2023-12-15T14:14:29Z", "updated_at": "2023-12-15T14:14:32Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/jan-linux-amd64-0.4.2.deb" @@ -3442,7 +4005,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 119581861, - "download_count": 138, + "download_count": 139, "created_at": "2023-12-15T14:27:06Z", "updated_at": "2023-12-15T14:27:12Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/jan-mac-arm64-0.4.2.dmg" @@ -3510,7 +4073,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 115488941, - "download_count": 13, + "download_count": 14, "created_at": "2023-12-15T14:28:07Z", "updated_at": "2023-12-15T14:28:13Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/jan-mac-arm64-0.4.2.zip" @@ -3544,7 +4107,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 120796, - "download_count": 1, + "download_count": 2, "created_at": "2023-12-15T14:28:07Z", "updated_at": "2023-12-15T14:28:08Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/jan-mac-arm64-0.4.2.zip.blockmap" @@ -3646,7 +4209,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 122100717, - "download_count": 5, + "download_count": 6, "created_at": "2023-12-15T14:22:45Z", "updated_at": "2023-12-15T14:22:50Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/jan-mac-x64-0.4.2.zip" @@ -3714,7 +4277,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 103875992, - "download_count": 373, + "download_count": 374, "created_at": "2023-12-15T14:19:37Z", "updated_at": "2023-12-15T14:19:41Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/jan-win-x64-0.4.2.exe" @@ -3748,7 +4311,7 @@ "content_type": "text/xml", "state": "uploaded", "size": 110511, - "download_count": 217, + "download_count": 221, "created_at": "2023-12-15T14:19:41Z", "updated_at": "2023-12-15T14:19:42Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/jan-win-x64-0.4.2.exe.blockmap" @@ -3782,7 +4345,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 152, + "download_count": 153, "created_at": "2023-12-15T14:14:32Z", "updated_at": "2023-12-15T14:14:32Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/latest-linux.yml" @@ -3816,7 +4379,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 315, + "download_count": 316, "created_at": "2023-12-15T14:28:14Z", "updated_at": "2023-12-15T14:28:14Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/latest-mac.yml" @@ -3850,7 +4413,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 536, + "download_count": 537, "created_at": "2023-12-15T14:19:43Z", "updated_at": "2023-12-15T14:19:43Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.2/latest.yml" @@ -3925,7 +4488,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 93269080, - "download_count": 42, + "download_count": 43, "created_at": "2023-12-14T02:35:58Z", "updated_at": "2023-12-14T02:36:01Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/jan-linux-amd64-0.4.1.deb" @@ -3959,7 +4522,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 116601237, - "download_count": 33, + "download_count": 34, "created_at": "2023-12-14T02:44:08Z", "updated_at": "2023-12-14T02:44:13Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/jan-mac-arm64-0.4.1.dmg" @@ -3993,7 +4556,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 121458, - "download_count": 1, + "download_count": 2, "created_at": "2023-12-14T02:44:08Z", "updated_at": "2023-12-14T02:44:09Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/jan-mac-arm64-0.4.1.dmg.blockmap" @@ -4061,7 +4624,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 118119, - "download_count": 1, + "download_count": 2, "created_at": "2023-12-14T02:45:00Z", "updated_at": "2023-12-14T02:45:00Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/jan-mac-arm64-0.4.1.zip.blockmap" @@ -4197,7 +4760,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 124617, - "download_count": 1, + "download_count": 2, "created_at": "2023-12-14T02:42:31Z", "updated_at": "2023-12-14T02:42:31Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/jan-mac-x64-0.4.1.zip.blockmap" @@ -4231,7 +4794,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 100741136, - "download_count": 108, + "download_count": 109, "created_at": "2023-12-14T02:42:30Z", "updated_at": "2023-12-14T02:42:32Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/jan-win-x64-0.4.1.exe" @@ -4333,7 +4896,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 99, + "download_count": 100, "created_at": "2023-12-14T02:45:04Z", "updated_at": "2023-12-14T02:45:04Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/latest-mac.yml" @@ -4367,7 +4930,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 125, + "download_count": 126, "created_at": "2023-12-14T02:42:34Z", "updated_at": "2023-12-14T02:42:35Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.1/latest.yml" @@ -4748,7 +5311,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 89974264, - "download_count": 119, + "download_count": 120, "created_at": "2023-12-06T09:53:10Z", "updated_at": "2023-12-06T09:53:13Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.0/jan-win-x64-0.4.0.exe" @@ -4782,7 +5345,7 @@ "content_type": "text/xml", "state": "uploaded", "size": 94542, - "download_count": 17, + "download_count": 18, "created_at": "2023-12-06T09:53:14Z", "updated_at": "2023-12-06T09:53:14Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.4.0/jan-win-x64-0.4.0.exe.blockmap" @@ -4993,7 +5556,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 102358840, - "download_count": 34, + "download_count": 35, "created_at": "2023-11-28T14:43:02Z", "updated_at": "2023-11-28T14:43:07Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/jan-mac-arm64-0.3.3.dmg" @@ -5061,7 +5624,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 98581298, - "download_count": 11, + "download_count": 12, "created_at": "2023-11-28T14:43:53Z", "updated_at": "2023-11-28T14:43:57Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/jan-mac-arm64-0.3.3.zip" @@ -5095,7 +5658,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 104552, - "download_count": 2, + "download_count": 3, "created_at": "2023-11-28T14:43:53Z", "updated_at": "2023-11-28T14:43:53Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/jan-mac-arm64-0.3.3.zip.blockmap" @@ -5129,7 +5692,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 107582920, - "download_count": 17, + "download_count": 18, "created_at": "2023-11-28T14:40:08Z", "updated_at": "2023-11-28T14:40:15Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/jan-mac-x64-0.3.3.dmg" @@ -5197,7 +5760,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 103886222, - "download_count": 4, + "download_count": 5, "created_at": "2023-11-28T14:41:16Z", "updated_at": "2023-11-28T14:41:21Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/jan-mac-x64-0.3.3.zip" @@ -5265,7 +5828,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 81157168, - "download_count": 101, + "download_count": 102, "created_at": "2023-11-28T14:35:42Z", "updated_at": "2023-11-28T14:35:45Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/jan-win-x64-0.3.3.exe" @@ -5299,7 +5862,7 @@ "content_type": "text/xml", "state": "uploaded", "size": 85522, - "download_count": 16, + "download_count": 17, "created_at": "2023-11-28T14:35:46Z", "updated_at": "2023-11-28T14:35:46Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/jan-win-x64-0.3.3.exe.blockmap" @@ -5333,7 +5896,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 31, + "download_count": 32, "created_at": "2023-11-28T14:33:58Z", "updated_at": "2023-11-28T14:33:58Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/latest-linux.yml" @@ -5401,7 +5964,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 338, - "download_count": 121, + "download_count": 122, "created_at": "2023-11-28T14:35:47Z", "updated_at": "2023-11-28T14:35:47Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.3/latest.yml" @@ -5476,7 +6039,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 81940296, - "download_count": 21, + "download_count": 22, "created_at": "2023-11-15T06:33:57Z", "updated_at": "2023-11-15T06:33:59Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-linux-amd64-0.3.2.deb" @@ -5510,7 +6073,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 102275617, - "download_count": 27, + "download_count": 28, "created_at": "2023-11-15T06:38:49Z", "updated_at": "2023-11-15T06:38:54Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-mac-arm64-0.3.2.dmg" @@ -5544,7 +6107,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 106299, - "download_count": 1, + "download_count": 2, "created_at": "2023-11-15T06:38:49Z", "updated_at": "2023-11-15T06:38:49Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-mac-arm64-0.3.2.dmg.blockmap" @@ -5578,7 +6141,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 98501600, - "download_count": 9, + "download_count": 10, "created_at": "2023-11-15T06:39:01Z", "updated_at": "2023-11-15T06:40:11Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-mac-arm64-0.3.2.zip" @@ -5612,7 +6175,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 104215, - "download_count": 1, + "download_count": 2, "created_at": "2023-11-15T06:39:00Z", "updated_at": "2023-11-15T06:39:01Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-mac-arm64-0.3.2.zip.blockmap" @@ -5646,7 +6209,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 107523862, - "download_count": 8, + "download_count": 9, "created_at": "2023-11-15T06:36:29Z", "updated_at": "2023-11-15T06:36:33Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-mac-x64-0.3.2.dmg" @@ -5680,7 +6243,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 113475, - "download_count": 1, + "download_count": 2, "created_at": "2023-11-15T06:36:29Z", "updated_at": "2023-11-15T06:36:29Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-mac-x64-0.3.2.dmg.blockmap" @@ -5748,7 +6311,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 110226, - "download_count": 2, + "download_count": 3, "created_at": "2023-11-15T06:36:56Z", "updated_at": "2023-11-15T06:36:57Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/jan-mac-x64-0.3.2.zip.blockmap" @@ -5850,7 +6413,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 9, + "download_count": 10, "created_at": "2023-11-15T06:33:59Z", "updated_at": "2023-11-15T06:34:00Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/latest-linux.yml" @@ -5884,7 +6447,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 798, - "download_count": 69, + "download_count": 70, "created_at": "2023-11-15T06:40:12Z", "updated_at": "2023-11-15T06:40:12Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/latest-mac.yml" @@ -5918,7 +6481,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 338, - "download_count": 54, + "download_count": 55, "created_at": "2023-11-15T06:35:21Z", "updated_at": "2023-11-15T06:35:22Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.2/latest.yml" @@ -6061,7 +6624,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 116917, - "download_count": 2, + "download_count": 3, "created_at": "2023-11-10T10:37:02Z", "updated_at": "2023-11-10T10:37:03Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-mac-arm64-0.3.1.dmg.blockmap" @@ -6095,7 +6658,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 107422060, - "download_count": 11, + "download_count": 12, "created_at": "2023-11-10T10:37:15Z", "updated_at": "2023-11-10T10:37:22Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-mac-arm64-0.3.1.zip" @@ -6129,7 +6692,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 114462, - "download_count": 2, + "download_count": 3, "created_at": "2023-11-10T10:37:15Z", "updated_at": "2023-11-10T10:37:15Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-mac-arm64-0.3.1.zip.blockmap" @@ -6163,7 +6726,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 116468631, - "download_count": 7, + "download_count": 8, "created_at": "2023-11-10T10:34:56Z", "updated_at": "2023-11-10T10:35:02Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-mac-x64-0.3.1.dmg" @@ -6197,7 +6760,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 121262, - "download_count": 2, + "download_count": 3, "created_at": "2023-11-10T10:34:56Z", "updated_at": "2023-11-10T10:34:57Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-mac-x64-0.3.1.dmg.blockmap" @@ -6231,7 +6794,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 112726975, - "download_count": 5, + "download_count": 6, "created_at": "2023-11-10T10:35:07Z", "updated_at": "2023-11-10T10:35:12Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-mac-x64-0.3.1.zip" @@ -6265,7 +6828,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 119995, - "download_count": 1, + "download_count": 2, "created_at": "2023-11-10T10:35:07Z", "updated_at": "2023-11-10T10:35:08Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-mac-x64-0.3.1.zip.blockmap" @@ -6299,7 +6862,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 81029196, - "download_count": 93, + "download_count": 94, "created_at": "2023-11-10T10:30:51Z", "updated_at": "2023-11-10T10:30:56Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-win-x64-0.3.1.exe" @@ -6333,7 +6896,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 85734, - "download_count": 8, + "download_count": 9, "created_at": "2023-11-10T10:30:51Z", "updated_at": "2023-11-10T10:30:51Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/jan-win-x64-0.3.1.exe.blockmap" @@ -6367,7 +6930,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 12, + "download_count": 13, "created_at": "2023-11-10T10:32:33Z", "updated_at": "2023-11-10T10:32:33Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/latest-linux.yml" @@ -6401,7 +6964,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 216, + "download_count": 217, "created_at": "2023-11-10T10:37:23Z", "updated_at": "2023-11-10T10:37:23Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/latest-mac.yml" @@ -6435,7 +6998,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 338, - "download_count": 68, + "download_count": 69, "created_at": "2023-11-10T10:30:56Z", "updated_at": "2023-11-10T10:30:57Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.1/latest.yml" @@ -6510,7 +7073,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 95795282, - "download_count": 14, + "download_count": 15, "created_at": "2023-10-27T08:26:42Z", "updated_at": "2023-10-27T08:26:45Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-linux-amd64-0.3.0.deb" @@ -6544,7 +7107,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 122527047, - "download_count": 46, + "download_count": 47, "created_at": "2023-10-27T08:39:24Z", "updated_at": "2023-10-27T08:39:29Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-mac-arm64-0.3.0.dmg" @@ -6578,7 +7141,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 130885, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T08:39:24Z", "updated_at": "2023-10-27T08:39:25Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-mac-arm64-0.3.0.dmg.blockmap" @@ -6612,7 +7175,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 118596061, - "download_count": 14, + "download_count": 15, "created_at": "2023-10-27T08:39:50Z", "updated_at": "2023-10-27T08:39:55Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-mac-arm64-0.3.0.zip" @@ -6646,7 +7209,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 124846, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T08:39:50Z", "updated_at": "2023-10-27T08:39:50Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-mac-arm64-0.3.0.zip.blockmap" @@ -6680,7 +7243,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 127747523, - "download_count": 24, + "download_count": 25, "created_at": "2023-10-27T08:36:45Z", "updated_at": "2023-10-27T08:36:51Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-mac-x64-0.3.0.dmg" @@ -6714,7 +7277,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 134480, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T08:36:45Z", "updated_at": "2023-10-27T08:36:46Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-mac-x64-0.3.0.dmg.blockmap" @@ -6782,7 +7345,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 131343, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T08:35:56Z", "updated_at": "2023-10-27T08:35:56Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-mac-x64-0.3.0.zip.blockmap" @@ -6816,7 +7379,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 98469954, - "download_count": 52, + "download_count": 53, "created_at": "2023-10-27T08:30:52Z", "updated_at": "2023-10-27T08:30:55Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-win-x64-0.3.0.exe" @@ -6850,7 +7413,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 103165, - "download_count": 8, + "download_count": 9, "created_at": "2023-10-27T08:30:52Z", "updated_at": "2023-10-27T08:30:53Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/jan-win-x64-0.3.0.exe.blockmap" @@ -6884,7 +7447,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 79, + "download_count": 80, "created_at": "2023-10-27T08:26:45Z", "updated_at": "2023-10-27T08:26:45Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/latest-linux.yml" @@ -6918,7 +7481,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 305, + "download_count": 306, "created_at": "2023-10-27T08:39:55Z", "updated_at": "2023-10-27T08:39:55Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/latest-mac.yml" @@ -6952,7 +7515,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 338, - "download_count": 114, + "download_count": 115, "created_at": "2023-10-27T08:30:55Z", "updated_at": "2023-10-27T08:30:55Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.3.0/latest.yml" @@ -7027,7 +7590,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 95796132, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T07:52:29Z", "updated_at": "2023-10-27T07:52:32Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/jan-linux-amd64-0.2.3.deb" @@ -7061,7 +7624,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 122527352, - "download_count": 12, + "download_count": 13, "created_at": "2023-10-27T08:04:29Z", "updated_at": "2023-10-27T08:04:35Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/jan-mac-arm64-0.2.3.dmg" @@ -7163,7 +7726,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 125183, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T08:05:09Z", "updated_at": "2023-10-27T08:05:09Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/jan-mac-arm64-0.2.3.zip.blockmap" @@ -7265,7 +7828,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 123901593, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T08:01:37Z", "updated_at": "2023-10-27T08:01:43Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/jan-mac-x64-0.2.3.zip" @@ -7299,7 +7862,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 131642, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T08:01:37Z", "updated_at": "2023-10-27T08:01:37Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/jan-mac-x64-0.2.3.zip.blockmap" @@ -7333,7 +7896,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 98472254, - "download_count": 11, + "download_count": 12, "created_at": "2023-10-27T07:55:16Z", "updated_at": "2023-10-27T07:55:19Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/jan-win-x64-0.2.3.exe" @@ -7367,7 +7930,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 102940, - "download_count": 9, + "download_count": 10, "created_at": "2023-10-27T07:55:16Z", "updated_at": "2023-10-27T07:55:17Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/jan-win-x64-0.2.3.exe.blockmap" @@ -7401,7 +7964,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-27T07:52:33Z", "updated_at": "2023-10-27T07:52:33Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.3/latest-linux.yml" @@ -7544,7 +8107,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 95810320, - "download_count": 16, + "download_count": 17, "created_at": "2023-10-26T10:47:31Z", "updated_at": "2023-10-26T10:47:33Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.2/jan-linux-amd64-0.2.2.deb" @@ -7578,7 +8141,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 122505953, - "download_count": 15, + "download_count": 16, "created_at": "2023-10-26T10:55:21Z", "updated_at": "2023-10-26T10:55:27Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.2/jan-mac-arm64-0.2.2.dmg" @@ -7748,7 +8311,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 134230, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-26T10:52:32Z", "updated_at": "2023-10-26T10:52:33Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.2/jan-mac-x64-0.2.2.dmg.blockmap" @@ -7816,7 +8379,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 131354, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-26T10:53:03Z", "updated_at": "2023-10-26T10:53:04Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.2/jan-mac-x64-0.2.2.zip.blockmap" @@ -7850,7 +8413,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 98467481, - "download_count": 7, + "download_count": 8, "created_at": "2023-10-26T10:52:08Z", "updated_at": "2023-10-26T10:52:10Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.2/jan-win-x64-0.2.2.exe" @@ -7918,7 +8481,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 32, + "download_count": 33, "created_at": "2023-10-26T10:47:33Z", "updated_at": "2023-10-26T10:47:34Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.2/latest-linux.yml" @@ -7952,7 +8515,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 67, + "download_count": 68, "created_at": "2023-10-26T10:55:47Z", "updated_at": "2023-10-26T10:55:47Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.2/latest-mac.yml" @@ -8061,7 +8624,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 97793320, - "download_count": 13, + "download_count": 14, "created_at": "2023-10-25T09:02:35Z", "updated_at": "2023-10-25T09:02:38Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-linux-amd64-0.2.1.deb" @@ -8095,7 +8658,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 120490638, - "download_count": 12, + "download_count": 13, "created_at": "2023-10-25T09:15:35Z", "updated_at": "2023-10-25T09:15:40Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-mac-arm64-0.2.1.dmg" @@ -8163,7 +8726,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 116580462, - "download_count": 4, + "download_count": 5, "created_at": "2023-10-25T09:15:58Z", "updated_at": "2023-10-25T09:16:02Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-mac-arm64-0.2.1.zip" @@ -8197,7 +8760,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 122252, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-25T09:15:58Z", "updated_at": "2023-10-25T09:15:58Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-mac-arm64-0.2.1.zip.blockmap" @@ -8231,7 +8794,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 125739334, - "download_count": 4, + "download_count": 5, "created_at": "2023-10-25T09:13:07Z", "updated_at": "2023-10-25T09:13:14Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-mac-x64-0.2.1.dmg" @@ -8265,7 +8828,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 132524, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-25T09:13:07Z", "updated_at": "2023-10-25T09:13:08Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-mac-x64-0.2.1.dmg.blockmap" @@ -8299,7 +8862,7 @@ "content_type": "application/zip", "state": "uploaded", "size": 121885377, - "download_count": 3, + "download_count": 4, "created_at": "2023-10-25T09:11:35Z", "updated_at": "2023-10-25T09:11:40Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-mac-x64-0.2.1.zip" @@ -8333,7 +8896,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 129631, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-25T09:11:35Z", "updated_at": "2023-10-25T09:11:36Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-mac-x64-0.2.1.zip.blockmap" @@ -8367,7 +8930,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 100456828, - "download_count": 5, + "download_count": 6, "created_at": "2023-10-25T09:05:10Z", "updated_at": "2023-10-25T09:05:15Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/jan-win-x64-0.2.1.exe" @@ -8435,7 +8998,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 72, + "download_count": 73, "created_at": "2023-10-25T09:02:38Z", "updated_at": "2023-10-25T09:02:38Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/latest-linux.yml" @@ -8469,7 +9032,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 103, + "download_count": 104, "created_at": "2023-10-25T09:16:03Z", "updated_at": "2023-10-25T09:16:03Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/latest-mac.yml" @@ -8503,7 +9066,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 339, - "download_count": 71, + "download_count": 72, "created_at": "2023-10-25T09:05:15Z", "updated_at": "2023-10-25T09:05:15Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.1/latest.yml" @@ -8578,7 +9141,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 94500286, - "download_count": 14, + "download_count": 15, "created_at": "2023-10-13T10:35:34Z", "updated_at": "2023-10-13T10:35:36Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-linux-amd64-0.2.0.deb" @@ -8612,7 +9175,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 117364184, - "download_count": 36, + "download_count": 37, "created_at": "2023-10-13T10:42:56Z", "updated_at": "2023-10-13T10:42:59Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-mac-arm64-0.2.0.dmg" @@ -8646,7 +9209,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 121696, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-13T10:42:56Z", "updated_at": "2023-10-13T10:42:56Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-mac-arm64-0.2.0.dmg.blockmap" @@ -8714,7 +9277,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 120020, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-13T10:43:10Z", "updated_at": "2023-10-13T10:43:10Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-mac-arm64-0.2.0.zip.blockmap" @@ -8748,7 +9311,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 122602134, - "download_count": 9, + "download_count": 10, "created_at": "2023-10-13T10:40:05Z", "updated_at": "2023-10-13T10:40:08Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-mac-x64-0.2.0.dmg" @@ -8782,7 +9345,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 129839, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-13T10:40:05Z", "updated_at": "2023-10-13T10:40:06Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-mac-x64-0.2.0.dmg.blockmap" @@ -8850,7 +9413,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 126330, - "download_count": 2, + "download_count": 3, "created_at": "2023-10-13T10:40:10Z", "updated_at": "2023-10-13T10:40:10Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-mac-x64-0.2.0.zip.blockmap" @@ -8884,7 +9447,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 97037738, - "download_count": 41, + "download_count": 42, "created_at": "2023-10-13T10:39:41Z", "updated_at": "2023-10-13T10:39:45Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-win-x64-0.2.0.exe" @@ -8918,7 +9481,7 @@ "content_type": "application/octet-stream", "state": "uploaded", "size": 102058, - "download_count": 7, + "download_count": 8, "created_at": "2023-10-13T10:39:41Z", "updated_at": "2023-10-13T10:39:41Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/jan-win-x64-0.2.0.exe.blockmap" @@ -8952,7 +9515,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 346, - "download_count": 325, + "download_count": 326, "created_at": "2023-10-13T10:35:36Z", "updated_at": "2023-10-13T10:35:37Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/latest-linux.yml" @@ -8986,7 +9549,7 @@ "content_type": "text/yaml", "state": "uploaded", "size": 799, - "download_count": 395, + "download_count": 396, "created_at": "2023-10-13T10:43:13Z", "updated_at": "2023-10-13T10:43:13Z", "browser_download_url": "https://github.com/janhq/jan/releases/download/v0.2.0/latest-mac.yml" diff --git a/docs/docs/releases/changelog/changelog-v0.2.0.mdx b/docs/docs/releases/changelog/changelog-v0.2.0.mdx index 55a64bc48..5884db762 100644 --- a/docs/docs/releases/changelog/changelog-v0.2.0.mdx +++ b/docs/docs/releases/changelog/changelog-v0.2.0.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 17 +sidebar_position: 18 slug: /changelog/changelog-v0.2.0 --- # v0.2.0 diff --git a/docs/docs/releases/changelog/changelog-v0.2.1.mdx b/docs/docs/releases/changelog/changelog-v0.2.1.mdx index e4e8960f6..917aa43a3 100644 --- a/docs/docs/releases/changelog/changelog-v0.2.1.mdx +++ b/docs/docs/releases/changelog/changelog-v0.2.1.mdx @@ -1,13 +1,13 @@ ---- -sidebar_position: 16 -slug: /changelog/changelog-v0.2.1 ---- -# v0.2.1 - -For more details, [GitHub Issues](https://github.com/janhq/jan/releases/tag/v0.2.1) - -Highlighted Issue: [Issue #446: fix: model is started but the indicator is not stopped loading](https://github.com/janhq/jan/pull/446) - +--- +sidebar_position: 17 +slug: /changelog/changelog-v0.2.1 +--- +# v0.2.1 + +For more details, [GitHub Issues](https://github.com/janhq/jan/releases/tag/v0.2.1) + +Highlighted Issue: [Issue #446: fix: model is started but the indicator is not stopped loading](https://github.com/janhq/jan/pull/446) + ## Changes - fix: model is started but the indicator is not stopped loading @louis-jan (#446) @@ -90,4 +90,4 @@ Highlighted Issue: [Issue #446: fix: model is started but the indicator is not ## Contributor @0xSage, @dan-jan, @hiento09, @jan-service-account, @louis-jan, @nam-john-ho, @namchuai, @tikikun, @urmauur, @vuonghoainam and Hien To - + diff --git a/docs/docs/releases/changelog/changelog-v0.2.2.mdx b/docs/docs/releases/changelog/changelog-v0.2.2.mdx index 6546033cd..0beb0013b 100644 --- a/docs/docs/releases/changelog/changelog-v0.2.2.mdx +++ b/docs/docs/releases/changelog/changelog-v0.2.2.mdx @@ -1,13 +1,13 @@ ---- -sidebar_position: 15 -slug: /changelog/changelog-v0.2.2 ---- -# v0.2.2 - -For more details, [GitHub Issues](https://github.com/janhq/jan/releases/tag/v0.2.2) - -Highlighted Issue: [Issue #469: chore: plugin and app version dependency](https://github.com/janhq/jan/pull/469) - +--- +sidebar_position: 16 +slug: /changelog/changelog-v0.2.2 +--- +# v0.2.2 + +For more details, [GitHub Issues](https://github.com/janhq/jan/releases/tag/v0.2.2) + +Highlighted Issue: [Issue #469: chore: plugin and app version dependency](https://github.com/janhq/jan/pull/469) + ## Changes - chore: plugin and app version dependency @louis-jan (#469) @@ -40,4 +40,4 @@ Highlighted Issue: [Issue #469: chore: plugin and app version dependency](https ## Contributor @hiento09, @jan-service-account, @louis-jan, @namchuai, @urmauur and @vuonghoainam - + diff --git a/docs/docs/releases/changelog/changelog-v0.2.3.mdx b/docs/docs/releases/changelog/changelog-v0.2.3.mdx index e450bffc5..ba4c8fafd 100644 --- a/docs/docs/releases/changelog/changelog-v0.2.3.mdx +++ b/docs/docs/releases/changelog/changelog-v0.2.3.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 14 +sidebar_position: 15 slug: /changelog/changelog-v0.2.3 --- # v0.2.3 diff --git a/docs/docs/releases/changelog/changelog-v0.3.0.mdx b/docs/docs/releases/changelog/changelog-v0.3.0.mdx index 6ef6acb42..1e91edc8b 100644 --- a/docs/docs/releases/changelog/changelog-v0.3.0.mdx +++ b/docs/docs/releases/changelog/changelog-v0.3.0.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 13 +sidebar_position: 14 slug: /changelog/changelog-v0.3.0 --- # v0.3.0 diff --git a/docs/docs/releases/changelog/changelog-v0.3.1.mdx b/docs/docs/releases/changelog/changelog-v0.3.1.mdx index b83bc88a7..dedbae8e1 100644 --- a/docs/docs/releases/changelog/changelog-v0.3.1.mdx +++ b/docs/docs/releases/changelog/changelog-v0.3.1.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 12 +sidebar_position: 13 slug: /changelog/changelog-v0.3.1 --- # v0.3.1 diff --git a/docs/docs/releases/changelog/changelog-v0.3.2.mdx b/docs/docs/releases/changelog/changelog-v0.3.2.mdx index acc19cc1a..556085a6a 100644 --- a/docs/docs/releases/changelog/changelog-v0.3.2.mdx +++ b/docs/docs/releases/changelog/changelog-v0.3.2.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 11 +sidebar_position: 12 slug: /changelog/changelog-v0.3.2 --- # v0.3.2 diff --git a/docs/docs/releases/changelog/changelog-v0.3.3.mdx b/docs/docs/releases/changelog/changelog-v0.3.3.mdx index bdf4d1ec3..13fbeae01 100644 --- a/docs/docs/releases/changelog/changelog-v0.3.3.mdx +++ b/docs/docs/releases/changelog/changelog-v0.3.3.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 10 +sidebar_position: 11 slug: /changelog/changelog-v0.3.3 --- # v0.3.3 diff --git a/docs/docs/releases/changelog/changelog-v0.4.0.mdx b/docs/docs/releases/changelog/changelog-v0.4.0.mdx index c0225cc25..e8838e191 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.0.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.0.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 9 +sidebar_position: 10 slug: /changelog/changelog-v0.4.0 --- # v0.4.0 diff --git a/docs/docs/releases/changelog/changelog-v0.4.1.mdx b/docs/docs/releases/changelog/changelog-v0.4.1.mdx index 9e0300a4b..37d35a63d 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.1.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.1.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 8 +sidebar_position: 9 slug: /changelog/changelog-v0.4.1 --- # v0.4.1 diff --git a/docs/docs/releases/changelog/changelog-v0.4.2.mdx b/docs/docs/releases/changelog/changelog-v0.4.2.mdx index 7b2a1b81c..c2a6f7c0f 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.2.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.2.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 7 +sidebar_position: 8 slug: /changelog/changelog-v0.4.2 --- # v0.4.2 diff --git a/docs/docs/releases/changelog/changelog-v0.4.3.mdx b/docs/docs/releases/changelog/changelog-v0.4.3.mdx index 5703dbb6e..7ba008286 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.3.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.3.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 6 +sidebar_position: 7 slug: /changelog/changelog-v0.4.3 --- # v0.4.3 diff --git a/docs/docs/releases/changelog/changelog-v0.4.4.mdx b/docs/docs/releases/changelog/changelog-v0.4.4.mdx index e21359e67..348a48e7e 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.4.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.4.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 5 +sidebar_position: 6 slug: /changelog/changelog-v0.4.4 --- # v0.4.4 diff --git a/docs/docs/releases/changelog/changelog-v0.4.5.mdx b/docs/docs/releases/changelog/changelog-v0.4.5.mdx index 370d37cc7..0a94313a5 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.5.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.5.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 4 +sidebar_position: 5 slug: /changelog/changelog-v0.4.5 --- # v0.4.5 diff --git a/docs/docs/releases/changelog/changelog-v0.4.6.mdx b/docs/docs/releases/changelog/changelog-v0.4.6.mdx index d836551e7..aece33420 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.6.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.6.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 4 slug: /changelog/changelog-v0.4.6 --- # v0.4.6 diff --git a/docs/docs/releases/changelog/changelog-v0.4.7.mdx b/docs/docs/releases/changelog/changelog-v0.4.7.mdx index b73ea828c..06db9832d 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.7.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.7.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 2 +sidebar_position: 3 slug: /changelog/changelog-v0.4.7 --- # v0.4.7 diff --git a/docs/docs/releases/changelog/changelog-v0.4.8.mdx b/docs/docs/releases/changelog/changelog-v0.4.8.mdx index d5bb266fb..6aecf4293 100644 --- a/docs/docs/releases/changelog/changelog-v0.4.8.mdx +++ b/docs/docs/releases/changelog/changelog-v0.4.8.mdx @@ -1,5 +1,5 @@ --- -sidebar_position: 1 +sidebar_position: 2 slug: /changelog/changelog-v0.4.8 --- # v0.4.8 diff --git a/docs/docs/releases/changelog/changelog-v0.4.9.mdx b/docs/docs/releases/changelog/changelog-v0.4.9.mdx new file mode 100644 index 000000000..62211eac6 --- /dev/null +++ b/docs/docs/releases/changelog/changelog-v0.4.9.mdx @@ -0,0 +1,72 @@ +--- +sidebar_position: 1 +slug: /changelog/changelog-v0.4.9 +--- +# v0.4.9 + +For more details, [GitHub Issues](https://github.com/janhq/jan/releases/tag/v0.4.9) + +Highlighted Issue: [Issue #2421: Release/v0.4.9](https://github.com/janhq/jan/pull/2421) + +## Changes + +- Release/v0.4.9 @namchuai (#2421) +- Release cut 0.4.9 @louis-jan (#2398) +- Update models.json @louis-jan (#2382) +- docs: add about/faq @eckartal (#2329) +- Update docs 14th mar @dan-jan (#2362) +- feat: Nitro-Tensorrt-LLM Extension @louis-jan (#2280) +- Sync release 0.4.8 to dev @louis-jan (#2297) + +## 🚀 Features + +- feat: quick ask support dark mode @urmauur (#2316) + +## 🐛 Fixes + +- fix: move tensorrt executable to engine @namchuai (#2400) +- fix: unable to start web with quick ask @namchuai (#2402) +- fix: fail-fast would cancel matrix jobs as soon as one job fails @louis-jan (#2401) +- fix: clean working dir with long space support on Windows @louis-jan (#2399) +- fix: line height typography blog and docs @urmauur (#2390) +- fix: some regressions for tensorrt nightly build @namchuai (#2380) +- fix: use model from model hub not load correct model in thread screen @namchuai (#2368) +- fix: wrong engine handling @louis-jan (#2363) +- fix: incompatible GPU error message @louis-jan (#2357) +- fix: app does not recognize GPU first launch @louis-jan (#2350) +- fix: disable rag \& stream settings from tensorrt llm model.json @louis-jan (#2351) +- fix: badge or progress tensorRtExtensionItem @urmauur (#2349) +- fix: ts error when declar var in case @namchuai (#2348) +- fix: some costmetic issues: badges corner, recommended for tensorrt models @namchuai (#2346) +- Add icon file contain image size in file name to fix linux icon @hiento09 (#2344) +- fix: put quick ask feature toggle under experimental feature @louis-jan (#2338) +- fix: do not migrate extensions from quick ask window @louis-jan (#2336) +- fix: existing changelog @hieu-jan (#2330) +- fix: gate quick ask with feature toggle @louis-jan (#2331) +- fix: quick app windows, tray and dock behaviors @louis-jan (#2327) +- fix: jan app tray blocks app update @louis-jan (#2319) +- fix: quick ask not show @louis-jan (#2315) +- fix: quick ask blocks app update @louis-jan (#2310) +- fix: message from quick ask not get the selected model @namchuai (#2307) +- fix: replace robotjs by nutjs (#2295) @louis-jan (#2302) + +## 🧰 Maintenance + +- docs: Update sidebar and content for http proxy and import model @aindrajaya (#2328) +- chore: temporary remove linux from tensorrt support @namchuai (#2386) +- docs: Update broken-build.mdx @0xSage (#2385) +- docs: sync updated content to main page @hieu-jan (#2384) +- docs: fix broken link by redirecting to the right paths @aindrajaya (#2381) +- docs: api reference 2.0 @aindrajaya (#2367) +- docs: bump changelog v0.4.8 @hieu-jan (#2366) +- docs: trt-llm extension guides @0xSage (#2353) +- docs: enhance autogenerate changelog configuration @hieu-jan (#2289) +- docs: update website-docs content @hieu-jan (#2287) +- docs: update slogan @hieu-jan (#2282) +- docs: sync updated content from dev to docs branch @hieu-jan (#2283) +- docs: Fix install slug and fix navbar style in darkmode @aindrajaya (#2306) + +## Contributor + +@0xSage, @aindrajaya, @dan-jan, @eckartal, @hiento09, @hieu-jan, @jan-service-account, @louis-jan, @namchuai and @urmauur + diff --git a/docs/docs/server-suite/enterprise.md b/docs/docs/server-suite/enterprise.md index 565c14fde..292911485 100644 --- a/docs/docs/server-suite/enterprise.md +++ b/docs/docs/server-suite/enterprise.md @@ -4,14 +4,15 @@ slug: /enterprise description: Built for Enterprise Deployments keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/server-suite/home-server.md b/docs/docs/server-suite/home-server.md index 97f3afbc7..4f6a375a9 100644 --- a/docs/docs/server-suite/home-server.md +++ b/docs/docs/server-suite/home-server.md @@ -4,14 +4,15 @@ slug: /home-server description: Built for Home Servers keywords: [ - Jan AI, Jan, - ChatGPT alternative, + Rethink the Computer, local AI, - private AI, + privacy focus, + free and open source, + private and offline, conversational AI, no-subscription fee, - large language model, + large language models, ] --- diff --git a/docs/docs/support/support.md b/docs/docs/support/support.md index 5a1ec2097..36895017d 100644 --- a/docs/docs/support/support.md +++ b/docs/docs/support/support.md @@ -1,3 +1,7 @@ +--- +title: Support - Jan +--- + # Support - Bugs & requests: file a GitHub ticket [here](https://github.com/janhq/jan/issues) diff --git a/docs/docs/team/team.md b/docs/docs/team/team.md index 7d5e07cfb..b18774dda 100644 --- a/docs/docs/team/team.md +++ b/docs/docs/team/team.md @@ -13,7 +13,6 @@ Jan is a startup with an open source business model. We believe in the need for - [Jan Desktop Client & Local server](https://jan.ai) (AGPLv3, built on Jan Framework) - [Nitro: run Local AI](https://github.com/janhq/nitro) (AGPLv3) - ### Bootstrapped Jan is currently a bootstrapped startup. @@ -25,4 +24,4 @@ We balance technical invention with the search for a sustainable business model. ## Our Team - Contributors -- Core Team \ No newline at end of file +- Core Team diff --git a/docs/docs/template/QA_script.md b/docs/docs/template/QA_script.md index 9c7eeaf18..de006c629 100644 --- a/docs/docs/template/QA_script.md +++ b/docs/docs/template/QA_script.md @@ -26,7 +26,6 @@ - [ ] :key::warning: Check that the uninstallation process removes the app successfully from the system. - [ ] Clean the Jan root directory and open the app to check if it creates all the necessary folders, especially models and extensions. - ## B. Overview ### 1. Shortcut key, memory usage / CPU usage @@ -71,10 +70,12 @@ - [ ] :key: Ensure that users switch between threads with different models, the app can handle it. ### 3. Model dropdown + - [ ] :key: Model list should highlight recommended based on user RAM - [ ] Model size should display (for both installed and imported models) ### 4. Users can click on a history thread + - [ ] Confirm that the chat window displays the entire conversation from the selected history thread without any missing messages. - [ ] :key: Check the performance and accuracy of the history feature when dealing with a large number of threads. - [ ] Validate that historical threads reflect the exact state of the chat at that time, including settings. @@ -82,12 +83,12 @@ - [ ] Confirm that changing the title of the thread updates correctly. ### 5. Users can config instructions for the assistant. + - [ ] Test if the instructions set by the user are being followed by the assistant in subsequent conversations. - [ ] :key: Validate that changes to instructions are updated in real time and do not require a restart of the application or session. - [ ] :key: Check for the ability to reset instructions to default or clear them completely. - [ ] :key: RAG - Users can import documents and the system should process queries about the uploaded file, providing accurate and appropriate responses in the conversation thread. - ## D. Hub ### 1. Users can discover recommended models (Jan ships with a few preconfigured model.json files) @@ -117,13 +118,14 @@ ### 5. Users can use the model as they want -- [ ] :key: Check `start` / `stop` / `delete` button response exactly what it does. +- [ ] :key: Check `start` / `stop` / `delete` button response exactly what it does. - [ ] Check if starting another model stops the other model entirely. - [x] :rocket: Check the `Explore models` navigate correctly to the model panel. - [ ] :key: Check when deleting a model it will delete all the files on the user's computer. - [ ] :warning:The recommended tags should present right for the user's hardware. ### 6. Users can Integrate With a Remote Server + - [ ] :key: Import openAI GPT model https://jan.ai/guides/using-models/integrate-with-remote-server/ and the model displayed in Hub / Thread dropdown - [ ] Users can use the remote model properly @@ -184,9 +186,10 @@ ## G. Local API server ### 1. Local Server Usage with Server Options + - [ ] :key: Explore API Reference: Swagger API for sending/receiving requests - - [ ] Use default server option - - [ ] Configure and use custom server options + - [ ] Use default server option + - [ ] Configure and use custom server options - [ ] Test starting/stopping the local API server with different Model/Model settings - [ ] Server logs captured with correct Server Options provided - [ ] Verify functionality of Open logs/Clear feature diff --git a/docs/docs/wall-of-love.md b/docs/docs/wall-of-love.md index f6bfe79d8..2dda05770 100644 --- a/docs/docs/wall-of-love.md +++ b/docs/docs/wall-of-love.md @@ -1,5 +1,20 @@ --- title: Wall of Love ❤️ +slug: /wall-of-love +description: Check out what our amazing users are saying about Jan! +keywords: + [ + Jan, + Rethink the Computer, + local AI, + privacy focus, + free and open source, + private and offline, + conversational AI, + no-subscription fee, + large language models, + wall of love, + ] --- ## Twitter diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 4e7e91baf..7b74337fb 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -7,7 +7,7 @@ const path = require('path') /** @type {import('@docusaurus/types').Config} */ const config = { - title: 'Jan', + title: 'Jan | Rethink the Computer', tagline: 'Run your own AI', favicon: 'img/favicon.ico', @@ -67,43 +67,39 @@ const config = { redirects: [ { from: '/troubleshooting/failed-to-fetch', - to: '/guides/error-codes/something-amiss/', + to: '/troubleshooting/', }, { from: '/guides/troubleshooting/gpu-not-used/', - to: '/guides/common-error/not-using-gpu/', + to: '/troubleshooting/', }, { from: '/guides/troubleshooting/', - to: '/guides/error-codes/', + to: '/troubleshooting/', }, { from: '/troubleshooting/stuck-on-broken-build/', - to: '/guides/common-error/broken-build/', - }, - { - from: '/guides/troubleshooting/', - to: '/guides/error-codes/', + to: '/troubleshooting/', }, { from: '/troubleshooting/somethings-amiss/', - to: '/guides/error-codes/something-amiss/', + to: '/troubleshooting/', }, { from: '/troubleshooting/how-to-get-error-logs/', - to: '/guides/error-codes/how-to-get-error-logs/', + to: '/troubleshooting/', }, { from: '/troubleshooting/permission-denied/', - to: '/guides/error-codes/permission-denied/', + to: '/troubleshooting/', }, { from: '/troubleshooting/unexpected-token/', - to: '/guides/error-codes/unexpected-token/', + to: '/troubleshooting/', }, { from: '/troubleshooting/undefined-issue/', - to: '/guides/error-codes/undefined-issue/', + to: '/troubleshooting/', }, { from: '/install/', @@ -111,11 +107,343 @@ const config = { }, { from: '/guides/using-models/', - to: '/guides/models-setup/', + to: '/guides/models/', }, { from: '/guides/using-extensions/', - to: '/guides/extensions/', + to: '/extensions/', + }, + { + from: '/integrations/tensorrt', + to: '/guides/providers/tensorrt-llm', + }, + { + from: '/install/mac/', + to: '/guides/install/mac/', + }, + { + from: '/guides/using-models/integrate-with-remote-server/', + to: '/guides/engines/remote-server/', + }, + { + from: '/guides/chatting/manage-history/', + to: '/guides/threads/', + }, + { + from: '/guides/using-server/', + to: '/guides/local-api/', + }, + { + from: '/guides/using-models/customize-engine-settings/', + to: '/guides/engines/llamacpp/', + }, + { + from: '/guides/integrations/openrouter/', + to: '/integrations/openrouter/', + }, + { + from: '/docs/integrations/', + to: '/integrations/', + }, + { + from: '/docs/product/chat/', + to: '/developer/framework/product/chat/', + }, + { + from: '/install/windows/', + to: '/guides/install/windows/', + }, + { + from: '/api/overview/', + to: '/api-reference/', + }, + { + from: '/install/linux/', + to: '/guides/install/linux/', + }, + { + from: '/install/from-source/', + to: '/guides/install/#install-server-side', + }, + { + from: '/troubleshooting/gpu-not-used/', + to: '/troubleshooting/#troubleshooting-nvidia-gpu', + }, + { + from: '/guides/using-server/server/', + to: '/guides/local-api/#step-2-start-and-use-the-built-in-api-server', + }, + { + from: '/docs/integrations/openrouter/', + to: '/integrations/openrouter/', + }, + { + from: '/docs/integrations/ollama/', + to: '/guides/engines/ollama/', + }, + { + from: '/guides/using-models/install-from-hub/', + to: '/guides/models/', + }, + { + from: '/guides/integrations/continue/', + to: '/integrations/continue/', + }, + { + from: '/docs/engineering/assistants/', + to: '/developer/framework/engineering/assistants/', + }, + { + from: '/guides/install/hardware/', + to: '/guides/hardware/', + }, + { + from: '/docs/engineering/files/', + to: '/developer/framework/engineering/files/', + }, + { + from: '/features/acceleration/', + to: '/guides/advanced/#enable-the-gpu-acceleration', + }, + { + from: '/docs/extension-guides/', + to: '/extensions/', + }, + { + from: '/specs/settings/', + to: '/developer/framework/product/settings/', + }, + { + from: '/guides/using-models/import-models-using-absolute-filepath/', + to: '/guides/models/', + }, + { + from: '/install/docker/', + to: '/guides/install/server/', + }, + { + from: '/guides/using-models/import-manually/', + to: '/guides/models/', + }, + { + from: '/v1/models', + to: '/guides/models/', + }, + { + from: '/docs/team/contributor-program/', + to: '/team/contributor-program/', + }, + { + from: '/guides/installation/hardware/', + to: '/guides/hardware/', + }, + { + from: '/guides/chatting/start-thread/', + to: '/guides/threads/', + }, + { + from: '/api/files/', + to: '/developer/framework/engineering/files/#file-api', + }, + { + from: '/specs/threads/', + to: '/developer/framework/engineering/threads/', + }, + { + from: '/guides/using-models/customize-models/', + to: '/guides/models/', + }, + { + from: '/docs/modules/models/', + to: '/guides/models/', + }, + { + from: '/developer/build-extension/package-your-assistant/', + to: '/developer/extension/package-your-extension/', + }, + { + from: '/getting-started/install/linux/', + to: '/guides/install/linux/', + }, + { + from: '/features/extensions', + to: '/extensions/', + }, + { + from: '/specs/chats/', + to: '/developer/framework/engineering/chats/', + }, + { + from: '/specs/engine/', + to: '/developer/framework/engineering/engine/', + }, + { + from: '/docs/extension-capabilities/', + to: '/extensions/', + }, + { + from: '/docs/get-started/use-local-server/', + to: '/guides/local-api/', + }, + { + from: '/guides/how-jan-works/', + to: '/guides/', + }, + { + from: '/guides/windows/', + to: '/guides/install/windows/', + }, + { + from: '/specs/', + to: '/developer/framework/', + }, + { + from: '/docs/get-started/build-extension/', + to: '/developer/extension/', + }, + { + from: '/specs/files/', + to: '/developer/framework/engineering/files/', + }, + { + from: '/guides/using-models/package-models/', + to: '/guides/models/', + }, + { + from: '/install/overview/', + to: '/guides/install/', + }, + { + from: '/docs/get-started/extension-anatomy/', + to: '/developer/extension/extension-anatomy/', + }, + { + from: '/docs/get-started/', + to: '/guides/', + }, + { + from: '/guides/mac/', + to: '/guides/install/mac/', + }, + { + from: '/specs/fine-tuning/', + to: '/developer/framework/engineering/fine-tuning/', + }, + { + from: '/guides/server/', + to: '/guides/local-api/', + }, + { + from: '/specs/file-based/', + to: '/developer/file-based/', + }, + { + from: '/developers/', + to: '/developer/', + }, + { + from: '/api/', + to: '/api-reference/', + }, + { + from: '/products/desktop', + to: '/desktop/', + }, + { + from: '/developers/plugins/azure-openai', + to: '/guides/engines/openai/', + }, + { + from: '/getting-started/install/mac', + to: '/guides/install/mac/', + }, + { + from: '/guides/fine-tuning/what-models-can-be-fine-tuned', + to: '/developer/framework/engineering/fine-tuning/', + }, + { + from: '/guides/linux/', + to: '/guides/install/linux/', + }, + { + from: '/docs/specs/threads', + to: '/developer/framework/engineering/threads/', + }, + { + from: '/docs/api-reference/models/list', + to: '/api-reference#tag/models/get/models', + }, + { + from: '/docs/api-reference/threads', + to: '/api-reference/#tag/chat/post/chat/completions', + }, + { + from: '/getting-started/troubleshooting', + to: '/troubleshooting/', + }, + { + from: '/getting-started/install/windows', + to: '/guides/install/windows/', + }, + { + from: '/docs/api-reference/messages', + to: '/api-reference#tag/messages/get/threads/{thread_id}/messages', + }, + { + from: '/docs/modules/chats', + to: '/developer/framework/engineering/chats/', + }, + { + from: '/docs/specs/chats', + to: '/developer/framework/engineering/chats/', + }, + { + from: '/docs/api-reference/assistants', + to: '/api-reference/#tag/assistants/get/assistants', + }, + { + from: '/docs/modules/files', + to: '/developer/framework/engineering/files/', + }, + { + from: '/features/ai-models', + to: '/guides/models/', + }, + { + from: '/docs/specs/models', + to: '/developer/framework/engineering/models/', + }, + { + from: '/docs/models/overview', + to: '/developer/framework/engineering/models/', + }, + { + from: '/docs/api-reference/models', + to: '/api-reference#tag/models/get/models', + }, + { + from: '/docs/guides/fine-tuning', + to: '/developer/framework/engineering/fine-tuning/', + }, + { + from: '/docs/specs/files', + to: '/developer/framework/engineering/files/', + }, + { + from: '/docs/modules/threads', + to: '/developer/framework/engineering/threads/', + }, + { + from: '/hardware/examples/3090x1-@dan-jan', + to: '/guides/hardware/', + }, + { + from: '/chat', + to: '/guides/threads/', + }, + { + from: '/docs/modules/assistants', + to: '/developer/assistant/', }, ], }, @@ -222,23 +550,21 @@ const config = { metadata: [ { name: 'description', - content: - 'Jan runs 100% offline on your computer, utilizes open-source AI models, prioritizes privacy, and is highly customizable.', + content: `Jan turns your computer into an AI machine by running LLMs locally on your computer. It's a privacy-focus, local-first, open-source solution.`, }, { name: 'keywords', content: - 'Jan AI, Jan, ChatGPT alternative, local AI, private AI, conversational AI, no-subscription fee, large language model ', + 'Jan, Rethink the Computer, local AI, privacy focus, free and open source, private and offline, conversational AI, no-subscription fee, large language models', }, { name: 'robots', content: 'index, follow' }, { property: 'og:title', - content: 'Jan | Open-source ChatGPT Alternative', + content: 'Jan | Rethink the Computer', }, { property: 'og:description', - content: - 'Jan runs 100% offline on your computer, utilizes open-source AI models, prioritizes privacy, and is highly customizable.', + content: `Jan turns your computer into an AI machine by running LLMs locally on your computer. It's a privacy-focus, local-first, open-source solution.`, }, { property: 'og:image', @@ -249,12 +575,11 @@ const config = { { property: 'twitter:site', content: '@janframework' }, { property: 'twitter:title', - content: 'Jan | Open-source ChatGPT Alternative', + content: 'Jan | Rethink the Computer', }, { property: 'twitter:description', - content: - 'Jan runs 100% offline on your computer, utilizes open-source AI models, prioritizes privacy, and is highly customizable.', + content: `Jan turns your computer into an AI machine by running LLMs locally on your computer. It's a privacy-focus, local-first, open-source solution.`, }, { property: 'twitter:image', @@ -278,15 +603,10 @@ const config = { }, innerHTML: JSON.stringify({ '@context': 'https://schema.org/', - '@type': 'localAI', - 'name': 'Jan', - 'description': - 'Jan runs 100% offline on your computer, utilizes open-source AI models, prioritizes privacy, and is highly customizable.', - 'keywords': - 'Jan AI, Jan, ChatGPT alternative, local AI, private AI, conversational AI, no-subscription fee, large language model ', - 'applicationCategory': 'BusinessApplication', - 'operatingSystem': 'Multiple', - 'url': 'https://jan.ai/', + '@type': 'Organization', + name: 'Jan', + url: 'https://jan.ai/', + logo: 'https://jan.ai/img/og-image.png', }), }, ], @@ -338,10 +658,15 @@ const config = { position: 'left', label: 'Ecosystem', }, + { + to: 'download', + position: 'left', + label: 'Download', + }, // { // type: "docSidebar", // sidebarId: "pricingSidebar", - // positionL: "left", + // positionl: "left", // label: "Pricing", // }, // Navbar right diff --git a/docs/package.json b/docs/package.json index 95b1444df..9c8d805dc 100644 --- a/docs/package.json +++ b/docs/package.json @@ -38,9 +38,12 @@ "postcss": "^8.4.30", "posthog-docusaurus": "^2.0.0", "prism-react-renderer": "^1.3.5", + "lucide-react": "^0.291.0", "react": "^18.2.0", "react-dom": "^18.2.0", + "react-hook-form": "^7.47.0", "react-icons": "^4.11.0", + "react-tweet": "^3.2.0", "redocusaurus": "^2.0.0", "sass": "^1.69.3", "tailwind-merge": "^2.1.0", @@ -48,7 +51,7 @@ }, "devDependencies": { "@docusaurus/module-type-aliases": "^3.0.0", - "dotenv": "^16.3.1", + "dotenv": "^16.4.5", "tailwindcss-animate": "^1.0.7" }, "browserslist": { diff --git a/docs/sidebars.js b/docs/sidebars.js index ad09d670a..f9775c645 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -20,6 +20,7 @@ const sidebars = { link: { type: "doc", id: "about/about" }, items: [ //"about/roadmap", + // temporary unpublish "about/vision", "community/community", ], }, @@ -157,12 +158,6 @@ const sidebars = { ], }, ], - // guidesSidebar: [ - // { - // type: "autogenerated", - // dirName: "guides", - // }, - // ], guidesSidebar: [ { type: "category", @@ -170,99 +165,125 @@ const sidebars = { collapsible: false, className: "head_Menu", items: [ - "guides/quickstart", - "guides/install", - "guides/start-server", - "guides/models-list" + "guides/get-started/overview", + "guides/get-started/quickstart", + + { + type: "category", + label: "Hardware Setup", + className: "head_SubMenu", + link: { + type: 'doc', + id: "guides/get-started/hardware-setup", + }, + items: [ + "guides/get-started/settingup-gpu", + ] + }, + { + type: "category", + label: "Installation", + className: "head_SubMenu", + link: { + type: 'doc', + id: "guides/installation/README", + }, + items: [ + "guides/installation/docker", + "guides/installation/linux", + "guides/installation/mac", + "guides/installation/windows" + ] + }, ] }, { type: "category", - label: "Guides", + label: "User Guides", collapsible: false, className: "head_Menu", items: [ - "guides/best-practices", - "guides/thread", + "guides/user-guides/overview-guides", + "guides/user-guides/jan-data-folder", + "guides/user-guides/manage-models", + "guides/user-guides/manage-assistants", + "guides/user-guides/manage-threads", + "guides/user-guides/local-server", + "guides/user-guides/advanced-settings" ] }, { type: "category", - label: "Advanced Features", + label: "Inference Engines", collapsible: false, className: "head_Menu", items: [ + "guides/inference/overview-inference", { type: "category", - label: "Advanced Settings", + label: "Local Engines", className: "head_SubMenu", link: { type: 'doc', - id: "guides/advanced-settings/advanced-settings", + id: "guides/local-providers/README", }, items: [ - "guides/advanced-settings/http-proxy", + "guides/local-providers/llamacpp", + "guides/local-providers/lmstudio", + "guides/local-providers/ollama", + "guides/local-providers/tensorrt", ] }, { type: "category", - label: "Advanced Model Setup", + label: "Remote Engines", className: "head_SubMenu", link: { type: 'doc', - id: "guides/models/README", + id: "guides/remote-providers/README", }, items: [ - "guides/models/customize-engine", - "guides/models/import-models", - "guides/models/integrate-remote", - ] - }, - { - type: "category", - label: "Inference Providers", - className: "head_SubMenu", - link: { - type: 'doc', - id: "guides/providers/README", - }, - items: [ - "guides/providers/llama-cpp", - "guides/providers/tensorrt-llm", - ] - }, - { - type: "category", - label: "Extensions", - className: "head_SubMenu", - link: { - type: 'doc', - id: "guides/extensions/README", - }, - items: [ - "guides/extensions/import-ext", - "guides/extensions/setup-ext", + "guides/remote-providers/claude", + "guides/remote-providers/groq", + "guides/remote-providers/mistral", + "guides/remote-providers/openai", + "guides/remote-providers/remote-server-integration" ] }, + ] + }, + { + type: "category", + label: "Extensions", + collapsible: false, + className: "head_Menu", + items: [ + "guides/extensions/extensions", + ] + }, + { + type: "category", + label: "Integrations", + collapsible: false, + className: "head_Menu", + items: [ + // "guides/integrations/overview-integration", { type: "category", label: "Integrations", className: "head_SubMenu", link: { type: 'doc', - id: "guides/integration/README", + id: "guides/integrations/README", }, items: [ - "guides/integration/azure", - "guides/integration/discord", - "guides/integration/groq", - "guides/integration/lmstudio", - "guides/integration/mistral", - "guides/integration/ollama", - "guides/integration/openinterpreter", - "guides/integration/openrouter", - "guides/integration/raycast", - "guides/integration/vscode", + "guides/integrations/crewai", + "guides/integrations/discord", + "guides/integrations/interpreter", + "guides/integrations/raycast", + "guides/integrations/router", + "guides/integrations/unsloth", + "guides/integrations/vscode" ] }, ] @@ -273,38 +294,128 @@ const sidebars = { collapsible: false, className: "head_Menu", items: [ - { - type: "category", - label: "Error Codes", - className: "head_SubMenu", - link: { - type: 'doc', - id: "guides/error-codes/README", - }, - items: [ - "guides/error-codes/how-to-get-error-logs", - "guides/error-codes/permission-denied", - "guides/error-codes/something-amiss", - "guides/error-codes/undefined-issue", - "guides/error-codes/unexpected-token", - ] - }, - { - type: "category", - label: "Common Error", - className: "head_SubMenu", - link: { - type: 'doc', - id: "guides/common-error/README", - }, - items: [ - "guides/common-error/broken-build", - "guides/common-error/not-using-gpu", - ] - }, - "guides/faq" + "guides/troubleshooting", ] }, + // { + // type: "category", + // label: "Advanced Features", + // collapsible: false, + // className: "head_Menu", + // items: [ + // { + // type: "category", + // label: "Advanced Settings", + // className: "head_SubMenu", + // link: { + // type: 'doc', + // id: "guides/advanced-settings/advanced-settings", + // }, + // items: [ + // "guides/advanced-settings/http-proxy", + // ] + // }, + // { + // type: "category", + // label: "Advanced Model Setup", + // className: "head_SubMenu", + // link: { + // type: 'doc', + // id: "guides/models/README", + // }, + // items: [ + // "guides/models/customize-engine", + // "guides/models/import-models", + // "guides/models/integrate-remote", + // ] + // }, + // { + // type: "category", + // label: "Inference Providers", + // className: "head_SubMenu", + // link: { + // type: 'doc', + // id: "guides/providers/README", + // }, + // items: [ + // "guides/providers/llama-cpp", + // "guides/providers/tensorrt-llm", + // ] + // }, + // { + // type: "category", + // label: "Extensions", + // className: "head_SubMenu", + // link: { + // type: 'doc', + // id: "guides/extensions/README", + // }, + // items: [ + // "guides/extensions/import-ext", + // "guides/extensions/setup-ext", + // ] + // }, + // { + // type: "category", + // label: "Integrations", + // className: "head_SubMenu", + // link: { + // type: 'doc', + // id: "guides/integration/README", + // }, + // items: [ + // "guides/integration/azure", + // "guides/integration/discord", + // "guides/integration/groq", + // "guides/integration/lmstudio", + // "guides/integration/mistral", + // "guides/integration/ollama", + // "guides/integration/openinterpreter", + // "guides/integration/openrouter", + // "guides/integration/raycast", + // "guides/integration/vscode", + // ] + // }, + // ] + // }, + // { + // type: "category", + // label: "Troubleshooting", + // collapsible: false, + // className: "head_Menu", + // items: [ + // { + // type: "category", + // label: "Error Codes", + // className: "head_SubMenu", + // link: { + // type: 'doc', + // id: "guides/error-codes/README", + // }, + // items: [ + // "guides/error-codes/how-to-get-error-logs", + // "guides/error-codes/permission-denied", + // "guides/error-codes/something-amiss", + // "guides/error-codes/undefined-issue", + // "guides/error-codes/unexpected-token", + // ] + // }, + // { + // type: "category", + // label: "Common Error", + // className: "head_SubMenu", + // link: { + // type: 'doc', + // id: "guides/common-error/README", + // }, + // items: [ + // "guides/common-error/broken-build", + // "guides/common-error/not-using-gpu", + // ] + // }, + // "guides/faq" + // ] + // }, ], developerSidebar: [ { diff --git a/docs/src/components/HomepagePrimaryFeatures/index.js b/docs/src/components/HomepagePrimaryFeatures/index.js index a9762c4e5..40e715f10 100644 --- a/docs/src/components/HomepagePrimaryFeatures/index.js +++ b/docs/src/components/HomepagePrimaryFeatures/index.js @@ -6,25 +6,25 @@ export default function HomepagePrimaryFeatures() {

Installation

-

Install Jan across multiple platforms.

+

Install Jan across multiple platforms.

- {"Card + {"Card
-
-

Models

-

Discover the pre-configured AI models available for use.

+
+

API Reference

+

Interact with our API locally.

- {"Card + {"Card
diff --git a/docs/src/components/HomepageSecondaryFeatures/index.js b/docs/src/components/HomepageSecondaryFeatures/index.js index 37bac0a13..c48e50cb2 100644 --- a/docs/src/components/HomepageSecondaryFeatures/index.js +++ b/docs/src/components/HomepageSecondaryFeatures/index.js @@ -9,19 +9,19 @@ export default function HomepageSecondaryFeatures() { imgSrc="/img/homepage-new/bg-rocket.png" title="Quickstart" description="Get started quickly with our Quickstart guide, offering simple steps for a smooth setup." - href="/guides/" + href="/guides/quickstart" /> diff --git a/docs/src/components/HomepageTerinaryFeatures/index.js b/docs/src/components/HomepageTerinaryFeatures/index.js index ec959332e..bd4588e96 100644 --- a/docs/src/components/HomepageTerinaryFeatures/index.js +++ b/docs/src/components/HomepageTerinaryFeatures/index.js @@ -12,7 +12,7 @@ export default function HomepageTerinaryFeatures() {

Easily kick off your journey with Jan by installing your AI locally.

@@ -32,24 +32,24 @@ export default function HomepageTerinaryFeatures() {
{"Icon"} -
Settings
+
User Guides
-

Discover how to manage Jan and configure your installed AI.

+

Explore our comprehensive guide on configuring and using the Jan application.

@@ -58,24 +58,24 @@ export default function HomepageTerinaryFeatures() {
{"Icon"} -
Features
+
Inference
-

Explore key features designed to enhance your experience with Jan.

+

Learn how to build and integrate Jan with local and remote inference providers.

@@ -87,21 +87,21 @@ export default function HomepageTerinaryFeatures() {
Troubleshooting
-

Find solutions to common issues, including error codes, and FAQs.

+

Find solutions to common issues and including error codes.

diff --git a/docs/src/containers/Banner/index.js b/docs/src/containers/Banner/index.js index 07622c63d..3538ab7a9 100644 --- a/docs/src/containers/Banner/index.js +++ b/docs/src/containers/Banner/index.js @@ -1,32 +1,43 @@ -import React from "react"; +import React from 'react' -import { useAppStars } from "@site/src/hooks/useAppStars"; -import { useAppRelease } from "@site/src/hooks/useAppRelease"; +import { useAppStars } from '@site/src/hooks/useAppStars' +import { useAppRelease } from '@site/src/hooks/useAppRelease' -import { AiOutlineGithub, AiOutlineTwitter } from "react-icons/ai"; -import { BiLogoDiscordAlt } from "react-icons/bi"; +import { AiOutlineGithub, AiOutlineTwitter } from 'react-icons/ai' +import { BiLogoDiscordAlt } from 'react-icons/bi' +import { FaLinkedin } from 'react-icons/fa' const socials = [ { icon: , - href: "https://twitter.com/janframework", + href: 'https://twitter.com/janframework', }, { icon: , - href: "https://discord.com/invite/FTk2MvZwJH", + href: 'https://discord.com/invite/FTk2MvZwJH', }, { icon: , - href: "https://github.com/janhq/jan", + href: 'https://github.com/janhq/jan', }, -]; + { + icon: , + href: 'https://www.linkedin.com/company/janframework/', + }, +] export default function AnnoncementBanner() { - const { stargazers } = useAppStars(); - const { release } = useAppRelease(); + const { stargazers } = useAppStars() + const { release } = useAppRelease() return ( -
+
{social.icon} - ); + ) })}
- ); + ) } diff --git a/docs/src/containers/DownloadApp/index.js b/docs/src/containers/DownloadApp/index.js index d1b586698..e3e5b6d42 100644 --- a/docs/src/containers/DownloadApp/index.js +++ b/docs/src/containers/DownloadApp/index.js @@ -1,135 +1,161 @@ -import React, { useState, useEffect } from "react"; -import axios from "axios"; -import { FaWindows, FaApple, FaLinux } from "react-icons/fa"; -import { twMerge } from "tailwind-merge"; +import React, { useState, useEffect } from 'react' +import axios from 'axios' +import { FaWindows, FaApple, FaLinux } from 'react-icons/fa' +import { twMerge } from 'tailwind-merge' +import { DownloadIcon } from 'lucide-react' const systemsTemplate = [ { - name: "Mac M1, M2, M3", + name: 'Mac M1, M2, M3', + label: 'Apple Silicon', logo: FaApple, - fileFormat: "{appname}-mac-arm64-{tag}.dmg", - comingSoon: false, + fileFormat: '{appname}-mac-arm64-{tag}.dmg', }, { - name: "Mac (Intel)", + name: 'Mac (Intel)', + label: 'Apple Intel', logo: FaApple, - fileFormat: "{appname}-mac-x64-{tag}.dmg", - comingSoon: false, + fileFormat: '{appname}-mac-x64-{tag}.dmg', }, { - name: "Windows", + name: 'Windows', + label: 'Standard (64-bit)', logo: FaWindows, - fileFormat: "{appname}-win-x64-{tag}.exe", + fileFormat: '{appname}-win-x64-{tag}.exe', }, { - name: "Linux (AppImage)", + name: 'Linux (AppImage)', + label: 'AppImage', logo: FaLinux, - fileFormat: "{appname}-linux-x86_64-{tag}.AppImage", + fileFormat: '{appname}-linux-x86_64-{tag}.AppImage', }, { - name: "Linux (deb)", + name: 'Linux (deb)', + label: 'Deb', logo: FaLinux, - fileFormat: "{appname}-linux-amd64-{tag}.deb", + fileFormat: '{appname}-linux-amd64-{tag}.deb', }, -]; +] + +const groupTemnplate = [ + { label: 'MacOS', name: 'mac', logo: FaApple }, + { label: 'Windows', name: 'windows', logo: FaWindows }, + { label: 'Linux', name: 'linux', logo: FaLinux }, +] export default function DownloadApp() { - const [systems, setSystems] = useState(systemsTemplate); + const [systems, setSystems] = useState(systemsTemplate) const getLatestReleaseInfo = async (repoOwner, repoName) => { - const url = `https://api.github.com/repos/${repoOwner}/${repoName}/releases/latest`; + const url = `https://api.github.com/repos/${repoOwner}/${repoName}/releases/latest` try { - const response = await axios.get(url); - return response.data; + const response = await axios.get(url) + return response.data } catch (error) { - console.error(error); - return null; + console.error(error) + return null } - }; + } const extractAppName = (fileName) => { // Extract appname using a regex that matches the provided file formats - const regex = /^(.*?)-(?:mac|win|linux)-(?:arm64|x64|amd64|x86_64)-.*$/; - const match = fileName.match(regex); - return match ? match[1] : null; - }; + const regex = /^(.*?)-(?:mac|win|linux)-(?:arm64|x64|amd64|x86_64)-.*$/ + const match = fileName.match(regex) + return match ? match[1] : null + } useEffect(() => { const updateDownloadLinks = async () => { try { - const releaseInfo = await getLatestReleaseInfo("janhq", "jan"); + const releaseInfo = await getLatestReleaseInfo('janhq', 'jan') // Extract appname from the first asset name - const firstAssetName = releaseInfo.assets[0].name; - const appname = extractAppName(firstAssetName); + const firstAssetName = releaseInfo.assets[0].name + const appname = extractAppName(firstAssetName) if (!appname) { console.error( - "Failed to extract appname from file name:", + 'Failed to extract appname from file name:', firstAssetName - ); + ) - return; + return } // Remove 'v' at the start of the tag_name - const tag = releaseInfo.tag_name.startsWith("v") + const tag = releaseInfo.tag_name.startsWith('v') ? releaseInfo.tag_name.substring(1) - : releaseInfo.tag_name; + : releaseInfo.tag_name const updatedSystems = systems.map((system) => { const downloadUrl = system.fileFormat - .replace("{appname}", appname) - .replace("{tag}", tag); + .replace('{appname}', appname) + .replace('{tag}', tag) return { ...system, href: `https://github.com/janhq/jan/releases/download/${releaseInfo.tag_name}/${downloadUrl}`, - }; - }); + } + }) - setSystems(updatedSystems); + setSystems(updatedSystems) } catch (error) { - console.error("Failed to update download links:", error); + console.error('Failed to update download links:', error) } - }; + } - updateDownloadLinks(); - }, []); + updateDownloadLinks() + }, []) + + const renderDownloadLink = (group) => { + return ( + <> + {systems + .filter((x) => x.name.toLowerCase().includes(group)) + .map((system, i) => ( + + ))} + + ) + } return ( -
-
- - Download for PC - -
- 🚧 - Warning: - - Jan is in the process of being built. Expect bugs! - -
-
-
- {systems.map((system, i) => ( - - - {system.name} - {system.comingSoon && ( - - Coming Soon - - )} - - ))} +
+
+ {groupTemnplate.map((item, i) => { + return ( +
+
+
+
+ +
+
{item.label}
+
+
+ {renderDownloadLink(item.name)} +
+
+
+ ) + })}
- ); + ) } diff --git a/docs/src/containers/Elements/dropdown.js b/docs/src/containers/Elements/dropdown.js index 91115c811..00176fdf2 100644 --- a/docs/src/containers/Elements/dropdown.js +++ b/docs/src/containers/Elements/dropdown.js @@ -1,134 +1,134 @@ -import React, { useState, useEffect } from "react"; -import { Fragment } from "react"; -import { Menu, Transition } from "@headlessui/react"; -import { ChevronDownIcon } from "@heroicons/react/20/solid"; -import axios from "axios"; -import { FaWindows, FaApple, FaLinux } from "react-icons/fa"; +import React, { useState, useEffect } from 'react' +import { Fragment } from 'react' +import { Menu, Transition } from '@headlessui/react' +import { ChevronDownIcon } from '@heroicons/react/20/solid' +import axios from 'axios' +import { FaWindows, FaApple, FaLinux } from 'react-icons/fa' const systemsTemplate = [ { - name: "Download for Mac (M1/M2/M3)", + name: 'Download for Mac (M1/M2/M3)', logo: FaApple, - fileFormat: "{appname}-mac-arm64-{tag}.dmg", + fileFormat: '{appname}-mac-arm64-{tag}.dmg', }, { - name: "Download for Mac (Intel)", + name: 'Download for Mac (Intel)', logo: FaApple, - fileFormat: "{appname}-mac-x64-{tag}.dmg", + fileFormat: '{appname}-mac-x64-{tag}.dmg', }, { - name: "Download for Windows", + name: 'Download for Windows', logo: FaWindows, - fileFormat: "{appname}-win-x64-{tag}.exe", + fileFormat: '{appname}-win-x64-{tag}.exe', }, { - name: "Download for Linux (AppImage)", + name: 'Download for Linux (AppImage)', logo: FaLinux, - fileFormat: "{appname}-linux-x86_64-{tag}.AppImage", + fileFormat: '{appname}-linux-x86_64-{tag}.AppImage', }, { - name: "Download for Linux (deb)", + name: 'Download for Linux (deb)', logo: FaLinux, - fileFormat: "{appname}-linux-amd64-{tag}.deb", - } -]; + fileFormat: '{appname}-linux-amd64-{tag}.deb', + }, +] function classNames(...classes) { - return classes.filter(Boolean).join(" "); + return classes.filter(Boolean).join(' ') } export default function Dropdown() { - const [systems, setSystems] = useState(systemsTemplate); - const [defaultSystem, setDefaultSystem] = useState(systems[0]); + const [systems, setSystems] = useState(systemsTemplate) + const [defaultSystem, setDefaultSystem] = useState(systems[0]) const getLatestReleaseInfo = async (repoOwner, repoName) => { - const url = `https://api.github.com/repos/${repoOwner}/${repoName}/releases/latest`; + const url = `https://api.github.com/repos/${repoOwner}/${repoName}/releases/latest` try { - const response = await axios.get(url); - return response.data; + const response = await axios.get(url) + return response.data } catch (error) { - console.error(error); - return null; + console.error(error) + return null } - }; + } const extractAppName = (fileName) => { // Extract appname using a regex that matches the provided file formats - const regex = /^(.*?)-(?:mac|win|linux)-(?:arm64|x64|x86_64|amd64)-.*$/; - const match = fileName.match(regex); - return match ? match[1] : null; - }; + const regex = /^(.*?)-(?:mac|win|linux)-(?:arm64|x64|x86_64|amd64)-.*$/ + const match = fileName.match(regex) + return match ? match[1] : null + } const changeDefaultSystem = async (systems) => { - const userAgent = navigator.userAgent; + const userAgent = navigator.userAgent - if (userAgent.includes("Windows")) { + if (userAgent.includes('Windows')) { // windows user - setDefaultSystem(systems[2]); - } else if (userAgent.includes("Linux")) { + setDefaultSystem(systems[2]) + } else if (userAgent.includes('Linux')) { // linux user - setDefaultSystem(systems[3]); - } else if (userAgent.includes("Mac OS")) { - setDefaultSystem(systems[0]); + setDefaultSystem(systems[3]) + } else if (userAgent.includes('Mac OS')) { + setDefaultSystem(systems[0]) } else { - setDefaultSystem(systems[1]); + setDefaultSystem(systems[1]) } - }; + } useEffect(() => { const updateDownloadLinks = async () => { try { - const releaseInfo = await getLatestReleaseInfo("janhq", "jan"); + const releaseInfo = await getLatestReleaseInfo('janhq', 'jan') // Extract appname from the first asset name - const firstAssetName = releaseInfo.assets[0].name; - const appname = extractAppName(firstAssetName); + const firstAssetName = releaseInfo.assets[0].name + const appname = extractAppName(firstAssetName) if (!appname) { console.error( - "Failed to extract appname from file name:", + 'Failed to extract appname from file name:', firstAssetName - ); - changeDefaultSystem(systems); - return; + ) + changeDefaultSystem(systems) + return } // Remove 'v' at the start of the tag_name - const tag = releaseInfo.tag_name.startsWith("v") + const tag = releaseInfo.tag_name.startsWith('v') ? releaseInfo.tag_name.substring(1) - : releaseInfo.tag_name; + : releaseInfo.tag_name const updatedSystems = systems.map((system) => { const downloadUrl = system.fileFormat - .replace("{appname}", appname) - .replace("{tag}", tag); + .replace('{appname}', appname) + .replace('{tag}', tag) return { ...system, href: `https://github.com/janhq/jan/releases/download/${releaseInfo.tag_name}/${downloadUrl}`, - }; - }); + } + }) - setSystems(updatedSystems); - changeDefaultSystem(updatedSystems); + setSystems(updatedSystems) + changeDefaultSystem(updatedSystems) } catch (error) { - console.error("Failed to update download links:", error); + console.error('Failed to update download links:', error) } - }; + } - updateDownloadLinks(); - }, []); + updateDownloadLinks() + }, []) return (
{defaultSystem.name} - + Open OS options @@ -141,7 +141,7 @@ export default function Dropdown() { leaveFrom="transform opacity-100 scale-100" leaveTo="transform opacity-0 scale-95" > - +
{systems.map((system) => ( {({ active }) => ( - + {system.name} @@ -171,5 +171,5 @@ export default function Dropdown() {
- ); + ) } diff --git a/docs/src/containers/Footer/index.js b/docs/src/containers/Footer/index.js index 3e62f579a..a33caa2c8 100644 --- a/docs/src/containers/Footer/index.js +++ b/docs/src/containers/Footer/index.js @@ -1,134 +1,213 @@ -import React from "react"; +import React, { useState } from 'react' -import { AiOutlineGithub, AiOutlineTwitter } from "react-icons/ai"; -import { BiLogoDiscordAlt, BiLogoLinkedin } from "react-icons/bi"; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext' +import { AiOutlineGithub, AiOutlineTwitter } from 'react-icons/ai' +import { BiLogoDiscordAlt, BiLogoLinkedin } from 'react-icons/bi' +import { useForm } from 'react-hook-form' const socials = [ { - icon: , - href: "https://twitter.com/janframework", + icon: ( + + ), + href: 'https://twitter.com/janframework', }, { - icon: , - href: "https://discord.com/invite/FTk2MvZwJH", + icon: ( + + ), + href: 'https://discord.com/invite/FTk2MvZwJH', }, { - icon: , - href: "https://github.com/janhq/jan", + icon: ( + + ), + href: 'https://github.com/janhq/jan', }, { - icon: , - href: "https://www.linkedin.com/company/janframework/", - } -]; + icon: ( + + ), + href: 'https://www.linkedin.com/company/janframework/', + }, +] const menus = [ { - name: "For Developers", + name: 'Product', child: [ { - menu: "Documentation", - path: "/developer", + menu: 'Download', + path: '/download', }, { - menu: "Hardware", - path: "/hardware", + menu: 'Documentation', + path: '/developer', }, { - menu: "API Reference", - path: "/api-reference", - }, - { - menu: "Changelog", - path: "https://github.com/janhq/jan/releases", + menu: 'Changelog', + path: 'https://github.com/janhq/jan/releases', external: true, }, ], }, { - name: "Community", + name: 'For Developers', child: [ { - menu: "Github", - path: "https://github.com/janhq/jan", - external: true, + menu: 'Guides', + path: '/guides', }, { - menu: "Discord", - path: "https://discord.gg/FTk2MvZwJH", - external: true, + menu: 'Developer', + path: '/developer', }, { - menu: "Twitter", - path: "https://twitter.com/janframework", - external: true, + menu: 'API Reference', + path: '/api-reference', }, - { - menu: "LinkedIn", - path: "https://www.linkedin.com/company/janframework/", - external: true, - } ], }, { - name: "Company", + name: 'Community', child: [ { - menu: "About", - path: "/about", - }, - { - menu: "Blog", - path: "/blog", - }, - { - menu: "Careers", - path: "https://janai.bamboohr.com/careers", + menu: 'Github', + path: 'https://github.com/janhq/jan', external: true, }, { - menu: "Newsletter", - path: "/community#newsletter", - } + menu: 'Discord', + path: 'https://discord.gg/FTk2MvZwJH', + external: true, + }, + { + menu: 'Twitter', + path: 'https://twitter.com/janframework', + external: true, + }, + { + menu: 'LinkedIn', + path: 'https://www.linkedin.com/company/janframework/', + external: true, + }, ], }, -]; + { + name: 'Company', + child: [ + { + menu: 'About', + path: '/about', + }, + { + menu: 'Blog', + path: '/blog', + }, + { + menu: 'Careers', + path: 'https://janai.bamboohr.com/careers', + external: true, + }, + { + menu: 'Newsletter', + path: '/community#newsletter', + }, + ], + }, +] -const getCurrentYear = new Date().getFullYear(); +const getCurrentYear = new Date().getFullYear() export default function Footer() { + const { register, handleSubmit, reset } = useForm({ + defaultValues: { + email: '', + }, + }) + + const { + siteConfig: { customFields }, + } = useDocusaurusContext() + + const [formMessage, setFormMessage] = useState('') + + const onSubmit = (data) => { + const { email } = data + const options = { + method: 'POST', + + body: JSON.stringify({ + updateEnabled: false, + email, + listIds: [13], + }), + } + + if (email) { + fetch('https://brevo.jan.ai/', options) + .then((response) => response.json()) + .then((response) => { + if (response.id) { + setFormMessage('You have successfully joined our newsletter') + } else { + setFormMessage(response.message) + } + reset() + setTimeout(() => { + setFormMessage('') + }, 5000) + }) + .catch((err) => console.error(err)) + } + } + return ( -