Merge branch 'dev-web' into stag-web

This commit is contained in:
Dinh Long Nguyen 2025-10-02 00:51:44 +07:00
commit 9a8aa07094
375 changed files with 7233 additions and 3868 deletions

View File

@ -70,10 +70,9 @@ jobs:
run: |
echo "Version: ${{ inputs.new_version }}"
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
fi
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
@ -83,7 +82,7 @@ jobs:
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
@ -96,7 +95,7 @@ jobs:
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
@ -125,7 +124,7 @@ jobs:
env:
RELEASE_CHANNEL: '${{ inputs.channel }}'
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
@ -136,4 +135,4 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage

View File

@ -91,10 +91,9 @@ jobs:
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
fi
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
@ -104,7 +103,7 @@ jobs:
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
@ -117,7 +116,7 @@ jobs:
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
@ -128,7 +127,7 @@ jobs:
# Temporarily enable devtool on prod build
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
cat ./src-tauri/Cargo.toml
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
@ -139,7 +138,7 @@ jobs:
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
@ -184,4 +183,3 @@ jobs:
with:
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage

View File

@ -108,10 +108,9 @@ jobs:
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
fi
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
@ -121,7 +120,7 @@ jobs:
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
@ -134,7 +133,7 @@ jobs:
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
@ -156,7 +155,7 @@ jobs:
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"

View File

@ -54,6 +54,8 @@ on:
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
MSI_FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.MSI_FILE_NAME }}
jobs:
build-windows-x64:
@ -61,6 +63,7 @@ jobs:
outputs:
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
MSI_FILE_NAME: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
permissions:
contents: write
steps:
@ -189,9 +192,15 @@ jobs:
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-${{ inputs.new_version }}
name: jan-windows-exe-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/nsis/*.exe
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-msi-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/msi/*.msi
## Set output filename for windows
- name: Set output filename for windows
@ -201,13 +210,18 @@ jobs:
if [ "${{ inputs.channel }}" != "stable" ]; then
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
MSI_FILE="Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64_en-US.msi"
else
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
MSI_FILE="Jan_${{ inputs.new_version }}_x64_en-US.msi"
fi
echo "::set-output name=WIN_SIG::$WIN_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
echo "::set-output name=MSI_FILE_NAME::$MSI_FILE"
id: metadata
## Upload to s3 for nightly and beta
@ -220,6 +234,8 @@ jobs:
# Upload for tauri updater
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
aws s3 cp ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.MSI_FILE_NAME }}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
@ -236,3 +252,13 @@ jobs:
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
asset_content_type: application/octet-stream

3
.gitignore vendored
View File

@ -21,11 +21,13 @@ src-tauri/resources/lib
src-tauri/icons
!src-tauri/icons/icon.png
src-tauri/gen/apple
src-tauri/gen/android
src-tauri/resources/bin
# Helper tools
.opencode
OpenCode.md
Claude.md
archive/
.cache/
@ -60,3 +62,4 @@ src-tauri/resources/
## test
test-data
llm-docs
.claude/agents

View File

@ -41,9 +41,25 @@ else
@echo "Not macOS; skipping Rust target installation."
endif
# Install required Rust targets for Android builds
install-android-rust-targets:
@echo "Checking and installing Android Rust targets..."
@rustup target list --installed | grep -q "aarch64-linux-android" || rustup target add aarch64-linux-android
@rustup target list --installed | grep -q "armv7-linux-androideabi" || rustup target add armv7-linux-androideabi
@rustup target list --installed | grep -q "i686-linux-android" || rustup target add i686-linux-android
@rustup target list --installed | grep -q "x86_64-linux-android" || rustup target add x86_64-linux-android
@echo "Android Rust targets ready!"
# Install required Rust targets for iOS builds
install-ios-rust-targets:
@echo "Checking and installing iOS Rust targets..."
@rustup target list --installed | grep -q "aarch64-apple-ios" || rustup target add aarch64-apple-ios
@rustup target list --installed | grep -q "aarch64-apple-ios-sim" || rustup target add aarch64-apple-ios-sim
@rustup target list --installed | grep -q "x86_64-apple-ios" || rustup target add x86_64-apple-ios
@echo "iOS Rust targets ready!"
dev: install-and-build
yarn download:bin
yarn download:lib
yarn dev
# Web application targets
@ -58,12 +74,41 @@ build-web-app: install-web-app
yarn build:core
yarn build:web-app
serve-web-app:
serve-web-app:
yarn serve:web-app
build-serve-web-app: build-web-app
yarn serve:web-app
# Mobile
dev-android: install-and-build install-android-rust-targets
@echo "Setting up Android development environment..."
@if [ ! -d "src-tauri/gen/android" ]; then \
echo "Android app not initialized. Initializing..."; \
yarn tauri android init; \
fi
@echo "Sourcing Android environment setup..."
@bash autoqa/scripts/setup-android-env.sh echo "Android environment ready"
@echo "Starting Android development server..."
yarn dev:android
dev-ios: install-and-build install-ios-rust-targets
@echo "Setting up iOS development environment..."
ifeq ($(shell uname -s),Darwin)
@if [ ! -d "src-tauri/gen/ios" ]; then \
echo "iOS app not initialized. Initializing..."; \
yarn tauri ios init; \
fi
@echo "Checking iOS development requirements..."
@xcrun --version > /dev/null 2>&1 || (echo "❌ Xcode command line tools not found. Install with: xcode-select --install" && exit 1)
@xcrun simctl list devices available | grep -q "iPhone\|iPad" || (echo "❌ No iOS simulators found. Install simulators through Xcode." && exit 1)
@echo "Starting iOS development server..."
yarn dev:ios
else
@echo "❌ iOS development is only supported on macOS"
@exit 1
endif
# Linting
lint: install-and-build
yarn lint
@ -71,7 +116,6 @@ lint: install-and-build
# Testing
test: lint
yarn download:bin
yarn download:lib
ifeq ($(OS),Windows_NT)
yarn download:windows-installer
endif

View File

@ -1,6 +1,6 @@
# Jan - Local AI Assistant
# Jan - Open-source ChatGPT replacement
![Jan AI](docs/src/pages/docs/_assets/jan-app.png)
<img width="2048" height="280" alt="github jan banner" src="https://github.com/user-attachments/assets/f3f87889-c133-433b-b250-236218150d3f" />
<p align="center">
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
@ -12,15 +12,13 @@
</p>
<p align="center">
<a href="https://jan.ai/docs/quickstart">Getting Started</a>
- <a href="https://jan.ai/docs">Docs</a>
<a href="https://www.jan.ai/docs/desktop">Getting Started</a>
- <a href="https://discord.gg/Exe46xPMbK">Community</a>
- <a href="https://jan.ai/changelog">Changelog</a>
- <a href="https://github.com/menloresearch/jan/issues">Bug reports</a>
- <a href="https://discord.gg/AsJ8krTT3N">Discord</a>
</p>
Jan is an AI assistant that can run 100% offline on your device. Download and run LLMs with
**full control** and **privacy**.
Jan is bringing the best of open-source AI in an easy-to-use product. Download and run LLMs with **full control** and **privacy**.
## Installation
@ -29,41 +27,36 @@ The easiest way to get started is by downloading one of the following versions f
<table>
<tr>
<td><b>Platform</b></td>
<td><b>Stable</b></td>
<td><b>Nightly</b></td>
<td><b>Download</b></td>
</tr>
<tr>
<td><b>Windows</b></td>
<td><a href='https://app.jan.ai/download/latest/win-x64'>jan.exe</a></td>
<td><a href='https://app.jan.ai/download/nightly/win-x64'>jan.exe</a></td>
</tr>
<tr>
<td><b>macOS</b></td>
<td><a href='https://app.jan.ai/download/latest/mac-universal'>jan.dmg</a></td>
<td><a href='https://app.jan.ai/download/nightly/mac-universal'>jan.dmg</a></td>
</tr>
<tr>
<td><b>Linux (deb)</b></td>
<td><a href='https://app.jan.ai/download/latest/linux-amd64-deb'>jan.deb</a></td>
<td><a href='https://app.jan.ai/download/nightly/linux-amd64-deb'>jan.deb</a></td>
</tr>
<tr>
<td><b>Linux (AppImage)</b></td>
<td><a href='https://app.jan.ai/download/latest/linux-amd64-appimage'>jan.AppImage</a></td>
<td><a href='https://app.jan.ai/download/nightly/linux-amd64-appimage'>jan.AppImage</a></td>
</tr>
</table>
Download from [jan.ai](https://jan.ai/) or [GitHub Releases](https://github.com/menloresearch/jan/releases).
Download from [jan.ai](https://jan.ai/) or [GitHub Releases](https://github.com/menloresearch/jan/releases).
## Features
- **Local AI Models**: Download and run LLMs (Llama, Gemma, Qwen, etc.) from HuggingFace
- **Cloud Integration**: Connect to OpenAI, Anthropic, Mistral, Groq, and others
- **Local AI Models**: Download and run LLMs (Llama, Gemma, Qwen, GPT-oss etc.) from HuggingFace
- **Cloud Integration**: Connect to GPT models via OpenAI, Claude models via Anthropic, Mistral, Groq, and others
- **Custom Assistants**: Create specialized AI assistants for your tasks
- **OpenAI-Compatible API**: Local server at `localhost:1337` for other applications
- **Model Context Protocol**: MCP integration for enhanced capabilities
- **Model Context Protocol**: MCP integration for agentic capabilities
- **Privacy First**: Everything runs locally when you want it to
## Build from Source

View File

@ -2,7 +2,18 @@
Internal tracker for web component changes and features.
## v0.0.11 (Current)
## v0.0.12 (Current)
**Release Date**: 2025-10-02
**Commit SHA**: df145d63a93bd27336b5b539ce0719fe9c7719e3
**Main Features**:
- Search button instead of tools
- Projects support properly for local used
- Temporary chat mode
- Performance enhancement: prevent thread items over fetching on app start
- Fix Google Tag
## v0.0.11
**Release Date**: 2025-09-23
**Commit SHA**: 494db746f7dd1f51241cec80bbf550901a0115e5

View File

@ -0,0 +1,80 @@
#!/bin/bash
# Android Development Environment Setup for Jan
# Ensure rustup's Rust toolchain is used instead of Homebrew's
export PATH="$HOME/.cargo/bin:$PATH"
# Set JAVA_HOME for Android builds
export JAVA_HOME=/opt/homebrew/opt/openjdk@17/libexec/openjdk.jdk/Contents/Home
export PATH="/opt/homebrew/opt/openjdk@17/bin:$PATH"
export ANDROID_HOME="$HOME/Library/Android/sdk"
export ANDROID_NDK_ROOT="$HOME/Library/Android/sdk/ndk/29.0.14033849"
export NDK_HOME="$HOME/Library/Android/sdk/ndk/29.0.14033849"
# Add Android tools to PATH
export PATH=$PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/tools:$ANDROID_HOME/cmdline-tools/latest/bin:$ANDROID_HOME/emulator:$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin
# Set up CC and CXX for Android compilation
export CC_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
export CXX_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++"
export AR_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
export RANLIB_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib"
# Additional environment variables for Rust cross-compilation
export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
# Only set global CC and AR for Android builds (when TAURI_ANDROID_BUILD is set)
if [ "$TAURI_ANDROID_BUILD" = "true" ]; then
export CC="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
export AR="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
echo "Global CC and AR set for Android build"
fi
# Create symlinks for Android tools if they don't exist
mkdir -p ~/.local/bin
if [ ! -f ~/.local/bin/aarch64-linux-android-ranlib ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib ~/.local/bin/aarch64-linux-android-ranlib
fi
if [ ! -f ~/.local/bin/aarch64-linux-android-clang ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang ~/.local/bin/aarch64-linux-android-clang
fi
if [ ! -f ~/.local/bin/aarch64-linux-android-clang++ ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++ ~/.local/bin/aarch64-linux-android-clang++
fi
# Fix the broken clang symlinks by ensuring base clang is available
if [ ! -f ~/.local/bin/clang ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang ~/.local/bin/clang
fi
if [ ! -f ~/.local/bin/clang++ ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang++ ~/.local/bin/clang++
fi
# Create symlinks for target-specific ar tools
if [ ! -f ~/.local/bin/aarch64-linux-android-ar ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar ~/.local/bin/aarch64-linux-android-ar
fi
export PATH="$HOME/.local/bin:$PATH"
echo "Android environment configured:"
echo "ANDROID_HOME: $ANDROID_HOME"
echo "ANDROID_NDK_ROOT: $ANDROID_NDK_ROOT"
echo "PATH includes NDK toolchain: $(echo $PATH | grep -o "ndk.*bin" || echo "NOT FOUND")"
# Verify required tools
echo -e "\nChecking required tools:"
which adb && echo "✅ adb found" || echo "❌ adb not found"
which emulator && echo "✅ emulator found" || echo "❌ emulator not found"
which $CC_aarch64_linux_android && echo "✅ Android clang found" || echo "❌ Android clang not found"
# Show available AVDs
echo -e "\nAvailable Android Virtual Devices:"
emulator -list-avds 2>/dev/null || echo "No AVDs found"
# Execute the provided command
if [ "$1" ]; then
echo -e "\nExecuting: $@"
exec "$@"
fi

View File

@ -27,11 +27,13 @@
"devDependencies": {
"@npmcli/arborist": "^7.1.0",
"@types/node": "^22.10.0",
"@types/react": "19.1.2",
"@vitest/coverage-v8": "^2.1.8",
"@vitest/ui": "^2.1.8",
"eslint": "8.57.0",
"happy-dom": "^15.11.6",
"pacote": "^21.0.0",
"react": "19.0.0",
"request": "^2.88.2",
"request-progress": "^3.0.0",
"rimraf": "^6.0.1",
@ -44,5 +46,8 @@
"rxjs": "^7.8.1",
"ulidx": "^2.3.0"
},
"peerDependencies": {
"react": "19.0.0"
},
"packageManager": "yarn@4.5.3"
}

View File

@ -10,7 +10,7 @@ export default defineConfig([
sourcemap: true,
},
platform: 'browser',
external: ['path'],
external: ['path', 'react', 'react-dom', 'react/jsx-runtime'],
define: {
NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`),
VERSION: JSON.stringify(pkgJson.version),

View File

@ -250,4 +250,4 @@ describe('ConversationalExtension', () => {
expect(retrievedAssistant.modelId).toBe('')
})
})
})

View File

@ -131,4 +131,4 @@ describe('LocalOAIEngine', () => {
expect(engine.loadedModel).toBeUndefined()
})
})
})
})

View File

@ -96,4 +96,4 @@ describe('MCPExtension', () => {
expect(healthy).toBe(true)
})
})
})
})

View File

@ -1,5 +1,6 @@
import { MCPInterface, MCPTool, MCPToolCallResult } from '../../types'
import { MCPInterface, MCPTool, MCPToolCallResult, MCPToolComponentProps } from '../../types'
import { BaseExtension, ExtensionTypeEnum } from '../extension'
import type { ComponentType } from 'react'
/**
* MCP (Model Context Protocol) extension for managing tools and server communication.
@ -18,4 +19,16 @@ export abstract class MCPExtension extends BaseExtension implements MCPInterface
abstract getConnectedServers(): Promise<string[]>
abstract refreshTools(): Promise<void>
abstract isHealthy(): Promise<boolean>
}
/**
* Optional method to provide a custom UI component for tools
* @returns A React component or null if no custom component is provided
*/
getToolComponent?(): ComponentType<MCPToolComponentProps> | null
/**
* Optional method to get the list of tool names that should be disabled by default
* @returns Array of tool names that should be disabled by default for new users
*/
getDefaultDisabledTools?(): Promise<string[]>
}

View File

@ -131,4 +131,4 @@ describe('ModelManager', () => {
expect(modelManager.models.get('model-2')).toEqual(model2)
})
})
})
})

View File

@ -16,4 +16,4 @@ if (!window.core) {
})
}
// Add any other global mocks needed for core tests
// Add any other global mocks needed for core tests

View File

@ -1,2 +1,2 @@
export * from './mcpEntity'
export * from './mcpInterface'
export * from './mcpInterface'

View File

@ -21,4 +21,18 @@ export interface MCPServerInfo {
name: string
connected: boolean
tools?: MCPTool[]
}
}
/**
* Props for MCP tool UI components
*/
export interface MCPToolComponentProps {
/** List of available MCP tools */
tools: MCPTool[]
/** Function to check if a specific tool is currently enabled */
isToolEnabled: (toolName: string) => boolean
/** Function to toggle a tool's enabled/disabled state */
onToolToggle: (toolName: string, enabled: boolean) => void
}

View File

@ -29,4 +29,4 @@ export interface MCPInterface {
* Check if MCP service is healthy
*/
isHealthy(): Promise<boolean>
}
}

View File

@ -112,6 +112,12 @@
/docs/remote-models/openrouter /docs/desktop/remote-models/openrouter 302
/docs/server-examples/llmcord /docs/desktop/server-examples/llmcord 302
/docs/server-examples/tabby /docs/desktop/server-examples/tabby 302
/docs/built-in/tensorrt-llm /docs/desktop/llama-cpp 302
/docs/desktop/docs/desktop/linux /docs/desktop/install/linux 302
/windows /docs/desktop/install/windows 302
/docs/quickstart /docs/ 302
/docs/desktop/mac /docs/desktop/install/mac 302
/handbook/open-superintelligence /handbook/why/open-superintelligence 302
/guides/integrations/continue/ /docs/desktop/server-examples/continue-dev 302
/continue-dev /docs/desktop/server-examples/continue-dev 302
@ -130,4 +136,4 @@
/local-server/troubleshooting /docs/desktop/troubleshooting 302
/mcp /docs/desktop/mcp 302
/quickstart /docs/desktop/quickstart 302
/server-examples/continue-dev /docs/desktop/server-examples/continue-dev 302
/server-examples/continue-dev /docs/desktop/server-examples/continue-dev 302

View File

@ -6,7 +6,7 @@ const camelCase = (str) => {
return str.replace(/[-_](\w)/g, (_, c) => c.toUpperCase())
}
const categories = ['building-jan', 'research']
const categories = ['building-jan', 'research', 'guides']
/**
* @param {import("plop").NodePlopAPI} plop

Binary file not shown.

After

Width:  |  Height:  |  Size: 325 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 355 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 328 KiB

View File

Before

Width:  |  Height:  |  Size: 634 KiB

After

Width:  |  Height:  |  Size: 634 KiB

View File

Before

Width:  |  Height:  |  Size: 725 KiB

After

Width:  |  Height:  |  Size: 725 KiB

View File

Before

Width:  |  Height:  |  Size: 235 KiB

After

Width:  |  Height:  |  Size: 235 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 402 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 395 KiB

View File

Before

Width:  |  Height:  |  Size: 1.7 MiB

After

Width:  |  Height:  |  Size: 1.7 MiB

View File

Before

Width:  |  Height:  |  Size: 138 KiB

After

Width:  |  Height:  |  Size: 138 KiB

View File

Before

Width:  |  Height:  |  Size: 176 KiB

After

Width:  |  Height:  |  Size: 176 KiB

View File

Before

Width:  |  Height:  |  Size: 673 KiB

After

Width:  |  Height:  |  Size: 673 KiB

View File

@ -1,125 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:news="http://www.google.com/schemas/sitemap-news/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml" xmlns:mobile="http://www.google.com/schemas/sitemap-mobile/1.0" xmlns:image="http://www.google.com/schemas/sitemap-image/1.1" xmlns:video="http://www.google.com/schemas/sitemap-video/1.1">
<url><loc>https://jan.ai</loc><lastmod>2025-09-24T03:40:05.491Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/api-reference</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/api-reference/api-reference</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/api-reference/architecture</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/api-reference/configuration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/api-reference/development</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/api-reference/installation</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/blog</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2023-12-21-faster-inference-across-platform</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-01-16-settings-options-right-panel</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-01-29-local-api-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-02-05-jan-data-folder</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-02-10-jan-is-more-stable</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-02-26-home-servers-with-helm</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-03-06-ui-revamp-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-03-11-import-models</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-03-19-nitro-tensorrt-llm-extension</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-04-02-groq-api-integration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-04-15-new-mistral-extension</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-04-25-llama3-command-r-hugginface</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-05-20-llamacpp-upgrade-new-remote-models</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-06-21-nvidia-nim-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-07-15-claude-3-5-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-09-01-llama3-1-gemma2-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-09-17-improved-cpu-performance</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-10-24-jan-stable</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-11-22-jan-bugs</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-11.14-jan-supports-qwen-coder</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-12-03-jan-is-faster</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-12-05-jan-hot-fix-mac</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2024-12-30-jan-new-privacy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-01-06-key-issues-resolved</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-01-23-deepseek-r1-jan</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-02-18-advanced-llama.cpp-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-03-14-jan-security-patch</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-05-14-jan-qwen3-patch</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-06-19-jan-ui-revamp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-06-26-jan-nano-mcp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-07-17-responsive-ui</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-07-31-llamacpp-tutorials</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-08-07-gpt-oss</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-08-14-general-improvs</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-08-28-image-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/changelog/2025-09-18-auto-optimize-vision-imports</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/api-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/assistants</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/data-folder</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/install/linux</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/install/mac</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/install/windows</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/jan-models/jan-nano-128</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/jan-models/jan-nano-32</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/jan-models/jan-v1</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/jan-models/lucy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/llama-cpp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/llama-cpp-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/manage-models</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/browser/browserbase</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/data-analysis/e2b</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/data-analysis/jupyter</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/deepresearch/octagon</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/design/canva</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/productivity/linear</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/productivity/todoist</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/search/exa</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/mcp-examples/search/serper</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/model-parameters</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/privacy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/privacy-policy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/quickstart</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/anthropic</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/cohere</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/google</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/groq</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/huggingface</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/mistralai</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/openai</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/remote-models/openrouter</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/server-examples/continue-dev</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/server-examples/llmcord</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/server-examples/n8n</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/server-examples/tabby</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/server-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/server-troubleshooting</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/desktop/troubleshooting</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference-administration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference-authentication</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference-chat</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference-chat-conversations</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference-conversations</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference-jan-responses</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/api-reference-jan-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/architecture</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/configuration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/development</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/installation</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/docs/server/overview</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/download</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/handbook</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/handbook/betting-on-open-source</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/handbook/open-superintelligence</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/benchmarking-nvidia-tensorrt-llm</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/bitdefender</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/data-is-moat</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/deepresearch</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/deepseek-r1-locally</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/jan-v1-for-research</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/offline-chatgpt-alternative</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/qwen3-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/rag-is-not-enough</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/run-ai-models-locally</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/post/run-gpt-oss-locally</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/privacy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
<url><loc>https://jan.ai/support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
</urlset>

View File

@ -19,6 +19,10 @@ const Blog = () => {
name: 'Research',
id: 'research',
},
{
name: 'Guides',
id: 'guides',
},
]
return (

View File

@ -16,7 +16,10 @@ const FOOTER_MENUS: FooterMenu[] = [
{
title: 'Company',
links: [
{ name: 'Vision', href: '/', comingSoon: true },
{
name: 'Open Superintelligence',
href: '/handbook/why/open-superintelligence',
},
{ name: 'Handbook', href: '/handbook' },
{ name: 'Community', href: 'https://discord.com/invite/FTk2MvZwJH' },
{ name: 'Careers', href: 'https://menlo.bamboohr.com/careers' },

View File

@ -4,7 +4,7 @@ import { useRouter } from 'next/router'
import { cn } from '@/lib/utils'
import { FaDiscord, FaGithub } from 'react-icons/fa'
import { FiDownload } from 'react-icons/fi'
import { FaXTwitter } from 'react-icons/fa6'
import { FaXTwitter, FaLinkedinIn } from 'react-icons/fa6'
import { Button } from './ui/button'
import LogoJanSVG from '@/assets/icons/logo-jan.svg'
@ -113,6 +113,43 @@ const Navbar = ({ noScroll }: { noScroll?: boolean }) => {
</Button>
</a>
</li>
<li>
<div className={cn('flex gap-4', !isLanding && '!text-black')}>
<a
href="https://discord.com/invite/FTk2MvZwJH"
target="_blank"
rel="noopener noreferrer"
className="rounded-lg flex items-center justify-center"
>
<FaDiscord className="size-5" />
</a>
<a
href="https://twitter.com/jandotai"
target="_blank"
rel="noopener noreferrer"
className="rounded-lg flex items-center justify-center"
>
<FaXTwitter className="size-5" />
</a>
<a
href="https://linkedin.com/company/opensuperintelligence"
target="_blank"
rel="noopener noreferrer"
className="rounded-lg flex items-center justify-center"
>
<FaLinkedinIn className="size-5" />
</a>
<a
href="https://github.com/menloresearch/jan"
target="_blank"
rel="noopener noreferrer"
className="rounded-lg flex items-center justify-center"
>
<FaGithub className="size-5" />
</a>
</div>
</li>
</ul>
</nav>
@ -232,6 +269,14 @@ const Navbar = ({ noScroll }: { noScroll?: boolean }) => {
>
<FaXTwitter className="size-5" />
</a>
<a
href="https://linkedin.com/company/opensuperintelligence"
target="_blank"
rel="noopener noreferrer"
className="text-black rounded-lg flex items-center justify-center"
>
<FaLinkedinIn className="size-5" />
</a>
<a
href="https://github.com/menloresearch/jan"
target="_blank"

View File

@ -0,0 +1,283 @@
import React, { useState, useEffect, useCallback } from 'react'
import { AlertCircle, CheckCircle, Clock, RefreshCw } from 'lucide-react'
interface StatusData {
status:
| 'operational'
| 'degraded'
| 'partial_outage'
| 'major_outage'
| 'under_maintenance'
| 'unknown'
lastUpdated: string
incidents: Array<{
name: string
status: string
impact: string
}>
}
const StatusIcon = ({ status }: { status: string }) => {
switch (status) {
case 'operational':
return <CheckCircle className="w-5 h-5 text-green-500" />
case 'degraded':
case 'partial_outage':
return <AlertCircle className="w-5 h-5 text-yellow-500" />
case 'major_outage':
return <AlertCircle className="w-5 h-5 text-red-500" />
case 'under_maintenance':
return <Clock className="w-5 h-5 text-blue-500" />
default:
return <AlertCircle className="w-5 h-5 text-gray-500" />
}
}
const getStatusColor = (status: string) => {
switch (status) {
case 'operational':
return 'bg-green-100 text-green-800 border-green-200 dark:bg-green-900/20 dark:text-green-300 dark:border-green-800'
case 'degraded':
case 'partial_outage':
return 'bg-yellow-100 text-yellow-800 border-yellow-200 dark:bg-yellow-900/20 dark:text-yellow-300 dark:border-yellow-800'
case 'major_outage':
return 'bg-red-100 text-red-800 border-red-200 dark:bg-red-900/20 dark:text-red-300 dark:border-red-800'
case 'under_maintenance':
return 'bg-blue-100 text-blue-800 border-blue-200 dark:bg-blue-900/20 dark:text-blue-300 dark:border-blue-800'
default:
return 'bg-gray-100 text-gray-800 border-gray-200 dark:bg-gray-900/20 dark:text-gray-300 dark:border-gray-800'
}
}
const getStatusText = (status: string) => {
switch (status) {
case 'operational':
return 'All Systems Operational'
case 'degraded':
return 'Degraded Performance'
case 'partial_outage':
return 'Partial Service Outage'
case 'major_outage':
return 'Major Service Outage'
case 'under_maintenance':
return 'Under Maintenance'
default:
return 'Status Unknown'
}
}
export const OpenAIStatusChecker: React.FC = () => {
const [statusData, setStatusData] = useState<StatusData | null>(null)
const [loading, setLoading] = useState(true)
const [error, setError] = useState<string | null>(null)
const [lastRefresh, setLastRefresh] = useState<Date>(new Date())
const fetchStatus = useCallback(async () => {
setLoading(true)
setError(null)
try {
console.log('Fetching real OpenAI status...')
// Use CORS proxy to fetch real OpenAI status
const proxyUrl = 'https://api.allorigins.win/get?url='
const targetUrl = 'https://status.openai.com/api/v2/status.json'
const response = await fetch(proxyUrl + encodeURIComponent(targetUrl))
if (!response.ok) {
throw new Error(`Proxy returned ${response.status}`)
}
const proxyData = await response.json()
const openaiData = JSON.parse(proxyData.contents)
console.log('Real OpenAI data received:', openaiData)
// Transform real OpenAI data to our format
const transformedData: StatusData = {
status: mapOpenAIStatusClient(
openaiData.status?.indicator || 'operational'
),
lastUpdated: openaiData.page?.updated_at || new Date().toISOString(),
incidents: (openaiData.incidents || []).slice(0, 3),
}
setStatusData(transformedData)
setLastRefresh(new Date())
console.log('✅ Real OpenAI status loaded successfully!')
} catch (err) {
console.error('Failed to fetch real status:', err)
// Fallback: try alternative proxy
try {
console.log('Trying alternative proxy...')
const altResponse = await fetch(
`https://cors-anywhere.herokuapp.com/https://status.openai.com/api/v2/summary.json`
)
if (altResponse.ok) {
const altData = await altResponse.json()
setStatusData({
status: mapOpenAIStatusClient(
altData.status?.indicator || 'operational'
),
lastUpdated: new Date().toISOString(),
incidents: [],
})
setLastRefresh(new Date())
console.log('✅ Alternative proxy worked!')
return
}
} catch (altErr) {
console.log('Alternative proxy also failed')
}
// Final fallback
setError('Unable to fetch real-time status')
setStatusData({
status: 'operational' as const,
lastUpdated: new Date().toISOString(),
incidents: [],
})
setLastRefresh(new Date())
console.log('Using fallback status')
} finally {
setLoading(false)
}
}, [])
// Client-side status mapping function
const mapOpenAIStatusClient = (indicator: string): StatusData['status'] => {
switch (indicator.toLowerCase()) {
case 'none':
case 'operational':
return 'operational'
case 'minor':
return 'degraded'
case 'major':
return 'partial_outage'
case 'critical':
return 'major_outage'
case 'maintenance':
return 'under_maintenance'
default:
return 'operational' as const // Default to operational
}
}
useEffect(() => {
fetchStatus()
// Refresh every 2 minutes for more real-time updates
const interval = setInterval(fetchStatus, 2 * 60 * 1000)
return () => clearInterval(interval)
}, [fetchStatus])
const handleRefresh = () => {
fetchStatus()
}
if (loading && !statusData) {
return (
<div className="bg-white dark:bg-gray-800 rounded-xl shadow-lg p-6 border border-gray-200 dark:border-gray-700">
<div className="flex items-center justify-center space-x-3">
<RefreshCw className="w-6 h-6 text-blue-500 animate-spin" />
<span className="text-lg font-medium text-gray-700 dark:text-gray-300">
Checking OpenAI Status...
</span>
</div>
</div>
)
}
if (error) {
return (
<div className="bg-white dark:bg-gray-800 rounded-xl shadow-lg p-6 border border-red-200 dark:border-red-800">
<div className="flex items-center justify-between">
<div className="flex items-center space-x-3">
<AlertCircle className="w-6 h-6 text-red-500" />
<div>
<h3 className="text-lg font-semibold text-red-800 dark:text-red-300">
Unable to Check Status
</h3>
<p className="text-red-600 dark:text-red-400">{error}</p>
</div>
</div>
<button
onClick={handleRefresh}
className="px-4 py-2 bg-red-100 hover:bg-red-200 dark:bg-red-900/20 dark:hover:bg-red-900/40 text-red-700 dark:text-red-300 rounded-lg font-medium transition-colors"
>
Retry
</button>
</div>
</div>
)
}
return (
<div className="bg-white dark:bg-gray-800 rounded-xl shadow-lg p-6 border border-gray-200 dark:border-gray-700 my-6">
<div className="flex items-center justify-between mb-4">
<div className="flex items-center space-x-3">
<StatusIcon status={statusData?.status || 'unknown'} />
<div>
<h3 className="text-xl font-bold text-gray-900 dark:text-gray-100">
OpenAI Services
</h3>
<p className="text-sm text-gray-600 dark:text-gray-400">
Last updated: {new Date(lastRefresh).toLocaleTimeString()}
</p>
</div>
</div>
<button
onClick={handleRefresh}
disabled={loading}
className="p-2 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors disabled:opacity-50"
>
<RefreshCw
className={`w-5 h-5 text-gray-600 dark:text-gray-400 ${loading ? 'animate-spin' : ''}`}
/>
</button>
</div>
<div
className={`inline-flex items-center px-4 py-2 rounded-full text-sm font-semibold border ${getStatusColor(statusData?.status || 'unknown')}`}
>
{getStatusText(statusData?.status || 'unknown')}
</div>
<div className="mt-4 p-4 bg-gray-50 dark:bg-gray-700 rounded-lg">
<h4 className="font-semibold text-gray-900 dark:text-gray-100 mb-2">
Quick Status Check
</h4>
<div className="grid grid-cols-1 sm:grid-cols-3 gap-3 text-sm">
<div className="flex items-center justify-between">
<span className="text-gray-600 dark:text-gray-400">ChatGPT</span>
<StatusIcon status={statusData?.status || 'unknown'} />
</div>
<div className="flex items-center justify-between">
<span className="text-gray-600 dark:text-gray-400">API</span>
<StatusIcon status={statusData?.status || 'unknown'} />
</div>
<div className="flex items-center justify-between">
<span className="text-gray-600 dark:text-gray-400">Playground</span>
<StatusIcon status={statusData?.status || 'unknown'} />
</div>
</div>
</div>
<div className="mt-4 text-xs text-gray-500 dark:text-gray-400 text-center">
{error
? 'Using fallback status • '
: '🟢 Real-time data from OpenAI • '}
Updated: {new Date(lastRefresh).toLocaleTimeString()}
<br />
<a
href="/post/is-chatgpt-down-use-jan#-is-chatgpt-down"
className="text-blue-500 hover:text-blue-600 dark:text-blue-400 dark:hover:text-blue-300 underline"
>
View detailed status guide
</a>
</div>
</div>
)
}

View File

@ -3,7 +3,7 @@ title: Installation
description: Install and deploy Jan Server on Kubernetes using minikube and Helm.
---
## Prerequisites
# Prerequisites
Jan Server requires the following tools installed on your system:

View File

@ -9,7 +9,7 @@
},
"desktop": {
"type": "page",
"title": "Jan Desktop & Mobile"
"title": "Jan Desktop"
},
"server": {
"type": "page",

View File

@ -42,6 +42,5 @@
},
"settings": "Settings",
"data-folder": "Jan Data Folder",
"troubleshooting": "Troubleshooting",
"privacy": "Privacy"
"troubleshooting": "Troubleshooting"
}

View File

@ -22,228 +22,52 @@ keywords:
import { Callout } from 'nextra/components'
import FAQBox from '@/components/FaqBox'
# Jan
![Jan's Cover Image](./_assets/jan-app-new.png)
## Jan's Goal
> We're working towards open superintelligence to make a viable open-source alternative to platforms like ChatGPT
and Claude that anyone can own and run.
## What is Jan Today
Jan is an open-source AI platform that runs on your hardware. We believe AI should be in the hands of many, not
controlled by a few tech giants.
Today, Jan is:
- **A desktop app** that runs AI models locally or connects to cloud providers
- **A model hub** making the latest open-source models accessible
- **A connector system** that lets AI interact with real-world tools via MCP
Tomorrow, Jan aims to be a complete ecosystem where open models rival or exceed closed alternatives.
# Overview
<Callout type="info">
We're building this with the open-source AI community, using the best available tools, and sharing everything
we learn along the way.
We're building [Open Superintelligence](https://jan.ai/handbook/open-superintelligence) together.
</Callout>
## The Jan Ecosystem
Jan is an open-source replacement for ChatGPT:
- AI Models: Use AI models with agentic capabilities
- [Open-source Models](/docs/desktop/manage-models): Run open-source locally
- [Cloud Models](/docs/desktop/remote-models/anthropic): Connect to remote models with API keys
- [Assistants](/docs/desktop/assistants): Create custom AI assistants
- [MCP Servers](/docs/desktop/mcp): Integrate MCP Servers to give agentic capabilities to AI models
- Jan Hub: Browse, install, and [manage models](/docs/desktop/manage-models)
- Local API Server: Expose an [OpenAI-compatible API](/docs/desktop/api-server) from your own machine or server
### Jan Apps
**Available Now:**
- **Desktop**: Full-featured AI workstation for Windows, Mac, and Linux
## Product Suite
**Coming Late 2025:**
- **Mobile**: Jan on your phone
- **Web**: Browser-based access at jan.ai
- **Server**: Self-hosted for teams
- **Extensions**: Browser extension for Chrome-based browsers
Jan is a full [product suite](https://en.wikipedia.org/wiki/Software_suite) that offers an alternative to Big AI:
- [Jan Desktop](/docs/desktop/quickstart): macOS, Windows, and Linux apps with offline mode
- [Jan Web](https://chat.jan.ai): Jan on browser, a direct alternative to chatgpt.com
- Jan Mobile: iOS and Android apps (Coming Soon)
- [Jan Server](/docs/server): deploy locally, in your cloud, or on-prem
- [Jan Models](/docs/models): Open-source models optimized for deep research, tool use, and reasoning
### Jan Model Hub
Making open-source AI accessible to everyone:
- **Easy Downloads**: One-click model installation
- **Jan Models**: Our own models optimized for local use
- **Jan-v1**: 4B reasoning model specialized in web search
- **Research Models**
- **Jan-Nano (32k/128k)**: 4B model for web search with MCP tools
- **Lucy**: 1.7B mobile-optimized for web search
- **Community Models**: Any GGUF from Hugging Face works in Jan
- **Cloud Models**: Connect your API keys for OpenAI, Anthropic, Gemini, and more
### Extending Jan (Coming Soon)
Jan helps you customize and align Open Superintelligence:
- Jan Connectors: Extend Jan with integrations
- Jan Studio: Fine-tune, align, and guardrail
- Evals: Benchmark models across industries, regions, and alignment dimensions
## Principles
### Jan Connectors Hub
Connect AI to the tools you use daily via [Model Context Protocol](./mcp):
- [Open source](https://www.redhat.com/en/blog/open-source-culture-9-core-principles-and-values): [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) licensed, built in public.
- No [vendor lock-in](https://en.wikipedia.org/wiki/Vendor_lock-in): Switch freely between local and frontier models.
- [Right to Repair](https://en.wikipedia.org/wiki/Right_to_repair): Inspect, audit, and modify your AI stack.
**Creative & Design:**
- **Canva**: Generate and edit designs
**Data & Analysis:**
- **Jupyter**: Run Python notebooks
- **E2B**: Execute code in sandboxes
**Web & Search:**
- **Browserbase & Browser Use**: Browser automation
- **Exa, Serper, Perplexity**: Advanced web search
- **Octagon**: Deep research capabilities
**Productivity:**
- **Linear**: Project management
- **Todoist**: Task management
## Core Features
- **Run Models Locally**: Download any GGUF model from Hugging Face, use OpenAI's gpt-oss models,
or connect to cloud providers
- **OpenAI-Compatible API**: Local server at `localhost:1337` works with tools like
[Continue](./server-examples/continue-dev) and [Cline](https://cline.bot/)
- **Extend with MCP Tools**: Browser automation, web search, data analysis, and design tools, all
through natural language
- **Your Choice of Infrastructure**: Run on your laptop, self-host on your servers (soon), or use
cloud when you need it
## Philosophy
Jan is built to be user-owned:
- **Open Source**: Apache 2.0 license
- **Local First**: Your data stays on your device. Internet is optional
- **Privacy Focused**: We don't collect or sell user data. See our [Privacy Policy](./privacy)
- **No Lock-in**: Export your data anytime. Use any model. Switch between local and cloud
<Callout>
The best AI is the one you control. Not the one that others control for you.
</Callout>
## The Path Forward
### What Works Today
- Run powerful models locally on consumer hardware
- Connect to any cloud provider with your API keys
- Use MCP tools for real-world tasks
- Access transparent model evaluations
### What We're Building
- More specialized models that excel at specific tasks
- Expanded app ecosystem (mobile, web, extensions)
- Richer connector ecosystem
- An evaluation framework to build better models
### The Long-Term Vision
We're working towards open superintelligence where:
- Open models match or exceed closed alternatives
- Anyone can run powerful AI on their own hardware
- The community drives innovation, not corporations
- AI capabilities are owned by users, not rented
<Callout type="warning">
This is an ambitious goal without a guaranteed path. We're betting on the open-source community, improved
hardware, and better techniques, but we're honest that this is a journey, not a destination we've reached.
</Callout>
## Quick Start
1. [Download Jan](./quickstart) for your operating system
2. Choose a model - download locally or add cloud API keys
3. Start chatting or connect tools via MCP
4. Build with our [local API](./api-server)
Jan grows through contribution. It is shaped by many and belongs to everyone who uses it.
## Acknowledgements
Jan is built on the shoulders of giants:
- [Llama.cpp](https://github.com/ggerganov/llama.cpp) for inference
- [Model Context Protocol](https://modelcontextprotocol.io) for tool integration
- The open-source community that makes this possible
> Good artists copy, great artists steal.
## FAQs
Jan exists because we've borrowed, learned, and built on the work of others.
<FAQBox title="What is Jan?">
Jan is an open-source AI platform working towards a viable alternative to Big Tech AI. Today it's a desktop app that runs models locally or connects to cloud providers. Tomorrow it aims to be a complete ecosystem rivaling platforms like ChatGPT and Claude.
</FAQBox>
<FAQBox title="How is this different from other AI platforms?">
Other platforms are models behind APIs you rent. Jan is a complete AI ecosystem you own. Run any model, use real tools through MCP, keep your data private, and never pay subscriptions for local use.
</FAQBox>
<FAQBox title="What models can I use?">
**Jan Models:**
- Jan-Nano (32k/128k) - Research and analysis with MCP integration
- Lucy - Mobile-optimized search (1.7B)
- Jan-v1 - Reasoning and tool use (4B)
**Open Source:**
- OpenAI's gpt-oss models (120b and 20b)
- Any GGUF model from Hugging Face
**Cloud (with your API keys):**
- OpenAI, Anthropic, Mistral, Groq, and more
</FAQBox>
<FAQBox title="What are MCP tools?">
MCP (Model Context Protocol) lets AI interact with real applications. Instead of just generating text, your AI can create designs in Canva, analyze data in Jupyter, browse the web, and execute code - all through conversation.
</FAQBox>
<FAQBox title="Is Jan compatible with my system?">
**Supported OS**:
- [Windows 10+](/docs/desktop/install/windows#compatibility)
- [macOS 12+](/docs/desktop/install/mac#compatibility)
- [Linux (Ubuntu 20.04+)](/docs/desktop/install/linux)
**Hardware**:
- Minimum: 8GB RAM, 10GB storage
- Recommended: 16GB RAM, GPU (NVIDIA/AMD/Intel/Apple), 50GB storage
</FAQBox>
<FAQBox title="How realistic is 'open superintelligence'?">
Honestly? It's ambitious and uncertain. We believe the combination of rapidly improving open models, better consumer hardware, community innovation, and specialized models working together can eventually rival closed platforms. But this is a multi-year journey with no guarantees. What we can guarantee is that we'll keep building in the open, with the community, towards this goal.
</FAQBox>
<FAQBox title="What can Jan actually do today?">
Right now, Jan can:
- Run models like Llama, Mistral, and our own Jan models locally
- Connect to cloud providers if you want more power
- Use MCP tools to create designs, analyze data, browse the web, and more
- Work completely offline once models are downloaded
- Provide an OpenAI-compatible API for developers
</FAQBox>
<FAQBox title="Is Jan really free?">
**Local use**: Always free, no catches
**Cloud models**: You pay providers directly (we add no markup)
**Jan cloud**: Optional paid services coming 2025
The core platform will always be free and open source.
</FAQBox>
<FAQBox title="How does Jan protect privacy?">
- Runs 100% offline once models are downloaded
- All data stored locally in [Jan Data Folder](/docs/desktop/data-folder)
- No telemetry without explicit consent
- Open source code you can audit
<Callout type="warning">
When using cloud providers through Jan, their privacy policies apply.
</Callout>
</FAQBox>
<FAQBox title="Can I self-host Jan?">
Yes. Download directly or build from [source](https://github.com/menloresearch/jan). Jan Server for production deployments coming late 2025.
</FAQBox>
<FAQBox title="When will mobile/web versions launch?">
- **Jan Web**: Beta late 2025
- **Jan Mobile**: Late 2025
- **Jan Server**: Late 2025
All versions will sync seamlessly.
</FAQBox>
<FAQBox title="How can I contribute?">
- Code: [GitHub](https://github.com/menloresearch/jan)
- Community: [Discord](https://discord.gg/FTk2MvZwJH)
- Testing: Help evaluate models and report bugs
- Documentation: Improve guides and tutorials
</FAQBox>
<FAQBox title="Are you hiring?">
Yes! We love hiring from our community. Check [Careers](https://menlo.bamboohr.com/careers).
</FAQBox>
- [llama.cpp](https://github.com/ggerganov/llama.cpp) and [GGML](https://github.com/ggerganov/ggml) for efficient inference
- [r/LocalLLaMA](https://www.reddit.com/r/LocalLLaMA/) for ideas, feedback, and debate
- [Model Context Protocol](https://modelcontextprotocol.io) for MCP integrations
- [PostHog](https://posthog.com/docs) for docs inspiration
- The open-source community for contributions, bug reports, and improvements

View File

@ -1,11 +1,12 @@
---
title: Linux
description: Get started quickly with Jan, an AI chat application that runs 100% offline on your desktop & mobile (*coming soon*).
description: Download Jan on Linux to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline.
keywords:
[
Jan,
Customizable Intelligence, LLM,
local AI,
Jan on Linux,
privacy focus,
free and open source,
private and offline,
@ -18,15 +19,17 @@ keywords:
installation,
"desktop"
]
twitter:
card: summary_large_image
site: "@jandotai"
title: "Jan on Linux"
description: "Download Jan on Linux to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline."
---
import FAQBox from '@/components/FaqBox'
import { Tabs, Callout, Steps } from 'nextra/components'
import { Settings } from 'lucide-react'
# Linux Installation
Instructions for installing Jan on Linux.
@ -244,7 +247,7 @@ export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda/lib64
### Step 2: Enable GPU Acceleration
1. Navigate to **Settings** (<Settings width={16} height={16} style={{display:"inline"}}/>) > **Local Engine** > **Llama.cpp**
2. Select appropriate backend in **llama-cpp Backend**. Details in our [guide](/docs/desktop/local-engines/llama-cpp).
2. Select appropriate backend in **llama-cpp Backend**. Details in our [llama.cpp guide](/docs/desktop/llama-cpp).
<Callout type="info">
CUDA offers better performance than Vulkan.

View File

@ -1,11 +1,11 @@
---
title: Mac
description: Get started quickly with Jan - a local AI that runs on your computer. Install Jan and pick your model to start chatting.
keywords:
description: Download Jan on Mac to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline.
[
Jan,
Customizable Intelligence, LLM,
local AI,
Jan on Mac,
privacy focus,
free and open source,
private and offline,
@ -18,6 +18,11 @@ keywords:
installation,
"desktop"
]
twitter:
card: summary_large_image
site: "@jandotai"
title: "Jan on Mac"
description: "Download Jan on Mac to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline."
---
import { Tabs } from 'nextra/components'

View File

@ -1,10 +1,11 @@
---
title: Windows
description: Run AI models locally on your Windows machine with Jan. Quick setup guide for local inference and chat.
description: Download Jan on Windows to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline.
keywords:
[
Jan,
Customizable Intelligence, LLM,
Jan on Windows,
local AI,
privacy focus,
free and open source,
@ -18,6 +19,11 @@ keywords:
installation,
"desktop"
]
twitter:
card: summary_large_image
site: "@jandotai"
title: "Jan on Windows"
description: "Download Jan on Windows to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline."
---
import { Tabs, Callout, Steps } from 'nextra/components'

View File

@ -59,7 +59,7 @@ The model and its different model variants are fully supported by Jan.
## Using Jan-Nano-32k
**Step 1**
Download Jan from [here](https://jan.ai/docs/desktop/).
Download Jan from [here](https://jan.ai/download/).
**Step 2**
Go to the Hub Tab, search for Jan-Nano-Gguf, and click on the download button to the best model size for your system.
@ -118,8 +118,8 @@ Here are some example queries to showcase Jan-Nano's web search capabilities:
- 4xA6000 for vllm server (inferencing)
- What frontend should I use?
- Jan Beta (recommended) - Minimalistic and polished interface
- Download link: https://jan.ai/docs/desktop/beta
- Jan (recommended)
- Download link: https://jan.ai/download
- Getting Jinja errors in LM Studio?
- Use Qwen3 template from other LM Studio compatible models

View File

@ -90,7 +90,7 @@ Refer to the following documentation to install the Tabby extension on your favo
Tabby offers an [Answer Engine](https://tabby.tabbyml.com/docs/administration/answer-engine/) on the homepage,
which can leverage the Jan LLM and related contexts like code, documentation, and web pages to answer user questions.
Simply open the Tabby homepage at [localhost:8080](http://localhost:8080) and ask your questions.
Simply open the Tabby homepage at http://localhost:8080 and ask your questions.
### IDE Chat Sidebar

View File

@ -108,7 +108,7 @@ You can help improve Jan by sharing anonymous usage data:
2. You can change this setting at any time
<Callout type="info">
Read more about that we collect with opt-in users at [Privacy](/docs/desktop/privacy).
Read more about that we collect with opt-in users at [Privacy](/privacy).
</Callout>
<br/>
@ -141,7 +141,7 @@ This action cannot be undone.
### Jan Data Folder
Jan stores your data locally in your own filesystem in a universal file format. See detailed [Jan Folder Structure](docs/data-folder#folder-structure).
Jan stores your data locally in your own filesystem in a universal file format. See detailed [Jan Folder Structure](/docs/desktop/data-folder#directory-structure).
**1. Open Jan Data Folder**

View File

@ -328,14 +328,14 @@ This command ensures that the necessary permissions are granted for Jan's instal
When you start a chat with a model and encounter a **Failed to Fetch** or **Something's Amiss** error, here are some possible solutions to resolve it:
**1. Check System & Hardware Requirements**
- Hardware dependencies: Ensure your device meets all [hardware requirements](docs/desktop/troubleshooting#step-1-verify-hardware-and-system-requirements)
- OS: Ensure your operating system meets the minimum requirements ([Mac](/docs/desktop/install/mac#minimum-requirements), [Windows](/docs/desktop/install/windows#compatibility), [Linux](/docs/desktop/install/linux#compatibility))
- Hardware dependencies: Ensure your device meets all [hardware requirements](troubleshooting)
- OS: Ensure your operating system meets the minimum requirements ([Mac](https://www.jan.ai/docs/desktop/install/mac#minimum-requirements), [Windows](/windows#compatibility), [Linux](https://www.jan.ai/docs/desktop/install/linux#compatibility)
- RAM: Choose models that use less than 80% of your available RAM
- For 8GB systems: Use models under 6GB
- For 16GB systems: Use models under 13GB
**2. Check Model Parameters**
- In **Engine Settings** in right sidebar, check your `ngl` ([number of GPU layers](/docs/desktop/models/model-parameters#engine-parameters)) setting to see if it's too high
- In **Engine Settings** in right sidebar, check your `ngl` ([number of GPU layers](/docs/desktop/model-parameters)) setting to see if it's too high
- Start with a lower NGL value and increase gradually based on your GPU memory
**3. Port Conflicts**

View File

@ -1,5 +1,4 @@
{
"index": "Overview",
"open-superintelligence": "Open Superintelligence",
"betting-on-open-source": "Betting on Open-Source"
"why": "Why does Jan exist?"
}

View File

@ -18,31 +18,6 @@ Jan's Handbook is a [living document](https://en.wikipedia.org/wiki/Living_docum
## Why does Jan exist?
### [Open Superintelligence](/handbook/open-superintelligence)
Building superintelligence that belongs to everyone, not just a few tech giants. We believe the future of AI should be open, accessible, and owned by the people who use it.
### [Betting on Open-Source](/handbook/betting-on-open-source)
- [Open Superintelligence](/handbook/open-superintelligence) - Building superintelligence that belongs to everyone, not just a few tech giants. We believe the future of AI should be open, accessible, and owned by the people who use it.
- [Betting on Open-Source](/handbook/betting-on-open-source)
Why we're betting on open-source as the future of AI and technology. Open-source has consistently won in the long term, and AI will be no different.
---
## Quick Links
- **For the curious**: Start with [Open Superintelligence](/handbook/open-superintelligence)
- **For developers**: Learn about [Betting on Open-Source](/handbook/betting-on-open-source)
- **For contributors**: Check out our [GitHub](https://github.com/menloresearch/jan) and [Discord](https://discord.gg/FTk2MvZwJH)
## Our North Star
We're building superintelligence that:
- **Works anywhere**: From your laptop to your data center
- **Belongs to you**: Download it, own it, modify it
- **Scales infinitely**: One person or ten thousand, same platform
- **Improves constantly**: Community-driven development
This isn't just about making AI accessible. It's about ensuring the most transformative technology in human history can be owned by those who use it.
---
_"The future of AI isn't about choosing between local or cloud. It's about having both, and everything in between, working perfectly together."_

View File

@ -0,0 +1,4 @@
{
"open-superintelligence": "Why Jan exists",
"betting-on-open-source": "Why we're betting on open-source"
}

View File

@ -1,11 +1,11 @@
---
title: "Why Open-Source"
title: "Why Jan is betting on Open-Source"
description: "Why we're betting on open-source."
---
# Why Open-Source
AI today is concentrated in the hands of a few companies. They ask for trust, while keeping the levers of control hidden. We think that's a mistake.
AI today is concentrated in the hands of [a few companies](https://stratechery.com/2025/tech-philosophy-and-ai-opportunity/). They ask for trust, while keeping the levers of control hidden. We think that's a mistake.
When you depend on one vendor, your future is tied to their roadmap, their politics, their survival. If they get acquired, pivot, or shut down; you're stuck.
@ -16,9 +16,9 @@ Depending on a closed vendor means giving up more than flexibility:
AI has become critical infrastructure. Nations, enterprises, even small teams rely on it to think and decide. And yet, control sits with a few vendors who decide the terms of access. We believe that's not control. That's dependency dressed up as convenience. One of the most powerful invention is being steered by a handful of executives. Their values shape what billions can say, build, or ask.
*This cannot stand. It must be changed.*
This can't stand. It must be changed.
## Jan's Bet
## How we see
We don't believe the future of AI should be dictated by a few firms in San Francisco, Beijing, or anywhere else.
@ -30,4 +30,4 @@ That's why we're building Jan, a full product suite:
- Jan Server
- Hub, Store, evals, guardrails, the ecosystem around it
The goal is to be the open-source replacement for ChatGPT and other BigAI products, with models and tools you can run, own, and trust.
The goal is to be the [open-source replacement for ChatGPT](https://jan.ai/) and other BigAI products, with models and tools you can run, own, and trust.

View File

@ -5,9 +5,13 @@ description: "Short answer: Open Superintelligence."
# Why does Jan exist?
> Short answer: Open Superintelligence.
import { Callout } from 'nextra/components'
In 1879, Edison lit a single street in [Menlo Park](https://en.wikipedia.org/wiki/Menlo_Park,_California). What mattered wasn't the bulb. It was that power could reach homes, schools, and factories.
<Callout type="info">
Short answer: Open Superintelligence.
</Callout>
In 1879, [Edison](https://en.wikipedia.org/wiki/Thomas_Edison) lit a single street in [Menlo Park](https://en.wikipedia.org/wiki/Menlo_Park,_California). What mattered wasn't the bulb. It was that power could reach homes, schools, and factories.
Electricity changed the world only when it became universal. Standard plugs, cheap generation, lines everywhere. People stopped talking about electricity and started using light, cold chains, and machines.
@ -19,13 +23,13 @@ Jan exists to push intelligence toward the first path: Open Superintelligence yo
> The world is made, and can be remade.
Every industrial wave redefined critical aspects of our daily lives:
- Factories introduced shift clocks and wage rhythms
- Steam gave way to electricity and standardized parts
- Rail, telegraph, and later networks changed how decisions travel
- Each wave pulled new bargains into being skills, schools, safety nets, labor law
Every industrial wave redefined new defaults of our daily lives:
- [Factories](https://en.wikipedia.org/wiki/Factory) created the modern job
- [Electricity](https://en.wikipedia.org/wiki/Electricity) created the modern home
- [Railroads](https://en.wikipedia.org/wiki/Rail_transport#History) and [telegraphs](https://en.wikipedia.org/wiki/Telegraphy#History) created the modern nation
- [The Internet](https://en.wikipedia.org/wiki/Internet) created the modern world
So what we're interested in is who is going to write the new defaults and share in the gains.
Open Superintelligence will create what comes next. What we're interested in is who is going to write the new defaults and share in the gains.
Technology doesnt choose its path, people do. Power accrues to whoever designs, deploys, and profits from the system:
- If intelligence is closed and centralized, the gains concentrate

Binary file not shown.

After

Width:  |  Height:  |  Size: 230 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 293 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 395 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 612 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 230 KiB

View File

@ -20,5 +20,10 @@
"title": "Research",
"display": "normal",
"href": "/blog?category=research"
},
"guides-cat": {
"title": "Guides",
"display": "normal",
"href": "/blog?category=guides"
}
}

View File

@ -0,0 +1,123 @@
---
title: "Private AI for legal professionals who need confidentiality"
description: "It's possible to use AI without risking client data. Jan helps lawyers save time while keeping clients safe."
tags: AI, ai for law, ai for lawyers, ChatGPT alternative, Jan, local AI, offline AI
categories: guides
date: 2025-09-30
ogImage: assets/images/general/jan-for-ai-law-assistant-chat.jpeg
twitter:
card: summary_large_image
site: "@jandotai"
title: "Private AI for legal professionals who need confidentiality"
description: "It's possible to use AI without risking client data. Jan helps lawyers save time while keeping clients safe."
image: assets/images/general/jan-assistants-ai-for-legal.jpeg
---
import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA'
import { OpenAIStatusChecker } from '@/components/OpenAIStatusChecker'
# Private AI for legal professionals who need confidentiality
![AI for Law](/assets/images/general/jan-for-ai-law-assistant-chat.jpeg)
Yes, it's possible to use AI in legal work without risking client data.
<Callout type="warning">
Client trust depends on privacy. Sending documents into public AI tools risks compliance and reputation.
</Callout>
Start by [downloading Jan](/download) and installing the **Jan v1 model**. Once installed, you can create assistants tailored to your practice and keep contracts, case notes, and client files under your control.
<Callout type="info">
**Why use Jan for legal tasks**
- Runs locally on your hardware, no cloud uploads
- Keeps chats and interactions private
- Works offline once installed
- Lets you build assistants for your own workflows
</Callout>
---
## Create your assistant
Once Jan is installed with the **Jan v1 model**, onboarding will guide you through downloading and setup.
Click **Create assistant** to start:
![Create your first AI assistant in Jan](./_assets/create-assistant-1.jpeg)
*Create your first assistant in Jan*
Add an assistant name and prompt:
![Jan assistant for contract review](./_assets/jan-assistant-for-law.png)
*Example of a Jan assistant for contract review*
You can create assistants using specific prompts. Below are examples for common legal workflows.
---
## Contract review assistant
AI can help lawyers move faster through long contracts by pointing out what matters most.
**Prompt for Jan:**
> You are a contract review assistant.
> When I paste a contract:
> - Highlight risky or unusual clauses
> - Flag ambiguous or missing terms
> - Summarize the agreement in plain English for a non-lawyer client
> Format your response with sections: **Risks**, **Ambiguities/Missing**, **Summary**.
> Do not provide legal advice.
---
## Drafting assistant
Use AI to create first drafts of NDAs, service agreements, or client letters. You still refine the output, but AI saves time on boilerplate.
**Prompt for Jan:**
> You are a drafting assistant.
> When asked to draft a legal agreement or client letter:
> - Produce a professional first version
> - Use clear, concise language
> - Leave placeholders like [Party Name], [Date], [Amount] for details
> - Structure output with headings, numbered clauses, and consistent formatting
> Do not provide legal advice.
---
## Case preparation assistant
Case prep often means reading hundreds of pages. AI can turn depositions, discovery files, or judgments into concise notes.
![Jan legal case preparation assistant](./_assets/jan-for-ai-law-assistant-chat.jpeg)
*Jan chat interface for case preparation — process documents and extract key information*
**Prompt for Jan:**
> You are a case preparation assistant.
> When I provide case materials:
> - Extract key facts, issues, and arguments
> - Present them as bullet points under headings: **Facts**, **Issues**, **Arguments**
> - Keep summaries concise (under 500 words unless I request more)
> Use plain English, no speculation or legal conclusions.
---
## Knowledge management assistant
Law firms accumulate memos, policies, and precedents. AI can help organize and retrieve them quickly.
**Prompt for Jan:**
> You are a knowledge management assistant.
> When I ask questions about internal documents:
> - Return concise summaries or direct excerpts
> - Always cite the source (e.g., “Policy Manual, Section 4”)
> - If not found in provided material, reply “Not found in documents.”
> Do not invent information.
---
## Final note
AI in legal practice is not about replacing lawyers. Its about handling repetitive tasks safely so you can focus on real decisions.
With private AI, you gain efficiency without compromising client confidentiality.
<CTABlog />

View File

@ -0,0 +1,134 @@
---
title: "AI for teachers who care about student privacy"
description: "Use AI in teaching without risking student data. Jan helps teachers plan lessons, grade faster, and communicate with parents."
tags: AI, ai for teachers, ChatGPT alternative, Jan, local AI, offline AI, education
categories: guides
date: 2025-10-01
ogImage: assets/images/general/ai-for-teacher.jpeg
twitter:
card: summary_large_image
site: "@jandotai"
title: "AI for teachers who care about student privacy"
description: "Use AI in teaching without risking student data. Jan helps teachers plan lessons, grade faster, and communicate with parents."
image: assets/images/general/ai-for-teacher.jpeg
---
import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA'
# AI for teachers who care about student privacy
![AI for teachers](/assets/images/general/ai-for-teacher.jpeg)
AI can help teachers handle the work that piles up outside class. It can draft a lesson outline, suggest feedback on essays, or turn notes into a polite parent email. These are the tasks that usually stretch into evenings and weekends.
<Callout>
Most AI tools like ChatGPT run in the cloud. Sharing lesson plans, student writing, or parent details there risks compliance and trust.
</Callout>
That's where Jan comes in:
- [Download Jan](/download)
- You get the same time-saving help
- Your data never leaves your device.
<video controls>
<source src="/assets/images/general/jan-ai-for-teacher.mp4" type="video/mp4" />
Your browser does not support the video tag.
</video>
*See how teachers use Jan for AI-powered lesson planning and grading*
<Callout type="info">
**Why use Jan for teaching**
- Runs locally, no cloud servers
- Keeps lesson plans and student data private
- Works offline once installed
- Lets you build assistants for your daily teaching tasks
</Callout>
---
## Create your assistant
Once Jan is installed, click **Create assistant** and add one of the prompts below. Each assistant is for a specific classroom task.
![Create your first AI assistant in Jan](/assets/images/general/assistants-ai-for-teachers.jpeg)
---
## Lesson planning assistant
AI can draft lesson outlines in minutes. You adapt and refine them for your students.
**Prompt for Jan:**
> You are a lesson planning assistant.
> When I give you a topic or subject:
> - Suggest a lesson outline with objectives, activities, and discussion questions
> - Adjust for different grade levels if I specify
> - Keep plans practical and realistic for a classroom setting
Example ask: For Grade 6 science on ecosystems. Objectives: define food chains, explain producer/consumer roles. Activity: group poster on an ecosystem. Questions: How would removing one species affect the whole system?
---
## Grading support assistant
AI won't replace your judgment, but it can make feedback faster and more consistent.
**Prompt for Jan:**
> You are a grading support assistant.
> When I paste student writing or answers:
> - Highlight strengths and areas for improvement
> - Suggest short, constructive feedback I can reuse
> - Keep tone supportive and professional
> Do not assign final grades.
Example: For a history essay. Strength: clear thesis. Improvement: weak evidence. Feedback: "Great thesis and structure. Next time, support your points with specific historical examples."
---
## Parent communication assistant
Writing parent emails is important but time-consuming.
**Prompt for Jan:**
> You are a parent communication assistant.
> When I give you key points about a student:
> - Draft a polite and empathetic email to parents
> - Use clear and professional language
> - Keep tone supportive, not overly formal
> Only include details I provide.
Example: Notes: “Student is falling behind on homework, otherwise engaged in class.” - Output: a short, encouraging message suggesting a check-in at home.
---
## Classroom resources assistant
Generate quizzes, worksheets, or practice activities at short notice.
**Prompt for Jan:**
> You are a classroom resource assistant.
> When I provide a topic or subject:
> - Generate sample quiz questions (multiple choice and short answer)
> - Suggest short practice activities
> - Provide answer keys separately
> Keep material age-appropriate for the level I specify.
Example: For Grade 4 fractions. 5 multiple-choice questions with answer key, plus a quick worksheet with 3 practice problems.
---
## Getting started
1. [Download Jan](/download).
2. Install the Jan model (guided in-app)
3. Create your first assistant using one of the prompts above
4. Test with non-sensitive examples first
5. Use it in real classroom tasks once you're comfortable
---
## Final note
AI isn't here to replace teachers. It's here to take repetitive tasks off your plate so you can focus on teaching. With Jan, you can use AI confidently without risking student privacy.
<CTABlog />

View File

@ -17,7 +17,7 @@ Jan now supports [NVIDIA TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) i
We've been excited for TensorRT-LLM for a while, and [had a lot of fun implementing it](https://github.com/menloresearch/nitro-tensorrt-llm). As part of the process, we've run some benchmarks, to see how TensorRT-LLM fares on consumer hardware (e.g. [4090s](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/), [3090s](https://www.nvidia.com/en-us/geforce/graphics-cards/30-series/)) we commonly see in the [Jan's hardware community](https://discord.com/channels/1107178041848909847/1201834752206974996).
<Callout type="info" >
**Give it a try!** Jan's [TensorRT-LLM extension](/docs/desktop/built-in/tensorrt-llm) is available in Jan v0.4.9 and up ([see more](/docs/desktop/built-in/tensorrt-llm)). We precompiled some TensorRT-LLM models for you to try: `Mistral 7b`, `TinyLlama-1.1b`, `TinyJensen-1.1b` 😂
**Give it a try!** Jan's TensorRT-LLM extension is available in Jan v0.4.9. We precompiled some TensorRT-LLM models for you to try: `Mistral 7b`, `TinyLlama-1.1b`, `TinyJensen-1.1b` 😂
Bugs or feedback? Let us know on [GitHub](https://github.com/menloresearch/jan) or via [Discord](https://discord.com/channels/1107178041848909847/1201832734704795688).
</Callout>

View File

@ -0,0 +1,120 @@
---
title: "ChatGPT alternatives that actually replace it"
description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT."
tags: AI, ChatGPT alternative, ChatGPT alternatives, alternative to chatgpt, Jan, local AI, privacy, open source, offline AI
categories: guides
date: 2025-09-29
ogImage: assets/images/general/chatgpt-alternative-jan.jpeg
twitter:
card: summary_large_image
site: "@jandotai"
title: "ChatGPT alternatives that actually replace it."
description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT."
image: assets/images/general/chatgpt-alternative-jan.jpeg
---
import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA'
# Best ChatGPT Alternatives
ChatGPT works well, but it always needs internet, has usage limits, and isn't private.
If you want options that fit different needs, offline use, privacy, or specialized tasks, see the best alternatives to ChatGPT available for specific use cases.
## Comparison: ChatGPT Alternatives
| ChatGPT Alternative | Offline | Key Strength | Best For |
| ------------------------- | ------- | ---------------------------- | -------------------------- |
| **[Jan](https://jan.ai)** | Yes | Runs Cloud + Offline, open-source | Best overall ChatGPT replacement |
| Claude | - | Strong writing and reasoning | Creative text & code |
| Gemini | - | Integrated with Google | Research tasks, image generation |
| Perplexity | - | Fast, with cited answers | Research and fact-checking |
| LM Studio | Yes | Runs open models on PC | Coding and experiments |
### Jan is the best ChatGPT alternative
![Use Jan to chat with AI models without internet access](/assets/images/general/chatgpt-alternative-jan.jpeg)
*Jan as an open-source alternative to ChatGPT*
Jan is the most complete ChatGPT alternative available today. It enables:
- Use AI in online & offline (even on a plain)
- Agentic actions supported
- MCP servers supported for tools
Unlike ChatGPT, it runs on your computer, which means:
- Offline AI capabilities (see [Offline ChatGPT post](https://www.jan.ai/post/offline-chatgpt-alternative) for details)
- 100% private
- Open-source & Free
<Callout> Jan is an [open-source replacement for ChatGPT.](https://www.jan.ai/) </Callout>
### Claude is the most notable online alternative
![Claude](./_assets/claude.jpeg)
Claude has become the main online rival to ChatGPT. It stands out for writing, reasoning, and coding.
- Handles very long documents and context well
- Strong for essays, research papers, and structured text
- Popular with developers for code explanations and debugging
- Cloud-only, no offline mode
- Filters outputs heavily, sometimes too restrictive
### Gemini is the Google's integrated alternative
![Gemini](./_assets/gemini.jpeg)
Gemini ties directly into Googles apps and search. Great for users in the Google ecosystem.
- Built into Gmail, Docs, and Google Search
- Good for real-time research and fact-checking
- Strong at pulling web context into answers
- Requires Google account, fully online
- Privacy concerns: all tied to Google services
### Perplexity is the research-focused alternative
![Perplexity](./_assets/perplexity.jpeg)
Perplexity is built for fact-checking and quick research, not creativity.
- Always cites sources for answers
- Strong at summarizing current web info
- Very fast for Q&A style use
- Limited in creativity and open-ended writing
- Cloud-only, daily free usage caps
### LM Studio is the experimental alternative
![LM Studio](./_assets/lm-studio.jpeg)
LM Studio is not a ChatGPT replacement but a local tool for running open models.
- Lets you test and run open-source models on PC
- Offline by default, works without internet
- Flexible setup for developers and technical users
- Requires decent hardware (RAM/VRAM)
LM Studio is not beginner-friendly compared to Jan.
## Choosing the right ChatGPT alternative for you:
- Best overall replacement: [Jan](https://www.jan.ai/)
- For writing & storytelling: Claude
- For research & web knowledge: Perplexity or Gemini
- For productivity & office work: Microsoft Copilot
- For experimentation with open-source models for technical people: LM Studio
Most ChatGPT alternatives are still cloud-based and limited. If you want full privacy, offline use, and no restrictions, the best ChatGPT alternative is [Jan](https://www.jan.ai/).
### Can I use ChatGPT offline?
No. ChatGPT always requires internet. For offline AI, use Jan.
### Whats the best free ChatGPT alternative?
Jan is free, open-source, and runs offline. Others like Claude or Perplexity have limited free tiers but are cloud-based.
### Which ChatGPT alternative is best for writing?
Claude is strong for essays, reports, and structured writing. You could use [open-source models](https://www.jan.ai/post/run-ai-models-locally) in Jan too.
### Which ChatGPT alternative is best for research?
Perplexity and Gemini pull real-time web data with citations.
### Whats the closest full replacement to ChatGPT?
Jan. It runs locally, works offline, and feels like ChatGPT without restrictions.

View File

@ -4,13 +4,13 @@ description: "A simple guide to replicating Deep Research results for free, with
tags: AI, local models, Jan, GGUF, Deep Research, local AI
categories: guides
date: 2025-08-04
ogImage: _assets/research-result-local.png
ogImage: assets/images/general/research-result-local.png
twitter:
card: summary_large_image
site: "@jandotai"
title: "Replicating Deep Research with Jan"
description: "Learn how to replicate Deep Research results with Jan."
image: _assets/research-result-local.jpg
image: assets/images/general/research-result-local.png
---
import { Callout } from 'nextra/components'
@ -125,8 +125,8 @@ any version with Model Context Protocol in it (>`v0.6.3`).
**The Key: Assistants + Tools**
Running deep research in Jan can be accomplished by combining [custom assistants](https://jan.ai/docs/assistants)
with [MCP search tools](https://jan.ai/docs/desktop/mcp-examples/search/exa). This pairing allows any model—local or
Running deep research in Jan can be accomplished by combining [custom assistants](https://jan.ai/docs/desktop/assistants)
with [MCP search tools](https://jan.ai/docs/mcp-examples/search/exa). This pairing allows any model—local or
cloud—to follow a systematic research workflow, to create a report similar to that of other providers, with some
visible limitations (for now).

View File

@ -4,7 +4,7 @@ description: "A straightforward guide to running DeepSeek R1 locally regardless
tags: DeepSeek, R1, local AI, Jan, GGUF, Qwen, Llama
categories: guides
date: 2025-01-31
ogImage: assets/deepseek-r1-locally-jan.jpg
ogImage: assets/images/general/deepseek-r1-locally-jan.jpg
twitter:
card: summary_large_image
site: "@jandotai"
@ -17,7 +17,7 @@ import CTABlog from '@/components/Blog/CTA'
# Run DeepSeek R1 locally on your device (Beginner-Friendly Guide)
![DeepSeek R1 running locally in Jan AI interface, showing the chat interface and model settings](./_assets/deepseek-r1-locally-jan.jpg)
![DeepSeek R1 running locally in Jan AI interface, showing the chat interface and model settings](/assets/images/general/deepseek-r1-locally-jan.jpg)
DeepSeek R1 is one of the best open-source models in the market right now, and you can run DeepSeek R1 on your own computer!

View File

@ -3,7 +3,7 @@ title: "How we (try to) benchmark GPU kernels accurately"
description: "We present the process behind how we decided to benchmark GPU kernels and iteratively improved our benchmarking pipeline"
tags: ""
categories: research
ogImage: "./_assets/cover-kernel-benchmarking.png"
ogImage: assets/images/general/cover-kernel-benchmarking.png
date: 2025-09-17
---

View File

@ -0,0 +1,124 @@
---
title: "If ChatGPT is down, switch to AI that never goes down"
description: "Check if ChatGPT down right now, and learn how to use AI that never goes down."
tags: AI, ChatGPT down, ChatGPT alternative, Jan, local AI, offline AI, ChatGPT at capacity
categories: guides
date: 2025-09-30
ogImage: assets/images/general/is-chatgpt-down.jpg
twitter:
card: summary_large_image
site: "@jandotai"
title: "Realtime Status: Is ChatGPT down?"
description: "Check if ChatGPT is down right now with our real-time status checker, and learn how to use AI that never goes offline."
image: assets/images/general/is-chatgpt-down.jpg
---
import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA'
import { OpenAIStatusChecker } from '@/components/OpenAIStatusChecker'
# If ChatGPT is down, switch to AI that never goes down
If you're seeing ChatGPT is down, it could be a good signal to switch to [Jan](https://www.jan.ai/), AI that never goes down.
## 🔴 Realtime Status: Is ChatGPT down?
<Callout>
This live tracker shows if ChatGPT is down right now.
</Callout>
<OpenAIStatusChecker />
### ChatGPT Status Indicators
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 my-6">
<div className="p-4 rounded-lg border border-green-200 bg-green-50 dark:bg-green-900/20 dark:border-green-800">
<div className="flex items-center gap-2 mb-2">
<div className="w-3 h-3 bg-green-500 rounded-full"></div>
<span className="font-semibold text-green-800 dark:text-green-300">Operational</span>
</div>
<p className="text-sm text-green-700 dark:text-green-400">All systems are functioning normally with no reported issues.</p>
</div>
<div className="p-4 rounded-lg border border-yellow-200 bg-yellow-50 dark:bg-yellow-900/20 dark:border-yellow-800">
<div className="flex items-center gap-2 mb-2">
<div className="w-3 h-3 bg-yellow-500 rounded-full"></div>
<span className="font-semibold text-yellow-800 dark:text-yellow-300">Degraded Performance</span>
</div>
<p className="text-sm text-yellow-700 dark:text-yellow-400">Services are running but may be slower than usual.</p>
</div>
<div className="p-4 rounded-lg border border-orange-200 bg-orange-50 dark:bg-orange-900/20 dark:border-orange-800">
<div className="flex items-center gap-2 mb-2">
<div className="w-3 h-3 bg-orange-500 rounded-full"></div>
<span className="font-semibold text-orange-800 dark:text-orange-300">Partial Outage</span>
</div>
<p className="text-sm text-orange-700 dark:text-orange-400">Some features or regions may be experiencing issues.</p>
</div>
<div className="p-4 rounded-lg border border-red-200 bg-red-50 dark:bg-red-900/20 dark:border-red-800">
<div className="flex items-center gap-2 mb-2">
<div className="w-3 h-3 bg-red-500 rounded-full"></div>
<span className="font-semibold text-red-800 dark:text-red-300">Major Outage</span>
</div>
<p className="text-sm text-red-700 dark:text-red-400">Significant service disruption affecting most users.</p>
</div>
</div>
## Skip the downtime with Jan
When ChatGPT is down, Jan keeps working. Jan is an open-source ChatGPT alternative that runs on your computer - no servers, no outages, no waiting.
![Jan running when ChatGPT is down](/assets/images/general/is-chatgpt-down.jpg)
*Jan works even when ChatGPT doesn't.*
### Why Jan never goes down:
- **Runs locally** - No dependency on external servers
- **Always available** - Works offline, even on flights
- **No capacity limits** - Uses your computer's resources
- **100% private** - Your conversations stay on your device
### Get started in 3 mins:
1. Download Jan: [jan.ai](https://jan.ai)
2. Install a model: Choose from Jan, Qwen, or other top models
3. Start chatting: Similar design as ChatGPT, but always available if you use local models
<Callout type="info">
**Pro tip:** Keep both ChatGPT and Jan. You'll never lose productivity to outages again.
</Callout>
Jan runs AI models locally, so you don't need internet access. That means Jan is unaffected when ChatGPT is down.
### Why does ChatGPT goes down?
There could be multiple reasons:
- Too many users at once
- Data center or API downtime
- Planned or uplanned updates
- Limited in some locations
ChatGPT depends on OpenAIs servers. If those go down, so does ChatGPT. Jan users don't affect by ChatGPT's outage.
### Common ChatGPT Errors
When ChatGPT experiences issues, you might see these error messages:
- "ChatGPT is at capacity right now": Too many users online, try again later
- "Error in message stream": Connection problems with OpenAI servers
- "Something went wrong": General server error, refresh and retry
- "Network error": Internet connectivity issues on your end or OpenAI's
- "Rate limit exceeded": Too many requests sent, wait before trying again
- "This model is currently overloaded": High demand for specific model
## Quick answers about ChatGPT status
### Is ChatGPT down?
Check the ChatGPT realtime status above. [See if ChatGPT is down right now.](http://localhost:3000/post/is-chatgpt-down-use-jan#-realtime-status-is-chatgpt-down)
### Why is ChatGPT down?
Usually server overload, maintenance, or outages at OpenAI.
### What does "ChatGPT is at capacity" mean?
Too many users are online at the same time. Youll need to wait or switch to Jan instead.
### Is ChatGPT shutting down?
No, ChatGPT isn't shutting down. Outages are temporary.
### Can I use ChatGPT offline?
No. ChatGPT always requires internet. For [offline AI](https://www.jan.ai/post/offline-chatgpt-alternative), use [Jan](https://jan.ai).

View File

@ -1,32 +1,36 @@
---
title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead"
description: "Learn how to use AI offline with Jan - a free, open-source alternative to ChatGPT that works 100% offline on your computer."
tags: AI, ChatGPT alternative, offline AI, Jan, local AI, privacy
description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline."
tags: AI, chatgpt offline, ChatGPT alternative, offline AI, Jan, local AI, privacy
categories: guides
date: 2025-02-08
ogImage: _assets/offline-chatgpt-alternatives-jan.jpg
ogImage: assets/images/general/offline-chatgpt-alternatives-jan.jpg
twitter:
card: summary_large_image
site: "@jandotai"
title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead"
description: "Want to use ChatGPT offline? Learn how to run AI models locally with Jan - free, open-source, and works without internet."
image: _assets/offline-chatgpt-alternatives-jan.jpg
description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline."
image: assets/images/general/offline-chatgpt-alternatives-jan.jpg
---
import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA'
# Offline ChatGPT: You can't run ChatGPT offline, do this instead
ChatGPT is a cloud-based service that requires internet access. However, it's not the only way to use AI. You can run AI models offline on your device with [Jan](https://jan.ai/). It's completely free, open-source, and gives you 100% offline capability. You can even use AI on a plane!
ChatGPT itself can't run offline. ChatGPT can't run offline. You cant download it. It always needs internet, because it runs on OpenAI's servers.
<Callout>
If you want offline AI, you need local models. The easiest way: [Jan, an open-source replacement of ChatGPT](https://jan.ai/). It's free, open-source, and works 100% offline. With Jan, you can even use AI on a plane.
<Callout type="info">
**Quick Summary:**
- ChatGPT always needs internet - it can't run offline
- Jan lets you run AI models 100% offline on your computer
- It's free and open-source
- Works on Mac, Windows, and Linux
- ChatGPT always needs internet - no offline mode
- Use Jan to use AI models 100% offline
- It's free & open-source, and works on Mac, Windows, and Linux
</Callout>
## How to use AI offline?
Offline AI means the model runs on your computer. So no internet needed, 100% private, and data never leaves your device. With Jan you can run offline AI models locally.
## Jan as an offline ChatGPT alternative
![Use Jan to chat with AI models without internet access](./_assets/offline-chatgpt-alternative-ai-without-internet.jpg)
@ -42,23 +46,25 @@ Go to [jan.ai](https://jan.ai) and download the version for your computer (Mac,
### 2. Download an AI model
You'll need an AI model to use AI offline, so download a model from Jan. Once it's on your computer, you don't need internet anymore.
You'll need an AI model to use AI offline, so download a model from Jan. Once it's on your computer, you don't need internet anymore. You can also use GPT models via Jan - check [running gpt-oss locally](https://www.jan.ai/post/run-gpt-oss-locally) post to see it.
![Choose an AI model that works offline](./_assets/jan-model-selection.jpg "Find the perfect AI model for offline use")
*Select an AI model that matches your needs and computer capabilities*
<Callout>
**Which model should you choose?**
### Which model should you choose?
- For most computers: Try Mistral 7B or DeepSeek - they're similar to ChatGPT 3.5
- For older computers: Use smaller 3B models
- For gaming PCs: You can try larger 13B models
<Callout type="info">
Don't worry about choosing - Jan will automatically recommend models that work well on your computer.
</Callout>
If you'd like to learn more about local AI, check [how to run AI models locally as a beginner](https://www.jan.ai/post/run-ai-models-locally) article.
### 3. Start using AI offline
![Chat with AI offline using Jan's interface](./_assets/run-ai-locally-with-jan.jpg "Experience ChatGPT-like interactions without internet")
![Chat with AI offline using Jan's interface](/assets/images/general/run-ai-locally-with-jan.jpg "Experience ChatGPT-like interactions without internet")
*Use Jan's clean interface to chat with AI - no internet required*
Once downloaded, you can use AI anywhere, anytime:
@ -71,12 +77,7 @@ Once downloaded, you can use AI anywhere, anytime:
## How to chat with your docs in Jan?
To chat with your docs in Jan, you need to activate experimental mode.
![Activate experimental mode in Jan's settings](./_assets/chat-with-your-docs-offline-ai.jpg "Enable experimental features to chat with your documents")
*Turn on experimental mode in settings to chat with your docs*
After activating experimental mode, simply add your files and ask questions about them.
Simply add your files and ask questions about them.
![Chat with your documents using Jan](./_assets/chat-with-docs-prompt.jpg "Ask questions about your documents offline")
*Chat with your documents privately - no internet needed*
@ -97,17 +98,17 @@ Local AI makes possible offline AI use, so Jan is going to be your first step to
4. **No Server Issues:** No more "ChatGPT is at capacity"
5. **Your Choice of Models:** Use newer models as they come out
**"Is it really free? What's the catch?"**
### "Is Jan really free? What's the catch?"
Yes, it's completely free and open source. Jan is built by developers who believe in making AI accessible to everyone.
**"How does it compare to ChatGPT?"**
### How does Jan compare to ChatGPT?"
Modern open-source models like DeepSeek and Mistral are very capable. While they might not match GPT-4, they're perfect for most tasks and getting better every month.
**"Do I need a powerful computer?"**
### "Do I need a powerful computer?"
If your computer is from the last 5 years, it will likely work fine. You need about 8GB of RAM and 10GB of free space for comfortable usage.
**"What about my privacy?"**
Everything stays on your computer. Your conversations, documents, and data never leave your device unless you choose to share them.
### "What about my privacy?"
Everything stays on your computer with Jan. Your conversations, documents, and data never leave your device unless you choose to share them.
Want to learn more about the technical side? Check our detailed [guide on running AI models locally](/post/run-ai-models-locally). It's not required to [use AI offline](https://jan.ai/) but helps understand how it all works.
@ -116,3 +117,20 @@ Want to learn more about the technical side? Check our detailed [guide on runnin
<Callout type="info">
[Join our Discord community](https://discord.gg/Exe46xPMbK) for support and tips on using Jan as your offline ChatGPT alternative.
</Callout>
### FAQ
#### Can I download ChatGPT for offline use?
No. ChatGPT is cloud-only.
#### How to use ChatGPT offline?
You can't. ChatGPT has no offline mode. Use Jan instead for a ChatGPT-like offline experience.
#### Does ChatGPT have internet access?
Yes. It runs in the cloud.
#### What's the best way to use AI offline?
Download Jan and run models like Mistral, DeepSeek, or GPT-OSS locally.
#### What's GPT offline?
OpenAI has open-source models you can run locally but not via ChatGPT. One of them is [gpt-oss](https://www.jan.ai/post/run-gpt-oss-locally) and you can run it via Jan.

View File

@ -50,7 +50,7 @@ Thinking mode is powerful, but greedy decoding kills its output. It'll repeat or
## Quick summary
![Qwen3 settings](./_assets/qwen3-settings-jan-ai.jpeg)
![Qwen3 settings](/assets/images/general/qwen3-30b-settings.jpg)
### Non-thinking mode (`enable_thinking=False`)

View File

@ -4,7 +4,7 @@ description: "A straightforward guide to running AI models locally on your compu
tags: AI, local models, Jan, GGUF, privacy, local AI
categories: guides
date: 2025-01-31
ogImage: assets/run-ai-locally-with-jan.jpg
ogImage: assets/images/general/run-ai-locally-with-jan.jpg
twitter:
card: summary_large_image
site: "@jandotai"
@ -35,7 +35,7 @@ Most people think running AI models locally is complicated. It's not. Anyone can
That's all to run your first AI model locally!
![Jan's simple and clean chat interface for local AI](./_assets/run-ai-locally-with-jan.jpg "Jan's easy-to-use chat interface after installation")
![Jan's simple and clean chat interface for local AI](/assets/images/general/run-ai-locally-with-jan.jpg "Jan's easy-to-use chat interface after installation")
*Jan's easy-to-use chat interface after installation.*
Keep reading to learn key terms of local AI and the things you should know before running AI models locally.

View File

@ -4,21 +4,19 @@ description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss local
tags: OpenAI, gpt-oss, local AI, Jan, privacy, Apache-2.0, llama.cpp, Ollama, LM Studio
categories: guides
date: 2025-08-06
ogImage: assets/gpt-oss%20locally.jpeg
ogImage: assets/images/general/gpt-oss locally.jpeg
twitter:
card: summary_large_image
site: "@jandotai"
title: "Run OpenAI's gpt-oss Locally in 5 Minutes (Beginner Guide)"
description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss locally with Jan AI for private, offline conversations."
image: assets/gpt-oss%20locally.jpeg
image: assets/images/general/gpt-oss locally.jpeg
---
import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA'
# Run OpenAI's gpt-oss Locally in 5 mins
![gpt-oss running locally in Jan interface](./_assets/gpt-oss%20locally.jpeg)
OpenAI launched [gpt-oss](https://openai.com/index/introducing-gpt-oss/), marking their return to open-source AI after GPT-2. This model is designed to run locally on consumer hardware. This guide shows you how to install and run gpt-oss on your computer for private, offline AI conversations.
## What is gpt-oss?

View File

@ -107,14 +107,15 @@ const config: DocsThemeConfig = {
head: function useHead() {
const { title, frontMatter } = useConfig()
const { asPath } = useRouter()
const titleTemplate =
(asPath.includes('/desktop')
const titleTemplate = asPath.includes('/post/')
? (frontMatter?.title || title)
: (asPath.includes('/desktop')
? 'Jan Desktop'
: asPath.includes('/server')
? 'Jan Server'
: 'Jan') +
' - ' +
(frontMatter?.title || title)
' - ' +
(frontMatter?.title || title)
return (
<Fragment>

View File

@ -22,6 +22,9 @@
},
"devDependencies": {
"@janhq/core": "workspace:*",
"@tabler/icons-react": "^3.34.0",
"@types/react": "19.1.2",
"react": "19.0.0",
"typescript": "5.9.2",
"vite": "5.4.20",
"vitest": "2.1.9",
@ -29,6 +32,8 @@
},
"peerDependencies": {
"@janhq/core": "*",
"@tabler/icons-react": "*",
"react": "19.0.0",
"zustand": "5.0.3"
},
"dependencies": {

View File

@ -14,4 +14,4 @@ export const DEFAULT_ASSISTANT = {
name: 'Jan',
avatar: '👋',
created_at: 1747029866.542,
}
}

View File

@ -11,6 +11,9 @@ import {
} from '@janhq/core'
import { RemoteApi } from './api'
import { getDefaultAssistant, ObjectParser, combineConversationItemsToMessages } from './utils'
import { ApiError } from '../shared/types/errors'
const CONVERSATION_NOT_FOUND_EVENT = 'conversation-not-found'
export default class ConversationalExtensionWeb extends ConversationalExtension {
private remoteApi: RemoteApi | undefined
@ -111,6 +114,15 @@ export default class ConversationalExtensionWeb extends ConversationalExtension
return messages
} catch (error) {
console.error('Failed to list messages:', error)
// Check if it's a 404 error (conversation not found)
if (error instanceof ApiError && error.isNotFound()) {
// Trigger a navigation event to redirect to home
// We'll use a custom event that the web app can listen to
window.dispatchEvent(new CustomEvent(CONVERSATION_NOT_FOUND_EVENT, {
detail: { threadId, error: error.message }
}))
}
return []
}
}

View File

@ -5,9 +5,45 @@
import { getSharedAuthService, JanAuthService } from '../shared'
import { JanModel, janProviderStore } from './store'
import { ApiError } from '../shared/types/errors'
// JAN_API_BASE is defined in vite.config.ts
// Constants
const TEMPORARY_CHAT_ID = 'temporary-chat'
/**
* Determines the appropriate API endpoint and request payload based on chat type
* @param request - The chat completion request
* @returns Object containing endpoint URL and processed request payload
*/
function getChatCompletionConfig(request: JanChatCompletionRequest, stream: boolean = false) {
const isTemporaryChat = request.conversation_id === TEMPORARY_CHAT_ID
// For temporary chats, use the stateless /chat/completions endpoint
// For regular conversations, use the stateful /conv/chat/completions endpoint
const endpoint = isTemporaryChat
? `${JAN_API_BASE}/chat/completions`
: `${JAN_API_BASE}/conv/chat/completions`
const payload = {
...request,
stream,
...(isTemporaryChat ? {
// For temporary chat: don't store anything, remove conversation metadata
conversation_id: undefined,
} : {
// For regular chat: store everything, use conversation metadata
store: true,
store_reasoning: true,
conversation: request.conversation_id,
conversation_id: undefined,
})
}
return { endpoint, payload, isTemporaryChat }
}
export interface JanModelsResponse {
object: string
data: JanModel[]
@ -102,7 +138,8 @@ export class JanApiClient {
return models
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to fetch models'
const errorMessage = error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Failed to fetch models'
janProviderStore.setError(errorMessage)
janProviderStore.setLoadingModels(false)
throw error
@ -115,22 +152,18 @@ export class JanApiClient {
try {
janProviderStore.clearError()
const { endpoint, payload } = getChatCompletionConfig(request, false)
return await this.authService.makeAuthenticatedRequest<JanChatCompletionResponse>(
`${JAN_API_BASE}/conv/chat/completions`,
endpoint,
{
method: 'POST',
body: JSON.stringify({
...request,
stream: false,
store: true,
store_reasoning: true,
conversation: request.conversation_id,
conversation_id: undefined,
}),
body: JSON.stringify(payload),
}
)
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to create chat completion'
const errorMessage = error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Failed to create chat completion'
janProviderStore.setError(errorMessage)
throw error
}
@ -144,23 +177,17 @@ export class JanApiClient {
): Promise<void> {
try {
janProviderStore.clearError()
const authHeader = await this.authService.getAuthHeader()
const response = await fetch(`${JAN_API_BASE}/conv/chat/completions`, {
const { endpoint, payload } = getChatCompletionConfig(request, true)
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...authHeader,
},
body: JSON.stringify({
...request,
stream: true,
store: true,
store_reasoning: true,
conversation: request.conversation_id,
conversation_id: undefined,
}),
body: JSON.stringify(payload),
})
if (!response.ok) {
@ -216,7 +243,8 @@ export class JanApiClient {
reader.releaseLock()
}
} catch (error) {
const err = error instanceof Error ? error : new Error('Unknown error occurred')
const err = error instanceof ApiError ? error :
error instanceof Error ? error : new Error('Unknown error occurred')
janProviderStore.setError(err.message)
onError?.(err)
throw err
@ -230,7 +258,8 @@ export class JanApiClient {
await this.getModels()
console.log('Jan API client initialized successfully')
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to initialize API client'
const errorMessage = error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Failed to initialize API client'
janProviderStore.setError(errorMessage)
throw error
} finally {
@ -239,4 +268,4 @@ export class JanApiClient {
}
}
export const janApiClient = JanApiClient.getInstance()
export const janApiClient = JanApiClient.getInstance()

View File

@ -1 +1 @@
export { default } from './provider'
export { default } from './provider'

View File

@ -15,6 +15,7 @@ import {
} from '@janhq/core' // cspell: disable-line
import { janApiClient, JanChatMessage } from './api'
import { janProviderStore } from './store'
import { ApiError } from '../shared/types/errors'
// Jan models support tools via MCP
const JAN_MODEL_CAPABILITIES = ['tools'] as const
@ -192,7 +193,8 @@ export default class JanProviderWeb extends AIEngine {
console.error(`Failed to unload Jan session ${sessionId}:`, error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
error: error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Unknown error',
}
}
}

View File

@ -92,4 +92,4 @@ export const janProviderStore = {
useJanProviderStore.getState().clearError(),
reset: () =>
useJanProviderStore.getState().reset(),
}
}

View File

@ -0,0 +1,54 @@
import { useMemo, useCallback } from 'react'
import { IconWorld } from '@tabler/icons-react'
import { MCPToolComponentProps } from '@janhq/core'
// List of tool names considered as web search tools
const WEB_SEARCH_TOOL_NAMES = ['google_search', 'scrape'];
export const WebSearchButton = ({
tools,
isToolEnabled,
onToolToggle,
}: MCPToolComponentProps) => {
const webSearchTools = useMemo(
() => tools.filter((tool) => WEB_SEARCH_TOOL_NAMES.includes(tool.name)),
[tools]
)
// Early return if no web search tools available
if (webSearchTools.length === 0) {
return null
}
// Check if all web search tools are enabled
const isEnabled = useMemo(
() => webSearchTools.every((tool) => isToolEnabled(tool.name)),
[webSearchTools, isToolEnabled]
)
const handleToggle = useCallback(() => {
// Toggle all web search tools at once
const newState = !isEnabled
webSearchTools.forEach((tool) => {
onToolToggle(tool.name, newState)
})
}, [isEnabled, webSearchTools, onToolToggle])
return (
<button
onClick={handleToggle}
className={`h-7 px-2 py-1 flex items-center justify-center rounded-md transition-all duration-200 ease-in-out gap-1 cursor-pointer ml-0.5 border-0 ${
isEnabled
? 'bg-accent/20 text-accent'
: 'bg-transparent text-main-view-fg/70 hover:bg-main-view-fg/5'
}`}
title={isEnabled ? 'Disable Web Search' : 'Enable Web Search'}
>
<IconWorld
size={16}
className={isEnabled ? 'text-accent' : 'text-main-view-fg/70'}
/>
<span className={`text-sm font-medium ${isEnabled ? 'text-accent' : ''}`}>Search</span>
</button>
)
}

View File

@ -0,0 +1 @@
export { WebSearchButton } from './WebSearchButton'

View File

@ -4,11 +4,13 @@
* Uses official MCP TypeScript SDK with proper session handling
*/
import { MCPExtension, MCPTool, MCPToolCallResult } from '@janhq/core'
import { MCPExtension, MCPTool, MCPToolCallResult, MCPToolComponentProps } from '@janhq/core'
import { getSharedAuthService, JanAuthService } from '../shared'
import { Client } from '@modelcontextprotocol/sdk/client/index.js'
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js'
import { JanMCPOAuthProvider } from './oauth-provider'
import { WebSearchButton } from './components'
import type { ComponentType } from 'react'
// JAN_API_BASE is defined in vite.config.ts (defaults to 'https://api-dev.jan.ai/jan/v1')
declare const JAN_API_BASE: string
@ -232,4 +234,27 @@ export default class MCPExtensionWeb extends MCPExtension {
throw error
}
}
}
/**
* Provides a custom UI component for web search tools
* @returns The WebSearchButton component
*/
getToolComponent(): ComponentType<MCPToolComponentProps> | null {
return WebSearchButton
}
/**
* Returns the list of tool names that should be disabled by default for new users
* All MCP web tools are disabled by default to prevent accidental API usage
* @returns Array of tool names to disable by default
*/
async getDefaultDisabledTools(): Promise<string[]> {
try {
const tools = await this.getTools()
return tools.map(tool => tool.name)
} catch (error) {
console.error('Failed to get default disabled tools:', error)
return []
}
}
}

View File

@ -57,4 +57,4 @@ export class JanMCPOAuthProvider implements OAuthClientProvider {
async codeVerifier(): Promise<string> {
throw new Error('Code verifier not supported')
}
}
}

View File

@ -16,6 +16,7 @@ import { logoutUser, refreshToken, guestLogin } from './api'
import { AuthProviderRegistry } from './registry'
import { AuthBroadcast } from './broadcast'
import type { ProviderType } from './providers'
import { ApiError } from '../types/errors'
const authProviderRegistry = new AuthProviderRegistry()
@ -160,7 +161,7 @@ export class JanAuthService {
this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000
} catch (error) {
console.error('Failed to refresh access token:', error)
if (error instanceof Error && error.message.includes('401')) {
if (error instanceof ApiError && error.isStatus(401)) {
await this.handleSessionExpired()
}
throw error
@ -305,9 +306,7 @@ export class JanAuthService {
if (!response.ok) {
const errorText = await response.text()
throw new Error(
`API request failed: ${response.status} ${response.statusText} - ${errorText}`
)
throw new ApiError(response.status, response.statusText, errorText)
}
return response.json()
@ -418,7 +417,7 @@ export class JanAuthService {
)
} catch (error) {
console.error('Failed to fetch user profile:', error)
if (error instanceof Error && error.message.includes('401')) {
if (error instanceof ApiError && error.isStatus(401)) {
// Authentication failed - handle session expiry
await this.handleSessionExpired()
return null

View File

@ -0,0 +1,50 @@
/**
* Shared error types for API responses
*/
export class ApiError extends Error {
public readonly status: number
public readonly statusText: string
public readonly responseText: string
constructor(status: number, statusText: string, responseText: string, message?: string) {
super(message || `API request failed: ${status} ${statusText} - ${responseText}`)
this.name = 'ApiError'
this.status = status
this.statusText = statusText
this.responseText = responseText
// Maintains proper stack trace for where our error was thrown (only available on V8)
if ((Error as any).captureStackTrace) {
(Error as any).captureStackTrace(this, ApiError)
}
}
/**
* Check if this is a specific HTTP status code
*/
isStatus(code: number): boolean {
return this.status === code
}
/**
* Check if this is a 404 Not Found error
*/
isNotFound(): boolean {
return this.status === 404
}
/**
* Check if this is a client error (4xx)
*/
isClientError(): boolean {
return this.status >= 400 && this.status < 500
}
/**
* Check if this is a server error (5xx)
*/
isServerError(): boolean {
return this.status >= 500 && this.status < 600
}
}

View File

@ -38,4 +38,4 @@ export interface IndexedDBConfig {
keyPath: string
indexes?: { name: string; keyPath: string | string[]; unique?: boolean }[]
}[]
}
}

View File

@ -2,4 +2,4 @@ export {}
declare global {
declare const JAN_API_BASE: string
}
}

View File

@ -1 +1 @@
/// <reference types="vite/client" />
/// <reference types="vite/client" />

View File

@ -3,6 +3,7 @@
"target": "ES2020",
"module": "ESNext",
"moduleResolution": "bundler",
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"strict": true,

View File

@ -9,11 +9,11 @@ export default defineConfig({
fileName: 'index'
},
rollupOptions: {
external: ['@janhq/core', 'zustand']
external: ['@janhq/core', 'zustand', 'react', 'react-dom', 'react/jsx-runtime', '@tabler/icons-react']
},
emptyOutDir: false // Don't clean the output directory
},
define: {
JAN_API_BASE: JSON.stringify(process.env.JAN_API_BASE || 'https://api-dev.jan.ai/v1'),
}
})
})

View File

@ -1,9 +1,8 @@
import { getJanDataFolderPath, fs, joinPath, events } from '@janhq/core'
import { invoke } from '@tauri-apps/api/core'
import { getProxyConfig } from './util'
import { getProxyConfig, basenameNoExt } from './util'
import { dirname, basename } from '@tauri-apps/api/path'
import { getSystemInfo } from '@janhq/tauri-plugin-hardware-api'
/*
* Reads currently installed backends in janDataFolderPath
*
@ -73,10 +72,7 @@ async function fetchRemoteSupportedBackends(
if (!name.startsWith(prefix)) continue
const backend = name
.replace(prefix, '')
.replace('.tar.gz', '')
.replace('.zip', '')
const backend = basenameNoExt(name).slice(prefix.length)
if (supportedBackends.includes(backend)) {
remote.push({ version, backend })

View File

@ -0,0 +1,12 @@
export {}
declare global {
interface RequestInit {
/**
* Tauri HTTP plugin option for connection timeout in milliseconds.
*/
connectTimeout?: number
}
}

View File

@ -1,3 +1,23 @@
// File path utilities
export function basenameNoExt(filePath: string): string {
const VALID_EXTENSIONS = [".tar.gz", ".zip"];
// handle VALID extensions first
for (const ext of VALID_EXTENSIONS) {
if (filePath.toLowerCase().endsWith(ext)) {
return filePath.slice(0, -ext.length);
}
}
// fallback: remove only the last extension
const lastDotIndex = filePath.lastIndexOf('.');
if (lastDotIndex > 0) {
return filePath.slice(0, lastDotIndex);
}
return filePath;
}
// Zustand proxy state structure
interface ProxyState {
proxyEnabled: boolean

View File

@ -342,41 +342,41 @@ __metadata:
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension":
version: 0.1.10
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension"
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension"
dependencies:
rxjs: "npm:^7.8.1"
ulidx: "npm:^2.3.0"
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
languageName: node
linkType: hard
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension":
version: 0.1.10
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension"
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension"
dependencies:
rxjs: "npm:^7.8.1"
ulidx: "npm:^2.3.0"
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
languageName: node
linkType: hard
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension":
version: 0.1.10
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension"
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension"
dependencies:
rxjs: "npm:^7.8.1"
ulidx: "npm:^2.3.0"
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
languageName: node
linkType: hard
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension":
version: 0.1.10
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension"
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension"
dependencies:
rxjs: "npm:^7.8.1"
ulidx: "npm:^2.3.0"
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
languageName: node
linkType: hard

Some files were not shown because too many files have changed in this diff Show More