Merge branch 'dev' into feat/encrypt-api-key
@ -70,10 +70,9 @@ jobs:
|
||||
run: |
|
||||
echo "Version: ${{ inputs.new_version }}"
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
||||
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||
fi
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||
@ -83,7 +82,7 @@ jobs:
|
||||
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||
|
||||
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||
|
||||
@ -96,7 +95,7 @@ jobs:
|
||||
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||
|
||||
|
||||
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||
@ -125,7 +124,7 @@ jobs:
|
||||
env:
|
||||
RELEASE_CHANNEL: '${{ inputs.channel }}'
|
||||
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
|
||||
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@ -136,4 +135,4 @@ jobs:
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
|
||||
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||
|
||||
@ -91,10 +91,9 @@ jobs:
|
||||
echo "Version: ${{ inputs.new_version }}"
|
||||
# Update tauri.conf.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
||||
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||
fi
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||
@ -104,7 +103,7 @@ jobs:
|
||||
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||
|
||||
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||
|
||||
@ -117,7 +116,7 @@ jobs:
|
||||
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||
|
||||
|
||||
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||
@ -128,7 +127,7 @@ jobs:
|
||||
|
||||
# Temporarily enable devtool on prod build
|
||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||
cat ./src-tauri/Cargo.toml
|
||||
cat ./src-tauri/Cargo.toml
|
||||
|
||||
# Change app name for beta and nightly builds
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
@ -139,7 +138,7 @@ jobs:
|
||||
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||
|
||||
cat ./src-tauri/tauri.conf.json
|
||||
|
||||
|
||||
# Update Cargo.toml
|
||||
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||
@ -184,4 +183,3 @@ jobs:
|
||||
with:
|
||||
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-AppImage
|
||||
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||
|
||||
|
||||
@ -108,10 +108,9 @@ jobs:
|
||||
echo "Version: ${{ inputs.new_version }}"
|
||||
# Update tauri.conf.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
||||
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||
fi
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||
@ -121,7 +120,7 @@ jobs:
|
||||
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||
|
||||
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||
|
||||
@ -134,7 +133,7 @@ jobs:
|
||||
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||
|
||||
|
||||
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||
@ -156,7 +155,7 @@ jobs:
|
||||
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||
|
||||
cat ./src-tauri/tauri.conf.json
|
||||
|
||||
|
||||
# Update Cargo.toml
|
||||
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||
|
||||
@ -49,6 +49,8 @@ jobs:
|
||||
# Update tauri.conf.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||
jq '.bundle.windows.signCommand = "echo External build - skipping signature: %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||
@ -80,6 +82,36 @@ jobs:
|
||||
echo "---------./src-tauri/Cargo.toml---------"
|
||||
cat ./src-tauri/Cargo.toml
|
||||
|
||||
generate_build_version() {
|
||||
### Examble
|
||||
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
|
||||
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
|
||||
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
|
||||
local new_version="$1"
|
||||
local base_version
|
||||
local t_value
|
||||
# Check if it has a "-"
|
||||
if [[ "$new_version" == *-* ]]; then
|
||||
base_version="${new_version%%-*}" # part before -
|
||||
suffix="${new_version#*-}" # part after -
|
||||
# Check if it is rcX-beta
|
||||
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
|
||||
t_value="${BASH_REMATCH[1]}"
|
||||
else
|
||||
t_value="$suffix"
|
||||
fi
|
||||
else
|
||||
base_version="$new_version"
|
||||
t_value="0"
|
||||
fi
|
||||
# Export two values
|
||||
new_base_version="$base_version"
|
||||
new_build_version="${base_version}.${t_value}"
|
||||
}
|
||||
generate_build_version ${{ inputs.new_version }}
|
||||
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
@ -103,7 +135,14 @@ jobs:
|
||||
chmod +x .github/scripts/rename-workspace.sh
|
||||
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||
cat ./package.json
|
||||
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
else
|
||||
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
fi
|
||||
echo "---------nsis.template---------"
|
||||
cat ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
- name: Build app
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
@ -54,6 +54,8 @@ on:
|
||||
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
|
||||
FILE_NAME:
|
||||
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
|
||||
MSI_FILE_NAME:
|
||||
value: ${{ jobs.build-windows-x64.outputs.MSI_FILE_NAME }}
|
||||
|
||||
jobs:
|
||||
build-windows-x64:
|
||||
@ -61,6 +63,7 @@ jobs:
|
||||
outputs:
|
||||
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
|
||||
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||
MSI_FILE_NAME: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
@ -95,9 +98,15 @@ jobs:
|
||||
# Update tauri.conf.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||
mv /tmp/package.json web-app/package.json
|
||||
|
||||
# Add sign commands to tauri.windows.conf.json
|
||||
jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||
|
||||
# Update tauri plugin versions
|
||||
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||
@ -124,9 +133,35 @@ jobs:
|
||||
echo "---------./src-tauri/Cargo.toml---------"
|
||||
cat ./src-tauri/Cargo.toml
|
||||
|
||||
# Add sign commands to tauri.windows.conf.json
|
||||
jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||
generate_build_version() {
|
||||
### Example
|
||||
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
|
||||
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
|
||||
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
|
||||
local new_version="$1"
|
||||
local base_version
|
||||
local t_value
|
||||
# Check if it has a "-"
|
||||
if [[ "$new_version" == *-* ]]; then
|
||||
base_version="${new_version%%-*}" # part before -
|
||||
suffix="${new_version#*-}" # part after -
|
||||
# Check if it is rcX-beta
|
||||
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
|
||||
t_value="${BASH_REMATCH[1]}"
|
||||
else
|
||||
t_value="$suffix"
|
||||
fi
|
||||
else
|
||||
base_version="$new_version"
|
||||
t_value="0"
|
||||
fi
|
||||
# Export two values
|
||||
new_base_version="$base_version"
|
||||
new_build_version="${base_version}.${t_value}"
|
||||
}
|
||||
generate_build_version ${{ inputs.new_version }}
|
||||
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
|
||||
echo "---------tauri.windows.conf.json---------"
|
||||
cat ./src-tauri/tauri.windows.conf.json
|
||||
@ -160,7 +195,14 @@ jobs:
|
||||
chmod +x .github/scripts/rename-workspace.sh
|
||||
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||
cat ./package.json
|
||||
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
else
|
||||
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
fi
|
||||
echo "---------nsis.template---------"
|
||||
cat ./src-tauri/tauri.bundle.windows.nsis.template
|
||||
|
||||
- name: Install AzureSignTool
|
||||
run: |
|
||||
@ -189,9 +231,15 @@ jobs:
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jan-windows-${{ inputs.new_version }}
|
||||
name: jan-windows-exe-${{ inputs.new_version }}
|
||||
path: |
|
||||
./src-tauri/target/release/bundle/nsis/*.exe
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jan-windows-msi-${{ inputs.new_version }}
|
||||
path: |
|
||||
./src-tauri/target/release/bundle/msi/*.msi
|
||||
|
||||
## Set output filename for windows
|
||||
- name: Set output filename for windows
|
||||
@ -201,13 +249,18 @@ jobs:
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
|
||||
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
|
||||
|
||||
MSI_FILE="Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64_en-US.msi"
|
||||
else
|
||||
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
|
||||
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
|
||||
|
||||
MSI_FILE="Jan_${{ inputs.new_version }}_x64_en-US.msi"
|
||||
fi
|
||||
|
||||
echo "::set-output name=WIN_SIG::$WIN_SIG"
|
||||
echo "::set-output name=FILE_NAME::$FILE_NAME"
|
||||
echo "::set-output name=MSI_FILE_NAME::$MSI_FILE"
|
||||
id: metadata
|
||||
|
||||
## Upload to s3 for nightly and beta
|
||||
|
||||
3
.gitignore
vendored
@ -21,11 +21,13 @@ src-tauri/resources/lib
|
||||
src-tauri/icons
|
||||
!src-tauri/icons/icon.png
|
||||
src-tauri/gen/apple
|
||||
src-tauri/gen/android
|
||||
src-tauri/resources/bin
|
||||
|
||||
# Helper tools
|
||||
.opencode
|
||||
OpenCode.md
|
||||
Claude.md
|
||||
archive/
|
||||
.cache/
|
||||
|
||||
@ -60,3 +62,4 @@ src-tauri/resources/
|
||||
## test
|
||||
test-data
|
||||
llm-docs
|
||||
.claude/agents
|
||||
|
||||
51
Makefile
@ -41,9 +41,25 @@ else
|
||||
@echo "Not macOS; skipping Rust target installation."
|
||||
endif
|
||||
|
||||
# Install required Rust targets for Android builds
|
||||
install-android-rust-targets:
|
||||
@echo "Checking and installing Android Rust targets..."
|
||||
@rustup target list --installed | grep -q "aarch64-linux-android" || rustup target add aarch64-linux-android
|
||||
@rustup target list --installed | grep -q "armv7-linux-androideabi" || rustup target add armv7-linux-androideabi
|
||||
@rustup target list --installed | grep -q "i686-linux-android" || rustup target add i686-linux-android
|
||||
@rustup target list --installed | grep -q "x86_64-linux-android" || rustup target add x86_64-linux-android
|
||||
@echo "Android Rust targets ready!"
|
||||
|
||||
# Install required Rust targets for iOS builds
|
||||
install-ios-rust-targets:
|
||||
@echo "Checking and installing iOS Rust targets..."
|
||||
@rustup target list --installed | grep -q "aarch64-apple-ios" || rustup target add aarch64-apple-ios
|
||||
@rustup target list --installed | grep -q "aarch64-apple-ios-sim" || rustup target add aarch64-apple-ios-sim
|
||||
@rustup target list --installed | grep -q "x86_64-apple-ios" || rustup target add x86_64-apple-ios
|
||||
@echo "iOS Rust targets ready!"
|
||||
|
||||
dev: install-and-build
|
||||
yarn download:bin
|
||||
yarn download:lib
|
||||
yarn dev
|
||||
|
||||
# Web application targets
|
||||
@ -58,12 +74,41 @@ build-web-app: install-web-app
|
||||
yarn build:core
|
||||
yarn build:web-app
|
||||
|
||||
serve-web-app:
|
||||
serve-web-app:
|
||||
yarn serve:web-app
|
||||
|
||||
build-serve-web-app: build-web-app
|
||||
yarn serve:web-app
|
||||
|
||||
# Mobile
|
||||
dev-android: install-and-build install-android-rust-targets
|
||||
@echo "Setting up Android development environment..."
|
||||
@if [ ! -d "src-tauri/gen/android" ]; then \
|
||||
echo "Android app not initialized. Initializing..."; \
|
||||
yarn tauri android init; \
|
||||
fi
|
||||
@echo "Sourcing Android environment setup..."
|
||||
@bash autoqa/scripts/setup-android-env.sh echo "Android environment ready"
|
||||
@echo "Starting Android development server..."
|
||||
yarn dev:android
|
||||
|
||||
dev-ios: install-and-build install-ios-rust-targets
|
||||
@echo "Setting up iOS development environment..."
|
||||
ifeq ($(shell uname -s),Darwin)
|
||||
@if [ ! -d "src-tauri/gen/ios" ]; then \
|
||||
echo "iOS app not initialized. Initializing..."; \
|
||||
yarn tauri ios init; \
|
||||
fi
|
||||
@echo "Checking iOS development requirements..."
|
||||
@xcrun --version > /dev/null 2>&1 || (echo "❌ Xcode command line tools not found. Install with: xcode-select --install" && exit 1)
|
||||
@xcrun simctl list devices available | grep -q "iPhone\|iPad" || (echo "❌ No iOS simulators found. Install simulators through Xcode." && exit 1)
|
||||
@echo "Starting iOS development server..."
|
||||
yarn dev:ios
|
||||
else
|
||||
@echo "❌ iOS development is only supported on macOS"
|
||||
@exit 1
|
||||
endif
|
||||
|
||||
# Linting
|
||||
lint: install-and-build
|
||||
yarn lint
|
||||
@ -71,9 +116,7 @@ lint: install-and-build
|
||||
# Testing
|
||||
test: lint
|
||||
yarn download:bin
|
||||
yarn download:lib
|
||||
ifeq ($(OS),Windows_NT)
|
||||
yarn download:windows-installer
|
||||
endif
|
||||
yarn test
|
||||
yarn copy:assets:tauri
|
||||
|
||||
80
autoqa/scripts/setup-android-env.sh
Executable file
@ -0,0 +1,80 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Android Development Environment Setup for Jan
|
||||
|
||||
# Ensure rustup's Rust toolchain is used instead of Homebrew's
|
||||
export PATH="$HOME/.cargo/bin:$PATH"
|
||||
|
||||
# Set JAVA_HOME for Android builds
|
||||
export JAVA_HOME=/opt/homebrew/opt/openjdk@17/libexec/openjdk.jdk/Contents/Home
|
||||
export PATH="/opt/homebrew/opt/openjdk@17/bin:$PATH"
|
||||
|
||||
export ANDROID_HOME="$HOME/Library/Android/sdk"
|
||||
export ANDROID_NDK_ROOT="$HOME/Library/Android/sdk/ndk/29.0.14033849"
|
||||
export NDK_HOME="$HOME/Library/Android/sdk/ndk/29.0.14033849"
|
||||
|
||||
# Add Android tools to PATH
|
||||
export PATH=$PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/tools:$ANDROID_HOME/cmdline-tools/latest/bin:$ANDROID_HOME/emulator:$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin
|
||||
|
||||
# Set up CC and CXX for Android compilation
|
||||
export CC_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
|
||||
export CXX_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++"
|
||||
export AR_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
|
||||
export RANLIB_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib"
|
||||
|
||||
# Additional environment variables for Rust cross-compilation
|
||||
export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
|
||||
|
||||
# Only set global CC and AR for Android builds (when IS_ANDROID is set)
|
||||
if [ "$IS_ANDROID" = "true" ]; then
|
||||
export CC="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
|
||||
export AR="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
|
||||
echo "Global CC and AR set for Android build"
|
||||
fi
|
||||
|
||||
# Create symlinks for Android tools if they don't exist
|
||||
mkdir -p ~/.local/bin
|
||||
if [ ! -f ~/.local/bin/aarch64-linux-android-ranlib ]; then
|
||||
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib ~/.local/bin/aarch64-linux-android-ranlib
|
||||
fi
|
||||
if [ ! -f ~/.local/bin/aarch64-linux-android-clang ]; then
|
||||
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang ~/.local/bin/aarch64-linux-android-clang
|
||||
fi
|
||||
if [ ! -f ~/.local/bin/aarch64-linux-android-clang++ ]; then
|
||||
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++ ~/.local/bin/aarch64-linux-android-clang++
|
||||
fi
|
||||
|
||||
# Fix the broken clang symlinks by ensuring base clang is available
|
||||
if [ ! -f ~/.local/bin/clang ]; then
|
||||
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang ~/.local/bin/clang
|
||||
fi
|
||||
if [ ! -f ~/.local/bin/clang++ ]; then
|
||||
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang++ ~/.local/bin/clang++
|
||||
fi
|
||||
|
||||
# Create symlinks for target-specific ar tools
|
||||
if [ ! -f ~/.local/bin/aarch64-linux-android-ar ]; then
|
||||
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar ~/.local/bin/aarch64-linux-android-ar
|
||||
fi
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
|
||||
echo "Android environment configured:"
|
||||
echo "ANDROID_HOME: $ANDROID_HOME"
|
||||
echo "ANDROID_NDK_ROOT: $ANDROID_NDK_ROOT"
|
||||
echo "PATH includes NDK toolchain: $(echo $PATH | grep -o "ndk.*bin" || echo "NOT FOUND")"
|
||||
|
||||
# Verify required tools
|
||||
echo -e "\nChecking required tools:"
|
||||
which adb && echo "✅ adb found" || echo "❌ adb not found"
|
||||
which emulator && echo "✅ emulator found" || echo "❌ emulator not found"
|
||||
which $CC_aarch64_linux_android && echo "✅ Android clang found" || echo "❌ Android clang not found"
|
||||
|
||||
# Show available AVDs
|
||||
echo -e "\nAvailable Android Virtual Devices:"
|
||||
emulator -list-avds 2>/dev/null || echo "No AVDs found"
|
||||
|
||||
# Execute the provided command
|
||||
if [ "$1" ]; then
|
||||
echo -e "\nExecuting: $@"
|
||||
exec "$@"
|
||||
fi
|
||||
@ -27,11 +27,13 @@
|
||||
"devDependencies": {
|
||||
"@npmcli/arborist": "^7.1.0",
|
||||
"@types/node": "^22.10.0",
|
||||
"@types/react": "19.1.2",
|
||||
"@vitest/coverage-v8": "^2.1.8",
|
||||
"@vitest/ui": "^2.1.8",
|
||||
"eslint": "8.57.0",
|
||||
"happy-dom": "^15.11.6",
|
||||
"pacote": "^21.0.0",
|
||||
"react": "19.0.0",
|
||||
"request": "^2.88.2",
|
||||
"request-progress": "^3.0.0",
|
||||
"rimraf": "^6.0.1",
|
||||
@ -44,5 +46,8 @@
|
||||
"rxjs": "^7.8.1",
|
||||
"ulidx": "^2.3.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "19.0.0"
|
||||
},
|
||||
"packageManager": "yarn@4.5.3"
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ export default defineConfig([
|
||||
sourcemap: true,
|
||||
},
|
||||
platform: 'browser',
|
||||
external: ['path'],
|
||||
external: ['path', 'react', 'react-dom', 'react/jsx-runtime'],
|
||||
define: {
|
||||
NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`),
|
||||
VERSION: JSON.stringify(pkgJson.version),
|
||||
|
||||
@ -250,4 +250,4 @@ describe('ConversationalExtension', () => {
|
||||
|
||||
expect(retrievedAssistant.modelId).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -131,4 +131,4 @@ describe('LocalOAIEngine', () => {
|
||||
expect(engine.loadedModel).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -96,4 +96,4 @@ describe('MCPExtension', () => {
|
||||
expect(healthy).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { MCPInterface, MCPTool, MCPToolCallResult } from '../../types'
|
||||
import { MCPInterface, MCPTool, MCPToolCallResult, MCPToolComponentProps } from '../../types'
|
||||
import { BaseExtension, ExtensionTypeEnum } from '../extension'
|
||||
import type { ComponentType } from 'react'
|
||||
|
||||
/**
|
||||
* MCP (Model Context Protocol) extension for managing tools and server communication.
|
||||
@ -18,4 +19,16 @@ export abstract class MCPExtension extends BaseExtension implements MCPInterface
|
||||
abstract getConnectedServers(): Promise<string[]>
|
||||
abstract refreshTools(): Promise<void>
|
||||
abstract isHealthy(): Promise<boolean>
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional method to provide a custom UI component for tools
|
||||
* @returns A React component or null if no custom component is provided
|
||||
*/
|
||||
getToolComponent?(): ComponentType<MCPToolComponentProps> | null
|
||||
|
||||
/**
|
||||
* Optional method to get the list of tool names that should be disabled by default
|
||||
* @returns Array of tool names that should be disabled by default for new users
|
||||
*/
|
||||
getDefaultDisabledTools?(): Promise<string[]>
|
||||
}
|
||||
|
||||
@ -131,4 +131,4 @@ describe('ModelManager', () => {
|
||||
expect(modelManager.models.get('model-2')).toEqual(model2)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -16,4 +16,4 @@ if (!window.core) {
|
||||
})
|
||||
}
|
||||
|
||||
// Add any other global mocks needed for core tests
|
||||
// Add any other global mocks needed for core tests
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
export * from './mcpEntity'
|
||||
export * from './mcpInterface'
|
||||
export * from './mcpInterface'
|
||||
|
||||
@ -21,4 +21,18 @@ export interface MCPServerInfo {
|
||||
name: string
|
||||
connected: boolean
|
||||
tools?: MCPTool[]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for MCP tool UI components
|
||||
*/
|
||||
export interface MCPToolComponentProps {
|
||||
/** List of available MCP tools */
|
||||
tools: MCPTool[]
|
||||
|
||||
/** Function to check if a specific tool is currently enabled */
|
||||
isToolEnabled: (toolName: string) => boolean
|
||||
|
||||
/** Function to toggle a tool's enabled/disabled state */
|
||||
onToolToggle: (toolName: string, enabled: boolean) => void
|
||||
}
|
||||
|
||||
@ -29,4 +29,4 @@ export interface MCPInterface {
|
||||
* Check if MCP service is healthy
|
||||
*/
|
||||
isHealthy(): Promise<boolean>
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,6 +112,12 @@
|
||||
/docs/remote-models/openrouter /docs/desktop/remote-models/openrouter 302
|
||||
/docs/server-examples/llmcord /docs/desktop/server-examples/llmcord 302
|
||||
/docs/server-examples/tabby /docs/desktop/server-examples/tabby 302
|
||||
/docs/built-in/tensorrt-llm /docs/desktop/llama-cpp 302
|
||||
/docs/desktop/docs/desktop/linux /docs/desktop/install/linux 302
|
||||
/windows /docs/desktop/install/windows 302
|
||||
/docs/quickstart /docs/ 302
|
||||
/docs/desktop/mac /docs/desktop/install/mac 302
|
||||
/handbook/open-superintelligence /handbook/why/open-superintelligence 302
|
||||
|
||||
/guides/integrations/continue/ /docs/desktop/server-examples/continue-dev 302
|
||||
/continue-dev /docs/desktop/server-examples/continue-dev 302
|
||||
@ -130,4 +136,4 @@
|
||||
/local-server/troubleshooting /docs/desktop/troubleshooting 302
|
||||
/mcp /docs/desktop/mcp 302
|
||||
/quickstart /docs/desktop/quickstart 302
|
||||
/server-examples/continue-dev /docs/desktop/server-examples/continue-dev 302
|
||||
/server-examples/continue-dev /docs/desktop/server-examples/continue-dev 302
|
||||
|
||||
@ -6,7 +6,7 @@ const camelCase = (str) => {
|
||||
return str.replace(/[-_](\w)/g, (_, c) => c.toUpperCase())
|
||||
}
|
||||
|
||||
const categories = ['building-jan', 'research']
|
||||
const categories = ['building-jan', 'research', 'guides']
|
||||
|
||||
/**
|
||||
* @param {import("plop").NodePlopAPI} plop
|
||||
|
||||
BIN
docs/public/assets/images/changelog/jan-release-v0.7.0.jpeg
Normal file
|
After Width: | Height: | Size: 262 KiB |
BIN
docs/public/assets/images/general/ai-for-teacher.jpeg
Normal file
|
After Width: | Height: | Size: 325 KiB |
|
After Width: | Height: | Size: 355 KiB |
BIN
docs/public/assets/images/general/chatgpt-alternative-jan.jpeg
Normal file
|
After Width: | Height: | Size: 328 KiB |
|
Before Width: | Height: | Size: 634 KiB After Width: | Height: | Size: 634 KiB |
|
Before Width: | Height: | Size: 725 KiB After Width: | Height: | Size: 725 KiB |
|
Before Width: | Height: | Size: 235 KiB After Width: | Height: | Size: 235 KiB |
BIN
docs/public/assets/images/general/is-chatgpt-down.jpg
Normal file
|
After Width: | Height: | Size: 402 KiB |
BIN
docs/public/assets/images/general/jan-ai-for-teacher.mp4
Normal file
|
After Width: | Height: | Size: 320 KiB |
|
After Width: | Height: | Size: 395 KiB |
|
Before Width: | Height: | Size: 1.7 MiB After Width: | Height: | Size: 1.7 MiB |
|
Before Width: | Height: | Size: 138 KiB After Width: | Height: | Size: 138 KiB |
|
Before Width: | Height: | Size: 176 KiB After Width: | Height: | Size: 176 KiB |
|
Before Width: | Height: | Size: 673 KiB After Width: | Height: | Size: 673 KiB |
@ -1,125 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:news="http://www.google.com/schemas/sitemap-news/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml" xmlns:mobile="http://www.google.com/schemas/sitemap-mobile/1.0" xmlns:image="http://www.google.com/schemas/sitemap-image/1.1" xmlns:video="http://www.google.com/schemas/sitemap-video/1.1">
|
||||
<url><loc>https://jan.ai</loc><lastmod>2025-09-24T03:40:05.491Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/api-reference</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/api-reference/api-reference</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/api-reference/architecture</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/api-reference/configuration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/api-reference/development</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/api-reference/installation</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/blog</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2023-12-21-faster-inference-across-platform</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-01-16-settings-options-right-panel</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-01-29-local-api-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-02-05-jan-data-folder</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-02-10-jan-is-more-stable</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-02-26-home-servers-with-helm</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-03-06-ui-revamp-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-03-11-import-models</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-03-19-nitro-tensorrt-llm-extension</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-04-02-groq-api-integration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-04-15-new-mistral-extension</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-04-25-llama3-command-r-hugginface</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-05-20-llamacpp-upgrade-new-remote-models</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-06-21-nvidia-nim-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-07-15-claude-3-5-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-09-01-llama3-1-gemma2-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-09-17-improved-cpu-performance</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-10-24-jan-stable</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-11-22-jan-bugs</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-11.14-jan-supports-qwen-coder</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-12-03-jan-is-faster</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-12-05-jan-hot-fix-mac</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2024-12-30-jan-new-privacy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-01-06-key-issues-resolved</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-01-23-deepseek-r1-jan</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-02-18-advanced-llama.cpp-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-03-14-jan-security-patch</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-05-14-jan-qwen3-patch</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-06-19-jan-ui-revamp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-06-26-jan-nano-mcp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-07-17-responsive-ui</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-07-31-llamacpp-tutorials</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-08-07-gpt-oss</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-08-14-general-improvs</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-08-28-image-support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/changelog/2025-09-18-auto-optimize-vision-imports</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/api-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/assistants</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/data-folder</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/install/linux</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/install/mac</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/install/windows</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/jan-models/jan-nano-128</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/jan-models/jan-nano-32</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/jan-models/jan-v1</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/jan-models/lucy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/llama-cpp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/llama-cpp-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/manage-models</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/browser/browserbase</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/data-analysis/e2b</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/data-analysis/jupyter</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/deepresearch/octagon</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/design/canva</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/productivity/linear</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/productivity/todoist</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/search/exa</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/mcp-examples/search/serper</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/model-parameters</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/privacy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/privacy-policy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/quickstart</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/anthropic</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/cohere</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/google</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/groq</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/huggingface</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/mistralai</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/openai</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/remote-models/openrouter</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/server-examples/continue-dev</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/server-examples/llmcord</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/server-examples/n8n</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/server-examples/tabby</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/server-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/server-troubleshooting</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/desktop/troubleshooting</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference-administration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference-authentication</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference-chat</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference-chat-conversations</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference-conversations</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference-jan-responses</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/api-reference-jan-server</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/architecture</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/configuration</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/development</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/installation</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/docs/server/overview</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/download</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/handbook</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/handbook/betting-on-open-source</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/handbook/open-superintelligence</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/benchmarking-nvidia-tensorrt-llm</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/bitdefender</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/data-is-moat</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/deepresearch</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/deepseek-r1-locally</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/jan-v1-for-research</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/offline-chatgpt-alternative</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/qwen3-settings</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/rag-is-not-enough</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/run-ai-models-locally</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/post/run-gpt-oss-locally</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/privacy</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
<url><loc>https://jan.ai/support</loc><lastmod>2025-09-24T03:40:05.492Z</lastmod><changefreq>daily</changefreq><priority>1</priority></url>
|
||||
</urlset>
|
||||
@ -19,6 +19,10 @@ const Blog = () => {
|
||||
name: 'Research',
|
||||
id: 'research',
|
||||
},
|
||||
{
|
||||
name: 'Guides',
|
||||
id: 'guides',
|
||||
},
|
||||
]
|
||||
|
||||
return (
|
||||
|
||||
@ -16,7 +16,10 @@ const FOOTER_MENUS: FooterMenu[] = [
|
||||
{
|
||||
title: 'Company',
|
||||
links: [
|
||||
{ name: 'Vision', href: '/', comingSoon: true },
|
||||
{
|
||||
name: 'Open Superintelligence',
|
||||
href: '/handbook/why/open-superintelligence',
|
||||
},
|
||||
{ name: 'Handbook', href: '/handbook' },
|
||||
{ name: 'Community', href: 'https://discord.com/invite/FTk2MvZwJH' },
|
||||
{ name: 'Careers', href: 'https://menlo.bamboohr.com/careers' },
|
||||
|
||||
@ -4,7 +4,7 @@ import { useRouter } from 'next/router'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { FaDiscord, FaGithub } from 'react-icons/fa'
|
||||
import { FiDownload } from 'react-icons/fi'
|
||||
import { FaXTwitter } from 'react-icons/fa6'
|
||||
import { FaXTwitter, FaLinkedinIn } from 'react-icons/fa6'
|
||||
import { Button } from './ui/button'
|
||||
import LogoJanSVG from '@/assets/icons/logo-jan.svg'
|
||||
|
||||
@ -113,6 +113,43 @@ const Navbar = ({ noScroll }: { noScroll?: boolean }) => {
|
||||
</Button>
|
||||
</a>
|
||||
</li>
|
||||
|
||||
<li>
|
||||
<div className={cn('flex gap-4', !isLanding && '!text-black')}>
|
||||
<a
|
||||
href="https://discord.com/invite/FTk2MvZwJH"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="rounded-lg flex items-center justify-center"
|
||||
>
|
||||
<FaDiscord className="size-5" />
|
||||
</a>
|
||||
<a
|
||||
href="https://twitter.com/jandotai"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="rounded-lg flex items-center justify-center"
|
||||
>
|
||||
<FaXTwitter className="size-5" />
|
||||
</a>
|
||||
<a
|
||||
href="https://linkedin.com/company/opensuperintelligence"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="rounded-lg flex items-center justify-center"
|
||||
>
|
||||
<FaLinkedinIn className="size-5" />
|
||||
</a>
|
||||
<a
|
||||
href="https://github.com/menloresearch/jan"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="rounded-lg flex items-center justify-center"
|
||||
>
|
||||
<FaGithub className="size-5" />
|
||||
</a>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
|
||||
@ -232,6 +269,14 @@ const Navbar = ({ noScroll }: { noScroll?: boolean }) => {
|
||||
>
|
||||
<FaXTwitter className="size-5" />
|
||||
</a>
|
||||
<a
|
||||
href="https://linkedin.com/company/opensuperintelligence"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-black rounded-lg flex items-center justify-center"
|
||||
>
|
||||
<FaLinkedinIn className="size-5" />
|
||||
</a>
|
||||
<a
|
||||
href="https://github.com/menloresearch/jan"
|
||||
target="_blank"
|
||||
|
||||
283
docs/src/components/OpenAIStatusChecker.tsx
Normal file
@ -0,0 +1,283 @@
|
||||
import React, { useState, useEffect, useCallback } from 'react'
|
||||
import { AlertCircle, CheckCircle, Clock, RefreshCw } from 'lucide-react'
|
||||
|
||||
interface StatusData {
|
||||
status:
|
||||
| 'operational'
|
||||
| 'degraded'
|
||||
| 'partial_outage'
|
||||
| 'major_outage'
|
||||
| 'under_maintenance'
|
||||
| 'unknown'
|
||||
lastUpdated: string
|
||||
incidents: Array<{
|
||||
name: string
|
||||
status: string
|
||||
impact: string
|
||||
}>
|
||||
}
|
||||
|
||||
const StatusIcon = ({ status }: { status: string }) => {
|
||||
switch (status) {
|
||||
case 'operational':
|
||||
return <CheckCircle className="w-5 h-5 text-green-500" />
|
||||
case 'degraded':
|
||||
case 'partial_outage':
|
||||
return <AlertCircle className="w-5 h-5 text-yellow-500" />
|
||||
case 'major_outage':
|
||||
return <AlertCircle className="w-5 h-5 text-red-500" />
|
||||
case 'under_maintenance':
|
||||
return <Clock className="w-5 h-5 text-blue-500" />
|
||||
default:
|
||||
return <AlertCircle className="w-5 h-5 text-gray-500" />
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusColor = (status: string) => {
|
||||
switch (status) {
|
||||
case 'operational':
|
||||
return 'bg-green-100 text-green-800 border-green-200 dark:bg-green-900/20 dark:text-green-300 dark:border-green-800'
|
||||
case 'degraded':
|
||||
case 'partial_outage':
|
||||
return 'bg-yellow-100 text-yellow-800 border-yellow-200 dark:bg-yellow-900/20 dark:text-yellow-300 dark:border-yellow-800'
|
||||
case 'major_outage':
|
||||
return 'bg-red-100 text-red-800 border-red-200 dark:bg-red-900/20 dark:text-red-300 dark:border-red-800'
|
||||
case 'under_maintenance':
|
||||
return 'bg-blue-100 text-blue-800 border-blue-200 dark:bg-blue-900/20 dark:text-blue-300 dark:border-blue-800'
|
||||
default:
|
||||
return 'bg-gray-100 text-gray-800 border-gray-200 dark:bg-gray-900/20 dark:text-gray-300 dark:border-gray-800'
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusText = (status: string) => {
|
||||
switch (status) {
|
||||
case 'operational':
|
||||
return 'All Systems Operational'
|
||||
case 'degraded':
|
||||
return 'Degraded Performance'
|
||||
case 'partial_outage':
|
||||
return 'Partial Service Outage'
|
||||
case 'major_outage':
|
||||
return 'Major Service Outage'
|
||||
case 'under_maintenance':
|
||||
return 'Under Maintenance'
|
||||
default:
|
||||
return 'Status Unknown'
|
||||
}
|
||||
}
|
||||
|
||||
export const OpenAIStatusChecker: React.FC = () => {
|
||||
const [statusData, setStatusData] = useState<StatusData | null>(null)
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [lastRefresh, setLastRefresh] = useState<Date>(new Date())
|
||||
|
||||
const fetchStatus = useCallback(async () => {
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
console.log('Fetching real OpenAI status...')
|
||||
|
||||
// Use CORS proxy to fetch real OpenAI status
|
||||
const proxyUrl = 'https://api.allorigins.win/get?url='
|
||||
const targetUrl = 'https://status.openai.com/api/v2/status.json'
|
||||
|
||||
const response = await fetch(proxyUrl + encodeURIComponent(targetUrl))
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Proxy returned ${response.status}`)
|
||||
}
|
||||
|
||||
const proxyData = await response.json()
|
||||
const openaiData = JSON.parse(proxyData.contents)
|
||||
|
||||
console.log('Real OpenAI data received:', openaiData)
|
||||
|
||||
// Transform real OpenAI data to our format
|
||||
const transformedData: StatusData = {
|
||||
status: mapOpenAIStatusClient(
|
||||
openaiData.status?.indicator || 'operational'
|
||||
),
|
||||
lastUpdated: openaiData.page?.updated_at || new Date().toISOString(),
|
||||
incidents: (openaiData.incidents || []).slice(0, 3),
|
||||
}
|
||||
|
||||
setStatusData(transformedData)
|
||||
setLastRefresh(new Date())
|
||||
console.log('✅ Real OpenAI status loaded successfully!')
|
||||
} catch (err) {
|
||||
console.error('Failed to fetch real status:', err)
|
||||
|
||||
// Fallback: try alternative proxy
|
||||
try {
|
||||
console.log('Trying alternative proxy...')
|
||||
const altResponse = await fetch(
|
||||
`https://cors-anywhere.herokuapp.com/https://status.openai.com/api/v2/summary.json`
|
||||
)
|
||||
|
||||
if (altResponse.ok) {
|
||||
const altData = await altResponse.json()
|
||||
setStatusData({
|
||||
status: mapOpenAIStatusClient(
|
||||
altData.status?.indicator || 'operational'
|
||||
),
|
||||
lastUpdated: new Date().toISOString(),
|
||||
incidents: [],
|
||||
})
|
||||
setLastRefresh(new Date())
|
||||
console.log('✅ Alternative proxy worked!')
|
||||
return
|
||||
}
|
||||
} catch (altErr) {
|
||||
console.log('Alternative proxy also failed')
|
||||
}
|
||||
|
||||
// Final fallback
|
||||
setError('Unable to fetch real-time status')
|
||||
setStatusData({
|
||||
status: 'operational' as const,
|
||||
lastUpdated: new Date().toISOString(),
|
||||
incidents: [],
|
||||
})
|
||||
setLastRefresh(new Date())
|
||||
console.log('Using fallback status')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Client-side status mapping function
|
||||
const mapOpenAIStatusClient = (indicator: string): StatusData['status'] => {
|
||||
switch (indicator.toLowerCase()) {
|
||||
case 'none':
|
||||
case 'operational':
|
||||
return 'operational'
|
||||
case 'minor':
|
||||
return 'degraded'
|
||||
case 'major':
|
||||
return 'partial_outage'
|
||||
case 'critical':
|
||||
return 'major_outage'
|
||||
case 'maintenance':
|
||||
return 'under_maintenance'
|
||||
default:
|
||||
return 'operational' as const // Default to operational
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
fetchStatus()
|
||||
// Refresh every 2 minutes for more real-time updates
|
||||
const interval = setInterval(fetchStatus, 2 * 60 * 1000)
|
||||
return () => clearInterval(interval)
|
||||
}, [fetchStatus])
|
||||
|
||||
const handleRefresh = () => {
|
||||
fetchStatus()
|
||||
}
|
||||
|
||||
if (loading && !statusData) {
|
||||
return (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl shadow-lg p-6 border border-gray-200 dark:border-gray-700">
|
||||
<div className="flex items-center justify-center space-x-3">
|
||||
<RefreshCw className="w-6 h-6 text-blue-500 animate-spin" />
|
||||
<span className="text-lg font-medium text-gray-700 dark:text-gray-300">
|
||||
Checking OpenAI Status...
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl shadow-lg p-6 border border-red-200 dark:border-red-800">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center space-x-3">
|
||||
<AlertCircle className="w-6 h-6 text-red-500" />
|
||||
<div>
|
||||
<h3 className="text-lg font-semibold text-red-800 dark:text-red-300">
|
||||
Unable to Check Status
|
||||
</h3>
|
||||
<p className="text-red-600 dark:text-red-400">{error}</p>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={handleRefresh}
|
||||
className="px-4 py-2 bg-red-100 hover:bg-red-200 dark:bg-red-900/20 dark:hover:bg-red-900/40 text-red-700 dark:text-red-300 rounded-lg font-medium transition-colors"
|
||||
>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-xl shadow-lg p-6 border border-gray-200 dark:border-gray-700 my-6">
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<div className="flex items-center space-x-3">
|
||||
<StatusIcon status={statusData?.status || 'unknown'} />
|
||||
<div>
|
||||
<h3 className="text-xl font-bold text-gray-900 dark:text-gray-100">
|
||||
OpenAI Services
|
||||
</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">
|
||||
Last updated: {new Date(lastRefresh).toLocaleTimeString()}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={handleRefresh}
|
||||
disabled={loading}
|
||||
className="p-2 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors disabled:opacity-50"
|
||||
>
|
||||
<RefreshCw
|
||||
className={`w-5 h-5 text-gray-600 dark:text-gray-400 ${loading ? 'animate-spin' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className={`inline-flex items-center px-4 py-2 rounded-full text-sm font-semibold border ${getStatusColor(statusData?.status || 'unknown')}`}
|
||||
>
|
||||
{getStatusText(statusData?.status || 'unknown')}
|
||||
</div>
|
||||
|
||||
<div className="mt-4 p-4 bg-gray-50 dark:bg-gray-700 rounded-lg">
|
||||
<h4 className="font-semibold text-gray-900 dark:text-gray-100 mb-2">
|
||||
Quick Status Check
|
||||
</h4>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-3 gap-3 text-sm">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-gray-600 dark:text-gray-400">ChatGPT</span>
|
||||
<StatusIcon status={statusData?.status || 'unknown'} />
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-gray-600 dark:text-gray-400">API</span>
|
||||
<StatusIcon status={statusData?.status || 'unknown'} />
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-gray-600 dark:text-gray-400">Playground</span>
|
||||
<StatusIcon status={statusData?.status || 'unknown'} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-4 text-xs text-gray-500 dark:text-gray-400 text-center">
|
||||
{error
|
||||
? 'Using fallback status • '
|
||||
: '🟢 Real-time data from OpenAI • '}
|
||||
Updated: {new Date(lastRefresh).toLocaleTimeString()}
|
||||
<br />
|
||||
<a
|
||||
href="/post/is-chatgpt-down-use-jan#-is-chatgpt-down"
|
||||
className="text-blue-500 hover:text-blue-600 dark:text-blue-400 dark:hover:text-blue-300 underline"
|
||||
>
|
||||
View detailed status guide
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@ -3,7 +3,7 @@ title: Installation
|
||||
description: Install and deploy Jan Server on Kubernetes using minikube and Helm.
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
# Prerequisites
|
||||
|
||||
Jan Server requires the following tools installed on your system:
|
||||
|
||||
|
||||
28
docs/src/pages/changelog/2025-10-02-jan-projects.mdx
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
title: "Jan v0.7.0: Jan Projects"
|
||||
version: 0.7.0
|
||||
description: "Jan v0.7.0 introduces Projects, model renaming, llama.cpp auto-tuning, model stats, and Azure support."
|
||||
date: 2025-10-02
|
||||
ogImage: "/assets/images/changelog/jan-release-v0.7.0.jpeg"
|
||||
---
|
||||
|
||||
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
<ChangelogHeader title="Jan v0.7.0" date="2025-10-01" ogImage="/assets/images/changelog/jan-release-v0.7.0.jpeg" />
|
||||
|
||||
## Jan v0.7.0: Jan Projects
|
||||
|
||||
Jan v0.7.0 is live! This release focuses on helping you organize your workspace and better understand how models run.
|
||||
|
||||
### What’s new
|
||||
- **Projects**: Group related chats under one project for a cleaner workflow.
|
||||
- **Rename models**: Give your models custom names for easier identification.
|
||||
- **Model context stats**: See context usage when a model runs.
|
||||
- **Auto-loaded cloud models**: Cloud model names now appear automatically.
|
||||
|
||||
---
|
||||
|
||||
Update your Jan or [download the latest version](https://jan.ai/).
|
||||
|
||||
For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.7.0).
|
||||
@ -0,0 +1,26 @@
|
||||
---
|
||||
title: "Jan v0.7.1: Fixes Windows Version Revert & OpenRouter Models"
|
||||
version: 0.7.1
|
||||
description: "Jan v0.7.1 focuses on bug fixes, including a windows version revert and improvements to OpenRouter models."
|
||||
date: 2025-10-03
|
||||
---
|
||||
|
||||
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
<ChangelogHeader title="Jan v0.7.1" date="2025-10-03" />
|
||||
|
||||
### Bug Fixes: Windows Version Revert & OpenRouter Models
|
||||
|
||||
#### Two quick fixes:
|
||||
- Jan no longer reverts to an older version on load
|
||||
- OpenRouter can now add models again
|
||||
- Add headers for anthropic request to fetch models
|
||||
|
||||
---
|
||||
|
||||
Update your Jan or [download the latest version](https://jan.ai/).
|
||||
|
||||
For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.7.1).
|
||||
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
---
|
||||
title: Linux
|
||||
description: Install Jan to run AI models locally on Linux. Works offline with GPU acceleration on Ubuntu, Debian, and other distributions.
|
||||
description: Download Jan on Linux to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
Customizable Intelligence, LLM,
|
||||
local AI,
|
||||
Jan on Linux,
|
||||
privacy focus,
|
||||
free and open source,
|
||||
private and offline,
|
||||
@ -18,15 +19,17 @@ keywords:
|
||||
installation,
|
||||
"desktop"
|
||||
]
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Jan on Linux"
|
||||
description: "Download Jan on Linux to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline."
|
||||
---
|
||||
|
||||
|
||||
import FAQBox from '@/components/FaqBox'
|
||||
import { Tabs, Callout, Steps } from 'nextra/components'
|
||||
import { Settings } from 'lucide-react'
|
||||
|
||||
|
||||
|
||||
# Linux Installation
|
||||
Instructions for installing Jan on Linux.
|
||||
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
---
|
||||
title: Mac
|
||||
description: Get started quickly with Jan - a local AI that runs on your computer. Install Jan and pick your model to start chatting.
|
||||
keywords:
|
||||
description: Download Jan on Mac to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline.
|
||||
[
|
||||
Jan,
|
||||
Customizable Intelligence, LLM,
|
||||
local AI,
|
||||
Jan on Mac,
|
||||
privacy focus,
|
||||
free and open source,
|
||||
private and offline,
|
||||
@ -18,6 +18,11 @@ keywords:
|
||||
installation,
|
||||
"desktop"
|
||||
]
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Jan on Mac"
|
||||
description: "Download Jan on Mac to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline."
|
||||
---
|
||||
|
||||
import { Tabs } from 'nextra/components'
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
---
|
||||
title: Windows
|
||||
description: Install Jan to run AI models locally on Windows. Works offline with GPU acceleration on Windows 10 and 11.
|
||||
description: Download Jan on Windows to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
Customizable Intelligence, LLM,
|
||||
Jan on Windows,
|
||||
local AI,
|
||||
privacy focus,
|
||||
free and open source,
|
||||
@ -18,6 +19,11 @@ keywords:
|
||||
installation,
|
||||
"desktop"
|
||||
]
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Jan on Windows"
|
||||
description: "Download Jan on Windows to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline."
|
||||
---
|
||||
|
||||
import { Tabs, Callout, Steps } from 'nextra/components'
|
||||
|
||||
@ -90,7 +90,7 @@ Refer to the following documentation to install the Tabby extension on your favo
|
||||
Tabby offers an [Answer Engine](https://tabby.tabbyml.com/docs/administration/answer-engine/) on the homepage,
|
||||
which can leverage the Jan LLM and related contexts like code, documentation, and web pages to answer user questions.
|
||||
|
||||
Simply open the Tabby homepage at [localhost:8080](http://localhost:8080) and ask your questions.
|
||||
Simply open the Tabby homepage at http://localhost:8080 and ask your questions.
|
||||
|
||||
### IDE Chat Sidebar
|
||||
|
||||
|
||||
@ -329,7 +329,7 @@ When you start a chat with a model and encounter a **Failed to Fetch** or **Some
|
||||
|
||||
**1. Check System & Hardware Requirements**
|
||||
- Hardware dependencies: Ensure your device meets all [hardware requirements](troubleshooting)
|
||||
- OS: Ensure your operating system meets the minimum requirements ([Mac](https://www.jan.ai/docs/desktop/install/mac#minimum-requirements), [Windows](/windows#compatibility), [Linux](docs/desktop/linux#compatibility))
|
||||
- OS: Ensure your operating system meets the minimum requirements ([Mac](https://www.jan.ai/docs/desktop/install/mac#minimum-requirements), [Windows](/windows#compatibility), [Linux](https://www.jan.ai/docs/desktop/install/linux#compatibility)
|
||||
- RAM: Choose models that use less than 80% of your available RAM
|
||||
- For 8GB systems: Use models under 6GB
|
||||
- For 16GB systems: Use models under 13GB
|
||||
|
||||
BIN
docs/src/pages/post/_assets/claude.jpeg
Normal file
|
After Width: | Height: | Size: 230 KiB |
BIN
docs/src/pages/post/_assets/create-assistant-1.jpeg
Normal file
|
After Width: | Height: | Size: 320 KiB |
BIN
docs/src/pages/post/_assets/gemini.jpeg
Normal file
|
After Width: | Height: | Size: 293 KiB |
BIN
docs/src/pages/post/_assets/jan-assistant-for-law.png
Normal file
|
After Width: | Height: | Size: 1.4 MiB |
BIN
docs/src/pages/post/_assets/jan-for-ai-law-assistant-chat.jpeg
Normal file
|
After Width: | Height: | Size: 395 KiB |
BIN
docs/src/pages/post/_assets/lm-studio.jpeg
Normal file
|
After Width: | Height: | Size: 612 KiB |
BIN
docs/src/pages/post/_assets/perplexity.jpeg
Normal file
|
After Width: | Height: | Size: 230 KiB |
@ -20,5 +20,10 @@
|
||||
"title": "Research",
|
||||
"display": "normal",
|
||||
"href": "/blog?category=research"
|
||||
},
|
||||
"guides-cat": {
|
||||
"title": "Guides",
|
||||
"display": "normal",
|
||||
"href": "/blog?category=guides"
|
||||
}
|
||||
}
|
||||
|
||||
123
docs/src/pages/post/ai-for-law.mdx
Normal file
@ -0,0 +1,123 @@
|
||||
---
|
||||
title: "Private AI for legal professionals who need confidentiality"
|
||||
description: "It's possible to use AI without risking client data. Jan helps lawyers save time while keeping clients safe."
|
||||
tags: AI, ai for law, ai for lawyers, ChatGPT alternative, Jan, local AI, offline AI
|
||||
categories: guides
|
||||
date: 2025-09-30
|
||||
ogImage: assets/images/general/jan-for-ai-law-assistant-chat.jpeg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Private AI for legal professionals who need confidentiality"
|
||||
description: "It's possible to use AI without risking client data. Jan helps lawyers save time while keeping clients safe."
|
||||
image: assets/images/general/jan-assistants-ai-for-legal.jpeg
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
import CTABlog from '@/components/Blog/CTA'
|
||||
import { OpenAIStatusChecker } from '@/components/OpenAIStatusChecker'
|
||||
|
||||
# Private AI for legal professionals who need confidentiality
|
||||
|
||||

|
||||
|
||||
Yes, it's possible to use AI in legal work without risking client data.
|
||||
|
||||
<Callout type="warning">
|
||||
Client trust depends on privacy. Sending documents into public AI tools risks compliance and reputation.
|
||||
</Callout>
|
||||
|
||||
Start by [downloading Jan](/download) and installing the **Jan v1 model**. Once installed, you can create assistants tailored to your practice and keep contracts, case notes, and client files under your control.
|
||||
|
||||
<Callout type="info">
|
||||
**Why use Jan for legal tasks**
|
||||
- Runs locally on your hardware, no cloud uploads
|
||||
- Keeps chats and interactions private
|
||||
- Works offline once installed
|
||||
- Lets you build assistants for your own workflows
|
||||
</Callout>
|
||||
|
||||
---
|
||||
|
||||
## Create your assistant
|
||||
|
||||
Once Jan is installed with the **Jan v1 model**, onboarding will guide you through downloading and setup.
|
||||
|
||||
Click **Create assistant** to start:
|
||||

|
||||
*Create your first assistant in Jan*
|
||||
|
||||
Add an assistant name and prompt:
|
||||

|
||||
*Example of a Jan assistant for contract review*
|
||||
|
||||
You can create assistants using specific prompts. Below are examples for common legal workflows.
|
||||
|
||||
---
|
||||
|
||||
## Contract review assistant
|
||||
|
||||
AI can help lawyers move faster through long contracts by pointing out what matters most.
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a contract review assistant.
|
||||
> When I paste a contract:
|
||||
> - Highlight risky or unusual clauses
|
||||
> - Flag ambiguous or missing terms
|
||||
> - Summarize the agreement in plain English for a non-lawyer client
|
||||
> Format your response with sections: **Risks**, **Ambiguities/Missing**, **Summary**.
|
||||
> Do not provide legal advice.
|
||||
|
||||
---
|
||||
|
||||
## Drafting assistant
|
||||
|
||||
Use AI to create first drafts of NDAs, service agreements, or client letters. You still refine the output, but AI saves time on boilerplate.
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a drafting assistant.
|
||||
> When asked to draft a legal agreement or client letter:
|
||||
> - Produce a professional first version
|
||||
> - Use clear, concise language
|
||||
> - Leave placeholders like [Party Name], [Date], [Amount] for details
|
||||
> - Structure output with headings, numbered clauses, and consistent formatting
|
||||
> Do not provide legal advice.
|
||||
|
||||
---
|
||||
|
||||
## Case preparation assistant
|
||||
|
||||
Case prep often means reading hundreds of pages. AI can turn depositions, discovery files, or judgments into concise notes.
|
||||
|
||||

|
||||
*Jan chat interface for case preparation — process documents and extract key information*
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a case preparation assistant.
|
||||
> When I provide case materials:
|
||||
> - Extract key facts, issues, and arguments
|
||||
> - Present them as bullet points under headings: **Facts**, **Issues**, **Arguments**
|
||||
> - Keep summaries concise (under 500 words unless I request more)
|
||||
> Use plain English, no speculation or legal conclusions.
|
||||
|
||||
---
|
||||
|
||||
## Knowledge management assistant
|
||||
|
||||
Law firms accumulate memos, policies, and precedents. AI can help organize and retrieve them quickly.
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a knowledge management assistant.
|
||||
> When I ask questions about internal documents:
|
||||
> - Return concise summaries or direct excerpts
|
||||
> - Always cite the source (e.g., “Policy Manual, Section 4”)
|
||||
> - If not found in provided material, reply “Not found in documents.”
|
||||
> Do not invent information.
|
||||
|
||||
---
|
||||
|
||||
## Final note
|
||||
|
||||
AI in legal practice is not about replacing lawyers. It’s about handling repetitive tasks safely so you can focus on real decisions.
|
||||
With private AI, you gain efficiency without compromising client confidentiality.
|
||||
|
||||
<CTABlog />
|
||||
134
docs/src/pages/post/ai-for-teachers.mdx
Normal file
@ -0,0 +1,134 @@
|
||||
---
|
||||
title: "AI for teachers who care about student privacy"
|
||||
description: "Use AI in teaching without risking student data. Jan helps teachers plan lessons, grade faster, and communicate with parents."
|
||||
tags: AI, ai for teachers, ChatGPT alternative, Jan, local AI, offline AI, education
|
||||
categories: guides
|
||||
date: 2025-10-01
|
||||
ogImage: assets/images/general/ai-for-teacher.jpeg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "AI for teachers who care about student privacy"
|
||||
description: "Use AI in teaching without risking student data. Jan helps teachers plan lessons, grade faster, and communicate with parents."
|
||||
image: assets/images/general/ai-for-teacher.jpeg
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
import CTABlog from '@/components/Blog/CTA'
|
||||
|
||||
# AI for teachers who care about student privacy
|
||||
|
||||

|
||||
|
||||
AI can help teachers handle the work that piles up outside class. It can draft a lesson outline, suggest feedback on essays, or turn notes into a polite parent email. These are the tasks that usually stretch into evenings and weekends.
|
||||
|
||||
<Callout>
|
||||
Most AI tools like ChatGPT run in the cloud. Sharing lesson plans, student writing, or parent details there risks compliance and trust.
|
||||
</Callout>
|
||||
|
||||
That's where Jan comes in:
|
||||
- [Download Jan](/download)
|
||||
- You get the same time-saving help
|
||||
- Your data never leaves your device.
|
||||
|
||||
<video controls>
|
||||
<source src="/assets/images/general/jan-ai-for-teacher.mp4" type="video/mp4" />
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
*See how teachers use Jan for AI-powered lesson planning and grading*
|
||||
|
||||
<Callout type="info">
|
||||
**Why use Jan for teaching**
|
||||
- Runs locally, no cloud servers
|
||||
- Keeps lesson plans and student data private
|
||||
- Works offline once installed
|
||||
- Lets you build assistants for your daily teaching tasks
|
||||
</Callout>
|
||||
|
||||
---
|
||||
|
||||
## Create your assistant
|
||||
|
||||
Once Jan is installed, click **Create assistant** and add one of the prompts below. Each assistant is for a specific classroom task.
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Lesson planning assistant
|
||||
|
||||
AI can draft lesson outlines in minutes. You adapt and refine them for your students.
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a lesson planning assistant.
|
||||
> When I give you a topic or subject:
|
||||
> - Suggest a lesson outline with objectives, activities, and discussion questions
|
||||
> - Adjust for different grade levels if I specify
|
||||
> - Keep plans practical and realistic for a classroom setting
|
||||
|
||||
Example ask: For Grade 6 science on ecosystems. Objectives: define food chains, explain producer/consumer roles. Activity: group poster on an ecosystem. Questions: How would removing one species affect the whole system?
|
||||
|
||||
---
|
||||
|
||||
## Grading support assistant
|
||||
|
||||
AI won't replace your judgment, but it can make feedback faster and more consistent.
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a grading support assistant.
|
||||
> When I paste student writing or answers:
|
||||
> - Highlight strengths and areas for improvement
|
||||
> - Suggest short, constructive feedback I can reuse
|
||||
> - Keep tone supportive and professional
|
||||
> Do not assign final grades.
|
||||
|
||||
Example: For a history essay. Strength: clear thesis. Improvement: weak evidence. Feedback: "Great thesis and structure. Next time, support your points with specific historical examples."
|
||||
|
||||
---
|
||||
|
||||
## Parent communication assistant
|
||||
|
||||
Writing parent emails is important but time-consuming.
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a parent communication assistant.
|
||||
> When I give you key points about a student:
|
||||
> - Draft a polite and empathetic email to parents
|
||||
> - Use clear and professional language
|
||||
> - Keep tone supportive, not overly formal
|
||||
> Only include details I provide.
|
||||
|
||||
Example: Notes: “Student is falling behind on homework, otherwise engaged in class.” - Output: a short, encouraging message suggesting a check-in at home.
|
||||
|
||||
---
|
||||
|
||||
## Classroom resources assistant
|
||||
|
||||
Generate quizzes, worksheets, or practice activities at short notice.
|
||||
|
||||
**Prompt for Jan:**
|
||||
> You are a classroom resource assistant.
|
||||
> When I provide a topic or subject:
|
||||
> - Generate sample quiz questions (multiple choice and short answer)
|
||||
> - Suggest short practice activities
|
||||
> - Provide answer keys separately
|
||||
> Keep material age-appropriate for the level I specify.
|
||||
|
||||
Example: For Grade 4 fractions. 5 multiple-choice questions with answer key, plus a quick worksheet with 3 practice problems.
|
||||
|
||||
---
|
||||
|
||||
## Getting started
|
||||
|
||||
1. [Download Jan](/download).
|
||||
2. Install the Jan model (guided in-app)
|
||||
3. Create your first assistant using one of the prompts above
|
||||
4. Test with non-sensitive examples first
|
||||
5. Use it in real classroom tasks once you're comfortable
|
||||
|
||||
---
|
||||
|
||||
## Final note
|
||||
|
||||
AI isn't here to replace teachers. It's here to take repetitive tasks off your plate so you can focus on teaching. With Jan, you can use AI confidently without risking student privacy.
|
||||
|
||||
<CTABlog />
|
||||
@ -17,7 +17,7 @@ Jan now supports [NVIDIA TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) i
|
||||
We've been excited for TensorRT-LLM for a while, and [had a lot of fun implementing it](https://github.com/menloresearch/nitro-tensorrt-llm). As part of the process, we've run some benchmarks, to see how TensorRT-LLM fares on consumer hardware (e.g. [4090s](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/), [3090s](https://www.nvidia.com/en-us/geforce/graphics-cards/30-series/)) we commonly see in the [Jan's hardware community](https://discord.com/channels/1107178041848909847/1201834752206974996).
|
||||
|
||||
<Callout type="info" >
|
||||
**Give it a try!** Jan's TensorRT-LLM extension is available in Jan v0.4.9 and up ([see more](/docs/built-in/tensorrt-llm)). We precompiled some TensorRT-LLM models for you to try: `Mistral 7b`, `TinyLlama-1.1b`, `TinyJensen-1.1b` 😂
|
||||
**Give it a try!** Jan's TensorRT-LLM extension is available in Jan v0.4.9. We precompiled some TensorRT-LLM models for you to try: `Mistral 7b`, `TinyLlama-1.1b`, `TinyJensen-1.1b` 😂
|
||||
|
||||
Bugs or feedback? Let us know on [GitHub](https://github.com/menloresearch/jan) or via [Discord](https://discord.com/channels/1107178041848909847/1201832734704795688).
|
||||
</Callout>
|
||||
|
||||
120
docs/src/pages/post/chatgpt-alternatives.mdx
Normal file
@ -0,0 +1,120 @@
|
||||
---
|
||||
title: "ChatGPT alternatives that actually replace it"
|
||||
description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT."
|
||||
tags: AI, ChatGPT alternative, ChatGPT alternatives, alternative to chatgpt, Jan, local AI, privacy, open source, offline AI
|
||||
categories: guides
|
||||
date: 2025-09-29
|
||||
ogImage: assets/images/general/chatgpt-alternative-jan.jpeg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "ChatGPT alternatives that actually replace it."
|
||||
description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT."
|
||||
image: assets/images/general/chatgpt-alternative-jan.jpeg
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
import CTABlog from '@/components/Blog/CTA'
|
||||
|
||||
# Best ChatGPT Alternatives
|
||||
|
||||
ChatGPT works well, but it always needs internet, has usage limits, and isn't private.
|
||||
|
||||
If you want options that fit different needs, offline use, privacy, or specialized tasks, see the best alternatives to ChatGPT available for specific use cases.
|
||||
|
||||
## Comparison: ChatGPT Alternatives
|
||||
|
||||
| ChatGPT Alternative | Offline | Key Strength | Best For |
|
||||
| ------------------------- | ------- | ---------------------------- | -------------------------- |
|
||||
| **[Jan](https://jan.ai)** | Yes | Runs Cloud + Offline, open-source | Best overall ChatGPT replacement |
|
||||
| Claude | - | Strong writing and reasoning | Creative text & code |
|
||||
| Gemini | - | Integrated with Google | Research tasks, image generation |
|
||||
| Perplexity | - | Fast, with cited answers | Research and fact-checking |
|
||||
| LM Studio | Yes | Runs open models on PC | Coding and experiments |
|
||||
|
||||
### Jan is the best ChatGPT alternative
|
||||
|
||||

|
||||
*Jan as an open-source alternative to ChatGPT*
|
||||
|
||||
Jan is the most complete ChatGPT alternative available today. It enables:
|
||||
- Use AI in online & offline (even on a plain)
|
||||
- Agentic actions supported
|
||||
- MCP servers supported for tools
|
||||
|
||||
Unlike ChatGPT, it runs on your computer, which means:
|
||||
- Offline AI capabilities (see [Offline ChatGPT post](https://www.jan.ai/post/offline-chatgpt-alternative) for details)
|
||||
- 100% private
|
||||
- Open-source & Free
|
||||
|
||||
<Callout> Jan is an [open-source replacement for ChatGPT.](https://www.jan.ai/) </Callout>
|
||||
|
||||
### Claude is the most notable online alternative
|
||||

|
||||
|
||||
Claude has become the main online rival to ChatGPT. It stands out for writing, reasoning, and coding.
|
||||
|
||||
- Handles very long documents and context well
|
||||
- Strong for essays, research papers, and structured text
|
||||
- Popular with developers for code explanations and debugging
|
||||
- Cloud-only, no offline mode
|
||||
- Filters outputs heavily, sometimes too restrictive
|
||||
|
||||
### Gemini is the Google's integrated alternative
|
||||

|
||||
|
||||
Gemini ties directly into Google’s apps and search. Great for users in the Google ecosystem.
|
||||
|
||||
- Built into Gmail, Docs, and Google Search
|
||||
- Good for real-time research and fact-checking
|
||||
- Strong at pulling web context into answers
|
||||
- Requires Google account, fully online
|
||||
- Privacy concerns: all tied to Google services
|
||||
|
||||
### Perplexity is the research-focused alternative
|
||||

|
||||
|
||||
Perplexity is built for fact-checking and quick research, not creativity.
|
||||
|
||||
- Always cites sources for answers
|
||||
- Strong at summarizing current web info
|
||||
- Very fast for Q&A style use
|
||||
- Limited in creativity and open-ended writing
|
||||
- Cloud-only, daily free usage caps
|
||||
|
||||
### LM Studio is the experimental alternative
|
||||
|
||||

|
||||
|
||||
LM Studio is not a ChatGPT replacement but a local tool for running open models.
|
||||
|
||||
- Lets you test and run open-source models on PC
|
||||
- Offline by default, works without internet
|
||||
- Flexible setup for developers and technical users
|
||||
- Requires decent hardware (RAM/VRAM)
|
||||
|
||||
LM Studio is not beginner-friendly compared to Jan.
|
||||
|
||||
## Choosing the right ChatGPT alternative for you:
|
||||
|
||||
- Best overall replacement: [Jan](https://www.jan.ai/)
|
||||
- For writing & storytelling: Claude
|
||||
- For research & web knowledge: Perplexity or Gemini
|
||||
- For productivity & office work: Microsoft Copilot
|
||||
- For experimentation with open-source models for technical people: LM Studio
|
||||
|
||||
Most ChatGPT alternatives are still cloud-based and limited. If you want full privacy, offline use, and no restrictions, the best ChatGPT alternative is [Jan](https://www.jan.ai/).
|
||||
|
||||
### Can I use ChatGPT offline?
|
||||
No. ChatGPT always requires internet. For offline AI, use Jan.
|
||||
|
||||
### What’s the best free ChatGPT alternative?
|
||||
Jan is free, open-source, and runs offline. Others like Claude or Perplexity have limited free tiers but are cloud-based.
|
||||
|
||||
### Which ChatGPT alternative is best for writing?
|
||||
Claude is strong for essays, reports, and structured writing. You could use [open-source models](https://www.jan.ai/post/run-ai-models-locally) in Jan too.
|
||||
|
||||
### Which ChatGPT alternative is best for research?
|
||||
Perplexity and Gemini pull real-time web data with citations.
|
||||
|
||||
### What’s the closest full replacement to ChatGPT?
|
||||
Jan. It runs locally, works offline, and feels like ChatGPT without restrictions.
|
||||
@ -4,13 +4,13 @@ description: "A simple guide to replicating Deep Research results for free, with
|
||||
tags: AI, local models, Jan, GGUF, Deep Research, local AI
|
||||
categories: guides
|
||||
date: 2025-08-04
|
||||
ogImage: _assets/research-result-local.png
|
||||
ogImage: assets/images/general/research-result-local.png
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Replicating Deep Research with Jan"
|
||||
description: "Learn how to replicate Deep Research results with Jan."
|
||||
image: _assets/research-result-local.jpg
|
||||
image: assets/images/general/research-result-local.png
|
||||
---
|
||||
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
@ -4,7 +4,7 @@ description: "A straightforward guide to running DeepSeek R1 locally regardless
|
||||
tags: DeepSeek, R1, local AI, Jan, GGUF, Qwen, Llama
|
||||
categories: guides
|
||||
date: 2025-01-31
|
||||
ogImage: assets/deepseek-r1-locally-jan.jpg
|
||||
ogImage: assets/images/general/deepseek-r1-locally-jan.jpg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
@ -17,7 +17,7 @@ import CTABlog from '@/components/Blog/CTA'
|
||||
|
||||
# Run DeepSeek R1 locally on your device (Beginner-Friendly Guide)
|
||||
|
||||

|
||||

|
||||
|
||||
DeepSeek R1 is one of the best open-source models in the market right now, and you can run DeepSeek R1 on your own computer!
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ title: "How we (try to) benchmark GPU kernels accurately"
|
||||
description: "We present the process behind how we decided to benchmark GPU kernels and iteratively improved our benchmarking pipeline"
|
||||
tags: ""
|
||||
categories: research
|
||||
ogImage: "./_assets/cover-kernel-benchmarking.png"
|
||||
ogImage: assets/images/general/cover-kernel-benchmarking.png
|
||||
date: 2025-09-17
|
||||
---
|
||||
|
||||
|
||||
124
docs/src/pages/post/is-chatgpt-down-use-jan.mdx
Normal file
@ -0,0 +1,124 @@
|
||||
---
|
||||
title: "If ChatGPT is down, switch to AI that never goes down"
|
||||
description: "Check if ChatGPT down right now, and learn how to use AI that never goes down."
|
||||
tags: AI, ChatGPT down, ChatGPT alternative, Jan, local AI, offline AI, ChatGPT at capacity
|
||||
categories: guides
|
||||
date: 2025-09-30
|
||||
ogImage: assets/images/general/is-chatgpt-down.jpg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Realtime Status: Is ChatGPT down?"
|
||||
description: "Check if ChatGPT is down right now with our real-time status checker, and learn how to use AI that never goes offline."
|
||||
image: assets/images/general/is-chatgpt-down.jpg
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
import CTABlog from '@/components/Blog/CTA'
|
||||
import { OpenAIStatusChecker } from '@/components/OpenAIStatusChecker'
|
||||
|
||||
# If ChatGPT is down, switch to AI that never goes down
|
||||
|
||||
If you're seeing ChatGPT is down, it could be a good signal to switch to [Jan](https://www.jan.ai/), AI that never goes down.
|
||||
|
||||
## 🔴 Realtime Status: Is ChatGPT down?
|
||||
<Callout>
|
||||
This live tracker shows if ChatGPT is down right now.
|
||||
</Callout>
|
||||
<OpenAIStatusChecker />
|
||||
|
||||
### ChatGPT Status Indicators
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 my-6">
|
||||
<div className="p-4 rounded-lg border border-green-200 bg-green-50 dark:bg-green-900/20 dark:border-green-800">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<div className="w-3 h-3 bg-green-500 rounded-full"></div>
|
||||
<span className="font-semibold text-green-800 dark:text-green-300">Operational</span>
|
||||
</div>
|
||||
<p className="text-sm text-green-700 dark:text-green-400">All systems are functioning normally with no reported issues.</p>
|
||||
</div>
|
||||
|
||||
<div className="p-4 rounded-lg border border-yellow-200 bg-yellow-50 dark:bg-yellow-900/20 dark:border-yellow-800">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<div className="w-3 h-3 bg-yellow-500 rounded-full"></div>
|
||||
<span className="font-semibold text-yellow-800 dark:text-yellow-300">Degraded Performance</span>
|
||||
</div>
|
||||
<p className="text-sm text-yellow-700 dark:text-yellow-400">Services are running but may be slower than usual.</p>
|
||||
</div>
|
||||
|
||||
<div className="p-4 rounded-lg border border-orange-200 bg-orange-50 dark:bg-orange-900/20 dark:border-orange-800">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<div className="w-3 h-3 bg-orange-500 rounded-full"></div>
|
||||
<span className="font-semibold text-orange-800 dark:text-orange-300">Partial Outage</span>
|
||||
</div>
|
||||
<p className="text-sm text-orange-700 dark:text-orange-400">Some features or regions may be experiencing issues.</p>
|
||||
</div>
|
||||
|
||||
<div className="p-4 rounded-lg border border-red-200 bg-red-50 dark:bg-red-900/20 dark:border-red-800">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<div className="w-3 h-3 bg-red-500 rounded-full"></div>
|
||||
<span className="font-semibold text-red-800 dark:text-red-300">Major Outage</span>
|
||||
</div>
|
||||
<p className="text-sm text-red-700 dark:text-red-400">Significant service disruption affecting most users.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
## Skip the downtime with Jan
|
||||
|
||||
When ChatGPT is down, Jan keeps working. Jan is an open-source ChatGPT alternative that runs on your computer - no servers, no outages, no waiting.
|
||||
|
||||

|
||||
*Jan works even when ChatGPT doesn't.*
|
||||
|
||||
### Why Jan never goes down:
|
||||
- **Runs locally** - No dependency on external servers
|
||||
- **Always available** - Works offline, even on flights
|
||||
- **No capacity limits** - Uses your computer's resources
|
||||
- **100% private** - Your conversations stay on your device
|
||||
|
||||
### Get started in 3 mins:
|
||||
1. Download Jan: [jan.ai](https://jan.ai)
|
||||
2. Install a model: Choose from Jan, Qwen, or other top models
|
||||
3. Start chatting: Similar design as ChatGPT, but always available if you use local models
|
||||
|
||||
<Callout type="info">
|
||||
**Pro tip:** Keep both ChatGPT and Jan. You'll never lose productivity to outages again.
|
||||
</Callout>
|
||||
|
||||
Jan runs AI models locally, so you don't need internet access. That means Jan is unaffected when ChatGPT is down.
|
||||
|
||||
### Why does ChatGPT goes down?
|
||||
There could be multiple reasons:
|
||||
- Too many users at once
|
||||
- Data center or API downtime
|
||||
- Planned or uplanned updates
|
||||
- Limited in some locations
|
||||
|
||||
ChatGPT depends on OpenAI’s servers. If those go down, so does ChatGPT. Jan users don't affect by ChatGPT's outage.
|
||||
|
||||
### Common ChatGPT Errors
|
||||
|
||||
When ChatGPT experiences issues, you might see these error messages:
|
||||
|
||||
- "ChatGPT is at capacity right now": Too many users online, try again later
|
||||
- "Error in message stream": Connection problems with OpenAI servers
|
||||
- "Something went wrong": General server error, refresh and retry
|
||||
- "Network error": Internet connectivity issues on your end or OpenAI's
|
||||
- "Rate limit exceeded": Too many requests sent, wait before trying again
|
||||
- "This model is currently overloaded": High demand for specific model
|
||||
|
||||
## Quick answers about ChatGPT status
|
||||
|
||||
### Is ChatGPT down?
|
||||
Check the ChatGPT realtime status above. [See if ChatGPT is down right now.](http://localhost:3000/post/is-chatgpt-down-use-jan#-realtime-status-is-chatgpt-down)
|
||||
|
||||
### Why is ChatGPT down?
|
||||
Usually server overload, maintenance, or outages at OpenAI.
|
||||
|
||||
### What does "ChatGPT is at capacity" mean?
|
||||
Too many users are online at the same time. You’ll need to wait or switch to Jan instead.
|
||||
|
||||
### Is ChatGPT shutting down?
|
||||
No, ChatGPT isn't shutting down. Outages are temporary.
|
||||
|
||||
### Can I use ChatGPT offline?
|
||||
No. ChatGPT always requires internet. For [offline AI](https://www.jan.ai/post/offline-chatgpt-alternative), use [Jan](https://jan.ai).
|
||||
@ -1,32 +1,36 @@
|
||||
---
|
||||
title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead"
|
||||
description: "Learn how to use AI offline with Jan - a free, open-source alternative to ChatGPT that works 100% offline on your computer."
|
||||
tags: AI, ChatGPT alternative, offline AI, Jan, local AI, privacy
|
||||
description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline."
|
||||
tags: AI, chatgpt offline, ChatGPT alternative, offline AI, Jan, local AI, privacy
|
||||
categories: guides
|
||||
date: 2025-02-08
|
||||
ogImage: _assets/offline-chatgpt-alternatives-jan.jpg
|
||||
ogImage: assets/images/general/offline-chatgpt-alternatives-jan.jpg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead"
|
||||
description: "Want to use ChatGPT offline? Learn how to run AI models locally with Jan - free, open-source, and works without internet."
|
||||
image: _assets/offline-chatgpt-alternatives-jan.jpg
|
||||
description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline."
|
||||
image: assets/images/general/offline-chatgpt-alternatives-jan.jpg
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
import CTABlog from '@/components/Blog/CTA'
|
||||
|
||||
# Offline ChatGPT: You can't run ChatGPT offline, do this instead
|
||||
|
||||
ChatGPT is a cloud-based service that requires internet access. However, it's not the only way to use AI. You can run AI models offline on your device with [Jan](https://jan.ai/). It's completely free, open-source, and gives you 100% offline capability. You can even use AI on a plane!
|
||||
ChatGPT itself can't run offline. ChatGPT can't run offline. You can’t download it. It always needs internet, because it runs on OpenAI's servers.
|
||||
|
||||
<Callout>
|
||||
If you want offline AI, you need local models. The easiest way: [Jan, an open-source replacement of ChatGPT](https://jan.ai/). It's free, open-source, and works 100% offline. With Jan, you can even use AI on a plane.
|
||||
|
||||
<Callout type="info">
|
||||
**Quick Summary:**
|
||||
- ChatGPT always needs internet - it can't run offline
|
||||
- Jan lets you run AI models 100% offline on your computer
|
||||
- It's free and open-source
|
||||
- Works on Mac, Windows, and Linux
|
||||
- ChatGPT always needs internet - no offline mode
|
||||
- Use Jan to use AI models 100% offline
|
||||
- It's free & open-source, and works on Mac, Windows, and Linux
|
||||
</Callout>
|
||||
|
||||
## How to use AI offline?
|
||||
Offline AI means the model runs on your computer. So no internet needed, 100% private, and data never leaves your device. With Jan you can run offline AI models locally.
|
||||
|
||||
## Jan as an offline ChatGPT alternative
|
||||
|
||||

|
||||
@ -42,23 +46,25 @@ Go to [jan.ai](https://jan.ai) and download the version for your computer (Mac,
|
||||
|
||||
### 2. Download an AI model
|
||||
|
||||
You'll need an AI model to use AI offline, so download a model from Jan. Once it's on your computer, you don't need internet anymore.
|
||||
You'll need an AI model to use AI offline, so download a model from Jan. Once it's on your computer, you don't need internet anymore. You can also use GPT models via Jan - check [running gpt-oss locally](https://www.jan.ai/post/run-gpt-oss-locally) post to see it.
|
||||
|
||||

|
||||
*Select an AI model that matches your needs and computer capabilities*
|
||||
|
||||
<Callout>
|
||||
**Which model should you choose?**
|
||||
### Which model should you choose?
|
||||
- For most computers: Try Mistral 7B or DeepSeek - they're similar to ChatGPT 3.5
|
||||
- For older computers: Use smaller 3B models
|
||||
- For gaming PCs: You can try larger 13B models
|
||||
|
||||
<Callout type="info">
|
||||
Don't worry about choosing - Jan will automatically recommend models that work well on your computer.
|
||||
</Callout>
|
||||
|
||||
If you'd like to learn more about local AI, check [how to run AI models locally as a beginner](https://www.jan.ai/post/run-ai-models-locally) article.
|
||||
|
||||
### 3. Start using AI offline
|
||||
|
||||

|
||||

|
||||
*Use Jan's clean interface to chat with AI - no internet required*
|
||||
|
||||
Once downloaded, you can use AI anywhere, anytime:
|
||||
@ -71,12 +77,7 @@ Once downloaded, you can use AI anywhere, anytime:
|
||||
|
||||
## How to chat with your docs in Jan?
|
||||
|
||||
To chat with your docs in Jan, you need to activate experimental mode.
|
||||
|
||||

|
||||
*Turn on experimental mode in settings to chat with your docs*
|
||||
|
||||
After activating experimental mode, simply add your files and ask questions about them.
|
||||
Simply add your files and ask questions about them.
|
||||
|
||||

|
||||
*Chat with your documents privately - no internet needed*
|
||||
@ -97,17 +98,17 @@ Local AI makes possible offline AI use, so Jan is going to be your first step to
|
||||
4. **No Server Issues:** No more "ChatGPT is at capacity"
|
||||
5. **Your Choice of Models:** Use newer models as they come out
|
||||
|
||||
**"Is it really free? What's the catch?"**
|
||||
### "Is Jan really free? What's the catch?"
|
||||
Yes, it's completely free and open source. Jan is built by developers who believe in making AI accessible to everyone.
|
||||
|
||||
**"How does it compare to ChatGPT?"**
|
||||
### How does Jan compare to ChatGPT?"
|
||||
Modern open-source models like DeepSeek and Mistral are very capable. While they might not match GPT-4, they're perfect for most tasks and getting better every month.
|
||||
|
||||
**"Do I need a powerful computer?"**
|
||||
### "Do I need a powerful computer?"
|
||||
If your computer is from the last 5 years, it will likely work fine. You need about 8GB of RAM and 10GB of free space for comfortable usage.
|
||||
|
||||
**"What about my privacy?"**
|
||||
Everything stays on your computer. Your conversations, documents, and data never leave your device unless you choose to share them.
|
||||
### "What about my privacy?"
|
||||
Everything stays on your computer with Jan. Your conversations, documents, and data never leave your device unless you choose to share them.
|
||||
|
||||
Want to learn more about the technical side? Check our detailed [guide on running AI models locally](/post/run-ai-models-locally). It's not required to [use AI offline](https://jan.ai/) but helps understand how it all works.
|
||||
|
||||
@ -116,3 +117,20 @@ Want to learn more about the technical side? Check our detailed [guide on runnin
|
||||
<Callout type="info">
|
||||
[Join our Discord community](https://discord.gg/Exe46xPMbK) for support and tips on using Jan as your offline ChatGPT alternative.
|
||||
</Callout>
|
||||
|
||||
### FAQ
|
||||
|
||||
#### Can I download ChatGPT for offline use?
|
||||
No. ChatGPT is cloud-only.
|
||||
|
||||
#### How to use ChatGPT offline?
|
||||
You can't. ChatGPT has no offline mode. Use Jan instead for a ChatGPT-like offline experience.
|
||||
|
||||
#### Does ChatGPT have internet access?
|
||||
Yes. It runs in the cloud.
|
||||
|
||||
#### What's the best way to use AI offline?
|
||||
Download Jan and run models like Mistral, DeepSeek, or GPT-OSS locally.
|
||||
|
||||
#### What's GPT offline?
|
||||
OpenAI has open-source models you can run locally but not via ChatGPT. One of them is [gpt-oss](https://www.jan.ai/post/run-gpt-oss-locally) and you can run it via Jan.
|
||||
@ -50,7 +50,7 @@ Thinking mode is powerful, but greedy decoding kills its output. It'll repeat or
|
||||
|
||||
## Quick summary
|
||||
|
||||

|
||||

|
||||
|
||||
### Non-thinking mode (`enable_thinking=False`)
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ description: "A straightforward guide to running AI models locally on your compu
|
||||
tags: AI, local models, Jan, GGUF, privacy, local AI
|
||||
categories: guides
|
||||
date: 2025-01-31
|
||||
ogImage: assets/run-ai-locally-with-jan.jpg
|
||||
ogImage: assets/images/general/run-ai-locally-with-jan.jpg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
@ -35,7 +35,7 @@ Most people think running AI models locally is complicated. It's not. Anyone can
|
||||
|
||||
That's all to run your first AI model locally!
|
||||
|
||||

|
||||

|
||||
*Jan's easy-to-use chat interface after installation.*
|
||||
|
||||
Keep reading to learn key terms of local AI and the things you should know before running AI models locally.
|
||||
|
||||
@ -4,21 +4,19 @@ description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss local
|
||||
tags: OpenAI, gpt-oss, local AI, Jan, privacy, Apache-2.0, llama.cpp, Ollama, LM Studio
|
||||
categories: guides
|
||||
date: 2025-08-06
|
||||
ogImage: assets/gpt-oss%20locally.jpeg
|
||||
ogImage: assets/images/general/gpt-oss locally.jpeg
|
||||
twitter:
|
||||
card: summary_large_image
|
||||
site: "@jandotai"
|
||||
title: "Run OpenAI's gpt-oss Locally in 5 Minutes (Beginner Guide)"
|
||||
description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss locally with Jan AI for private, offline conversations."
|
||||
image: assets/gpt-oss%20locally.jpeg
|
||||
image: assets/images/general/gpt-oss locally.jpeg
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
import CTABlog from '@/components/Blog/CTA'
|
||||
|
||||
# Run OpenAI's gpt-oss Locally in 5 mins
|
||||
|
||||

|
||||
|
||||
OpenAI launched [gpt-oss](https://openai.com/index/introducing-gpt-oss/), marking their return to open-source AI after GPT-2. This model is designed to run locally on consumer hardware. This guide shows you how to install and run gpt-oss on your computer for private, offline AI conversations.
|
||||
|
||||
## What is gpt-oss?
|
||||
|
||||
@ -107,14 +107,15 @@ const config: DocsThemeConfig = {
|
||||
head: function useHead() {
|
||||
const { title, frontMatter } = useConfig()
|
||||
const { asPath } = useRouter()
|
||||
const titleTemplate =
|
||||
(asPath.includes('/desktop')
|
||||
const titleTemplate = asPath.includes('/post/')
|
||||
? (frontMatter?.title || title)
|
||||
: (asPath.includes('/desktop')
|
||||
? 'Jan Desktop'
|
||||
: asPath.includes('/server')
|
||||
? 'Jan Server'
|
||||
: 'Jan') +
|
||||
' - ' +
|
||||
(frontMatter?.title || title)
|
||||
' - ' +
|
||||
(frontMatter?.title || title)
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
|
||||
@ -22,6 +22,9 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@janhq/core": "workspace:*",
|
||||
"@tabler/icons-react": "^3.34.0",
|
||||
"@types/react": "19.1.2",
|
||||
"react": "19.0.0",
|
||||
"typescript": "5.9.2",
|
||||
"vite": "5.4.20",
|
||||
"vitest": "2.1.9",
|
||||
@ -29,6 +32,8 @@
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@janhq/core": "*",
|
||||
"@tabler/icons-react": "*",
|
||||
"react": "19.0.0",
|
||||
"zustand": "5.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@ -14,4 +14,4 @@ export const DEFAULT_ASSISTANT = {
|
||||
name: 'Jan',
|
||||
avatar: '👋',
|
||||
created_at: 1747029866.542,
|
||||
}
|
||||
}
|
||||
|
||||
@ -11,6 +11,9 @@ import {
|
||||
} from '@janhq/core'
|
||||
import { RemoteApi } from './api'
|
||||
import { getDefaultAssistant, ObjectParser, combineConversationItemsToMessages } from './utils'
|
||||
import { ApiError } from '../shared/types/errors'
|
||||
|
||||
const CONVERSATION_NOT_FOUND_EVENT = 'conversation-not-found'
|
||||
|
||||
export default class ConversationalExtensionWeb extends ConversationalExtension {
|
||||
private remoteApi: RemoteApi | undefined
|
||||
@ -111,6 +114,15 @@ export default class ConversationalExtensionWeb extends ConversationalExtension
|
||||
return messages
|
||||
} catch (error) {
|
||||
console.error('Failed to list messages:', error)
|
||||
// Check if it's a 404 error (conversation not found)
|
||||
if (error instanceof ApiError && error.isNotFound()) {
|
||||
// Trigger a navigation event to redirect to home
|
||||
// We'll use a custom event that the web app can listen to
|
||||
window.dispatchEvent(new CustomEvent(CONVERSATION_NOT_FOUND_EVENT, {
|
||||
detail: { threadId, error: error.message }
|
||||
}))
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,9 +5,45 @@
|
||||
|
||||
import { getSharedAuthService, JanAuthService } from '../shared'
|
||||
import { JanModel, janProviderStore } from './store'
|
||||
import { ApiError } from '../shared/types/errors'
|
||||
|
||||
// JAN_API_BASE is defined in vite.config.ts
|
||||
|
||||
// Constants
|
||||
const TEMPORARY_CHAT_ID = 'temporary-chat'
|
||||
|
||||
/**
|
||||
* Determines the appropriate API endpoint and request payload based on chat type
|
||||
* @param request - The chat completion request
|
||||
* @returns Object containing endpoint URL and processed request payload
|
||||
*/
|
||||
function getChatCompletionConfig(request: JanChatCompletionRequest, stream: boolean = false) {
|
||||
const isTemporaryChat = request.conversation_id === TEMPORARY_CHAT_ID
|
||||
|
||||
// For temporary chats, use the stateless /chat/completions endpoint
|
||||
// For regular conversations, use the stateful /conv/chat/completions endpoint
|
||||
const endpoint = isTemporaryChat
|
||||
? `${JAN_API_BASE}/chat/completions`
|
||||
: `${JAN_API_BASE}/conv/chat/completions`
|
||||
|
||||
const payload = {
|
||||
...request,
|
||||
stream,
|
||||
...(isTemporaryChat ? {
|
||||
// For temporary chat: don't store anything, remove conversation metadata
|
||||
conversation_id: undefined,
|
||||
} : {
|
||||
// For regular chat: store everything, use conversation metadata
|
||||
store: true,
|
||||
store_reasoning: true,
|
||||
conversation: request.conversation_id,
|
||||
conversation_id: undefined,
|
||||
})
|
||||
}
|
||||
|
||||
return { endpoint, payload, isTemporaryChat }
|
||||
}
|
||||
|
||||
export interface JanModelsResponse {
|
||||
object: string
|
||||
data: JanModel[]
|
||||
@ -102,7 +138,8 @@ export class JanApiClient {
|
||||
|
||||
return models
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to fetch models'
|
||||
const errorMessage = error instanceof ApiError ? error.message :
|
||||
error instanceof Error ? error.message : 'Failed to fetch models'
|
||||
janProviderStore.setError(errorMessage)
|
||||
janProviderStore.setLoadingModels(false)
|
||||
throw error
|
||||
@ -115,22 +152,18 @@ export class JanApiClient {
|
||||
try {
|
||||
janProviderStore.clearError()
|
||||
|
||||
const { endpoint, payload } = getChatCompletionConfig(request, false)
|
||||
|
||||
return await this.authService.makeAuthenticatedRequest<JanChatCompletionResponse>(
|
||||
`${JAN_API_BASE}/conv/chat/completions`,
|
||||
endpoint,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
...request,
|
||||
stream: false,
|
||||
store: true,
|
||||
store_reasoning: true,
|
||||
conversation: request.conversation_id,
|
||||
conversation_id: undefined,
|
||||
}),
|
||||
body: JSON.stringify(payload),
|
||||
}
|
||||
)
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to create chat completion'
|
||||
const errorMessage = error instanceof ApiError ? error.message :
|
||||
error instanceof Error ? error.message : 'Failed to create chat completion'
|
||||
janProviderStore.setError(errorMessage)
|
||||
throw error
|
||||
}
|
||||
@ -144,23 +177,17 @@ export class JanApiClient {
|
||||
): Promise<void> {
|
||||
try {
|
||||
janProviderStore.clearError()
|
||||
|
||||
|
||||
const authHeader = await this.authService.getAuthHeader()
|
||||
|
||||
const response = await fetch(`${JAN_API_BASE}/conv/chat/completions`, {
|
||||
const { endpoint, payload } = getChatCompletionConfig(request, true)
|
||||
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...authHeader,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...request,
|
||||
stream: true,
|
||||
store: true,
|
||||
store_reasoning: true,
|
||||
conversation: request.conversation_id,
|
||||
conversation_id: undefined,
|
||||
}),
|
||||
body: JSON.stringify(payload),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
@ -216,7 +243,8 @@ export class JanApiClient {
|
||||
reader.releaseLock()
|
||||
}
|
||||
} catch (error) {
|
||||
const err = error instanceof Error ? error : new Error('Unknown error occurred')
|
||||
const err = error instanceof ApiError ? error :
|
||||
error instanceof Error ? error : new Error('Unknown error occurred')
|
||||
janProviderStore.setError(err.message)
|
||||
onError?.(err)
|
||||
throw err
|
||||
@ -230,7 +258,8 @@ export class JanApiClient {
|
||||
await this.getModels()
|
||||
console.log('Jan API client initialized successfully')
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to initialize API client'
|
||||
const errorMessage = error instanceof ApiError ? error.message :
|
||||
error instanceof Error ? error.message : 'Failed to initialize API client'
|
||||
janProviderStore.setError(errorMessage)
|
||||
throw error
|
||||
} finally {
|
||||
@ -239,4 +268,4 @@ export class JanApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
export const janApiClient = JanApiClient.getInstance()
|
||||
export const janApiClient = JanApiClient.getInstance()
|
||||
|
||||
@ -1 +1 @@
|
||||
export { default } from './provider'
|
||||
export { default } from './provider'
|
||||
|
||||
@ -15,6 +15,7 @@ import {
|
||||
} from '@janhq/core' // cspell: disable-line
|
||||
import { janApiClient, JanChatMessage } from './api'
|
||||
import { janProviderStore } from './store'
|
||||
import { ApiError } from '../shared/types/errors'
|
||||
|
||||
// Jan models support tools via MCP
|
||||
const JAN_MODEL_CAPABILITIES = ['tools'] as const
|
||||
@ -192,7 +193,8 @@ export default class JanProviderWeb extends AIEngine {
|
||||
console.error(`Failed to unload Jan session ${sessionId}:`, error)
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
error: error instanceof ApiError ? error.message :
|
||||
error instanceof Error ? error.message : 'Unknown error',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -92,4 +92,4 @@ export const janProviderStore = {
|
||||
useJanProviderStore.getState().clearError(),
|
||||
reset: () =>
|
||||
useJanProviderStore.getState().reset(),
|
||||
}
|
||||
}
|
||||
|
||||
54
extensions-web/src/mcp-web/components/WebSearchButton.tsx
Normal file
@ -0,0 +1,54 @@
|
||||
import { useMemo, useCallback } from 'react'
|
||||
import { IconWorld } from '@tabler/icons-react'
|
||||
import { MCPToolComponentProps } from '@janhq/core'
|
||||
|
||||
// List of tool names considered as web search tools
|
||||
const WEB_SEARCH_TOOL_NAMES = ['google_search', 'scrape'];
|
||||
|
||||
export const WebSearchButton = ({
|
||||
tools,
|
||||
isToolEnabled,
|
||||
onToolToggle,
|
||||
}: MCPToolComponentProps) => {
|
||||
const webSearchTools = useMemo(
|
||||
() => tools.filter((tool) => WEB_SEARCH_TOOL_NAMES.includes(tool.name)),
|
||||
[tools]
|
||||
)
|
||||
|
||||
// Early return if no web search tools available
|
||||
if (webSearchTools.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Check if all web search tools are enabled
|
||||
const isEnabled = useMemo(
|
||||
() => webSearchTools.every((tool) => isToolEnabled(tool.name)),
|
||||
[webSearchTools, isToolEnabled]
|
||||
)
|
||||
|
||||
const handleToggle = useCallback(() => {
|
||||
// Toggle all web search tools at once
|
||||
const newState = !isEnabled
|
||||
webSearchTools.forEach((tool) => {
|
||||
onToolToggle(tool.name, newState)
|
||||
})
|
||||
}, [isEnabled, webSearchTools, onToolToggle])
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={handleToggle}
|
||||
className={`h-7 px-2 py-1 flex items-center justify-center rounded-md transition-all duration-200 ease-in-out gap-1 cursor-pointer ml-0.5 border-0 ${
|
||||
isEnabled
|
||||
? 'bg-accent/20 text-accent'
|
||||
: 'bg-transparent text-main-view-fg/70 hover:bg-main-view-fg/5'
|
||||
}`}
|
||||
title={isEnabled ? 'Disable Web Search' : 'Enable Web Search'}
|
||||
>
|
||||
<IconWorld
|
||||
size={16}
|
||||
className={isEnabled ? 'text-accent' : 'text-main-view-fg/70'}
|
||||
/>
|
||||
<span className={`text-sm font-medium ${isEnabled ? 'text-accent' : ''}`}>Search</span>
|
||||
</button>
|
||||
)
|
||||
}
|
||||
1
extensions-web/src/mcp-web/components/index.ts
Normal file
@ -0,0 +1 @@
|
||||
export { WebSearchButton } from './WebSearchButton'
|
||||
@ -4,11 +4,13 @@
|
||||
* Uses official MCP TypeScript SDK with proper session handling
|
||||
*/
|
||||
|
||||
import { MCPExtension, MCPTool, MCPToolCallResult } from '@janhq/core'
|
||||
import { MCPExtension, MCPTool, MCPToolCallResult, MCPToolComponentProps } from '@janhq/core'
|
||||
import { getSharedAuthService, JanAuthService } from '../shared'
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js'
|
||||
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js'
|
||||
import { JanMCPOAuthProvider } from './oauth-provider'
|
||||
import { WebSearchButton } from './components'
|
||||
import type { ComponentType } from 'react'
|
||||
|
||||
// JAN_API_BASE is defined in vite.config.ts (defaults to 'https://api-dev.jan.ai/jan/v1')
|
||||
declare const JAN_API_BASE: string
|
||||
@ -232,4 +234,27 @@ export default class MCPExtensionWeb extends MCPExtension {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a custom UI component for web search tools
|
||||
* @returns The WebSearchButton component
|
||||
*/
|
||||
getToolComponent(): ComponentType<MCPToolComponentProps> | null {
|
||||
return WebSearchButton
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of tool names that should be disabled by default for new users
|
||||
* All MCP web tools are disabled by default to prevent accidental API usage
|
||||
* @returns Array of tool names to disable by default
|
||||
*/
|
||||
async getDefaultDisabledTools(): Promise<string[]> {
|
||||
try {
|
||||
const tools = await this.getTools()
|
||||
return tools.map(tool => tool.name)
|
||||
} catch (error) {
|
||||
console.error('Failed to get default disabled tools:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,4 +57,4 @@ export class JanMCPOAuthProvider implements OAuthClientProvider {
|
||||
async codeVerifier(): Promise<string> {
|
||||
throw new Error('Code verifier not supported')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -16,6 +16,7 @@ import { logoutUser, refreshToken, guestLogin } from './api'
|
||||
import { AuthProviderRegistry } from './registry'
|
||||
import { AuthBroadcast } from './broadcast'
|
||||
import type { ProviderType } from './providers'
|
||||
import { ApiError } from '../types/errors'
|
||||
|
||||
const authProviderRegistry = new AuthProviderRegistry()
|
||||
|
||||
@ -160,7 +161,7 @@ export class JanAuthService {
|
||||
this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000
|
||||
} catch (error) {
|
||||
console.error('Failed to refresh access token:', error)
|
||||
if (error instanceof Error && error.message.includes('401')) {
|
||||
if (error instanceof ApiError && error.isStatus(401)) {
|
||||
await this.handleSessionExpired()
|
||||
}
|
||||
throw error
|
||||
@ -305,9 +306,7 @@ export class JanAuthService {
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(
|
||||
`API request failed: ${response.status} ${response.statusText} - ${errorText}`
|
||||
)
|
||||
throw new ApiError(response.status, response.statusText, errorText)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
@ -418,7 +417,7 @@ export class JanAuthService {
|
||||
)
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch user profile:', error)
|
||||
if (error instanceof Error && error.message.includes('401')) {
|
||||
if (error instanceof ApiError && error.isStatus(401)) {
|
||||
// Authentication failed - handle session expiry
|
||||
await this.handleSessionExpired()
|
||||
return null
|
||||
|
||||
50
extensions-web/src/shared/types/errors.ts
Normal file
@ -0,0 +1,50 @@
|
||||
/**
|
||||
* Shared error types for API responses
|
||||
*/
|
||||
|
||||
export class ApiError extends Error {
|
||||
public readonly status: number
|
||||
public readonly statusText: string
|
||||
public readonly responseText: string
|
||||
|
||||
constructor(status: number, statusText: string, responseText: string, message?: string) {
|
||||
super(message || `API request failed: ${status} ${statusText} - ${responseText}`)
|
||||
this.name = 'ApiError'
|
||||
this.status = status
|
||||
this.statusText = statusText
|
||||
this.responseText = responseText
|
||||
|
||||
// Maintains proper stack trace for where our error was thrown (only available on V8)
|
||||
if ((Error as any).captureStackTrace) {
|
||||
(Error as any).captureStackTrace(this, ApiError)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a specific HTTP status code
|
||||
*/
|
||||
isStatus(code: number): boolean {
|
||||
return this.status === code
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a 404 Not Found error
|
||||
*/
|
||||
isNotFound(): boolean {
|
||||
return this.status === 404
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a client error (4xx)
|
||||
*/
|
||||
isClientError(): boolean {
|
||||
return this.status >= 400 && this.status < 500
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a server error (5xx)
|
||||
*/
|
||||
isServerError(): boolean {
|
||||
return this.status >= 500 && this.status < 600
|
||||
}
|
||||
}
|
||||
@ -38,4 +38,4 @@ export interface IndexedDBConfig {
|
||||
keyPath: string
|
||||
indexes?: { name: string; keyPath: string | string[]; unique?: boolean }[]
|
||||
}[]
|
||||
}
|
||||
}
|
||||
|
||||
2
extensions-web/src/types/global.d.ts
vendored
@ -2,4 +2,4 @@ export {}
|
||||
|
||||
declare global {
|
||||
declare const JAN_API_BASE: string
|
||||
}
|
||||
}
|
||||
|
||||
2
extensions-web/src/vite-env.d.ts
vendored
@ -1 +1 @@
|
||||
/// <reference types="vite/client" />
|
||||
/// <reference types="vite/client" />
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
"target": "ES2020",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"jsx": "react-jsx",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
"strict": true,
|
||||
|
||||
@ -9,11 +9,11 @@ export default defineConfig({
|
||||
fileName: 'index'
|
||||
},
|
||||
rollupOptions: {
|
||||
external: ['@janhq/core', 'zustand']
|
||||
external: ['@janhq/core', 'zustand', 'react', 'react-dom', 'react/jsx-runtime', '@tabler/icons-react']
|
||||
},
|
||||
emptyOutDir: false // Don't clean the output directory
|
||||
},
|
||||
define: {
|
||||
JAN_API_BASE: JSON.stringify(process.env.JAN_API_BASE || 'https://api-dev.jan.ai/v1'),
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,9 +1,8 @@
|
||||
import { getJanDataFolderPath, fs, joinPath, events } from '@janhq/core'
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
import { getProxyConfig } from './util'
|
||||
import { getProxyConfig, basenameNoExt } from './util'
|
||||
import { dirname, basename } from '@tauri-apps/api/path'
|
||||
import { getSystemInfo } from '@janhq/tauri-plugin-hardware-api'
|
||||
|
||||
/*
|
||||
* Reads currently installed backends in janDataFolderPath
|
||||
*
|
||||
@ -73,10 +72,7 @@ async function fetchRemoteSupportedBackends(
|
||||
|
||||
if (!name.startsWith(prefix)) continue
|
||||
|
||||
const backend = name
|
||||
.replace(prefix, '')
|
||||
.replace('.tar.gz', '')
|
||||
.replace('.zip', '')
|
||||
const backend = basenameNoExt(name).slice(prefix.length)
|
||||
|
||||
if (supportedBackends.includes(backend)) {
|
||||
remote.push({ version, backend })
|
||||
|
||||
@ -39,7 +39,6 @@ import { getProxyConfig } from './util'
|
||||
import { basename } from '@tauri-apps/api/path'
|
||||
import {
|
||||
readGgufMetadata,
|
||||
estimateKVCacheSize,
|
||||
getModelSize,
|
||||
isModelSupported,
|
||||
planModelLoadInternal,
|
||||
@ -58,6 +57,8 @@ type LlamacppConfig = {
|
||||
chat_template: string
|
||||
n_gpu_layers: number
|
||||
offload_mmproj: boolean
|
||||
cpu_moe: boolean
|
||||
n_cpu_moe: number
|
||||
override_tensor_buffer_t: string
|
||||
ctx_size: number
|
||||
threads: number
|
||||
@ -332,12 +333,14 @@ export default class llamacpp_extension extends AIEngine {
|
||||
)
|
||||
// Clear the invalid stored preference
|
||||
this.clearStoredBackendType()
|
||||
bestAvailableBackendString =
|
||||
await this.determineBestBackend(version_backends)
|
||||
bestAvailableBackendString = await this.determineBestBackend(
|
||||
version_backends
|
||||
)
|
||||
}
|
||||
} else {
|
||||
bestAvailableBackendString =
|
||||
await this.determineBestBackend(version_backends)
|
||||
bestAvailableBackendString = await this.determineBestBackend(
|
||||
version_backends
|
||||
)
|
||||
}
|
||||
|
||||
let settings = structuredClone(SETTINGS)
|
||||
@ -1581,6 +1584,10 @@ export default class llamacpp_extension extends AIEngine {
|
||||
])
|
||||
args.push('--jinja')
|
||||
args.push('-m', modelPath)
|
||||
if (cfg.cpu_moe) args.push('--cpu-moe')
|
||||
if (cfg.n_cpu_moe && cfg.n_cpu_moe > 0) {
|
||||
args.push('--n-cpu-moe', String(cfg.n_cpu_moe))
|
||||
}
|
||||
// For overriding tensor buffer type, useful where
|
||||
// massive MOE models can be made faster by keeping attention on the GPU
|
||||
// and offloading the expert FFNs to the CPU.
|
||||
@ -2151,7 +2158,12 @@ export default class llamacpp_extension extends AIEngine {
|
||||
if (mmprojPath && !this.isAbsolutePath(mmprojPath))
|
||||
mmprojPath = await joinPath([await getJanDataFolderPath(), path])
|
||||
try {
|
||||
const result = await planModelLoadInternal(path, this.memoryMode, mmprojPath, requestedCtx)
|
||||
const result = await planModelLoadInternal(
|
||||
path,
|
||||
this.memoryMode,
|
||||
mmprojPath,
|
||||
requestedCtx
|
||||
)
|
||||
return result
|
||||
} catch (e) {
|
||||
throw new Error(String(e))
|
||||
@ -2279,12 +2291,18 @@ export default class llamacpp_extension extends AIEngine {
|
||||
}
|
||||
|
||||
// Calculate text tokens
|
||||
const messages = JSON.stringify({ messages: opts.messages })
|
||||
// Use chat_template_kwargs from opts if provided, otherwise default to disable enable_thinking
|
||||
const tokenizeRequest = {
|
||||
messages: opts.messages,
|
||||
chat_template_kwargs: opts.chat_template_kwargs || {
|
||||
enable_thinking: false,
|
||||
},
|
||||
}
|
||||
|
||||
let parseResponse = await fetch(`${baseUrl}/apply-template`, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: messages,
|
||||
body: JSON.stringify(tokenizeRequest),
|
||||
})
|
||||
|
||||
if (!parseResponse.ok) {
|
||||
|
||||
12
extensions/llamacpp-extension/src/type.d.ts
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
export {}
|
||||
|
||||
declare global {
|
||||
interface RequestInit {
|
||||
/**
|
||||
* Tauri HTTP plugin option for connection timeout in milliseconds.
|
||||
*/
|
||||
connectTimeout?: number
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,3 +1,23 @@
|
||||
// File path utilities
|
||||
export function basenameNoExt(filePath: string): string {
|
||||
const VALID_EXTENSIONS = [".tar.gz", ".zip"];
|
||||
|
||||
// handle VALID extensions first
|
||||
for (const ext of VALID_EXTENSIONS) {
|
||||
if (filePath.toLowerCase().endsWith(ext)) {
|
||||
return filePath.slice(0, -ext.length);
|
||||
}
|
||||
}
|
||||
|
||||
// fallback: remove only the last extension
|
||||
const lastDotIndex = filePath.lastIndexOf('.');
|
||||
if (lastDotIndex > 0) {
|
||||
return filePath.slice(0, lastDotIndex);
|
||||
}
|
||||
|
||||
return filePath;
|
||||
}
|
||||
|
||||
// Zustand proxy state structure
|
||||
interface ProxyState {
|
||||
proxyEnabled: boolean
|
||||
|
||||
@ -342,41 +342,41 @@ __metadata:
|
||||
|
||||
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension":
|
||||
version: 0.1.10
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension"
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension"
|
||||
dependencies:
|
||||
rxjs: "npm:^7.8.1"
|
||||
ulidx: "npm:^2.3.0"
|
||||
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
|
||||
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension":
|
||||
version: 0.1.10
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension"
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension"
|
||||
dependencies:
|
||||
rxjs: "npm:^7.8.1"
|
||||
ulidx: "npm:^2.3.0"
|
||||
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
|
||||
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension":
|
||||
version: 0.1.10
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension"
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension"
|
||||
dependencies:
|
||||
rxjs: "npm:^7.8.1"
|
||||
ulidx: "npm:^2.3.0"
|
||||
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
|
||||
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension":
|
||||
version: 0.1.10
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension"
|
||||
resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension"
|
||||
dependencies:
|
||||
rxjs: "npm:^7.8.1"
|
||||
ulidx: "npm:^2.3.0"
|
||||
checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d
|
||||
checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
||||
17
package.json
@ -12,6 +12,8 @@
|
||||
"scripts": {
|
||||
"lint": "yarn workspace @janhq/web-app lint",
|
||||
"dev": "yarn dev:tauri",
|
||||
"ios": "yarn tauri ios dev",
|
||||
"android": "yarn tauri android dev",
|
||||
"build": "yarn build:web && yarn build:tauri",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
@ -24,12 +26,17 @@
|
||||
"serve:web-app": "yarn workspace @janhq/web-app serve:web",
|
||||
"build:serve:web-app": "yarn build:web-app && yarn serve:web-app",
|
||||
"dev:tauri": "yarn build:icon && yarn copy:assets:tauri && cross-env IS_CLEAN=true tauri dev",
|
||||
"dev:ios": "yarn copy:assets:mobile && RUSTC_WRAPPER= cross-env IS_IOS=true yarn tauri ios dev --features mobile",
|
||||
"dev:android": "yarn copy:assets:mobile && cross-env IS_ANDROID=true yarn tauri android dev --features mobile",
|
||||
"build:android": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_CLEAN=true yarn tauri android build -- --no-default-features --features mobile",
|
||||
"build:ios": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile",
|
||||
"build:ios:device": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile --export-method debugging",
|
||||
"copy:assets:tauri": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"",
|
||||
"copy:assets:mobile": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"",
|
||||
"download:lib": "node ./scripts/download-lib.mjs",
|
||||
"download:bin": "node ./scripts/download-bin.mjs",
|
||||
"download:windows-installer": "node ./scripts/download-win-installer-deps.mjs",
|
||||
"build:tauri:win32": "yarn download:bin && yarn download:lib && yarn download:windows-installer && yarn tauri build",
|
||||
"build:tauri:linux": "yarn download:bin && yarn download:lib && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh",
|
||||
"build:tauri:win32": "yarn download:bin && yarn tauri build",
|
||||
"build:tauri:linux": "yarn download:bin && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh",
|
||||
"build:tauri:darwin": "yarn download:bin && yarn tauri build --target universal-apple-darwin",
|
||||
"build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os",
|
||||
"build:tauri:plugin:api": "cd src-tauri/plugins && yarn install && yarn workspaces foreach -Apt run build",
|
||||
@ -58,7 +65,9 @@
|
||||
"hoistingLimits": "workspaces"
|
||||
},
|
||||
"resolutions": {
|
||||
"yallist": "4.0.0"
|
||||
"yallist": "4.0.0",
|
||||
"@types/react": "19.1.2",
|
||||
"@types/react-dom": "19.1.2"
|
||||
},
|
||||
"packageManager": "yarn@4.5.3"
|
||||
}
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
console.log('Script is running')
|
||||
// scripts/download.js
|
||||
import https from 'https'
|
||||
import fs, { copyFile, mkdirSync } from 'fs'
|
||||
@ -69,7 +68,10 @@ function getPlatformArch() {
|
||||
arch === 'arm64' ? 'aarch64-apple-darwin' : 'x86_64-apple-darwin'
|
||||
} else if (platform === 'linux') {
|
||||
bunPlatform = arch === 'arm64' ? 'linux-aarch64' : 'linux-x64'
|
||||
uvPlatform = arch === 'arm64' ? 'aarch64-unknown-linux-gnu' : 'x86_64-unknown-linux-gnu'
|
||||
uvPlatform =
|
||||
arch === 'arm64'
|
||||
? 'aarch64-unknown-linux-gnu'
|
||||
: 'x86_64-unknown-linux-gnu'
|
||||
} else if (platform === 'win32') {
|
||||
bunPlatform = 'windows-x64' // Bun has limited Windows support
|
||||
uvPlatform = 'x86_64-pc-windows-msvc'
|
||||
@ -81,6 +83,10 @@ function getPlatformArch() {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (process.env.SKIP_BINARIES) {
|
||||
console.log('Skipping binaries download.')
|
||||
process.exit(0)
|
||||
}
|
||||
console.log('Starting main function')
|
||||
const platform = os.platform()
|
||||
const { bunPlatform, uvPlatform } = getPlatformArch()
|
||||
@ -100,13 +106,11 @@ async function main() {
|
||||
}
|
||||
|
||||
// Adjust these URLs based on latest releases
|
||||
const bunVersion = '1.2.10' // Example Bun version
|
||||
const bunUrl = `https://github.com/oven-sh/bun/releases/download/bun-v${bunVersion}/bun-${bunPlatform}.zip`
|
||||
const bunUrl = `https://github.com/oven-sh/bun/releases/latest/download/bun-${bunPlatform}.zip`
|
||||
|
||||
const uvVersion = '0.6.17' // Example UV version
|
||||
let uvUrl = `https://github.com/astral-sh/uv/releases/download/${uvVersion}/uv-${uvPlatform}.tar.gz`
|
||||
let uvUrl = `https://github.com/astral-sh/uv/releases/latest/download/uv-${uvPlatform}.tar.gz`
|
||||
if (platform === 'win32') {
|
||||
uvUrl = `https://github.com/astral-sh/uv/releases/download/${uvVersion}/uv-${uvPlatform}.zip`
|
||||
uvUrl = `https://github.com/astral-sh/uv/releases/latest/download/uv-${uvPlatform}.zip`
|
||||
}
|
||||
|
||||
console.log(`Downloading Bun for ${bunPlatform}...`)
|
||||
@ -124,29 +128,45 @@ async function main() {
|
||||
if (err) {
|
||||
console.log('Add execution permission failed!', err)
|
||||
}
|
||||
});
|
||||
})
|
||||
if (platform === 'darwin') {
|
||||
copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-x86_64-apple-darwin'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
}
|
||||
})
|
||||
copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-aarch64-apple-darwin'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
}
|
||||
})
|
||||
copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-universal-apple-darwin'), (err) => {
|
||||
copyFile(
|
||||
path.join(binDir, 'bun'),
|
||||
path.join(binDir, 'bun-x86_64-apple-darwin'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
})
|
||||
} else if (platform === 'linux') {
|
||||
copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-x86_64-unknown-linux-gnu'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
}
|
||||
})
|
||||
)
|
||||
copyFile(
|
||||
path.join(binDir, 'bun'),
|
||||
path.join(binDir, 'bun-aarch64-apple-darwin'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
)
|
||||
copyFile(
|
||||
path.join(binDir, 'bun'),
|
||||
path.join(binDir, 'bun-universal-apple-darwin'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
)
|
||||
} else if (platform === 'linux') {
|
||||
copyFile(
|
||||
path.join(binDir, 'bun'),
|
||||
path.join(binDir, 'bun-x86_64-unknown-linux-gnu'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
// Expect EEXIST error
|
||||
@ -157,11 +177,15 @@ async function main() {
|
||||
path.join(binDir)
|
||||
)
|
||||
if (platform === 'win32') {
|
||||
copyFile(path.join(binDir, 'bun.exe'), path.join(binDir, 'bun-x86_64-pc-windows-msvc.exe'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
copyFile(
|
||||
path.join(binDir, 'bun.exe'),
|
||||
path.join(binDir, 'bun-x86_64-pc-windows-msvc.exe'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
// Expect EEXIST error
|
||||
@ -176,52 +200,66 @@ async function main() {
|
||||
await decompress(uvPath, tempBinDir)
|
||||
}
|
||||
try {
|
||||
copySync(
|
||||
path.join(tempBinDir, `uv-${uvPlatform}`, 'uv'),
|
||||
path.join(binDir)
|
||||
)
|
||||
copySync(path.join(tempBinDir, `uv-${uvPlatform}`, 'uv'), path.join(binDir))
|
||||
fs.chmod(path.join(binDir, 'uv'), 0o755, (err) => {
|
||||
if (err) {
|
||||
console.log('Add execution permission failed!', err)
|
||||
}
|
||||
});
|
||||
})
|
||||
if (platform === 'darwin') {
|
||||
copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-x86_64-apple-darwin'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
copyFile(
|
||||
path.join(binDir, 'uv'),
|
||||
path.join(binDir, 'uv-x86_64-apple-darwin'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
})
|
||||
copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-aarch64-apple-darwin'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
)
|
||||
copyFile(
|
||||
path.join(binDir, 'uv'),
|
||||
path.join(binDir, 'uv-aarch64-apple-darwin'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
})
|
||||
copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-universal-apple-darwin'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
)
|
||||
copyFile(
|
||||
path.join(binDir, 'uv'),
|
||||
path.join(binDir, 'uv-universal-apple-darwin'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
} else if (platform === 'linux') {
|
||||
copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-x86_64-unknown-linux-gnu'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
copyFile(
|
||||
path.join(binDir, 'uv'),
|
||||
path.join(binDir, 'uv-x86_64-unknown-linux-gnu'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
// Expect EEXIST error
|
||||
}
|
||||
try {
|
||||
copySync(
|
||||
path.join(tempBinDir, 'uv.exe'),
|
||||
path.join(binDir)
|
||||
)
|
||||
copySync(path.join(tempBinDir, 'uv.exe'), path.join(binDir))
|
||||
if (platform === 'win32') {
|
||||
copyFile(path.join(binDir, 'uv.exe'), path.join(binDir, 'uv-x86_64-pc-windows-msvc.exe'), (err) => {
|
||||
if (err) {
|
||||
console.log("Error Found:", err);
|
||||
copyFile(
|
||||
path.join(binDir, 'uv.exe'),
|
||||
path.join(binDir, 'uv-x86_64-pc-windows-msvc.exe'),
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log('Error Found:', err)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
// Expect EEXIST error
|
||||
|
||||
@ -1,86 +0,0 @@
|
||||
console.log('Script is running')
|
||||
// scripts/download-lib.mjs
|
||||
import https from 'https'
|
||||
import fs, { mkdirSync } from 'fs'
|
||||
import os from 'os'
|
||||
import path from 'path'
|
||||
import { copySync } from 'cpx'
|
||||
|
||||
function download(url, dest) {
|
||||
return new Promise((resolve, reject) => {
|
||||
console.log(`Downloading ${url} to ${dest}`)
|
||||
const file = fs.createWriteStream(dest)
|
||||
https
|
||||
.get(url, (response) => {
|
||||
console.log(`Response status code: ${response.statusCode}`)
|
||||
if (
|
||||
response.statusCode >= 300 &&
|
||||
response.statusCode < 400 &&
|
||||
response.headers.location
|
||||
) {
|
||||
// Handle redirect
|
||||
const redirectURL = response.headers.location
|
||||
console.log(`Redirecting to ${redirectURL}`)
|
||||
download(redirectURL, dest).then(resolve, reject) // Recursive call
|
||||
return
|
||||
} else if (response.statusCode !== 200) {
|
||||
reject(`Failed to get '${url}' (${response.statusCode})`)
|
||||
return
|
||||
}
|
||||
response.pipe(file)
|
||||
file.on('finish', () => {
|
||||
file.close(resolve)
|
||||
})
|
||||
})
|
||||
.on('error', (err) => {
|
||||
fs.unlink(dest, () => reject(err.message))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('Starting main function')
|
||||
const platform = os.platform() // 'darwin', 'linux', 'win32'
|
||||
const arch = os.arch() // 'x64', 'arm64', etc.
|
||||
|
||||
if (arch != 'x64') return
|
||||
|
||||
let filename
|
||||
if (platform == 'linux')
|
||||
filename = 'libvulkan.so'
|
||||
else if (platform == 'win32')
|
||||
filename = 'vulkan-1.dll'
|
||||
else
|
||||
return
|
||||
|
||||
const url = `https://catalog.jan.ai/${filename}`
|
||||
|
||||
const libDir = 'src-tauri/resources/lib'
|
||||
const tempDir = 'scripts/dist'
|
||||
|
||||
try {
|
||||
mkdirSync('scripts/dist')
|
||||
} catch (err) {
|
||||
// Expect EEXIST error if the directory already exists
|
||||
}
|
||||
|
||||
console.log(`Downloading libvulkan...`)
|
||||
const savePath = path.join(tempDir, filename)
|
||||
if (!fs.existsSync(savePath)) {
|
||||
await download(url, savePath)
|
||||
}
|
||||
|
||||
// copy to tauri resources
|
||||
try {
|
||||
copySync(savePath, libDir)
|
||||
} catch (err) {
|
||||
// Expect EEXIST error
|
||||
}
|
||||
|
||||
console.log('Downloads completed.')
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('Error:', err)
|
||||
process.exit(1)
|
||||
})
|
||||
@ -1,83 +0,0 @@
|
||||
console.log('Downloading Windows installer dependencies...')
|
||||
// scripts/download-win-installer-deps.mjs
|
||||
import https from 'https'
|
||||
import fs, { mkdirSync } from 'fs'
|
||||
import os from 'os'
|
||||
import path from 'path'
|
||||
import { copySync } from 'cpx'
|
||||
|
||||
function download(url, dest) {
|
||||
return new Promise((resolve, reject) => {
|
||||
console.log(`Downloading ${url} to ${dest}`)
|
||||
const file = fs.createWriteStream(dest)
|
||||
https
|
||||
.get(url, (response) => {
|
||||
console.log(`Response status code: ${response.statusCode}`)
|
||||
if (
|
||||
response.statusCode >= 300 &&
|
||||
response.statusCode < 400 &&
|
||||
response.headers.location
|
||||
) {
|
||||
// Handle redirect
|
||||
const redirectURL = response.headers.location
|
||||
console.log(`Redirecting to ${redirectURL}`)
|
||||
download(redirectURL, dest).then(resolve, reject) // Recursive call
|
||||
return
|
||||
} else if (response.statusCode !== 200) {
|
||||
reject(`Failed to get '${url}' (${response.statusCode})`)
|
||||
return
|
||||
}
|
||||
response.pipe(file)
|
||||
file.on('finish', () => {
|
||||
file.close(resolve)
|
||||
})
|
||||
})
|
||||
.on('error', (err) => {
|
||||
fs.unlink(dest, () => reject(err.message))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('Starting Windows installer dependencies download')
|
||||
const platform = os.platform() // 'darwin', 'linux', 'win32'
|
||||
const arch = os.arch() // 'x64', 'arm64', etc.
|
||||
|
||||
if (arch != 'x64') return
|
||||
|
||||
|
||||
const libDir = 'src-tauri/resources/lib'
|
||||
const tempDir = 'scripts/dist'
|
||||
|
||||
try {
|
||||
mkdirSync('scripts/dist')
|
||||
} catch (err) {
|
||||
// Expect EEXIST error if the directory already exists
|
||||
}
|
||||
|
||||
// Download VC++ Redistributable 17
|
||||
if (platform == 'win32') {
|
||||
const vcFilename = 'vc_redist.x64.exe'
|
||||
const vcUrl = 'https://aka.ms/vs/17/release/vc_redist.x64.exe'
|
||||
|
||||
console.log(`Downloading VC++ Redistributable...`)
|
||||
const vcSavePath = path.join(tempDir, vcFilename)
|
||||
if (!fs.existsSync(vcSavePath)) {
|
||||
await download(vcUrl, vcSavePath)
|
||||
}
|
||||
|
||||
// copy to tauri resources
|
||||
try {
|
||||
copySync(vcSavePath, libDir)
|
||||
} catch (err) {
|
||||
// Expect EEXIST error
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Windows installer dependencies downloads completed.')
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('Error:', err)
|
||||
process.exit(1)
|
||||
})
|
||||
@ -3,3 +3,20 @@
|
||||
# see https://github.com/tauri-apps/tauri/pull/4383#issuecomment-1212221864
|
||||
__TAURI_WORKSPACE__ = "true"
|
||||
ENABLE_SYSTEM_TRAY_ICON = "false"
|
||||
|
||||
[target.aarch64-linux-android]
|
||||
linker = "aarch64-linux-android21-clang"
|
||||
ar = "llvm-ar"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=lld"]
|
||||
|
||||
[target.armv7-linux-androideabi]
|
||||
linker = "armv7a-linux-androideabi21-clang"
|
||||
ar = "llvm-ar"
|
||||
|
||||
[target.x86_64-linux-android]
|
||||
linker = "x86_64-linux-android21-clang"
|
||||
ar = "llvm-ar"
|
||||
|
||||
[target.i686-linux-android]
|
||||
linker = "i686-linux-android21-clang"
|
||||
ar = "llvm-ar"
|
||||
|
||||
1
src-tauri/.gitignore
vendored
@ -2,6 +2,7 @@
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
/gen/schemas
|
||||
/gen/android
|
||||
binaries
|
||||
!binaries/download.sh
|
||||
!binaries/download.bat
|
||||