diff --git a/.github/workflows/template-tauri-build-linux-x64-external.yml b/.github/workflows/template-tauri-build-linux-x64-external.yml index a88c48267..5c39e17c7 100644 --- a/.github/workflows/template-tauri-build-linux-x64-external.yml +++ b/.github/workflows/template-tauri-build-linux-x64-external.yml @@ -70,10 +70,9 @@ jobs: run: | echo "Version: ${{ inputs.new_version }}" jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json - mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json if [ "${{ inputs.channel }}" != "stable" ]; then - jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun", - "usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json + jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json fi jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json @@ -83,7 +82,7 @@ jobs: jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json - + echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------" cat ./src-tauri/plugins/tauri-plugin-hardware/package.json @@ -96,7 +95,7 @@ jobs: ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}" echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------" cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml - + ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}" echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------" cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml @@ -125,7 +124,7 @@ jobs: env: RELEASE_CHANNEL: '${{ inputs.channel }}' AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }} - + - name: Upload Artifact uses: actions/upload-artifact@v4 with: @@ -136,4 +135,4 @@ jobs: uses: actions/upload-artifact@v4 with: name: jan-linux-amd64-${{ inputs.new_version }}-AppImage - path: ./src-tauri/target/release/bundle/appimage/*.AppImage \ No newline at end of file + path: ./src-tauri/target/release/bundle/appimage/*.AppImage diff --git a/.github/workflows/template-tauri-build-linux-x64-flatpak.yml b/.github/workflows/template-tauri-build-linux-x64-flatpak.yml index 15d4827f7..d63fae3e7 100644 --- a/.github/workflows/template-tauri-build-linux-x64-flatpak.yml +++ b/.github/workflows/template-tauri-build-linux-x64-flatpak.yml @@ -91,10 +91,9 @@ jobs: echo "Version: ${{ inputs.new_version }}" # Update tauri.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json - mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json if [ "${{ inputs.channel }}" != "stable" ]; then - jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun", - "usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json + jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json fi jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json @@ -104,7 +103,7 @@ jobs: jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json - + echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------" cat ./src-tauri/plugins/tauri-plugin-hardware/package.json @@ -117,7 +116,7 @@ jobs: ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}" echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------" cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml - + ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}" echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------" cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml @@ -128,7 +127,7 @@ jobs: # Temporarily enable devtool on prod build ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools" - cat ./src-tauri/Cargo.toml + cat ./src-tauri/Cargo.toml # Change app name for beta and nightly builds if [ "${{ inputs.channel }}" != "stable" ]; then @@ -139,7 +138,7 @@ jobs: .github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }} cat ./src-tauri/tauri.conf.json - + # Update Cargo.toml ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}" ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools" @@ -184,4 +183,3 @@ jobs: with: name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-AppImage path: ./src-tauri/target/release/bundle/appimage/*.AppImage - diff --git a/.github/workflows/template-tauri-build-linux-x64.yml b/.github/workflows/template-tauri-build-linux-x64.yml index bd9b38369..487571595 100644 --- a/.github/workflows/template-tauri-build-linux-x64.yml +++ b/.github/workflows/template-tauri-build-linux-x64.yml @@ -108,10 +108,9 @@ jobs: echo "Version: ${{ inputs.new_version }}" # Update tauri.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json - mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json if [ "${{ inputs.channel }}" != "stable" ]; then - jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun", - "usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json + jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json fi jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json @@ -121,7 +120,7 @@ jobs: jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json - + echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------" cat ./src-tauri/plugins/tauri-plugin-hardware/package.json @@ -134,7 +133,7 @@ jobs: ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}" echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------" cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml - + ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}" echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------" cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml @@ -156,7 +155,7 @@ jobs: .github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }} cat ./src-tauri/tauri.conf.json - + # Update Cargo.toml ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}" ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools" diff --git a/.github/workflows/template-tauri-build-windows-x64-external.yml b/.github/workflows/template-tauri-build-windows-x64-external.yml index 16895de65..dbd5fd7eb 100644 --- a/.github/workflows/template-tauri-build-windows-x64-external.yml +++ b/.github/workflows/template-tauri-build-windows-x64-external.yml @@ -49,6 +49,8 @@ jobs: # Update tauri.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json + mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json jq '.bundle.windows.signCommand = "echo External build - skipping signature: %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json @@ -80,6 +82,36 @@ jobs: echo "---------./src-tauri/Cargo.toml---------" cat ./src-tauri/Cargo.toml + generate_build_version() { + ### Examble + ### input 0.5.6 output will be 0.5.6 and 0.5.6.0 + ### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2 + ### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213 + local new_version="$1" + local base_version + local t_value + # Check if it has a "-" + if [[ "$new_version" == *-* ]]; then + base_version="${new_version%%-*}" # part before - + suffix="${new_version#*-}" # part after - + # Check if it is rcX-beta + if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then + t_value="${BASH_REMATCH[1]}" + else + t_value="$suffix" + fi + else + base_version="$new_version" + t_value="0" + fi + # Export two values + new_base_version="$base_version" + new_build_version="${base_version}.${t_value}" + } + generate_build_version ${{ inputs.new_version }} + sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + if [ "${{ inputs.channel }}" != "stable" ]; then jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json @@ -103,7 +135,14 @@ jobs: chmod +x .github/scripts/rename-workspace.sh .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} cat ./package.json + sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + else + sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template fi + echo "---------nsis.template---------" + cat ./src-tauri/tauri.bundle.windows.nsis.template - name: Build app shell: bash run: | diff --git a/.github/workflows/template-tauri-build-windows-x64.yml b/.github/workflows/template-tauri-build-windows-x64.yml index 643fef5ac..edf8d88aa 100644 --- a/.github/workflows/template-tauri-build-windows-x64.yml +++ b/.github/workflows/template-tauri-build-windows-x64.yml @@ -54,6 +54,8 @@ on: value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }} FILE_NAME: value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }} + MSI_FILE_NAME: + value: ${{ jobs.build-windows-x64.outputs.MSI_FILE_NAME }} jobs: build-windows-x64: @@ -61,6 +63,7 @@ jobs: outputs: WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }} FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }} + MSI_FILE_NAME: ${{ steps.metadata.outputs.MSI_FILE_NAME }} permissions: contents: write steps: @@ -95,9 +98,15 @@ jobs: # Update tauri.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json + mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json mv /tmp/package.json web-app/package.json + # Add sign commands to tauri.windows.conf.json + jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json + mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json + # Update tauri plugin versions jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json @@ -124,9 +133,35 @@ jobs: echo "---------./src-tauri/Cargo.toml---------" cat ./src-tauri/Cargo.toml - # Add sign commands to tauri.windows.conf.json - jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json - mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json + generate_build_version() { + ### Example + ### input 0.5.6 output will be 0.5.6 and 0.5.6.0 + ### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2 + ### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213 + local new_version="$1" + local base_version + local t_value + # Check if it has a "-" + if [[ "$new_version" == *-* ]]; then + base_version="${new_version%%-*}" # part before - + suffix="${new_version#*-}" # part after - + # Check if it is rcX-beta + if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then + t_value="${BASH_REMATCH[1]}" + else + t_value="$suffix" + fi + else + base_version="$new_version" + t_value="0" + fi + # Export two values + new_base_version="$base_version" + new_build_version="${base_version}.${t_value}" + } + generate_build_version ${{ inputs.new_version }} + sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template echo "---------tauri.windows.conf.json---------" cat ./src-tauri/tauri.windows.conf.json @@ -160,7 +195,14 @@ jobs: chmod +x .github/scripts/rename-workspace.sh .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} cat ./package.json + sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + else + sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template fi + echo "---------nsis.template---------" + cat ./src-tauri/tauri.bundle.windows.nsis.template - name: Install AzureSignTool run: | @@ -189,9 +231,15 @@ jobs: - name: Upload Artifact uses: actions/upload-artifact@v4 with: - name: jan-windows-${{ inputs.new_version }} + name: jan-windows-exe-${{ inputs.new_version }} path: | ./src-tauri/target/release/bundle/nsis/*.exe + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: jan-windows-msi-${{ inputs.new_version }} + path: | + ./src-tauri/target/release/bundle/msi/*.msi ## Set output filename for windows - name: Set output filename for windows @@ -201,13 +249,18 @@ jobs: if [ "${{ inputs.channel }}" != "stable" ]; then FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig) + + MSI_FILE="Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64_en-US.msi" else FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig) + + MSI_FILE="Jan_${{ inputs.new_version }}_x64_en-US.msi" fi echo "::set-output name=WIN_SIG::$WIN_SIG" echo "::set-output name=FILE_NAME::$FILE_NAME" + echo "::set-output name=MSI_FILE_NAME::$MSI_FILE" id: metadata ## Upload to s3 for nightly and beta diff --git a/.gitignore b/.gitignore index 6b51867ef..e78486abd 100644 --- a/.gitignore +++ b/.gitignore @@ -21,11 +21,13 @@ src-tauri/resources/lib src-tauri/icons !src-tauri/icons/icon.png src-tauri/gen/apple +src-tauri/gen/android src-tauri/resources/bin # Helper tools .opencode OpenCode.md +Claude.md archive/ .cache/ @@ -60,3 +62,4 @@ src-tauri/resources/ ## test test-data llm-docs +.claude/agents diff --git a/Makefile b/Makefile index 9a03ddaad..a3fa63665 100644 --- a/Makefile +++ b/Makefile @@ -41,9 +41,25 @@ else @echo "Not macOS; skipping Rust target installation." endif +# Install required Rust targets for Android builds +install-android-rust-targets: + @echo "Checking and installing Android Rust targets..." + @rustup target list --installed | grep -q "aarch64-linux-android" || rustup target add aarch64-linux-android + @rustup target list --installed | grep -q "armv7-linux-androideabi" || rustup target add armv7-linux-androideabi + @rustup target list --installed | grep -q "i686-linux-android" || rustup target add i686-linux-android + @rustup target list --installed | grep -q "x86_64-linux-android" || rustup target add x86_64-linux-android + @echo "Android Rust targets ready!" + +# Install required Rust targets for iOS builds +install-ios-rust-targets: + @echo "Checking and installing iOS Rust targets..." + @rustup target list --installed | grep -q "aarch64-apple-ios" || rustup target add aarch64-apple-ios + @rustup target list --installed | grep -q "aarch64-apple-ios-sim" || rustup target add aarch64-apple-ios-sim + @rustup target list --installed | grep -q "x86_64-apple-ios" || rustup target add x86_64-apple-ios + @echo "iOS Rust targets ready!" + dev: install-and-build yarn download:bin - yarn download:lib yarn dev # Web application targets @@ -58,12 +74,41 @@ build-web-app: install-web-app yarn build:core yarn build:web-app -serve-web-app: +serve-web-app: yarn serve:web-app build-serve-web-app: build-web-app yarn serve:web-app +# Mobile +dev-android: install-and-build install-android-rust-targets + @echo "Setting up Android development environment..." + @if [ ! -d "src-tauri/gen/android" ]; then \ + echo "Android app not initialized. Initializing..."; \ + yarn tauri android init; \ + fi + @echo "Sourcing Android environment setup..." + @bash autoqa/scripts/setup-android-env.sh echo "Android environment ready" + @echo "Starting Android development server..." + yarn dev:android + +dev-ios: install-and-build install-ios-rust-targets + @echo "Setting up iOS development environment..." +ifeq ($(shell uname -s),Darwin) + @if [ ! -d "src-tauri/gen/ios" ]; then \ + echo "iOS app not initialized. Initializing..."; \ + yarn tauri ios init; \ + fi + @echo "Checking iOS development requirements..." + @xcrun --version > /dev/null 2>&1 || (echo "❌ Xcode command line tools not found. Install with: xcode-select --install" && exit 1) + @xcrun simctl list devices available | grep -q "iPhone\|iPad" || (echo "❌ No iOS simulators found. Install simulators through Xcode." && exit 1) + @echo "Starting iOS development server..." + yarn dev:ios +else + @echo "❌ iOS development is only supported on macOS" + @exit 1 +endif + # Linting lint: install-and-build yarn lint @@ -71,9 +116,7 @@ lint: install-and-build # Testing test: lint yarn download:bin - yarn download:lib ifeq ($(OS),Windows_NT) - yarn download:windows-installer endif yarn test yarn copy:assets:tauri diff --git a/autoqa/scripts/setup-android-env.sh b/autoqa/scripts/setup-android-env.sh new file mode 100755 index 000000000..2cf18ae8f --- /dev/null +++ b/autoqa/scripts/setup-android-env.sh @@ -0,0 +1,80 @@ +#!/bin/bash + +# Android Development Environment Setup for Jan + +# Ensure rustup's Rust toolchain is used instead of Homebrew's +export PATH="$HOME/.cargo/bin:$PATH" + +# Set JAVA_HOME for Android builds +export JAVA_HOME=/opt/homebrew/opt/openjdk@17/libexec/openjdk.jdk/Contents/Home +export PATH="/opt/homebrew/opt/openjdk@17/bin:$PATH" + +export ANDROID_HOME="$HOME/Library/Android/sdk" +export ANDROID_NDK_ROOT="$HOME/Library/Android/sdk/ndk/29.0.14033849" +export NDK_HOME="$HOME/Library/Android/sdk/ndk/29.0.14033849" + +# Add Android tools to PATH +export PATH=$PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/tools:$ANDROID_HOME/cmdline-tools/latest/bin:$ANDROID_HOME/emulator:$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin + +# Set up CC and CXX for Android compilation +export CC_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang" +export CXX_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++" +export AR_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar" +export RANLIB_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib" + +# Additional environment variables for Rust cross-compilation +export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang" + +# Only set global CC and AR for Android builds (when IS_ANDROID is set) +if [ "$IS_ANDROID" = "true" ]; then + export CC="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang" + export AR="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar" + echo "Global CC and AR set for Android build" +fi + +# Create symlinks for Android tools if they don't exist +mkdir -p ~/.local/bin +if [ ! -f ~/.local/bin/aarch64-linux-android-ranlib ]; then + ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib ~/.local/bin/aarch64-linux-android-ranlib +fi +if [ ! -f ~/.local/bin/aarch64-linux-android-clang ]; then + ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang ~/.local/bin/aarch64-linux-android-clang +fi +if [ ! -f ~/.local/bin/aarch64-linux-android-clang++ ]; then + ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++ ~/.local/bin/aarch64-linux-android-clang++ +fi + +# Fix the broken clang symlinks by ensuring base clang is available +if [ ! -f ~/.local/bin/clang ]; then + ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang ~/.local/bin/clang +fi +if [ ! -f ~/.local/bin/clang++ ]; then + ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang++ ~/.local/bin/clang++ +fi + +# Create symlinks for target-specific ar tools +if [ ! -f ~/.local/bin/aarch64-linux-android-ar ]; then + ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar ~/.local/bin/aarch64-linux-android-ar +fi +export PATH="$HOME/.local/bin:$PATH" + +echo "Android environment configured:" +echo "ANDROID_HOME: $ANDROID_HOME" +echo "ANDROID_NDK_ROOT: $ANDROID_NDK_ROOT" +echo "PATH includes NDK toolchain: $(echo $PATH | grep -o "ndk.*bin" || echo "NOT FOUND")" + +# Verify required tools +echo -e "\nChecking required tools:" +which adb && echo "✅ adb found" || echo "❌ adb not found" +which emulator && echo "✅ emulator found" || echo "❌ emulator not found" +which $CC_aarch64_linux_android && echo "✅ Android clang found" || echo "❌ Android clang not found" + +# Show available AVDs +echo -e "\nAvailable Android Virtual Devices:" +emulator -list-avds 2>/dev/null || echo "No AVDs found" + +# Execute the provided command +if [ "$1" ]; then + echo -e "\nExecuting: $@" + exec "$@" +fi \ No newline at end of file diff --git a/core/package.json b/core/package.json index eec56a733..203eaf293 100644 --- a/core/package.json +++ b/core/package.json @@ -27,11 +27,13 @@ "devDependencies": { "@npmcli/arborist": "^7.1.0", "@types/node": "^22.10.0", + "@types/react": "19.1.2", "@vitest/coverage-v8": "^2.1.8", "@vitest/ui": "^2.1.8", "eslint": "8.57.0", "happy-dom": "^15.11.6", "pacote": "^21.0.0", + "react": "19.0.0", "request": "^2.88.2", "request-progress": "^3.0.0", "rimraf": "^6.0.1", @@ -44,5 +46,8 @@ "rxjs": "^7.8.1", "ulidx": "^2.3.0" }, + "peerDependencies": { + "react": "19.0.0" + }, "packageManager": "yarn@4.5.3" } diff --git a/core/rolldown.config.mjs b/core/rolldown.config.mjs index fd3329ee0..fbb2bd351 100644 --- a/core/rolldown.config.mjs +++ b/core/rolldown.config.mjs @@ -10,7 +10,7 @@ export default defineConfig([ sourcemap: true, }, platform: 'browser', - external: ['path'], + external: ['path', 'react', 'react-dom', 'react/jsx-runtime'], define: { NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`), VERSION: JSON.stringify(pkgJson.version), diff --git a/core/src/browser/extensions/conversational.test.ts b/core/src/browser/extensions/conversational.test.ts index c08468905..44d1e9b4f 100644 --- a/core/src/browser/extensions/conversational.test.ts +++ b/core/src/browser/extensions/conversational.test.ts @@ -250,4 +250,4 @@ describe('ConversationalExtension', () => { expect(retrievedAssistant.modelId).toBe('') }) -}) \ No newline at end of file +}) diff --git a/core/src/browser/extensions/engines/LocalOAIEngine.test.ts b/core/src/browser/extensions/engines/LocalOAIEngine.test.ts index 5f2563d56..3523c3ce6 100644 --- a/core/src/browser/extensions/engines/LocalOAIEngine.test.ts +++ b/core/src/browser/extensions/engines/LocalOAIEngine.test.ts @@ -131,4 +131,4 @@ describe('LocalOAIEngine', () => { expect(engine.loadedModel).toBeUndefined() }) }) -}) \ No newline at end of file +}) diff --git a/core/src/browser/extensions/mcp.test.ts b/core/src/browser/extensions/mcp.test.ts index ece971809..8ba3f200a 100644 --- a/core/src/browser/extensions/mcp.test.ts +++ b/core/src/browser/extensions/mcp.test.ts @@ -96,4 +96,4 @@ describe('MCPExtension', () => { expect(healthy).toBe(true) }) }) -}) \ No newline at end of file +}) diff --git a/core/src/browser/extensions/mcp.ts b/core/src/browser/extensions/mcp.ts index 7f30a5428..74a008d40 100644 --- a/core/src/browser/extensions/mcp.ts +++ b/core/src/browser/extensions/mcp.ts @@ -1,5 +1,6 @@ -import { MCPInterface, MCPTool, MCPToolCallResult } from '../../types' +import { MCPInterface, MCPTool, MCPToolCallResult, MCPToolComponentProps } from '../../types' import { BaseExtension, ExtensionTypeEnum } from '../extension' +import type { ComponentType } from 'react' /** * MCP (Model Context Protocol) extension for managing tools and server communication. @@ -18,4 +19,16 @@ export abstract class MCPExtension extends BaseExtension implements MCPInterface abstract getConnectedServers(): Promise abstract refreshTools(): Promise abstract isHealthy(): Promise -} \ No newline at end of file + + /** + * Optional method to provide a custom UI component for tools + * @returns A React component or null if no custom component is provided + */ + getToolComponent?(): ComponentType | null + + /** + * Optional method to get the list of tool names that should be disabled by default + * @returns Array of tool names that should be disabled by default for new users + */ + getDefaultDisabledTools?(): Promise +} diff --git a/core/src/browser/models/manager.test.ts b/core/src/browser/models/manager.test.ts index 90626b22e..0e4e728cf 100644 --- a/core/src/browser/models/manager.test.ts +++ b/core/src/browser/models/manager.test.ts @@ -131,4 +131,4 @@ describe('ModelManager', () => { expect(modelManager.models.get('model-2')).toEqual(model2) }) }) -}) \ No newline at end of file +}) diff --git a/core/src/test/setup.ts b/core/src/test/setup.ts index c597a3748..6f3d766bf 100644 --- a/core/src/test/setup.ts +++ b/core/src/test/setup.ts @@ -16,4 +16,4 @@ if (!window.core) { }) } -// Add any other global mocks needed for core tests \ No newline at end of file +// Add any other global mocks needed for core tests diff --git a/core/src/types/mcp/index.ts b/core/src/types/mcp/index.ts index 4ffd501fc..bd809d790 100644 --- a/core/src/types/mcp/index.ts +++ b/core/src/types/mcp/index.ts @@ -1,2 +1,2 @@ export * from './mcpEntity' -export * from './mcpInterface' \ No newline at end of file +export * from './mcpInterface' diff --git a/core/src/types/mcp/mcpEntity.ts b/core/src/types/mcp/mcpEntity.ts index a2259e52e..6c7e0c598 100644 --- a/core/src/types/mcp/mcpEntity.ts +++ b/core/src/types/mcp/mcpEntity.ts @@ -21,4 +21,18 @@ export interface MCPServerInfo { name: string connected: boolean tools?: MCPTool[] -} \ No newline at end of file +} + +/** + * Props for MCP tool UI components + */ +export interface MCPToolComponentProps { + /** List of available MCP tools */ + tools: MCPTool[] + + /** Function to check if a specific tool is currently enabled */ + isToolEnabled: (toolName: string) => boolean + + /** Function to toggle a tool's enabled/disabled state */ + onToolToggle: (toolName: string, enabled: boolean) => void +} diff --git a/core/src/types/mcp/mcpInterface.ts b/core/src/types/mcp/mcpInterface.ts index 15152a83b..a656e9f66 100644 --- a/core/src/types/mcp/mcpInterface.ts +++ b/core/src/types/mcp/mcpInterface.ts @@ -29,4 +29,4 @@ export interface MCPInterface { * Check if MCP service is healthy */ isHealthy(): Promise -} \ No newline at end of file +} diff --git a/docs/_redirects b/docs/_redirects index 17342a020..b72c65400 100644 --- a/docs/_redirects +++ b/docs/_redirects @@ -112,6 +112,12 @@ /docs/remote-models/openrouter /docs/desktop/remote-models/openrouter 302 /docs/server-examples/llmcord /docs/desktop/server-examples/llmcord 302 /docs/server-examples/tabby /docs/desktop/server-examples/tabby 302 +/docs/built-in/tensorrt-llm /docs/desktop/llama-cpp 302 +/docs/desktop/docs/desktop/linux /docs/desktop/install/linux 302 +/windows /docs/desktop/install/windows 302 +/docs/quickstart /docs/ 302 +/docs/desktop/mac /docs/desktop/install/mac 302 +/handbook/open-superintelligence /handbook/why/open-superintelligence 302 /guides/integrations/continue/ /docs/desktop/server-examples/continue-dev 302 /continue-dev /docs/desktop/server-examples/continue-dev 302 @@ -130,4 +136,4 @@ /local-server/troubleshooting /docs/desktop/troubleshooting 302 /mcp /docs/desktop/mcp 302 /quickstart /docs/desktop/quickstart 302 -/server-examples/continue-dev /docs/desktop/server-examples/continue-dev 302 \ No newline at end of file +/server-examples/continue-dev /docs/desktop/server-examples/continue-dev 302 diff --git a/docs/plopfile.js b/docs/plopfile.js index a31caa889..0c384af85 100644 --- a/docs/plopfile.js +++ b/docs/plopfile.js @@ -6,7 +6,7 @@ const camelCase = (str) => { return str.replace(/[-_](\w)/g, (_, c) => c.toUpperCase()) } -const categories = ['building-jan', 'research'] +const categories = ['building-jan', 'research', 'guides'] /** * @param {import("plop").NodePlopAPI} plop diff --git a/docs/public/assets/images/changelog/jan-release-v0.7.0.jpeg b/docs/public/assets/images/changelog/jan-release-v0.7.0.jpeg new file mode 100644 index 000000000..cb0d4a3a9 Binary files /dev/null and b/docs/public/assets/images/changelog/jan-release-v0.7.0.jpeg differ diff --git a/docs/public/assets/images/general/ai-for-teacher.jpeg b/docs/public/assets/images/general/ai-for-teacher.jpeg new file mode 100644 index 000000000..c600389b3 Binary files /dev/null and b/docs/public/assets/images/general/ai-for-teacher.jpeg differ diff --git a/docs/public/assets/images/general/assistants-ai-for-teachers.jpeg b/docs/public/assets/images/general/assistants-ai-for-teachers.jpeg new file mode 100644 index 000000000..b9aedf135 Binary files /dev/null and b/docs/public/assets/images/general/assistants-ai-for-teachers.jpeg differ diff --git a/docs/public/assets/images/general/chatgpt-alternative-jan.jpeg b/docs/public/assets/images/general/chatgpt-alternative-jan.jpeg new file mode 100644 index 000000000..c4e18913a Binary files /dev/null and b/docs/public/assets/images/general/chatgpt-alternative-jan.jpeg differ diff --git a/docs/src/pages/post/_assets/cover-kernel-benchmarking.png b/docs/public/assets/images/general/cover-kernel-benchmarking.png similarity index 100% rename from docs/src/pages/post/_assets/cover-kernel-benchmarking.png rename to docs/public/assets/images/general/cover-kernel-benchmarking.png diff --git a/docs/src/pages/post/_assets/deepseek-r1-locally-jan.jpg b/docs/public/assets/images/general/deepseek-r1-locally-jan.jpg similarity index 100% rename from docs/src/pages/post/_assets/deepseek-r1-locally-jan.jpg rename to docs/public/assets/images/general/deepseek-r1-locally-jan.jpg diff --git a/docs/src/pages/post/_assets/gpt-oss locally.jpeg b/docs/public/assets/images/general/gpt-oss locally.jpeg similarity index 100% rename from docs/src/pages/post/_assets/gpt-oss locally.jpeg rename to docs/public/assets/images/general/gpt-oss locally.jpeg diff --git a/docs/public/assets/images/general/is-chatgpt-down.jpg b/docs/public/assets/images/general/is-chatgpt-down.jpg new file mode 100644 index 000000000..2a515d344 Binary files /dev/null and b/docs/public/assets/images/general/is-chatgpt-down.jpg differ diff --git a/docs/public/assets/images/general/jan-ai-for-teacher.mp4 b/docs/public/assets/images/general/jan-ai-for-teacher.mp4 new file mode 100644 index 000000000..3930e6e26 Binary files /dev/null and b/docs/public/assets/images/general/jan-ai-for-teacher.mp4 differ diff --git a/docs/public/assets/images/general/jan-assistants-ai-for-legal.jpeg b/docs/public/assets/images/general/jan-assistants-ai-for-legal.jpeg new file mode 100644 index 000000000..b49de6380 Binary files /dev/null and b/docs/public/assets/images/general/jan-assistants-ai-for-legal.jpeg differ diff --git a/docs/public/assets/images/general/jan-for-ai-law-assistant-chat.jpeg b/docs/public/assets/images/general/jan-for-ai-law-assistant-chat.jpeg new file mode 100644 index 000000000..5501b5e70 Binary files /dev/null and b/docs/public/assets/images/general/jan-for-ai-law-assistant-chat.jpeg differ diff --git a/docs/src/pages/post/_assets/offline-chatgpt-alternatives-jan.jpg b/docs/public/assets/images/general/offline-chatgpt-alternatives-jan.jpg similarity index 100% rename from docs/src/pages/post/_assets/offline-chatgpt-alternatives-jan.jpg rename to docs/public/assets/images/general/offline-chatgpt-alternatives-jan.jpg diff --git a/docs/src/pages/post/_assets/qwen3-settings-jan-ai.jpeg b/docs/public/assets/images/general/qwen3-30b-settings.jpg similarity index 100% rename from docs/src/pages/post/_assets/qwen3-settings-jan-ai.jpeg rename to docs/public/assets/images/general/qwen3-30b-settings.jpg diff --git a/docs/src/pages/post/_assets/research-result-local.png b/docs/public/assets/images/general/research-result-local.png similarity index 100% rename from docs/src/pages/post/_assets/research-result-local.png rename to docs/public/assets/images/general/research-result-local.png diff --git a/docs/src/pages/post/_assets/run-ai-locally-with-jan.jpg b/docs/public/assets/images/general/run-ai-locally-with-jan.jpg similarity index 100% rename from docs/src/pages/post/_assets/run-ai-locally-with-jan.jpg rename to docs/public/assets/images/general/run-ai-locally-with-jan.jpg diff --git a/docs/public/sitemap-0.xml b/docs/public/sitemap-0.xml deleted file mode 100644 index 131222295..000000000 --- a/docs/public/sitemap-0.xml +++ /dev/null @@ -1,125 +0,0 @@ - - -https://jan.ai2025-09-24T03:40:05.491Zdaily1 -https://jan.ai/api-reference2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/api-reference/api-reference2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/api-reference/architecture2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/api-reference/configuration2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/api-reference/development2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/api-reference/installation2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/blog2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2023-12-21-faster-inference-across-platform2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-01-16-settings-options-right-panel2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-01-29-local-api-server2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-02-05-jan-data-folder2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-02-10-jan-is-more-stable2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-02-26-home-servers-with-helm2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-03-06-ui-revamp-settings2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-03-11-import-models2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-03-19-nitro-tensorrt-llm-extension2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-04-02-groq-api-integration2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-04-15-new-mistral-extension2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-04-25-llama3-command-r-hugginface2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-05-20-llamacpp-upgrade-new-remote-models2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-06-21-nvidia-nim-support2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-07-15-claude-3-5-support2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-09-01-llama3-1-gemma2-support2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-09-17-improved-cpu-performance2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-10-24-jan-stable2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-11-22-jan-bugs2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-11.14-jan-supports-qwen-coder2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-12-03-jan-is-faster2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-12-05-jan-hot-fix-mac2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2024-12-30-jan-new-privacy2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-01-06-key-issues-resolved2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-01-23-deepseek-r1-jan2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-02-18-advanced-llama.cpp-settings2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-03-14-jan-security-patch2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-05-14-jan-qwen3-patch2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-06-19-jan-ui-revamp2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-06-26-jan-nano-mcp2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-07-17-responsive-ui2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-07-31-llamacpp-tutorials2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-08-07-gpt-oss2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-08-14-general-improvs2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-08-28-image-support2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/changelog/2025-09-18-auto-optimize-vision-imports2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/api-server2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/assistants2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/data-folder2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/install/linux2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/install/mac2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/install/windows2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/jan-models/jan-nano-1282025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/jan-models/jan-nano-322025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/jan-models/jan-v12025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/jan-models/lucy2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/llama-cpp2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/llama-cpp-server2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/manage-models2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/browser/browserbase2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/data-analysis/e2b2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/data-analysis/jupyter2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/deepresearch/octagon2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/design/canva2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/productivity/linear2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/productivity/todoist2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/search/exa2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/mcp-examples/search/serper2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/model-parameters2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/privacy2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/privacy-policy2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/quickstart2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/anthropic2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/cohere2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/google2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/groq2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/huggingface2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/mistralai2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/openai2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/remote-models/openrouter2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/server-examples/continue-dev2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/server-examples/llmcord2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/server-examples/n8n2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/server-examples/tabby2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/server-settings2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/server-troubleshooting2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/settings2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/desktop/troubleshooting2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference-administration2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference-authentication2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference-chat2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference-chat-conversations2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference-conversations2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference-jan-responses2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/api-reference-jan-server2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/architecture2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/configuration2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/development2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/installation2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/docs/server/overview2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/download2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/handbook2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/handbook/betting-on-open-source2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/handbook/open-superintelligence2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/benchmarking-nvidia-tensorrt-llm2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/bitdefender2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/data-is-moat2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/deepresearch2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/deepseek-r1-locally2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/jan-v1-for-research2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/offline-chatgpt-alternative2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/qwen3-settings2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/rag-is-not-enough2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/run-ai-models-locally2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/post/run-gpt-oss-locally2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/privacy2025-09-24T03:40:05.492Zdaily1 -https://jan.ai/support2025-09-24T03:40:05.492Zdaily1 - \ No newline at end of file diff --git a/docs/src/components/Blog/index.tsx b/docs/src/components/Blog/index.tsx index ca10cf408..16899df2c 100644 --- a/docs/src/components/Blog/index.tsx +++ b/docs/src/components/Blog/index.tsx @@ -19,6 +19,10 @@ const Blog = () => { name: 'Research', id: 'research', }, + { + name: 'Guides', + id: 'guides', + }, ] return ( diff --git a/docs/src/components/FooterMenu/index.tsx b/docs/src/components/FooterMenu/index.tsx index 68e1e6e78..317def75f 100644 --- a/docs/src/components/FooterMenu/index.tsx +++ b/docs/src/components/FooterMenu/index.tsx @@ -16,7 +16,10 @@ const FOOTER_MENUS: FooterMenu[] = [ { title: 'Company', links: [ - { name: 'Vision', href: '/', comingSoon: true }, + { + name: 'Open Superintelligence', + href: '/handbook/why/open-superintelligence', + }, { name: 'Handbook', href: '/handbook' }, { name: 'Community', href: 'https://discord.com/invite/FTk2MvZwJH' }, { name: 'Careers', href: 'https://menlo.bamboohr.com/careers' }, diff --git a/docs/src/components/Navbar.tsx b/docs/src/components/Navbar.tsx index 51044e9c7..b12fcaa73 100644 --- a/docs/src/components/Navbar.tsx +++ b/docs/src/components/Navbar.tsx @@ -4,7 +4,7 @@ import { useRouter } from 'next/router' import { cn } from '@/lib/utils' import { FaDiscord, FaGithub } from 'react-icons/fa' import { FiDownload } from 'react-icons/fi' -import { FaXTwitter } from 'react-icons/fa6' +import { FaXTwitter, FaLinkedinIn } from 'react-icons/fa6' import { Button } from './ui/button' import LogoJanSVG from '@/assets/icons/logo-jan.svg' @@ -113,6 +113,43 @@ const Navbar = ({ noScroll }: { noScroll?: boolean }) => { + +
  • + +
  • @@ -232,6 +269,14 @@ const Navbar = ({ noScroll }: { noScroll?: boolean }) => { > + + + +} + +const StatusIcon = ({ status }: { status: string }) => { + switch (status) { + case 'operational': + return + case 'degraded': + case 'partial_outage': + return + case 'major_outage': + return + case 'under_maintenance': + return + default: + return + } +} + +const getStatusColor = (status: string) => { + switch (status) { + case 'operational': + return 'bg-green-100 text-green-800 border-green-200 dark:bg-green-900/20 dark:text-green-300 dark:border-green-800' + case 'degraded': + case 'partial_outage': + return 'bg-yellow-100 text-yellow-800 border-yellow-200 dark:bg-yellow-900/20 dark:text-yellow-300 dark:border-yellow-800' + case 'major_outage': + return 'bg-red-100 text-red-800 border-red-200 dark:bg-red-900/20 dark:text-red-300 dark:border-red-800' + case 'under_maintenance': + return 'bg-blue-100 text-blue-800 border-blue-200 dark:bg-blue-900/20 dark:text-blue-300 dark:border-blue-800' + default: + return 'bg-gray-100 text-gray-800 border-gray-200 dark:bg-gray-900/20 dark:text-gray-300 dark:border-gray-800' + } +} + +const getStatusText = (status: string) => { + switch (status) { + case 'operational': + return 'All Systems Operational' + case 'degraded': + return 'Degraded Performance' + case 'partial_outage': + return 'Partial Service Outage' + case 'major_outage': + return 'Major Service Outage' + case 'under_maintenance': + return 'Under Maintenance' + default: + return 'Status Unknown' + } +} + +export const OpenAIStatusChecker: React.FC = () => { + const [statusData, setStatusData] = useState(null) + const [loading, setLoading] = useState(true) + const [error, setError] = useState(null) + const [lastRefresh, setLastRefresh] = useState(new Date()) + + const fetchStatus = useCallback(async () => { + setLoading(true) + setError(null) + + try { + console.log('Fetching real OpenAI status...') + + // Use CORS proxy to fetch real OpenAI status + const proxyUrl = 'https://api.allorigins.win/get?url=' + const targetUrl = 'https://status.openai.com/api/v2/status.json' + + const response = await fetch(proxyUrl + encodeURIComponent(targetUrl)) + + if (!response.ok) { + throw new Error(`Proxy returned ${response.status}`) + } + + const proxyData = await response.json() + const openaiData = JSON.parse(proxyData.contents) + + console.log('Real OpenAI data received:', openaiData) + + // Transform real OpenAI data to our format + const transformedData: StatusData = { + status: mapOpenAIStatusClient( + openaiData.status?.indicator || 'operational' + ), + lastUpdated: openaiData.page?.updated_at || new Date().toISOString(), + incidents: (openaiData.incidents || []).slice(0, 3), + } + + setStatusData(transformedData) + setLastRefresh(new Date()) + console.log('✅ Real OpenAI status loaded successfully!') + } catch (err) { + console.error('Failed to fetch real status:', err) + + // Fallback: try alternative proxy + try { + console.log('Trying alternative proxy...') + const altResponse = await fetch( + `https://cors-anywhere.herokuapp.com/https://status.openai.com/api/v2/summary.json` + ) + + if (altResponse.ok) { + const altData = await altResponse.json() + setStatusData({ + status: mapOpenAIStatusClient( + altData.status?.indicator || 'operational' + ), + lastUpdated: new Date().toISOString(), + incidents: [], + }) + setLastRefresh(new Date()) + console.log('✅ Alternative proxy worked!') + return + } + } catch (altErr) { + console.log('Alternative proxy also failed') + } + + // Final fallback + setError('Unable to fetch real-time status') + setStatusData({ + status: 'operational' as const, + lastUpdated: new Date().toISOString(), + incidents: [], + }) + setLastRefresh(new Date()) + console.log('Using fallback status') + } finally { + setLoading(false) + } + }, []) + + // Client-side status mapping function + const mapOpenAIStatusClient = (indicator: string): StatusData['status'] => { + switch (indicator.toLowerCase()) { + case 'none': + case 'operational': + return 'operational' + case 'minor': + return 'degraded' + case 'major': + return 'partial_outage' + case 'critical': + return 'major_outage' + case 'maintenance': + return 'under_maintenance' + default: + return 'operational' as const // Default to operational + } + } + + useEffect(() => { + fetchStatus() + // Refresh every 2 minutes for more real-time updates + const interval = setInterval(fetchStatus, 2 * 60 * 1000) + return () => clearInterval(interval) + }, [fetchStatus]) + + const handleRefresh = () => { + fetchStatus() + } + + if (loading && !statusData) { + return ( +
    +
    + + + Checking OpenAI Status... + +
    +
    + ) + } + + if (error) { + return ( +
    +
    +
    + +
    +

    + Unable to Check Status +

    +

    {error}

    +
    +
    + +
    +
    + ) + } + + return ( +
    + ) +} diff --git a/docs/src/pages/api-reference/installation.mdx b/docs/src/pages/api-reference/installation.mdx index de0609a08..266962089 100644 --- a/docs/src/pages/api-reference/installation.mdx +++ b/docs/src/pages/api-reference/installation.mdx @@ -3,7 +3,7 @@ title: Installation description: Install and deploy Jan Server on Kubernetes using minikube and Helm. --- -## Prerequisites +# Prerequisites Jan Server requires the following tools installed on your system: diff --git a/docs/src/pages/changelog/2025-10-02-jan-projects.mdx b/docs/src/pages/changelog/2025-10-02-jan-projects.mdx new file mode 100644 index 000000000..851e26403 --- /dev/null +++ b/docs/src/pages/changelog/2025-10-02-jan-projects.mdx @@ -0,0 +1,28 @@ +--- +title: "Jan v0.7.0: Jan Projects" +version: 0.7.0 +description: "Jan v0.7.0 introduces Projects, model renaming, llama.cpp auto-tuning, model stats, and Azure support." +date: 2025-10-02 +ogImage: "/assets/images/changelog/jan-release-v0.7.0.jpeg" +--- + +import ChangelogHeader from "@/components/Changelog/ChangelogHeader" +import { Callout } from 'nextra/components' + + + +## Jan v0.7.0: Jan Projects + +Jan v0.7.0 is live! This release focuses on helping you organize your workspace and better understand how models run. + +### What’s new +- **Projects**: Group related chats under one project for a cleaner workflow. +- **Rename models**: Give your models custom names for easier identification. +- **Model context stats**: See context usage when a model runs. +- **Auto-loaded cloud models**: Cloud model names now appear automatically. + +--- + +Update your Jan or [download the latest version](https://jan.ai/). + +For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.7.0). diff --git a/docs/src/pages/changelog/2025-10-03-jan-stability-improvements.mdx b/docs/src/pages/changelog/2025-10-03-jan-stability-improvements.mdx new file mode 100644 index 000000000..df756ccfc --- /dev/null +++ b/docs/src/pages/changelog/2025-10-03-jan-stability-improvements.mdx @@ -0,0 +1,26 @@ +--- +title: "Jan v0.7.1: Fixes Windows Version Revert & OpenRouter Models" +version: 0.7.1 +description: "Jan v0.7.1 focuses on bug fixes, including a windows version revert and improvements to OpenRouter models." +date: 2025-10-03 +--- + +import ChangelogHeader from "@/components/Changelog/ChangelogHeader" +import { Callout } from 'nextra/components' + + + +### Bug Fixes: Windows Version Revert & OpenRouter Models + +#### Two quick fixes: +- Jan no longer reverts to an older version on load +- OpenRouter can now add models again +- Add headers for anthropic request to fetch models + +--- + +Update your Jan or [download the latest version](https://jan.ai/). + +For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.7.1). + + diff --git a/docs/src/pages/docs/desktop/install/linux.mdx b/docs/src/pages/docs/desktop/install/linux.mdx index 22e0ff1e4..2d9d39f9f 100644 --- a/docs/src/pages/docs/desktop/install/linux.mdx +++ b/docs/src/pages/docs/desktop/install/linux.mdx @@ -1,11 +1,12 @@ --- title: Linux -description: Install Jan to run AI models locally on Linux. Works offline with GPU acceleration on Ubuntu, Debian, and other distributions. +description: Download Jan on Linux to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline. keywords: [ Jan, Customizable Intelligence, LLM, local AI, + Jan on Linux, privacy focus, free and open source, private and offline, @@ -18,15 +19,17 @@ keywords: installation, "desktop" ] +twitter: + card: summary_large_image + site: "@jandotai" + title: "Jan on Linux" + description: "Download Jan on Linux to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline." --- - import FAQBox from '@/components/FaqBox' import { Tabs, Callout, Steps } from 'nextra/components' import { Settings } from 'lucide-react' - - # Linux Installation Instructions for installing Jan on Linux. diff --git a/docs/src/pages/docs/desktop/install/mac.mdx b/docs/src/pages/docs/desktop/install/mac.mdx index 827329d6e..b784f2ecf 100644 --- a/docs/src/pages/docs/desktop/install/mac.mdx +++ b/docs/src/pages/docs/desktop/install/mac.mdx @@ -1,11 +1,11 @@ --- title: Mac -description: Get started quickly with Jan - a local AI that runs on your computer. Install Jan and pick your model to start chatting. -keywords: +description: Download Jan on Mac to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline. [ Jan, Customizable Intelligence, LLM, local AI, + Jan on Mac, privacy focus, free and open source, private and offline, @@ -18,6 +18,11 @@ keywords: installation, "desktop" ] +twitter: + card: summary_large_image + site: "@jandotai" + title: "Jan on Mac" + description: "Download Jan on Mac to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline." --- import { Tabs } from 'nextra/components' diff --git a/docs/src/pages/docs/desktop/install/windows.mdx b/docs/src/pages/docs/desktop/install/windows.mdx index c4dca989e..6e858a2b5 100644 --- a/docs/src/pages/docs/desktop/install/windows.mdx +++ b/docs/src/pages/docs/desktop/install/windows.mdx @@ -1,10 +1,11 @@ --- title: Windows -description: Install Jan to run AI models locally on Windows. Works offline with GPU acceleration on Windows 10 and 11. +description: Download Jan on Windows to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline. keywords: [ Jan, Customizable Intelligence, LLM, + Jan on Windows, local AI, privacy focus, free and open source, @@ -18,6 +19,11 @@ keywords: installation, "desktop" ] +twitter: + card: summary_large_image + site: "@jandotai" + title: "Jan on Windows" + description: "Download Jan on Windows to run AI models locally. Jan is a free, open-source ChatGPT alternative to run offline." --- import { Tabs, Callout, Steps } from 'nextra/components' diff --git a/docs/src/pages/docs/desktop/server-examples/tabby.mdx b/docs/src/pages/docs/desktop/server-examples/tabby.mdx index 917f40550..f25c89dab 100644 --- a/docs/src/pages/docs/desktop/server-examples/tabby.mdx +++ b/docs/src/pages/docs/desktop/server-examples/tabby.mdx @@ -90,7 +90,7 @@ Refer to the following documentation to install the Tabby extension on your favo Tabby offers an [Answer Engine](https://tabby.tabbyml.com/docs/administration/answer-engine/) on the homepage, which can leverage the Jan LLM and related contexts like code, documentation, and web pages to answer user questions. -Simply open the Tabby homepage at [localhost:8080](http://localhost:8080) and ask your questions. +Simply open the Tabby homepage at http://localhost:8080 and ask your questions. ### IDE Chat Sidebar diff --git a/docs/src/pages/docs/desktop/troubleshooting.mdx b/docs/src/pages/docs/desktop/troubleshooting.mdx index c2b84c03a..6d6c02703 100644 --- a/docs/src/pages/docs/desktop/troubleshooting.mdx +++ b/docs/src/pages/docs/desktop/troubleshooting.mdx @@ -329,7 +329,7 @@ When you start a chat with a model and encounter a **Failed to Fetch** or **Some **1. Check System & Hardware Requirements** - Hardware dependencies: Ensure your device meets all [hardware requirements](troubleshooting) -- OS: Ensure your operating system meets the minimum requirements ([Mac](https://www.jan.ai/docs/desktop/install/mac#minimum-requirements), [Windows](/windows#compatibility), [Linux](docs/desktop/linux#compatibility)) +- OS: Ensure your operating system meets the minimum requirements ([Mac](https://www.jan.ai/docs/desktop/install/mac#minimum-requirements), [Windows](/windows#compatibility), [Linux](https://www.jan.ai/docs/desktop/install/linux#compatibility) - RAM: Choose models that use less than 80% of your available RAM - For 8GB systems: Use models under 6GB - For 16GB systems: Use models under 13GB diff --git a/docs/src/pages/post/_assets/claude.jpeg b/docs/src/pages/post/_assets/claude.jpeg new file mode 100644 index 000000000..1b5dc1158 Binary files /dev/null and b/docs/src/pages/post/_assets/claude.jpeg differ diff --git a/docs/src/pages/post/_assets/create-assistant-1.jpeg b/docs/src/pages/post/_assets/create-assistant-1.jpeg new file mode 100644 index 000000000..4c005b6ed Binary files /dev/null and b/docs/src/pages/post/_assets/create-assistant-1.jpeg differ diff --git a/docs/src/pages/post/_assets/gemini.jpeg b/docs/src/pages/post/_assets/gemini.jpeg new file mode 100644 index 000000000..acd333607 Binary files /dev/null and b/docs/src/pages/post/_assets/gemini.jpeg differ diff --git a/docs/src/pages/post/_assets/jan-assistant-for-law.png b/docs/src/pages/post/_assets/jan-assistant-for-law.png new file mode 100644 index 000000000..298d92f0f Binary files /dev/null and b/docs/src/pages/post/_assets/jan-assistant-for-law.png differ diff --git a/docs/src/pages/post/_assets/jan-for-ai-law-assistant-chat.jpeg b/docs/src/pages/post/_assets/jan-for-ai-law-assistant-chat.jpeg new file mode 100644 index 000000000..5501b5e70 Binary files /dev/null and b/docs/src/pages/post/_assets/jan-for-ai-law-assistant-chat.jpeg differ diff --git a/docs/src/pages/post/_assets/lm-studio.jpeg b/docs/src/pages/post/_assets/lm-studio.jpeg new file mode 100644 index 000000000..ee6ee3825 Binary files /dev/null and b/docs/src/pages/post/_assets/lm-studio.jpeg differ diff --git a/docs/src/pages/post/_assets/perplexity.jpeg b/docs/src/pages/post/_assets/perplexity.jpeg new file mode 100644 index 000000000..6a73f9b55 Binary files /dev/null and b/docs/src/pages/post/_assets/perplexity.jpeg differ diff --git a/docs/src/pages/post/_meta.json b/docs/src/pages/post/_meta.json index e5472dd61..1962ffa9e 100644 --- a/docs/src/pages/post/_meta.json +++ b/docs/src/pages/post/_meta.json @@ -20,5 +20,10 @@ "title": "Research", "display": "normal", "href": "/blog?category=research" + }, + "guides-cat": { + "title": "Guides", + "display": "normal", + "href": "/blog?category=guides" } } diff --git a/docs/src/pages/post/ai-for-law.mdx b/docs/src/pages/post/ai-for-law.mdx new file mode 100644 index 000000000..a42b70ced --- /dev/null +++ b/docs/src/pages/post/ai-for-law.mdx @@ -0,0 +1,123 @@ +--- +title: "Private AI for legal professionals who need confidentiality" +description: "It's possible to use AI without risking client data. Jan helps lawyers save time while keeping clients safe." +tags: AI, ai for law, ai for lawyers, ChatGPT alternative, Jan, local AI, offline AI +categories: guides +date: 2025-09-30 +ogImage: assets/images/general/jan-for-ai-law-assistant-chat.jpeg +twitter: + card: summary_large_image + site: "@jandotai" + title: "Private AI for legal professionals who need confidentiality" + description: "It's possible to use AI without risking client data. Jan helps lawyers save time while keeping clients safe." + image: assets/images/general/jan-assistants-ai-for-legal.jpeg +--- +import { Callout } from 'nextra/components' +import CTABlog from '@/components/Blog/CTA' +import { OpenAIStatusChecker } from '@/components/OpenAIStatusChecker' + +# Private AI for legal professionals who need confidentiality + +![AI for Law](/assets/images/general/jan-for-ai-law-assistant-chat.jpeg) + +Yes, it's possible to use AI in legal work without risking client data. + + +Client trust depends on privacy. Sending documents into public AI tools risks compliance and reputation. + + +Start by [downloading Jan](/download) and installing the **Jan v1 model**. Once installed, you can create assistants tailored to your practice and keep contracts, case notes, and client files under your control. + + +**Why use Jan for legal tasks** +- Runs locally on your hardware, no cloud uploads +- Keeps chats and interactions private +- Works offline once installed +- Lets you build assistants for your own workflows + + +--- + +## Create your assistant + +Once Jan is installed with the **Jan v1 model**, onboarding will guide you through downloading and setup. + +Click **Create assistant** to start: +![Create your first AI assistant in Jan](./_assets/create-assistant-1.jpeg) +*Create your first assistant in Jan* + +Add an assistant name and prompt: +![Jan assistant for contract review](./_assets/jan-assistant-for-law.png) +*Example of a Jan assistant for contract review* + +You can create assistants using specific prompts. Below are examples for common legal workflows. + +--- + +## Contract review assistant + +AI can help lawyers move faster through long contracts by pointing out what matters most. + +**Prompt for Jan:** +> You are a contract review assistant. +> When I paste a contract: +> - Highlight risky or unusual clauses +> - Flag ambiguous or missing terms +> - Summarize the agreement in plain English for a non-lawyer client +> Format your response with sections: **Risks**, **Ambiguities/Missing**, **Summary**. +> Do not provide legal advice. + +--- + +## Drafting assistant + +Use AI to create first drafts of NDAs, service agreements, or client letters. You still refine the output, but AI saves time on boilerplate. + +**Prompt for Jan:** +> You are a drafting assistant. +> When asked to draft a legal agreement or client letter: +> - Produce a professional first version +> - Use clear, concise language +> - Leave placeholders like [Party Name], [Date], [Amount] for details +> - Structure output with headings, numbered clauses, and consistent formatting +> Do not provide legal advice. + +--- + +## Case preparation assistant + +Case prep often means reading hundreds of pages. AI can turn depositions, discovery files, or judgments into concise notes. + +![Jan legal case preparation assistant](./_assets/jan-for-ai-law-assistant-chat.jpeg) +*Jan chat interface for case preparation — process documents and extract key information* + +**Prompt for Jan:** +> You are a case preparation assistant. +> When I provide case materials: +> - Extract key facts, issues, and arguments +> - Present them as bullet points under headings: **Facts**, **Issues**, **Arguments** +> - Keep summaries concise (under 500 words unless I request more) +> Use plain English, no speculation or legal conclusions. + +--- + +## Knowledge management assistant + +Law firms accumulate memos, policies, and precedents. AI can help organize and retrieve them quickly. + +**Prompt for Jan:** +> You are a knowledge management assistant. +> When I ask questions about internal documents: +> - Return concise summaries or direct excerpts +> - Always cite the source (e.g., “Policy Manual, Section 4”) +> - If not found in provided material, reply “Not found in documents.” +> Do not invent information. + +--- + +## Final note + +AI in legal practice is not about replacing lawyers. It’s about handling repetitive tasks safely so you can focus on real decisions. +With private AI, you gain efficiency without compromising client confidentiality. + + diff --git a/docs/src/pages/post/ai-for-teachers.mdx b/docs/src/pages/post/ai-for-teachers.mdx new file mode 100644 index 000000000..cd2c2b60b --- /dev/null +++ b/docs/src/pages/post/ai-for-teachers.mdx @@ -0,0 +1,134 @@ +--- +title: "AI for teachers who care about student privacy" +description: "Use AI in teaching without risking student data. Jan helps teachers plan lessons, grade faster, and communicate with parents." +tags: AI, ai for teachers, ChatGPT alternative, Jan, local AI, offline AI, education +categories: guides +date: 2025-10-01 +ogImage: assets/images/general/ai-for-teacher.jpeg +twitter: + card: summary_large_image + site: "@jandotai" + title: "AI for teachers who care about student privacy" + description: "Use AI in teaching without risking student data. Jan helps teachers plan lessons, grade faster, and communicate with parents." + image: assets/images/general/ai-for-teacher.jpeg +--- +import { Callout } from 'nextra/components' +import CTABlog from '@/components/Blog/CTA' + +# AI for teachers who care about student privacy + +![AI for teachers](/assets/images/general/ai-for-teacher.jpeg) + +AI can help teachers handle the work that piles up outside class. It can draft a lesson outline, suggest feedback on essays, or turn notes into a polite parent email. These are the tasks that usually stretch into evenings and weekends. + + +Most AI tools like ChatGPT run in the cloud. Sharing lesson plans, student writing, or parent details there risks compliance and trust. + + +That's where Jan comes in: +- [Download Jan](/download) +- You get the same time-saving help +- Your data never leaves your device. + + +*See how teachers use Jan for AI-powered lesson planning and grading* + + +**Why use Jan for teaching** +- Runs locally, no cloud servers +- Keeps lesson plans and student data private +- Works offline once installed +- Lets you build assistants for your daily teaching tasks + + +--- + +## Create your assistant + +Once Jan is installed, click **Create assistant** and add one of the prompts below. Each assistant is for a specific classroom task. + +![Create your first AI assistant in Jan](/assets/images/general/assistants-ai-for-teachers.jpeg) + +--- + +## Lesson planning assistant + +AI can draft lesson outlines in minutes. You adapt and refine them for your students. + +**Prompt for Jan:** +> You are a lesson planning assistant. +> When I give you a topic or subject: +> - Suggest a lesson outline with objectives, activities, and discussion questions +> - Adjust for different grade levels if I specify +> - Keep plans practical and realistic for a classroom setting + +Example ask: For Grade 6 science on ecosystems. Objectives: define food chains, explain producer/consumer roles. Activity: group poster on an ecosystem. Questions: How would removing one species affect the whole system? + +--- + +## Grading support assistant + +AI won't replace your judgment, but it can make feedback faster and more consistent. + +**Prompt for Jan:** +> You are a grading support assistant. +> When I paste student writing or answers: +> - Highlight strengths and areas for improvement +> - Suggest short, constructive feedback I can reuse +> - Keep tone supportive and professional +> Do not assign final grades. + +Example: For a history essay. Strength: clear thesis. Improvement: weak evidence. Feedback: "Great thesis and structure. Next time, support your points with specific historical examples." + +--- + +## Parent communication assistant + +Writing parent emails is important but time-consuming. + +**Prompt for Jan:** +> You are a parent communication assistant. +> When I give you key points about a student: +> - Draft a polite and empathetic email to parents +> - Use clear and professional language +> - Keep tone supportive, not overly formal +> Only include details I provide. + +Example: Notes: “Student is falling behind on homework, otherwise engaged in class.” - Output: a short, encouraging message suggesting a check-in at home. + +--- + +## Classroom resources assistant + +Generate quizzes, worksheets, or practice activities at short notice. + +**Prompt for Jan:** +> You are a classroom resource assistant. +> When I provide a topic or subject: +> - Generate sample quiz questions (multiple choice and short answer) +> - Suggest short practice activities +> - Provide answer keys separately +> Keep material age-appropriate for the level I specify. + +Example: For Grade 4 fractions. 5 multiple-choice questions with answer key, plus a quick worksheet with 3 practice problems. + +--- + +## Getting started + +1. [Download Jan](/download). +2. Install the Jan model (guided in-app) +3. Create your first assistant using one of the prompts above +4. Test with non-sensitive examples first +5. Use it in real classroom tasks once you're comfortable + +--- + +## Final note + +AI isn't here to replace teachers. It's here to take repetitive tasks off your plate so you can focus on teaching. With Jan, you can use AI confidently without risking student privacy. + + diff --git a/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx b/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx index 3f5d376cb..9fa67ea07 100644 --- a/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx +++ b/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx @@ -17,7 +17,7 @@ Jan now supports [NVIDIA TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) i We've been excited for TensorRT-LLM for a while, and [had a lot of fun implementing it](https://github.com/menloresearch/nitro-tensorrt-llm). As part of the process, we've run some benchmarks, to see how TensorRT-LLM fares on consumer hardware (e.g. [4090s](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/), [3090s](https://www.nvidia.com/en-us/geforce/graphics-cards/30-series/)) we commonly see in the [Jan's hardware community](https://discord.com/channels/1107178041848909847/1201834752206974996). - **Give it a try!** Jan's TensorRT-LLM extension is available in Jan v0.4.9 and up ([see more](/docs/built-in/tensorrt-llm)). We precompiled some TensorRT-LLM models for you to try: `Mistral 7b`, `TinyLlama-1.1b`, `TinyJensen-1.1b` 😂 + **Give it a try!** Jan's TensorRT-LLM extension is available in Jan v0.4.9. We precompiled some TensorRT-LLM models for you to try: `Mistral 7b`, `TinyLlama-1.1b`, `TinyJensen-1.1b` 😂 Bugs or feedback? Let us know on [GitHub](https://github.com/menloresearch/jan) or via [Discord](https://discord.com/channels/1107178041848909847/1201832734704795688). diff --git a/docs/src/pages/post/chatgpt-alternatives.mdx b/docs/src/pages/post/chatgpt-alternatives.mdx new file mode 100644 index 000000000..36f44e5c3 --- /dev/null +++ b/docs/src/pages/post/chatgpt-alternatives.mdx @@ -0,0 +1,120 @@ +--- +title: "ChatGPT alternatives that actually replace it" +description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT." +tags: AI, ChatGPT alternative, ChatGPT alternatives, alternative to chatgpt, Jan, local AI, privacy, open source, offline AI +categories: guides +date: 2025-09-29 +ogImage: assets/images/general/chatgpt-alternative-jan.jpeg +twitter: + card: summary_large_image + site: "@jandotai" + title: "ChatGPT alternatives that actually replace it." + description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT." + image: assets/images/general/chatgpt-alternative-jan.jpeg +--- +import { Callout } from 'nextra/components' +import CTABlog from '@/components/Blog/CTA' + +# Best ChatGPT Alternatives + +ChatGPT works well, but it always needs internet, has usage limits, and isn't private. + +If you want options that fit different needs, offline use, privacy, or specialized tasks, see the best alternatives to ChatGPT available for specific use cases. + +## Comparison: ChatGPT Alternatives + +| ChatGPT Alternative | Offline | Key Strength | Best For | +| ------------------------- | ------- | ---------------------------- | -------------------------- | +| **[Jan](https://jan.ai)** | Yes | Runs Cloud + Offline, open-source | Best overall ChatGPT replacement | +| Claude | - | Strong writing and reasoning | Creative text & code | +| Gemini | - | Integrated with Google | Research tasks, image generation | +| Perplexity | - | Fast, with cited answers | Research and fact-checking | +| LM Studio | Yes | Runs open models on PC | Coding and experiments | + +### Jan is the best ChatGPT alternative + +![Use Jan to chat with AI models without internet access](/assets/images/general/chatgpt-alternative-jan.jpeg) +*Jan as an open-source alternative to ChatGPT* + +Jan is the most complete ChatGPT alternative available today. It enables: +- Use AI in online & offline (even on a plain) +- Agentic actions supported +- MCP servers supported for tools + +Unlike ChatGPT, it runs on your computer, which means: +- Offline AI capabilities (see [Offline ChatGPT post](https://www.jan.ai/post/offline-chatgpt-alternative) for details) +- 100% private +- Open-source & Free + + Jan is an [open-source replacement for ChatGPT.](https://www.jan.ai/) + +### Claude is the most notable online alternative +![Claude](./_assets/claude.jpeg) + +Claude has become the main online rival to ChatGPT. It stands out for writing, reasoning, and coding. + +- Handles very long documents and context well +- Strong for essays, research papers, and structured text +- Popular with developers for code explanations and debugging +- Cloud-only, no offline mode +- Filters outputs heavily, sometimes too restrictive + +### Gemini is the Google's integrated alternative +![Gemini](./_assets/gemini.jpeg) + +Gemini ties directly into Google’s apps and search. Great for users in the Google ecosystem. + +- Built into Gmail, Docs, and Google Search +- Good for real-time research and fact-checking +- Strong at pulling web context into answers +- Requires Google account, fully online +- Privacy concerns: all tied to Google services + +### Perplexity is the research-focused alternative +![Perplexity](./_assets/perplexity.jpeg) + +Perplexity is built for fact-checking and quick research, not creativity. + +- Always cites sources for answers +- Strong at summarizing current web info +- Very fast for Q&A style use +- Limited in creativity and open-ended writing +- Cloud-only, daily free usage caps + +### LM Studio is the experimental alternative + +![LM Studio](./_assets/lm-studio.jpeg) + +LM Studio is not a ChatGPT replacement but a local tool for running open models. + +- Lets you test and run open-source models on PC +- Offline by default, works without internet +- Flexible setup for developers and technical users +- Requires decent hardware (RAM/VRAM) + +LM Studio is not beginner-friendly compared to Jan. + +## Choosing the right ChatGPT alternative for you: + +- Best overall replacement: [Jan](https://www.jan.ai/) +- For writing & storytelling: Claude +- For research & web knowledge: Perplexity or Gemini +- For productivity & office work: Microsoft Copilot +- For experimentation with open-source models for technical people: LM Studio + +Most ChatGPT alternatives are still cloud-based and limited. If you want full privacy, offline use, and no restrictions, the best ChatGPT alternative is [Jan](https://www.jan.ai/). + +### Can I use ChatGPT offline? +No. ChatGPT always requires internet. For offline AI, use Jan. + +### What’s the best free ChatGPT alternative? +Jan is free, open-source, and runs offline. Others like Claude or Perplexity have limited free tiers but are cloud-based. + +### Which ChatGPT alternative is best for writing? +Claude is strong for essays, reports, and structured writing. You could use [open-source models](https://www.jan.ai/post/run-ai-models-locally) in Jan too. + +### Which ChatGPT alternative is best for research? +Perplexity and Gemini pull real-time web data with citations. + +### What’s the closest full replacement to ChatGPT? +Jan. It runs locally, works offline, and feels like ChatGPT without restrictions. \ No newline at end of file diff --git a/docs/src/pages/post/deepresearch.mdx b/docs/src/pages/post/deepresearch.mdx index f3f1c0ee7..50cfc19ad 100644 --- a/docs/src/pages/post/deepresearch.mdx +++ b/docs/src/pages/post/deepresearch.mdx @@ -4,13 +4,13 @@ description: "A simple guide to replicating Deep Research results for free, with tags: AI, local models, Jan, GGUF, Deep Research, local AI categories: guides date: 2025-08-04 -ogImage: _assets/research-result-local.png +ogImage: assets/images/general/research-result-local.png twitter: card: summary_large_image site: "@jandotai" title: "Replicating Deep Research with Jan" description: "Learn how to replicate Deep Research results with Jan." - image: _assets/research-result-local.jpg + image: assets/images/general/research-result-local.png --- import { Callout } from 'nextra/components' diff --git a/docs/src/pages/post/deepseek-r1-locally.mdx b/docs/src/pages/post/deepseek-r1-locally.mdx index c9fb229b5..6d09532e9 100644 --- a/docs/src/pages/post/deepseek-r1-locally.mdx +++ b/docs/src/pages/post/deepseek-r1-locally.mdx @@ -4,7 +4,7 @@ description: "A straightforward guide to running DeepSeek R1 locally regardless tags: DeepSeek, R1, local AI, Jan, GGUF, Qwen, Llama categories: guides date: 2025-01-31 -ogImage: assets/deepseek-r1-locally-jan.jpg +ogImage: assets/images/general/deepseek-r1-locally-jan.jpg twitter: card: summary_large_image site: "@jandotai" @@ -17,7 +17,7 @@ import CTABlog from '@/components/Blog/CTA' # Run DeepSeek R1 locally on your device (Beginner-Friendly Guide) -![DeepSeek R1 running locally in Jan AI interface, showing the chat interface and model settings](./_assets/deepseek-r1-locally-jan.jpg) +![DeepSeek R1 running locally in Jan AI interface, showing the chat interface and model settings](/assets/images/general/deepseek-r1-locally-jan.jpg) DeepSeek R1 is one of the best open-source models in the market right now, and you can run DeepSeek R1 on your own computer! diff --git a/docs/src/pages/post/how-we-benchmark-kernels.mdx b/docs/src/pages/post/how-we-benchmark-kernels.mdx index dca80b095..6d5f6d947 100644 --- a/docs/src/pages/post/how-we-benchmark-kernels.mdx +++ b/docs/src/pages/post/how-we-benchmark-kernels.mdx @@ -3,7 +3,7 @@ title: "How we (try to) benchmark GPU kernels accurately" description: "We present the process behind how we decided to benchmark GPU kernels and iteratively improved our benchmarking pipeline" tags: "" categories: research -ogImage: "./_assets/cover-kernel-benchmarking.png" +ogImage: assets/images/general/cover-kernel-benchmarking.png date: 2025-09-17 --- diff --git a/docs/src/pages/post/is-chatgpt-down-use-jan.mdx b/docs/src/pages/post/is-chatgpt-down-use-jan.mdx new file mode 100644 index 000000000..dfd9843fd --- /dev/null +++ b/docs/src/pages/post/is-chatgpt-down-use-jan.mdx @@ -0,0 +1,124 @@ +--- +title: "If ChatGPT is down, switch to AI that never goes down" +description: "Check if ChatGPT down right now, and learn how to use AI that never goes down." +tags: AI, ChatGPT down, ChatGPT alternative, Jan, local AI, offline AI, ChatGPT at capacity +categories: guides +date: 2025-09-30 +ogImage: assets/images/general/is-chatgpt-down.jpg +twitter: + card: summary_large_image + site: "@jandotai" + title: "Realtime Status: Is ChatGPT down?" + description: "Check if ChatGPT is down right now with our real-time status checker, and learn how to use AI that never goes offline." + image: assets/images/general/is-chatgpt-down.jpg +--- +import { Callout } from 'nextra/components' +import CTABlog from '@/components/Blog/CTA' +import { OpenAIStatusChecker } from '@/components/OpenAIStatusChecker' + +# If ChatGPT is down, switch to AI that never goes down + +If you're seeing ChatGPT is down, it could be a good signal to switch to [Jan](https://www.jan.ai/), AI that never goes down. + +## 🔴 Realtime Status: Is ChatGPT down? + +This live tracker shows if ChatGPT is down right now. + + + +### ChatGPT Status Indicators + +
    +
    +
    +
    + Operational +
    +

    All systems are functioning normally with no reported issues.

    +
    + +
    +
    +
    + Degraded Performance +
    +

    Services are running but may be slower than usual.

    +
    + +
    +
    +
    + Partial Outage +
    +

    Some features or regions may be experiencing issues.

    +
    + +
    +
    +
    + Major Outage +
    +

    Significant service disruption affecting most users.

    +
    +
    + +## Skip the downtime with Jan + +When ChatGPT is down, Jan keeps working. Jan is an open-source ChatGPT alternative that runs on your computer - no servers, no outages, no waiting. + +![Jan running when ChatGPT is down](/assets/images/general/is-chatgpt-down.jpg) +*Jan works even when ChatGPT doesn't.* + +### Why Jan never goes down: +- **Runs locally** - No dependency on external servers +- **Always available** - Works offline, even on flights +- **No capacity limits** - Uses your computer's resources +- **100% private** - Your conversations stay on your device + +### Get started in 3 mins: +1. Download Jan: [jan.ai](https://jan.ai) +2. Install a model: Choose from Jan, Qwen, or other top models +3. Start chatting: Similar design as ChatGPT, but always available if you use local models + + +**Pro tip:** Keep both ChatGPT and Jan. You'll never lose productivity to outages again. + + +Jan runs AI models locally, so you don't need internet access. That means Jan is unaffected when ChatGPT is down. + +### Why does ChatGPT goes down? +There could be multiple reasons: +- Too many users at once +- Data center or API downtime +- Planned or uplanned updates +- Limited in some locations + +ChatGPT depends on OpenAI’s servers. If those go down, so does ChatGPT. Jan users don't affect by ChatGPT's outage. + +### Common ChatGPT Errors + +When ChatGPT experiences issues, you might see these error messages: + +- "ChatGPT is at capacity right now": Too many users online, try again later +- "Error in message stream": Connection problems with OpenAI servers +- "Something went wrong": General server error, refresh and retry +- "Network error": Internet connectivity issues on your end or OpenAI's +- "Rate limit exceeded": Too many requests sent, wait before trying again +- "This model is currently overloaded": High demand for specific model + +## Quick answers about ChatGPT status + +### Is ChatGPT down? +Check the ChatGPT realtime status above. [See if ChatGPT is down right now.](http://localhost:3000/post/is-chatgpt-down-use-jan#-realtime-status-is-chatgpt-down) + +### Why is ChatGPT down? +Usually server overload, maintenance, or outages at OpenAI. + +### What does "ChatGPT is at capacity" mean? +Too many users are online at the same time. You’ll need to wait or switch to Jan instead. + +### Is ChatGPT shutting down? +No, ChatGPT isn't shutting down. Outages are temporary. + +### Can I use ChatGPT offline? +No. ChatGPT always requires internet. For [offline AI](https://www.jan.ai/post/offline-chatgpt-alternative), use [Jan](https://jan.ai). \ No newline at end of file diff --git a/docs/src/pages/post/offline-chatgpt-alternative.mdx b/docs/src/pages/post/offline-chatgpt-alternative.mdx index d73bec712..7f94cc23e 100644 --- a/docs/src/pages/post/offline-chatgpt-alternative.mdx +++ b/docs/src/pages/post/offline-chatgpt-alternative.mdx @@ -1,32 +1,36 @@ --- title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead" -description: "Learn how to use AI offline with Jan - a free, open-source alternative to ChatGPT that works 100% offline on your computer." -tags: AI, ChatGPT alternative, offline AI, Jan, local AI, privacy +description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline." +tags: AI, chatgpt offline, ChatGPT alternative, offline AI, Jan, local AI, privacy categories: guides date: 2025-02-08 -ogImage: _assets/offline-chatgpt-alternatives-jan.jpg +ogImage: assets/images/general/offline-chatgpt-alternatives-jan.jpg twitter: card: summary_large_image site: "@jandotai" title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead" - description: "Want to use ChatGPT offline? Learn how to run AI models locally with Jan - free, open-source, and works without internet." - image: _assets/offline-chatgpt-alternatives-jan.jpg + description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline." + image: assets/images/general/offline-chatgpt-alternatives-jan.jpg --- import { Callout } from 'nextra/components' import CTABlog from '@/components/Blog/CTA' # Offline ChatGPT: You can't run ChatGPT offline, do this instead -ChatGPT is a cloud-based service that requires internet access. However, it's not the only way to use AI. You can run AI models offline on your device with [Jan](https://jan.ai/). It's completely free, open-source, and gives you 100% offline capability. You can even use AI on a plane! +ChatGPT itself can't run offline. ChatGPT can't run offline. You can’t download it. It always needs internet, because it runs on OpenAI's servers. - +If you want offline AI, you need local models. The easiest way: [Jan, an open-source replacement of ChatGPT](https://jan.ai/). It's free, open-source, and works 100% offline. With Jan, you can even use AI on a plane. + + **Quick Summary:** -- ChatGPT always needs internet - it can't run offline -- Jan lets you run AI models 100% offline on your computer -- It's free and open-source -- Works on Mac, Windows, and Linux +- ChatGPT always needs internet - no offline mode +- Use Jan to use AI models 100% offline +- It's free & open-source, and works on Mac, Windows, and Linux +## How to use AI offline? +Offline AI means the model runs on your computer. So no internet needed, 100% private, and data never leaves your device. With Jan you can run offline AI models locally. + ## Jan as an offline ChatGPT alternative ![Use Jan to chat with AI models without internet access](./_assets/offline-chatgpt-alternative-ai-without-internet.jpg) @@ -42,23 +46,25 @@ Go to [jan.ai](https://jan.ai) and download the version for your computer (Mac, ### 2. Download an AI model -You'll need an AI model to use AI offline, so download a model from Jan. Once it's on your computer, you don't need internet anymore. +You'll need an AI model to use AI offline, so download a model from Jan. Once it's on your computer, you don't need internet anymore. You can also use GPT models via Jan - check [running gpt-oss locally](https://www.jan.ai/post/run-gpt-oss-locally) post to see it. ![Choose an AI model that works offline](./_assets/jan-model-selection.jpg "Find the perfect AI model for offline use") *Select an AI model that matches your needs and computer capabilities* - -**Which model should you choose?** +### Which model should you choose? - For most computers: Try Mistral 7B or DeepSeek - they're similar to ChatGPT 3.5 - For older computers: Use smaller 3B models - For gaming PCs: You can try larger 13B models + Don't worry about choosing - Jan will automatically recommend models that work well on your computer. +If you'd like to learn more about local AI, check [how to run AI models locally as a beginner](https://www.jan.ai/post/run-ai-models-locally) article. + ### 3. Start using AI offline -![Chat with AI offline using Jan's interface](./_assets/run-ai-locally-with-jan.jpg "Experience ChatGPT-like interactions without internet") +![Chat with AI offline using Jan's interface](/assets/images/general/run-ai-locally-with-jan.jpg "Experience ChatGPT-like interactions without internet") *Use Jan's clean interface to chat with AI - no internet required* Once downloaded, you can use AI anywhere, anytime: @@ -71,12 +77,7 @@ Once downloaded, you can use AI anywhere, anytime: ## How to chat with your docs in Jan? -To chat with your docs in Jan, you need to activate experimental mode. - -![Activate experimental mode in Jan's settings](./_assets/chat-with-your-docs-offline-ai.jpg "Enable experimental features to chat with your documents") -*Turn on experimental mode in settings to chat with your docs* - -After activating experimental mode, simply add your files and ask questions about them. +Simply add your files and ask questions about them. ![Chat with your documents using Jan](./_assets/chat-with-docs-prompt.jpg "Ask questions about your documents offline") *Chat with your documents privately - no internet needed* @@ -97,17 +98,17 @@ Local AI makes possible offline AI use, so Jan is going to be your first step to 4. **No Server Issues:** No more "ChatGPT is at capacity" 5. **Your Choice of Models:** Use newer models as they come out -**"Is it really free? What's the catch?"** +### "Is Jan really free? What's the catch?" Yes, it's completely free and open source. Jan is built by developers who believe in making AI accessible to everyone. -**"How does it compare to ChatGPT?"** +### How does Jan compare to ChatGPT?" Modern open-source models like DeepSeek and Mistral are very capable. While they might not match GPT-4, they're perfect for most tasks and getting better every month. -**"Do I need a powerful computer?"** +### "Do I need a powerful computer?" If your computer is from the last 5 years, it will likely work fine. You need about 8GB of RAM and 10GB of free space for comfortable usage. -**"What about my privacy?"** -Everything stays on your computer. Your conversations, documents, and data never leave your device unless you choose to share them. +### "What about my privacy?" +Everything stays on your computer with Jan. Your conversations, documents, and data never leave your device unless you choose to share them. Want to learn more about the technical side? Check our detailed [guide on running AI models locally](/post/run-ai-models-locally). It's not required to [use AI offline](https://jan.ai/) but helps understand how it all works. @@ -116,3 +117,20 @@ Want to learn more about the technical side? Check our detailed [guide on runnin [Join our Discord community](https://discord.gg/Exe46xPMbK) for support and tips on using Jan as your offline ChatGPT alternative. + +### FAQ + +#### Can I download ChatGPT for offline use? +No. ChatGPT is cloud-only. + +#### How to use ChatGPT offline? +You can't. ChatGPT has no offline mode. Use Jan instead for a ChatGPT-like offline experience. + +#### Does ChatGPT have internet access? +Yes. It runs in the cloud. + +#### What's the best way to use AI offline? +Download Jan and run models like Mistral, DeepSeek, or GPT-OSS locally. + +#### What's GPT offline? +OpenAI has open-source models you can run locally but not via ChatGPT. One of them is [gpt-oss](https://www.jan.ai/post/run-gpt-oss-locally) and you can run it via Jan. \ No newline at end of file diff --git a/docs/src/pages/post/qwen3-settings.mdx b/docs/src/pages/post/qwen3-settings.mdx index c4635451c..07af8b9ba 100644 --- a/docs/src/pages/post/qwen3-settings.mdx +++ b/docs/src/pages/post/qwen3-settings.mdx @@ -50,7 +50,7 @@ Thinking mode is powerful, but greedy decoding kills its output. It'll repeat or ## Quick summary -![Qwen3 settings](./_assets/qwen3-settings-jan-ai.jpeg) +![Qwen3 settings](/assets/images/general/qwen3-30b-settings.jpg) ### Non-thinking mode (`enable_thinking=False`) diff --git a/docs/src/pages/post/run-ai-models-locally.mdx b/docs/src/pages/post/run-ai-models-locally.mdx index efe8bc594..315d9aad2 100644 --- a/docs/src/pages/post/run-ai-models-locally.mdx +++ b/docs/src/pages/post/run-ai-models-locally.mdx @@ -4,7 +4,7 @@ description: "A straightforward guide to running AI models locally on your compu tags: AI, local models, Jan, GGUF, privacy, local AI categories: guides date: 2025-01-31 -ogImage: assets/run-ai-locally-with-jan.jpg +ogImage: assets/images/general/run-ai-locally-with-jan.jpg twitter: card: summary_large_image site: "@jandotai" @@ -35,7 +35,7 @@ Most people think running AI models locally is complicated. It's not. Anyone can That's all to run your first AI model locally! -![Jan's simple and clean chat interface for local AI](./_assets/run-ai-locally-with-jan.jpg "Jan's easy-to-use chat interface after installation") +![Jan's simple and clean chat interface for local AI](/assets/images/general/run-ai-locally-with-jan.jpg "Jan's easy-to-use chat interface after installation") *Jan's easy-to-use chat interface after installation.* Keep reading to learn key terms of local AI and the things you should know before running AI models locally. diff --git a/docs/src/pages/post/run-gpt-oss-locally.mdx b/docs/src/pages/post/run-gpt-oss-locally.mdx index 5f71e8b45..795738644 100644 --- a/docs/src/pages/post/run-gpt-oss-locally.mdx +++ b/docs/src/pages/post/run-gpt-oss-locally.mdx @@ -4,21 +4,19 @@ description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss local tags: OpenAI, gpt-oss, local AI, Jan, privacy, Apache-2.0, llama.cpp, Ollama, LM Studio categories: guides date: 2025-08-06 -ogImage: assets/gpt-oss%20locally.jpeg +ogImage: assets/images/general/gpt-oss locally.jpeg twitter: card: summary_large_image site: "@jandotai" title: "Run OpenAI's gpt-oss Locally in 5 Minutes (Beginner Guide)" description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss locally with Jan AI for private, offline conversations." - image: assets/gpt-oss%20locally.jpeg + image: assets/images/general/gpt-oss locally.jpeg --- import { Callout } from 'nextra/components' import CTABlog from '@/components/Blog/CTA' # Run OpenAI's gpt-oss Locally in 5 mins -![gpt-oss running locally in Jan interface](./_assets/gpt-oss%20locally.jpeg) - OpenAI launched [gpt-oss](https://openai.com/index/introducing-gpt-oss/), marking their return to open-source AI after GPT-2. This model is designed to run locally on consumer hardware. This guide shows you how to install and run gpt-oss on your computer for private, offline AI conversations. ## What is gpt-oss? diff --git a/docs/theme.config.tsx b/docs/theme.config.tsx index 8b71c4cca..f3d1ab69c 100644 --- a/docs/theme.config.tsx +++ b/docs/theme.config.tsx @@ -107,14 +107,15 @@ const config: DocsThemeConfig = { head: function useHead() { const { title, frontMatter } = useConfig() const { asPath } = useRouter() - const titleTemplate = - (asPath.includes('/desktop') + const titleTemplate = asPath.includes('/post/') + ? (frontMatter?.title || title) + : (asPath.includes('/desktop') ? 'Jan Desktop' : asPath.includes('/server') ? 'Jan Server' : 'Jan') + - ' - ' + - (frontMatter?.title || title) + ' - ' + + (frontMatter?.title || title) return ( diff --git a/extensions-web/package.json b/extensions-web/package.json index 232ba13fa..aa536e9fe 100644 --- a/extensions-web/package.json +++ b/extensions-web/package.json @@ -22,6 +22,9 @@ }, "devDependencies": { "@janhq/core": "workspace:*", + "@tabler/icons-react": "^3.34.0", + "@types/react": "19.1.2", + "react": "19.0.0", "typescript": "5.9.2", "vite": "5.4.20", "vitest": "2.1.9", @@ -29,6 +32,8 @@ }, "peerDependencies": { "@janhq/core": "*", + "@tabler/icons-react": "*", + "react": "19.0.0", "zustand": "5.0.3" }, "dependencies": { diff --git a/extensions-web/src/conversational-web/const.ts b/extensions-web/src/conversational-web/const.ts index 0ad7e9049..4a860c500 100644 --- a/extensions-web/src/conversational-web/const.ts +++ b/extensions-web/src/conversational-web/const.ts @@ -14,4 +14,4 @@ export const DEFAULT_ASSISTANT = { name: 'Jan', avatar: '👋', created_at: 1747029866.542, -} \ No newline at end of file +} diff --git a/extensions-web/src/conversational-web/extension.ts b/extensions-web/src/conversational-web/extension.ts index 7c31f1c31..0e01e2ca3 100644 --- a/extensions-web/src/conversational-web/extension.ts +++ b/extensions-web/src/conversational-web/extension.ts @@ -11,6 +11,9 @@ import { } from '@janhq/core' import { RemoteApi } from './api' import { getDefaultAssistant, ObjectParser, combineConversationItemsToMessages } from './utils' +import { ApiError } from '../shared/types/errors' + +const CONVERSATION_NOT_FOUND_EVENT = 'conversation-not-found' export default class ConversationalExtensionWeb extends ConversationalExtension { private remoteApi: RemoteApi | undefined @@ -111,6 +114,15 @@ export default class ConversationalExtensionWeb extends ConversationalExtension return messages } catch (error) { console.error('Failed to list messages:', error) + // Check if it's a 404 error (conversation not found) + if (error instanceof ApiError && error.isNotFound()) { + // Trigger a navigation event to redirect to home + // We'll use a custom event that the web app can listen to + window.dispatchEvent(new CustomEvent(CONVERSATION_NOT_FOUND_EVENT, { + detail: { threadId, error: error.message } + })) + } + return [] } } diff --git a/extensions-web/src/jan-provider-web/api.ts b/extensions-web/src/jan-provider-web/api.ts index 436ee06b6..97a9608f2 100644 --- a/extensions-web/src/jan-provider-web/api.ts +++ b/extensions-web/src/jan-provider-web/api.ts @@ -5,9 +5,45 @@ import { getSharedAuthService, JanAuthService } from '../shared' import { JanModel, janProviderStore } from './store' +import { ApiError } from '../shared/types/errors' // JAN_API_BASE is defined in vite.config.ts +// Constants +const TEMPORARY_CHAT_ID = 'temporary-chat' + +/** + * Determines the appropriate API endpoint and request payload based on chat type + * @param request - The chat completion request + * @returns Object containing endpoint URL and processed request payload + */ +function getChatCompletionConfig(request: JanChatCompletionRequest, stream: boolean = false) { + const isTemporaryChat = request.conversation_id === TEMPORARY_CHAT_ID + + // For temporary chats, use the stateless /chat/completions endpoint + // For regular conversations, use the stateful /conv/chat/completions endpoint + const endpoint = isTemporaryChat + ? `${JAN_API_BASE}/chat/completions` + : `${JAN_API_BASE}/conv/chat/completions` + + const payload = { + ...request, + stream, + ...(isTemporaryChat ? { + // For temporary chat: don't store anything, remove conversation metadata + conversation_id: undefined, + } : { + // For regular chat: store everything, use conversation metadata + store: true, + store_reasoning: true, + conversation: request.conversation_id, + conversation_id: undefined, + }) + } + + return { endpoint, payload, isTemporaryChat } +} + export interface JanModelsResponse { object: string data: JanModel[] @@ -102,7 +138,8 @@ export class JanApiClient { return models } catch (error) { - const errorMessage = error instanceof Error ? error.message : 'Failed to fetch models' + const errorMessage = error instanceof ApiError ? error.message : + error instanceof Error ? error.message : 'Failed to fetch models' janProviderStore.setError(errorMessage) janProviderStore.setLoadingModels(false) throw error @@ -115,22 +152,18 @@ export class JanApiClient { try { janProviderStore.clearError() + const { endpoint, payload } = getChatCompletionConfig(request, false) + return await this.authService.makeAuthenticatedRequest( - `${JAN_API_BASE}/conv/chat/completions`, + endpoint, { method: 'POST', - body: JSON.stringify({ - ...request, - stream: false, - store: true, - store_reasoning: true, - conversation: request.conversation_id, - conversation_id: undefined, - }), + body: JSON.stringify(payload), } ) } catch (error) { - const errorMessage = error instanceof Error ? error.message : 'Failed to create chat completion' + const errorMessage = error instanceof ApiError ? error.message : + error instanceof Error ? error.message : 'Failed to create chat completion' janProviderStore.setError(errorMessage) throw error } @@ -144,23 +177,17 @@ export class JanApiClient { ): Promise { try { janProviderStore.clearError() - + const authHeader = await this.authService.getAuthHeader() - - const response = await fetch(`${JAN_API_BASE}/conv/chat/completions`, { + const { endpoint, payload } = getChatCompletionConfig(request, true) + + const response = await fetch(endpoint, { method: 'POST', headers: { 'Content-Type': 'application/json', ...authHeader, }, - body: JSON.stringify({ - ...request, - stream: true, - store: true, - store_reasoning: true, - conversation: request.conversation_id, - conversation_id: undefined, - }), + body: JSON.stringify(payload), }) if (!response.ok) { @@ -216,7 +243,8 @@ export class JanApiClient { reader.releaseLock() } } catch (error) { - const err = error instanceof Error ? error : new Error('Unknown error occurred') + const err = error instanceof ApiError ? error : + error instanceof Error ? error : new Error('Unknown error occurred') janProviderStore.setError(err.message) onError?.(err) throw err @@ -230,7 +258,8 @@ export class JanApiClient { await this.getModels() console.log('Jan API client initialized successfully') } catch (error) { - const errorMessage = error instanceof Error ? error.message : 'Failed to initialize API client' + const errorMessage = error instanceof ApiError ? error.message : + error instanceof Error ? error.message : 'Failed to initialize API client' janProviderStore.setError(errorMessage) throw error } finally { @@ -239,4 +268,4 @@ export class JanApiClient { } } -export const janApiClient = JanApiClient.getInstance() \ No newline at end of file +export const janApiClient = JanApiClient.getInstance() diff --git a/extensions-web/src/jan-provider-web/index.ts b/extensions-web/src/jan-provider-web/index.ts index 70cbf7770..4d3a4008a 100644 --- a/extensions-web/src/jan-provider-web/index.ts +++ b/extensions-web/src/jan-provider-web/index.ts @@ -1 +1 @@ -export { default } from './provider' \ No newline at end of file +export { default } from './provider' diff --git a/extensions-web/src/jan-provider-web/provider.ts b/extensions-web/src/jan-provider-web/provider.ts index cfbe18e2e..3375fd351 100644 --- a/extensions-web/src/jan-provider-web/provider.ts +++ b/extensions-web/src/jan-provider-web/provider.ts @@ -15,6 +15,7 @@ import { } from '@janhq/core' // cspell: disable-line import { janApiClient, JanChatMessage } from './api' import { janProviderStore } from './store' +import { ApiError } from '../shared/types/errors' // Jan models support tools via MCP const JAN_MODEL_CAPABILITIES = ['tools'] as const @@ -192,7 +193,8 @@ export default class JanProviderWeb extends AIEngine { console.error(`Failed to unload Jan session ${sessionId}:`, error) return { success: false, - error: error instanceof Error ? error.message : 'Unknown error', + error: error instanceof ApiError ? error.message : + error instanceof Error ? error.message : 'Unknown error', } } } diff --git a/extensions-web/src/jan-provider-web/store.ts b/extensions-web/src/jan-provider-web/store.ts index 02cc70686..2ff341147 100644 --- a/extensions-web/src/jan-provider-web/store.ts +++ b/extensions-web/src/jan-provider-web/store.ts @@ -92,4 +92,4 @@ export const janProviderStore = { useJanProviderStore.getState().clearError(), reset: () => useJanProviderStore.getState().reset(), -} \ No newline at end of file +} diff --git a/extensions-web/src/mcp-web/components/WebSearchButton.tsx b/extensions-web/src/mcp-web/components/WebSearchButton.tsx new file mode 100644 index 000000000..86fa08906 --- /dev/null +++ b/extensions-web/src/mcp-web/components/WebSearchButton.tsx @@ -0,0 +1,54 @@ +import { useMemo, useCallback } from 'react' +import { IconWorld } from '@tabler/icons-react' +import { MCPToolComponentProps } from '@janhq/core' + +// List of tool names considered as web search tools +const WEB_SEARCH_TOOL_NAMES = ['google_search', 'scrape']; + +export const WebSearchButton = ({ + tools, + isToolEnabled, + onToolToggle, +}: MCPToolComponentProps) => { + const webSearchTools = useMemo( + () => tools.filter((tool) => WEB_SEARCH_TOOL_NAMES.includes(tool.name)), + [tools] + ) + + // Early return if no web search tools available + if (webSearchTools.length === 0) { + return null + } + + // Check if all web search tools are enabled + const isEnabled = useMemo( + () => webSearchTools.every((tool) => isToolEnabled(tool.name)), + [webSearchTools, isToolEnabled] + ) + + const handleToggle = useCallback(() => { + // Toggle all web search tools at once + const newState = !isEnabled + webSearchTools.forEach((tool) => { + onToolToggle(tool.name, newState) + }) + }, [isEnabled, webSearchTools, onToolToggle]) + + return ( + + ) +} diff --git a/extensions-web/src/mcp-web/components/index.ts b/extensions-web/src/mcp-web/components/index.ts new file mode 100644 index 000000000..7f9bc47da --- /dev/null +++ b/extensions-web/src/mcp-web/components/index.ts @@ -0,0 +1 @@ +export { WebSearchButton } from './WebSearchButton' diff --git a/extensions-web/src/mcp-web/index.ts b/extensions-web/src/mcp-web/index.ts index 5e13846a7..3d588753f 100644 --- a/extensions-web/src/mcp-web/index.ts +++ b/extensions-web/src/mcp-web/index.ts @@ -4,11 +4,13 @@ * Uses official MCP TypeScript SDK with proper session handling */ -import { MCPExtension, MCPTool, MCPToolCallResult } from '@janhq/core' +import { MCPExtension, MCPTool, MCPToolCallResult, MCPToolComponentProps } from '@janhq/core' import { getSharedAuthService, JanAuthService } from '../shared' import { Client } from '@modelcontextprotocol/sdk/client/index.js' import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js' import { JanMCPOAuthProvider } from './oauth-provider' +import { WebSearchButton } from './components' +import type { ComponentType } from 'react' // JAN_API_BASE is defined in vite.config.ts (defaults to 'https://api-dev.jan.ai/jan/v1') declare const JAN_API_BASE: string @@ -232,4 +234,27 @@ export default class MCPExtensionWeb extends MCPExtension { throw error } } -} \ No newline at end of file + + /** + * Provides a custom UI component for web search tools + * @returns The WebSearchButton component + */ + getToolComponent(): ComponentType | null { + return WebSearchButton + } + + /** + * Returns the list of tool names that should be disabled by default for new users + * All MCP web tools are disabled by default to prevent accidental API usage + * @returns Array of tool names to disable by default + */ + async getDefaultDisabledTools(): Promise { + try { + const tools = await this.getTools() + return tools.map(tool => tool.name) + } catch (error) { + console.error('Failed to get default disabled tools:', error) + return [] + } + } +} diff --git a/extensions-web/src/mcp-web/oauth-provider.ts b/extensions-web/src/mcp-web/oauth-provider.ts index fd37c3ece..7d14264d6 100644 --- a/extensions-web/src/mcp-web/oauth-provider.ts +++ b/extensions-web/src/mcp-web/oauth-provider.ts @@ -57,4 +57,4 @@ export class JanMCPOAuthProvider implements OAuthClientProvider { async codeVerifier(): Promise { throw new Error('Code verifier not supported') } -} \ No newline at end of file +} diff --git a/extensions-web/src/shared/auth/service.ts b/extensions-web/src/shared/auth/service.ts index 1895ff8c4..eb15c4893 100644 --- a/extensions-web/src/shared/auth/service.ts +++ b/extensions-web/src/shared/auth/service.ts @@ -16,6 +16,7 @@ import { logoutUser, refreshToken, guestLogin } from './api' import { AuthProviderRegistry } from './registry' import { AuthBroadcast } from './broadcast' import type { ProviderType } from './providers' +import { ApiError } from '../types/errors' const authProviderRegistry = new AuthProviderRegistry() @@ -160,7 +161,7 @@ export class JanAuthService { this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000 } catch (error) { console.error('Failed to refresh access token:', error) - if (error instanceof Error && error.message.includes('401')) { + if (error instanceof ApiError && error.isStatus(401)) { await this.handleSessionExpired() } throw error @@ -305,9 +306,7 @@ export class JanAuthService { if (!response.ok) { const errorText = await response.text() - throw new Error( - `API request failed: ${response.status} ${response.statusText} - ${errorText}` - ) + throw new ApiError(response.status, response.statusText, errorText) } return response.json() @@ -418,7 +417,7 @@ export class JanAuthService { ) } catch (error) { console.error('Failed to fetch user profile:', error) - if (error instanceof Error && error.message.includes('401')) { + if (error instanceof ApiError && error.isStatus(401)) { // Authentication failed - handle session expiry await this.handleSessionExpired() return null diff --git a/extensions-web/src/shared/types/errors.ts b/extensions-web/src/shared/types/errors.ts new file mode 100644 index 000000000..650507a7b --- /dev/null +++ b/extensions-web/src/shared/types/errors.ts @@ -0,0 +1,50 @@ +/** + * Shared error types for API responses + */ + +export class ApiError extends Error { + public readonly status: number + public readonly statusText: string + public readonly responseText: string + + constructor(status: number, statusText: string, responseText: string, message?: string) { + super(message || `API request failed: ${status} ${statusText} - ${responseText}`) + this.name = 'ApiError' + this.status = status + this.statusText = statusText + this.responseText = responseText + + // Maintains proper stack trace for where our error was thrown (only available on V8) + if ((Error as any).captureStackTrace) { + (Error as any).captureStackTrace(this, ApiError) + } + } + + /** + * Check if this is a specific HTTP status code + */ + isStatus(code: number): boolean { + return this.status === code + } + + /** + * Check if this is a 404 Not Found error + */ + isNotFound(): boolean { + return this.status === 404 + } + + /** + * Check if this is a client error (4xx) + */ + isClientError(): boolean { + return this.status >= 400 && this.status < 500 + } + + /** + * Check if this is a server error (5xx) + */ + isServerError(): boolean { + return this.status >= 500 && this.status < 600 + } +} diff --git a/extensions-web/src/types.ts b/extensions-web/src/types.ts index 47ef0be71..4d52032c9 100644 --- a/extensions-web/src/types.ts +++ b/extensions-web/src/types.ts @@ -38,4 +38,4 @@ export interface IndexedDBConfig { keyPath: string indexes?: { name: string; keyPath: string | string[]; unique?: boolean }[] }[] -} \ No newline at end of file +} diff --git a/extensions-web/src/types/global.d.ts b/extensions-web/src/types/global.d.ts index a6e82d759..8d70d398b 100644 --- a/extensions-web/src/types/global.d.ts +++ b/extensions-web/src/types/global.d.ts @@ -2,4 +2,4 @@ export {} declare global { declare const JAN_API_BASE: string -} \ No newline at end of file +} diff --git a/extensions-web/src/vite-env.d.ts b/extensions-web/src/vite-env.d.ts index 151aa6856..11f02fe2a 100644 --- a/extensions-web/src/vite-env.d.ts +++ b/extensions-web/src/vite-env.d.ts @@ -1 +1 @@ -/// \ No newline at end of file +/// diff --git a/extensions-web/tsconfig.json b/extensions-web/tsconfig.json index e90dd4997..b39b50ee5 100644 --- a/extensions-web/tsconfig.json +++ b/extensions-web/tsconfig.json @@ -3,6 +3,7 @@ "target": "ES2020", "module": "ESNext", "moduleResolution": "bundler", + "jsx": "react-jsx", "allowSyntheticDefaultImports": true, "esModuleInterop": true, "strict": true, diff --git a/extensions-web/vite.config.ts b/extensions-web/vite.config.ts index 89cfb7d0e..8c144b0ab 100644 --- a/extensions-web/vite.config.ts +++ b/extensions-web/vite.config.ts @@ -9,11 +9,11 @@ export default defineConfig({ fileName: 'index' }, rollupOptions: { - external: ['@janhq/core', 'zustand'] + external: ['@janhq/core', 'zustand', 'react', 'react-dom', 'react/jsx-runtime', '@tabler/icons-react'] }, emptyOutDir: false // Don't clean the output directory }, define: { JAN_API_BASE: JSON.stringify(process.env.JAN_API_BASE || 'https://api-dev.jan.ai/v1'), } -}) \ No newline at end of file +}) diff --git a/extensions/llamacpp-extension/src/backend.ts b/extensions/llamacpp-extension/src/backend.ts index d60ecc138..a313e01c6 100644 --- a/extensions/llamacpp-extension/src/backend.ts +++ b/extensions/llamacpp-extension/src/backend.ts @@ -1,9 +1,8 @@ import { getJanDataFolderPath, fs, joinPath, events } from '@janhq/core' import { invoke } from '@tauri-apps/api/core' -import { getProxyConfig } from './util' +import { getProxyConfig, basenameNoExt } from './util' import { dirname, basename } from '@tauri-apps/api/path' import { getSystemInfo } from '@janhq/tauri-plugin-hardware-api' - /* * Reads currently installed backends in janDataFolderPath * @@ -73,10 +72,7 @@ async function fetchRemoteSupportedBackends( if (!name.startsWith(prefix)) continue - const backend = name - .replace(prefix, '') - .replace('.tar.gz', '') - .replace('.zip', '') + const backend = basenameNoExt(name).slice(prefix.length) if (supportedBackends.includes(backend)) { remote.push({ version, backend }) diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index 07d49cd53..f1a750138 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -39,7 +39,6 @@ import { getProxyConfig } from './util' import { basename } from '@tauri-apps/api/path' import { readGgufMetadata, - estimateKVCacheSize, getModelSize, isModelSupported, planModelLoadInternal, @@ -58,6 +57,8 @@ type LlamacppConfig = { chat_template: string n_gpu_layers: number offload_mmproj: boolean + cpu_moe: boolean + n_cpu_moe: number override_tensor_buffer_t: string ctx_size: number threads: number @@ -332,12 +333,14 @@ export default class llamacpp_extension extends AIEngine { ) // Clear the invalid stored preference this.clearStoredBackendType() - bestAvailableBackendString = - await this.determineBestBackend(version_backends) + bestAvailableBackendString = await this.determineBestBackend( + version_backends + ) } } else { - bestAvailableBackendString = - await this.determineBestBackend(version_backends) + bestAvailableBackendString = await this.determineBestBackend( + version_backends + ) } let settings = structuredClone(SETTINGS) @@ -1581,6 +1584,10 @@ export default class llamacpp_extension extends AIEngine { ]) args.push('--jinja') args.push('-m', modelPath) + if (cfg.cpu_moe) args.push('--cpu-moe') + if (cfg.n_cpu_moe && cfg.n_cpu_moe > 0) { + args.push('--n-cpu-moe', String(cfg.n_cpu_moe)) + } // For overriding tensor buffer type, useful where // massive MOE models can be made faster by keeping attention on the GPU // and offloading the expert FFNs to the CPU. @@ -2151,7 +2158,12 @@ export default class llamacpp_extension extends AIEngine { if (mmprojPath && !this.isAbsolutePath(mmprojPath)) mmprojPath = await joinPath([await getJanDataFolderPath(), path]) try { - const result = await planModelLoadInternal(path, this.memoryMode, mmprojPath, requestedCtx) + const result = await planModelLoadInternal( + path, + this.memoryMode, + mmprojPath, + requestedCtx + ) return result } catch (e) { throw new Error(String(e)) @@ -2279,12 +2291,18 @@ export default class llamacpp_extension extends AIEngine { } // Calculate text tokens - const messages = JSON.stringify({ messages: opts.messages }) + // Use chat_template_kwargs from opts if provided, otherwise default to disable enable_thinking + const tokenizeRequest = { + messages: opts.messages, + chat_template_kwargs: opts.chat_template_kwargs || { + enable_thinking: false, + }, + } let parseResponse = await fetch(`${baseUrl}/apply-template`, { method: 'POST', headers: headers, - body: messages, + body: JSON.stringify(tokenizeRequest), }) if (!parseResponse.ok) { diff --git a/extensions/llamacpp-extension/src/type.d.ts b/extensions/llamacpp-extension/src/type.d.ts new file mode 100644 index 000000000..88fc84a17 --- /dev/null +++ b/extensions/llamacpp-extension/src/type.d.ts @@ -0,0 +1,12 @@ +export {} + +declare global { + interface RequestInit { + /** + * Tauri HTTP plugin option for connection timeout in milliseconds. + */ + connectTimeout?: number + } +} + + diff --git a/extensions/llamacpp-extension/src/util.ts b/extensions/llamacpp-extension/src/util.ts index 1511eafec..b72766579 100644 --- a/extensions/llamacpp-extension/src/util.ts +++ b/extensions/llamacpp-extension/src/util.ts @@ -1,3 +1,23 @@ +// File path utilities +export function basenameNoExt(filePath: string): string { + const VALID_EXTENSIONS = [".tar.gz", ".zip"]; + + // handle VALID extensions first + for (const ext of VALID_EXTENSIONS) { + if (filePath.toLowerCase().endsWith(ext)) { + return filePath.slice(0, -ext.length); + } + } + + // fallback: remove only the last extension + const lastDotIndex = filePath.lastIndexOf('.'); + if (lastDotIndex > 0) { + return filePath.slice(0, lastDotIndex); + } + + return filePath; +} + // Zustand proxy state structure interface ProxyState { proxyEnabled: boolean diff --git a/extensions/yarn.lock b/extensions/yarn.lock index 0751c0069..f4a58c14f 100644 --- a/extensions/yarn.lock +++ b/extensions/yarn.lock @@ -342,41 +342,41 @@ __metadata: "@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension": version: 0.1.10 - resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension" + resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fassistant-extension%40workspace%3Aassistant-extension" dependencies: rxjs: "npm:^7.8.1" ulidx: "npm:^2.3.0" - checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d + checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c languageName: node linkType: hard "@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension": version: 0.1.10 - resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension" + resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fconversational-extension%40workspace%3Aconversational-extension" dependencies: rxjs: "npm:^7.8.1" ulidx: "npm:^2.3.0" - checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d + checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c languageName: node linkType: hard "@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension": version: 0.1.10 - resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension" + resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fdownload-extension%40workspace%3Adownload-extension" dependencies: rxjs: "npm:^7.8.1" ulidx: "npm:^2.3.0" - checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d + checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c languageName: node linkType: hard "@janhq/core@file:../../core/package.tgz::locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension": version: 0.1.10 - resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f9bdfe&locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension" + resolution: "@janhq/core@file:../../core/package.tgz#../../core/package.tgz::hash=f15485&locator=%40janhq%2Fllamacpp-extension%40workspace%3Allamacpp-extension" dependencies: rxjs: "npm:^7.8.1" ulidx: "npm:^2.3.0" - checksum: 10c0/417ea9bd3e5b53264596d2ee816c3e24299f8b721f6ea951d078342555da457ebca4d5b1e116bf187ac77ec0a9e3341211d464f4ffdbd2a3915139523688d41d + checksum: 10c0/257621cb56db31a4dd3a2b509ec4c61217022e74bbd39cf6a1a172073654b9a65eee94ef9c1b4d4f5d2231d159c8818cb02846f3d88fe14f102f43169ad3737c languageName: node linkType: hard diff --git a/package.json b/package.json index 50eb8ecaf..386721def 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,8 @@ "scripts": { "lint": "yarn workspace @janhq/web-app lint", "dev": "yarn dev:tauri", + "ios": "yarn tauri ios dev", + "android": "yarn tauri android dev", "build": "yarn build:web && yarn build:tauri", "test": "vitest run", "test:watch": "vitest", @@ -24,12 +26,17 @@ "serve:web-app": "yarn workspace @janhq/web-app serve:web", "build:serve:web-app": "yarn build:web-app && yarn serve:web-app", "dev:tauri": "yarn build:icon && yarn copy:assets:tauri && cross-env IS_CLEAN=true tauri dev", + "dev:ios": "yarn copy:assets:mobile && RUSTC_WRAPPER= cross-env IS_IOS=true yarn tauri ios dev --features mobile", + "dev:android": "yarn copy:assets:mobile && cross-env IS_ANDROID=true yarn tauri android dev --features mobile", + "build:android": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_CLEAN=true yarn tauri android build -- --no-default-features --features mobile", + "build:ios": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile", + "build:ios:device": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile --export-method debugging", "copy:assets:tauri": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"", + "copy:assets:mobile": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"", "download:lib": "node ./scripts/download-lib.mjs", "download:bin": "node ./scripts/download-bin.mjs", - "download:windows-installer": "node ./scripts/download-win-installer-deps.mjs", - "build:tauri:win32": "yarn download:bin && yarn download:lib && yarn download:windows-installer && yarn tauri build", - "build:tauri:linux": "yarn download:bin && yarn download:lib && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh", + "build:tauri:win32": "yarn download:bin && yarn tauri build", + "build:tauri:linux": "yarn download:bin && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh", "build:tauri:darwin": "yarn download:bin && yarn tauri build --target universal-apple-darwin", "build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os", "build:tauri:plugin:api": "cd src-tauri/plugins && yarn install && yarn workspaces foreach -Apt run build", @@ -58,7 +65,9 @@ "hoistingLimits": "workspaces" }, "resolutions": { - "yallist": "4.0.0" + "yallist": "4.0.0", + "@types/react": "19.1.2", + "@types/react-dom": "19.1.2" }, "packageManager": "yarn@4.5.3" } diff --git a/scripts/download-bin.mjs b/scripts/download-bin.mjs index 36e17b3f0..68f09bf5f 100644 --- a/scripts/download-bin.mjs +++ b/scripts/download-bin.mjs @@ -1,4 +1,3 @@ -console.log('Script is running') // scripts/download.js import https from 'https' import fs, { copyFile, mkdirSync } from 'fs' @@ -69,7 +68,10 @@ function getPlatformArch() { arch === 'arm64' ? 'aarch64-apple-darwin' : 'x86_64-apple-darwin' } else if (platform === 'linux') { bunPlatform = arch === 'arm64' ? 'linux-aarch64' : 'linux-x64' - uvPlatform = arch === 'arm64' ? 'aarch64-unknown-linux-gnu' : 'x86_64-unknown-linux-gnu' + uvPlatform = + arch === 'arm64' + ? 'aarch64-unknown-linux-gnu' + : 'x86_64-unknown-linux-gnu' } else if (platform === 'win32') { bunPlatform = 'windows-x64' // Bun has limited Windows support uvPlatform = 'x86_64-pc-windows-msvc' @@ -81,6 +83,10 @@ function getPlatformArch() { } async function main() { + if (process.env.SKIP_BINARIES) { + console.log('Skipping binaries download.') + process.exit(0) + } console.log('Starting main function') const platform = os.platform() const { bunPlatform, uvPlatform } = getPlatformArch() @@ -100,13 +106,11 @@ async function main() { } // Adjust these URLs based on latest releases - const bunVersion = '1.2.10' // Example Bun version - const bunUrl = `https://github.com/oven-sh/bun/releases/download/bun-v${bunVersion}/bun-${bunPlatform}.zip` + const bunUrl = `https://github.com/oven-sh/bun/releases/latest/download/bun-${bunPlatform}.zip` - const uvVersion = '0.6.17' // Example UV version - let uvUrl = `https://github.com/astral-sh/uv/releases/download/${uvVersion}/uv-${uvPlatform}.tar.gz` + let uvUrl = `https://github.com/astral-sh/uv/releases/latest/download/uv-${uvPlatform}.tar.gz` if (platform === 'win32') { - uvUrl = `https://github.com/astral-sh/uv/releases/download/${uvVersion}/uv-${uvPlatform}.zip` + uvUrl = `https://github.com/astral-sh/uv/releases/latest/download/uv-${uvPlatform}.zip` } console.log(`Downloading Bun for ${bunPlatform}...`) @@ -124,29 +128,45 @@ async function main() { if (err) { console.log('Add execution permission failed!', err) } - }); + }) if (platform === 'darwin') { - copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-x86_64-apple-darwin'), (err) => { - if (err) { - console.log("Error Found:", err); - } - }) - copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-aarch64-apple-darwin'), (err) => { - if (err) { - console.log("Error Found:", err); - } - }) - copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-universal-apple-darwin'), (err) => { + copyFile( + path.join(binDir, 'bun'), + path.join(binDir, 'bun-x86_64-apple-darwin'), + (err) => { if (err) { - console.log("Error Found:", err); + console.log('Error Found:', err) } - }) - } else if (platform === 'linux') { - copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-x86_64-unknown-linux-gnu'), (err) => { - if (err) { - console.log("Error Found:", err); } - }) + ) + copyFile( + path.join(binDir, 'bun'), + path.join(binDir, 'bun-aarch64-apple-darwin'), + (err) => { + if (err) { + console.log('Error Found:', err) + } + } + ) + copyFile( + path.join(binDir, 'bun'), + path.join(binDir, 'bun-universal-apple-darwin'), + (err) => { + if (err) { + console.log('Error Found:', err) + } + } + ) + } else if (platform === 'linux') { + copyFile( + path.join(binDir, 'bun'), + path.join(binDir, 'bun-x86_64-unknown-linux-gnu'), + (err) => { + if (err) { + console.log('Error Found:', err) + } + } + ) } } catch (err) { // Expect EEXIST error @@ -157,11 +177,15 @@ async function main() { path.join(binDir) ) if (platform === 'win32') { - copyFile(path.join(binDir, 'bun.exe'), path.join(binDir, 'bun-x86_64-pc-windows-msvc.exe'), (err) => { - if (err) { - console.log("Error Found:", err); + copyFile( + path.join(binDir, 'bun.exe'), + path.join(binDir, 'bun-x86_64-pc-windows-msvc.exe'), + (err) => { + if (err) { + console.log('Error Found:', err) + } } - }) + ) } } catch (err) { // Expect EEXIST error @@ -176,52 +200,66 @@ async function main() { await decompress(uvPath, tempBinDir) } try { - copySync( - path.join(tempBinDir, `uv-${uvPlatform}`, 'uv'), - path.join(binDir) - ) + copySync(path.join(tempBinDir, `uv-${uvPlatform}`, 'uv'), path.join(binDir)) fs.chmod(path.join(binDir, 'uv'), 0o755, (err) => { if (err) { console.log('Add execution permission failed!', err) } - }); + }) if (platform === 'darwin') { - copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-x86_64-apple-darwin'), (err) => { - if (err) { - console.log("Error Found:", err); + copyFile( + path.join(binDir, 'uv'), + path.join(binDir, 'uv-x86_64-apple-darwin'), + (err) => { + if (err) { + console.log('Error Found:', err) + } } - }) - copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-aarch64-apple-darwin'), (err) => { - if (err) { - console.log("Error Found:", err); + ) + copyFile( + path.join(binDir, 'uv'), + path.join(binDir, 'uv-aarch64-apple-darwin'), + (err) => { + if (err) { + console.log('Error Found:', err) + } } - }) - copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-universal-apple-darwin'), (err) => { - if (err) { - console.log("Error Found:", err); + ) + copyFile( + path.join(binDir, 'uv'), + path.join(binDir, 'uv-universal-apple-darwin'), + (err) => { + if (err) { + console.log('Error Found:', err) + } } - }) + ) } else if (platform === 'linux') { - copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-x86_64-unknown-linux-gnu'), (err) => { - if (err) { - console.log("Error Found:", err); + copyFile( + path.join(binDir, 'uv'), + path.join(binDir, 'uv-x86_64-unknown-linux-gnu'), + (err) => { + if (err) { + console.log('Error Found:', err) + } } - }) + ) } } catch (err) { // Expect EEXIST error } try { - copySync( - path.join(tempBinDir, 'uv.exe'), - path.join(binDir) - ) + copySync(path.join(tempBinDir, 'uv.exe'), path.join(binDir)) if (platform === 'win32') { - copyFile(path.join(binDir, 'uv.exe'), path.join(binDir, 'uv-x86_64-pc-windows-msvc.exe'), (err) => { - if (err) { - console.log("Error Found:", err); + copyFile( + path.join(binDir, 'uv.exe'), + path.join(binDir, 'uv-x86_64-pc-windows-msvc.exe'), + (err) => { + if (err) { + console.log('Error Found:', err) + } } - }) + ) } } catch (err) { // Expect EEXIST error diff --git a/scripts/download-lib.mjs b/scripts/download-lib.mjs deleted file mode 100644 index d2086b36e..000000000 --- a/scripts/download-lib.mjs +++ /dev/null @@ -1,86 +0,0 @@ -console.log('Script is running') -// scripts/download-lib.mjs -import https from 'https' -import fs, { mkdirSync } from 'fs' -import os from 'os' -import path from 'path' -import { copySync } from 'cpx' - -function download(url, dest) { - return new Promise((resolve, reject) => { - console.log(`Downloading ${url} to ${dest}`) - const file = fs.createWriteStream(dest) - https - .get(url, (response) => { - console.log(`Response status code: ${response.statusCode}`) - if ( - response.statusCode >= 300 && - response.statusCode < 400 && - response.headers.location - ) { - // Handle redirect - const redirectURL = response.headers.location - console.log(`Redirecting to ${redirectURL}`) - download(redirectURL, dest).then(resolve, reject) // Recursive call - return - } else if (response.statusCode !== 200) { - reject(`Failed to get '${url}' (${response.statusCode})`) - return - } - response.pipe(file) - file.on('finish', () => { - file.close(resolve) - }) - }) - .on('error', (err) => { - fs.unlink(dest, () => reject(err.message)) - }) - }) -} - -async function main() { - console.log('Starting main function') - const platform = os.platform() // 'darwin', 'linux', 'win32' - const arch = os.arch() // 'x64', 'arm64', etc. - - if (arch != 'x64') return - - let filename - if (platform == 'linux') - filename = 'libvulkan.so' - else if (platform == 'win32') - filename = 'vulkan-1.dll' - else - return - - const url = `https://catalog.jan.ai/${filename}` - - const libDir = 'src-tauri/resources/lib' - const tempDir = 'scripts/dist' - - try { - mkdirSync('scripts/dist') - } catch (err) { - // Expect EEXIST error if the directory already exists - } - - console.log(`Downloading libvulkan...`) - const savePath = path.join(tempDir, filename) - if (!fs.existsSync(savePath)) { - await download(url, savePath) - } - - // copy to tauri resources - try { - copySync(savePath, libDir) - } catch (err) { - // Expect EEXIST error - } - - console.log('Downloads completed.') -} - -main().catch((err) => { - console.error('Error:', err) - process.exit(1) -}) diff --git a/scripts/download-win-installer-deps.mjs b/scripts/download-win-installer-deps.mjs deleted file mode 100644 index 33bbbe04b..000000000 --- a/scripts/download-win-installer-deps.mjs +++ /dev/null @@ -1,83 +0,0 @@ -console.log('Downloading Windows installer dependencies...') -// scripts/download-win-installer-deps.mjs -import https from 'https' -import fs, { mkdirSync } from 'fs' -import os from 'os' -import path from 'path' -import { copySync } from 'cpx' - -function download(url, dest) { - return new Promise((resolve, reject) => { - console.log(`Downloading ${url} to ${dest}`) - const file = fs.createWriteStream(dest) - https - .get(url, (response) => { - console.log(`Response status code: ${response.statusCode}`) - if ( - response.statusCode >= 300 && - response.statusCode < 400 && - response.headers.location - ) { - // Handle redirect - const redirectURL = response.headers.location - console.log(`Redirecting to ${redirectURL}`) - download(redirectURL, dest).then(resolve, reject) // Recursive call - return - } else if (response.statusCode !== 200) { - reject(`Failed to get '${url}' (${response.statusCode})`) - return - } - response.pipe(file) - file.on('finish', () => { - file.close(resolve) - }) - }) - .on('error', (err) => { - fs.unlink(dest, () => reject(err.message)) - }) - }) -} - -async function main() { - console.log('Starting Windows installer dependencies download') - const platform = os.platform() // 'darwin', 'linux', 'win32' - const arch = os.arch() // 'x64', 'arm64', etc. - - if (arch != 'x64') return - - - const libDir = 'src-tauri/resources/lib' - const tempDir = 'scripts/dist' - - try { - mkdirSync('scripts/dist') - } catch (err) { - // Expect EEXIST error if the directory already exists - } - - // Download VC++ Redistributable 17 - if (platform == 'win32') { - const vcFilename = 'vc_redist.x64.exe' - const vcUrl = 'https://aka.ms/vs/17/release/vc_redist.x64.exe' - - console.log(`Downloading VC++ Redistributable...`) - const vcSavePath = path.join(tempDir, vcFilename) - if (!fs.existsSync(vcSavePath)) { - await download(vcUrl, vcSavePath) - } - - // copy to tauri resources - try { - copySync(vcSavePath, libDir) - } catch (err) { - // Expect EEXIST error - } - } - - console.log('Windows installer dependencies downloads completed.') -} - -main().catch((err) => { - console.error('Error:', err) - process.exit(1) -}) diff --git a/src-tauri/.cargo/config.toml b/src-tauri/.cargo/config.toml index 830adb1f1..9089e8115 100644 --- a/src-tauri/.cargo/config.toml +++ b/src-tauri/.cargo/config.toml @@ -3,3 +3,20 @@ # see https://github.com/tauri-apps/tauri/pull/4383#issuecomment-1212221864 __TAURI_WORKSPACE__ = "true" ENABLE_SYSTEM_TRAY_ICON = "false" + +[target.aarch64-linux-android] +linker = "aarch64-linux-android21-clang" +ar = "llvm-ar" +rustflags = ["-C", "link-arg=-fuse-ld=lld"] + +[target.armv7-linux-androideabi] +linker = "armv7a-linux-androideabi21-clang" +ar = "llvm-ar" + +[target.x86_64-linux-android] +linker = "x86_64-linux-android21-clang" +ar = "llvm-ar" + +[target.i686-linux-android] +linker = "i686-linux-android21-clang" +ar = "llvm-ar" diff --git a/src-tauri/.gitignore b/src-tauri/.gitignore index 40726cbe0..02bc782bf 100644 --- a/src-tauri/.gitignore +++ b/src-tauri/.gitignore @@ -2,6 +2,7 @@ # will have compiled files and executables /target/ /gen/schemas +/gen/android binaries !binaries/download.sh !binaries/download.bat \ No newline at end of file diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 85a90422a..ae5dadfca 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -23,6 +23,7 @@ dependencies = [ "serde", "serde_json", "serde_yaml", + "sqlx", "tar", "tauri", "tauri-build", @@ -85,6 +86,19 @@ dependencies = [ "version_check", ] +[[package]] +name = "ahash" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" +dependencies = [ + "cfg-if", + "getrandom 0.3.3", + "once_cell", + "version_check", + "zerocopy", +] + [[package]] name = "aho-corasick" version = "1.1.3" @@ -94,6 +108,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + [[package]] name = "android-tzdata" version = "0.1.1" @@ -149,11 +169,11 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "ash" -version = "0.38.0+1.3.281" +version = "0.37.3+1.3.251" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb44936d800fea8f016d7f2311c6a4f97aebd5dc86f09906139ec848cf3a46f" +checksum = "39e9c3835d686b0a6084ab4234fcd1b07dbf6e4767dce60874b12356a25ecd4a" dependencies = [ - "libloading 0.8.8", + "libloading 0.7.4", ] [[package]] @@ -166,7 +186,7 @@ dependencies = [ "futures-channel", "futures-util", "rand 0.9.2", - "raw-window-handle", + "raw-window-handle 0.6.2", "serde", "serde_repr", "tokio", @@ -331,6 +351,15 @@ dependencies = [ "system-deps", ] +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -510,6 +539,20 @@ name = "bytemuck" version = "1.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f154e572231cb6ba2bd1176980827e3d5dc04cc183a75dea38109fbdd672d29" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] [[package]] name = "byteorder" @@ -707,6 +750,12 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + [[package]] name = "const-random" version = "0.1.18" @@ -802,11 +851,22 @@ checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1" dependencies = [ "bitflags 2.9.1", "core-foundation 0.10.1", - "core-graphics-types", + "core-graphics-types 0.2.0", "foreign-types 0.5.0", "libc", ] +[[package]] +name = "core-graphics-types" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45390e6114f68f718cc7a830514a96f903cccd70d02a8f6d9f643ac4ba45afaf" +dependencies = [ + "bitflags 1.3.2", + "core-foundation 0.9.4", + "libc", +] + [[package]] name = "core-graphics-types" version = "0.2.0" @@ -827,6 +887,21 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.5.0" @@ -845,6 +920,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -980,6 +1064,17 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + [[package]] name = "deranged" version = "0.4.0" @@ -1021,6 +1116,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] @@ -1125,6 +1221,12 @@ dependencies = [ "litrs", ] +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + [[package]] name = "downcast-rs" version = "1.2.1" @@ -1167,6 +1269,15 @@ version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + [[package]] name = "embed-resource" version = "3.0.5" @@ -1268,6 +1379,17 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + [[package]] name = "event-listener" version = "5.4.1" @@ -1355,12 +1477,29 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "spin", +] + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foreign-types" version = "0.3.2" @@ -1470,6 +1609,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + [[package]] name = "futures-io" version = "0.3.31" @@ -1888,13 +2038,24 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" +dependencies = [ + "bytemuck", + "cfg-if", + "crunchy", +] + [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash", + "ahash 0.7.8", ] [[package]] @@ -1908,6 +2069,20 @@ name = "hashbrown" version = "0.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.4", +] [[package]] name = "heck" @@ -1933,6 +2108,15 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -2081,6 +2265,20 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.7" @@ -2090,12 +2288,12 @@ dependencies = [ "http 1.3.1", "hyper 1.6.0", "hyper-util", - "rustls", + "rustls 0.23.31", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tower-service", - "webpki-roots", + "webpki-roots 1.0.2", ] [[package]] @@ -2508,6 +2706,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "libappindicator" @@ -2559,6 +2760,12 @@ dependencies = [ "windows-targets 0.53.3", ] +[[package]] +name = "libm" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" + [[package]] name = "libredox" version = "0.1.9" @@ -2570,6 +2777,17 @@ dependencies = [ "redox_syscall", ] +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + [[package]] name = "linux-raw-sys" version = "0.9.4" @@ -2619,6 +2837,15 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + [[package]] name = "markup5ever" version = "0.14.1" @@ -2650,6 +2877,16 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + [[package]] name = "memchr" version = "2.7.5" @@ -2747,7 +2984,7 @@ dependencies = [ "log", "ndk-sys", "num_enum", - "raw-window-handle", + "raw-window-handle 0.6.2", "thiserror 1.0.69", ] @@ -2800,12 +3037,49 @@ dependencies = [ "winapi", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -2813,6 +3087,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -2869,6 +3144,15 @@ dependencies = [ "libloading 0.8.8", ] +[[package]] +name = "objc" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" +dependencies = [ + "malloc_buf", +] + [[package]] name = "objc-sys" version = "0.3.5" @@ -3177,6 +3461,15 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-src" +version = "300.5.2+3.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d270b79e2926f5150189d475bc7e9d2c69f9c4697b185fa917d5a32b792d21b4" +dependencies = [ + "cc", +] + [[package]] name = "openssl-sys" version = "0.9.109" @@ -3185,6 +3478,7 @@ checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" dependencies = [ "cc", "libc", + "openssl-src", "pkg-config", "vcpkg", ] @@ -3340,6 +3634,15 @@ dependencies = [ "sha2", ] +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -3503,6 +3806,27 @@ dependencies = [ "futures-io", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + [[package]] name = "pkg-config" version = "0.3.32" @@ -3726,7 +4050,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls", + "rustls 0.23.31", "socket2 0.5.10", "thiserror 2.0.12", "tokio", @@ -3746,7 +4070,7 @@ dependencies = [ "rand 0.9.2", "ring", "rustc-hash", - "rustls", + "rustls 0.23.31", "rustls-pki-types", "slab", "thiserror 2.0.12", @@ -3900,6 +4224,12 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "raw-window-handle" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9" + [[package]] name = "raw-window-handle" version = "0.6.2" @@ -3999,6 +4329,7 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", + "hyper-rustls 0.24.2", "hyper-tls", "ipnet", "js-sys", @@ -4008,6 +4339,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls 0.21.12", "rustls-pemfile", "serde", "serde_json", @@ -4016,6 +4348,7 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", + "tokio-rustls 0.24.1", "tokio-util", "tower-service", "url", @@ -4023,6 +4356,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", + "webpki-roots 0.25.4", "winreg 0.50.0", ] @@ -4044,7 +4378,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.6.0", - "hyper-rustls", + "hyper-rustls 0.27.7", "hyper-util", "js-sys", "log", @@ -4052,14 +4386,14 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls", + "rustls 0.23.31", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.2", "tokio-util", "tower", "tower-http", @@ -4069,7 +4403,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", + "webpki-roots 1.0.2", ] [[package]] @@ -4090,7 +4424,7 @@ dependencies = [ "objc2-app-kit", "objc2-core-foundation", "objc2-foundation 0.3.1", - "raw-window-handle", + "raw-window-handle 0.6.2", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -4180,6 +4514,26 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "rsa" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "rust-ini" version = "0.21.2" @@ -4240,6 +4594,18 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + [[package]] name = "rustls" version = "0.23.31" @@ -4249,7 +4615,7 @@ dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki", + "rustls-webpki 0.103.4", "subtle", "zeroize", ] @@ -4273,6 +4639,16 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustls-webpki" version = "0.103.4" @@ -4391,6 +4767,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "seahash" version = "4.1.0" @@ -4688,6 +5074,16 @@ dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + [[package]] name = "simd-adler32" version = "0.3.7" @@ -4723,6 +5119,9 @@ name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] [[package]] name = "socket2" @@ -4759,7 +5158,7 @@ dependencies = [ "objc2 0.5.2", "objc2-foundation 0.2.2", "objc2-quartz-core 0.2.2", - "raw-window-handle", + "raw-window-handle 0.6.2", "redox_syscall", "wasm-bindgen", "web-sys", @@ -4792,6 +5191,213 @@ dependencies = [ "system-deps", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlx" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" +dependencies = [ + "base64 0.22.1", + "bytes", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.4", + "hashlink", + "indexmap 2.10.0", + "log", + "memchr", + "once_cell", + "percent-encoding", + "serde", + "serde_json", + "sha2", + "smallvec", + "thiserror 2.0.12", + "tokio", + "tokio-stream", + "tracing", + "url", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.104", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" +dependencies = [ + "dotenvy", + "either", + "heck 0.5.0", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 2.0.104", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.9.1", + "byteorder", + "bytes", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.12", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.9.1", + "byteorder", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.12", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" +dependencies = [ + "atoi", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.12", + "tracing", + "url", +] + [[package]] name = "sse-stream" version = "0.2.1" @@ -4842,6 +5448,17 @@ dependencies = [ "quote", ] +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + [[package]] name = "strip-ansi-escapes" version = "0.2.1" @@ -5028,7 +5645,7 @@ dependencies = [ "objc2-foundation 0.3.1", "once_cell", "parking_lot", - "raw-window-handle", + "raw-window-handle 0.6.2", "scopeguard", "tao-macros", "unicode-segmentation", @@ -5103,7 +5720,7 @@ dependencies = [ "objc2-web-kit", "percent-encoding", "plist", - "raw-window-handle", + "raw-window-handle 0.6.2", "reqwest 0.12.22", "serde", "serde_json", @@ -5233,7 +5850,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37e5858cc7b455a73ab4ea2ebc08b5be33682c00ff1bf4cad5537d4fb62499d9" dependencies = [ "log", - "raw-window-handle", + "raw-window-handle 0.6.2", "rfd", "serde", "serde_json", @@ -5280,6 +5897,7 @@ dependencies = [ "sysinfo", "tauri", "tauri-plugin", + "vulkano", ] [[package]] @@ -5323,6 +5941,7 @@ dependencies = [ "sysinfo", "tauri", "tauri-plugin", + "tauri-plugin-hardware", "thiserror 2.0.12", "tokio", ] @@ -5488,7 +6107,7 @@ dependencies = [ "objc2 0.6.1", "objc2-ui-kit", "objc2-web-kit", - "raw-window-handle", + "raw-window-handle 0.6.2", "serde", "serde_json", "tauri-utils", @@ -5514,7 +6133,7 @@ dependencies = [ "objc2-foundation 0.3.1", "once_cell", "percent-encoding", - "raw-window-handle", + "raw-window-handle 0.6.2", "softbuffer", "tao", "tauri-runtime", @@ -5638,6 +6257,15 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + [[package]] name = "time" version = "0.3.41" @@ -5747,13 +6375,23 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ - "rustls", + "rustls 0.23.31", "tokio", ] @@ -5934,6 +6572,7 @@ version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -6051,12 +6690,33 @@ dependencies = [ "unic-common", ] +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + [[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + [[package]] name = "unicode-segmentation" version = "1.12.0" @@ -6153,6 +6813,15 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vk-parse" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81086c28be67a8759cd80cbb3c8f7b520e0874605fc5eb74d5a1c9c2d1878e79" +dependencies = [ + "xml-rs", +] + [[package]] name = "vswhom" version = "0.1.0" @@ -6182,6 +6851,48 @@ dependencies = [ "memchr", ] +[[package]] +name = "vulkano" +version = "0.34.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a26f2897a92a30931fceef3d6d1156a1089d9681fb2be73be92bbf24ae2ddf2" +dependencies = [ + "ahash 0.8.12", + "ash", + "bytemuck", + "core-graphics-types 0.1.3", + "crossbeam-queue", + "half", + "heck 0.4.1", + "indexmap 2.10.0", + "libloading 0.8.8", + "objc", + "once_cell", + "parking_lot", + "proc-macro2", + "quote", + "raw-window-handle 0.5.2", + "regex", + "serde", + "serde_json", + "smallvec", + "thread_local", + "vk-parse", + "vulkano-macros", +] + +[[package]] +name = "vulkano-macros" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52be622d364272fd77e298e7f68e8547ae66e7687cb86eb85335412cee7e3965" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -6222,6 +6933,12 @@ dependencies = [ "wit-bindgen-rt", ] +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" version = "0.2.100" @@ -6430,6 +7147,12 @@ dependencies = [ "system-deps", ] +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + [[package]] name = "webpki-roots" version = "1.0.2" @@ -6475,6 +7198,16 @@ dependencies = [ "windows-core 0.61.2", ] +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + [[package]] name = "winapi" version = "0.3.9" @@ -6516,7 +7249,7 @@ dependencies = [ "objc2-app-kit", "objc2-core-foundation", "objc2-foundation 0.3.1", - "raw-window-handle", + "raw-window-handle 0.6.2", "windows-sys 0.59.0", "windows-version", ] @@ -7088,7 +7821,7 @@ dependencies = [ "objc2-web-kit", "once_cell", "percent-encoding", - "raw-window-handle", + "raw-window-handle 0.6.2", "sha2", "soup3", "tao-macros", @@ -7143,6 +7876,12 @@ dependencies = [ "rustix", ] +[[package]] +name = "xml-rs" +version = "0.8.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fd8403733700263c6eb89f192880191f1b83e332f7a20371ddcf421c4a337c7" + [[package]] name = "yoke" version = "0.8.0" diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 51e84f880..79d6d1a4c 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -22,7 +22,20 @@ default = [ "tauri/macos-private-api", "tauri/tray-icon", "tauri/test", - "tauri/custom-protocol" + "tauri/custom-protocol", + "desktop", +] +hardware = ["dep:tauri-plugin-hardware"] +deep-link = ["dep:tauri-plugin-deep-link"] +desktop = [ + "deep-link", + "hardware" +] +mobile = [ + "tauri/protocol-asset", + "tauri/test", + "tauri/wry", + "dep:sqlx", ] test-tauri = [ "tauri/wry", @@ -31,6 +44,7 @@ test-tauri = [ "tauri/macos-private-api", "tauri/tray-icon", "tauri/test", + "desktop", ] [build-dependencies] @@ -46,7 +60,6 @@ hyper = { version = "0.14", features = ["server"] } jan-utils = { path = "./utils" } libloading = "0.8.7" log = "0.4" -reqwest = { version = "0.11", features = ["json", "blocking", "stream"] } rmcp = { version = "0.6.0", features = [ "client", "transport-sse-client", @@ -60,16 +73,17 @@ serde_json = "1.0" serde_yaml = "0.9.34" tar = "0.4" zip = "0.6" -tauri-plugin-deep-link = { version = "2.3.4" } tauri-plugin-dialog = "2.2.1" -tauri-plugin-hardware = { path = "./plugins/tauri-plugin-hardware" } -tauri-plugin-http = { version = "2", features = ["unsafe-headers"] } +tauri-plugin-deep-link = { version = "2", optional = true } +tauri-plugin-hardware = { path = "./plugins/tauri-plugin-hardware", optional = true } tauri-plugin-llamacpp = { path = "./plugins/tauri-plugin-llamacpp" } +tauri-plugin-http = { version = "2", features = ["unsafe-headers"] } tauri-plugin-log = "2.0.0-rc" tauri-plugin-opener = "2.2.7" tauri-plugin-os = "2.2.1" tauri-plugin-shell = "2.2.0" tauri-plugin-store = "2" +sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"], optional = true } thiserror = "2.0.12" tokio = { version = "1", features = ["full"] } tokio-util = "0.7.14" @@ -92,6 +106,30 @@ libc = "0.2.172" windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] } [target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls-vendored"] } tauri-plugin-updater = "2" once_cell = "1.18" -tauri-plugin-single-instance = { version = "2.3.4", features = ["deep-link"] } +tauri-plugin-single-instance = { version = "2", features = ["deep-link"] } + +[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "rustls-tls"], default-features = false } +tauri-plugin-dialog = { version = "2.2.1", default-features = false } +tauri-plugin-http = { version = "2", default-features = false } +tauri-plugin-log = { version = "2.0.0-rc", default-features = false } +tauri-plugin-opener = { version = "2.2.7", default-features = false } +tauri-plugin-os = { version = "2.2.1", default-features = false } +tauri-plugin-shell = { version = "2.2.0", default-features = false } +tauri-plugin-store = { version = "2", default-features = false } + +# Release profile optimizations for minimal binary size +[profile.release] +opt-level = "z" # Optimize for size +lto = "fat" # Aggressive Link Time Optimization +strip = "symbols" # Strip debug symbols for smaller binary +codegen-units = 1 # Reduce parallel codegen for better optimization +panic = "abort" # Don't unwind on panic, saves space +overflow-checks = false # Disable overflow checks for size +debug = false # No debug info +debug-assertions = false # No debug assertions +incremental = false # Disable incremental compilation for release +rpath = false # Don't include rpath diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json index e594bf023..5c5e7d48d 100644 --- a/src-tauri/capabilities/default.json +++ b/src-tauri/capabilities/default.json @@ -18,11 +18,10 @@ "os:default", "opener:default", "log:default", - "updater:default", "dialog:default", - "deep-link:default", "core:webview:allow-create-webview-window", "opener:allow-open-url", + "store:default", { "identifier": "http:default", "allow": [ @@ -54,9 +53,6 @@ "url": "http://0.0.0.0:*" } ] - }, - "store:default", - "llamacpp:default", - "hardware:default" + } ] } diff --git a/src-tauri/capabilities/desktop.json b/src-tauri/capabilities/desktop.json new file mode 100644 index 000000000..2182b9c03 --- /dev/null +++ b/src-tauri/capabilities/desktop.json @@ -0,0 +1,65 @@ +{ + "$schema": "../gen/schemas/desktop-schema.json", + "identifier": "desktop", + "description": "enables the default permissions for desktop platforms", + "windows": ["main"], + "remote": { + "urls": ["http://*"] + }, + "platforms": ["linux", "macOS", "windows"], + "permissions": [ + "core:default", + "core:webview:allow-set-webview-zoom", + "core:window:allow-start-dragging", + "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", + "shell:allow-spawn", + "shell:allow-open", + "core:app:allow-set-app-theme", + "core:window:allow-set-focus", + "os:default", + "opener:default", + "log:default", + "dialog:default", + "core:webview:allow-create-webview-window", + "opener:allow-open-url", + "store:default", + "llamacpp:default", + "deep-link:default", + "hardware:default", + + { + "identifier": "http:default", + "allow": [ + { + "url": "https://*:*" + }, + { + "url": "http://*:*" + } + ], + "deny": [] + }, + { + "identifier": "shell:allow-execute", + "allow": [] + }, + { + "identifier": "opener:allow-open-url", + "description": "opens the default permissions for the core module", + "windows": ["*"], + "allow": [ + { + "url": "https://*" + }, + { + "url": "http://127.0.0.1:*" + }, + { + "url": "http://0.0.0.0:*" + } + ] + } + ] +} \ No newline at end of file diff --git a/src-tauri/capabilities/log-app-window.json b/src-tauri/capabilities/log-app-window.json index 9f95d1bb9..1bc329ab4 100644 --- a/src-tauri/capabilities/log-app-window.json +++ b/src-tauri/capabilities/log-app-window.json @@ -1,14 +1,18 @@ { "$schema": "../gen/schemas/desktop-schema.json", - "identifier": "logs-app-window", + "identifier": "log-app-window", "description": "enables permissions for the logs app window", "windows": ["logs-app-window"], + "platforms": ["linux", "macOS", "windows"], "permissions": [ "core:default", "core:window:allow-start-dragging", "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", "log:default", "core:webview:allow-create-webview-window", + "core:webview:allow-get-all-webviews", "core:window:allow-set-focus" ] } diff --git a/src-tauri/capabilities/logs-window.json b/src-tauri/capabilities/logs-window.json index ef56e6f75..1a166f503 100644 --- a/src-tauri/capabilities/logs-window.json +++ b/src-tauri/capabilities/logs-window.json @@ -3,12 +3,16 @@ "identifier": "logs-window", "description": "enables permissions for the logs window", "windows": ["logs-window-local-api-server"], + "platforms": ["linux", "macOS", "windows"], "permissions": [ "core:default", "core:window:allow-start-dragging", "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", "log:default", "core:webview:allow-create-webview-window", + "core:webview:allow-get-all-webviews", "core:window:allow-set-focus" ] } diff --git a/src-tauri/capabilities/mobile.json b/src-tauri/capabilities/mobile.json new file mode 100644 index 000000000..fdbda476a --- /dev/null +++ b/src-tauri/capabilities/mobile.json @@ -0,0 +1,58 @@ +{ + "$schema": "../gen/schemas/mobile-schema.json", + "identifier": "mobile", + "description": "enables the default permissions for mobile platforms", + "windows": ["main"], + "remote": { + "urls": ["http://*"] + }, + "permissions": [ + "core:default", + "core:webview:allow-set-webview-zoom", + "core:window:allow-start-dragging", + "core:window:allow-set-theme", + "shell:allow-spawn", + "shell:allow-open", + "core:app:allow-set-app-theme", + "core:window:allow-set-focus", + "os:default", + "opener:default", + "log:default", + "dialog:default", + "core:webview:allow-create-webview-window", + "opener:allow-open-url", + "store:default", + { + "identifier": "http:default", + "allow": [ + { + "url": "https://*:*" + }, + { + "url": "http://*:*" + } + ], + "deny": [] + }, + { + "identifier": "shell:allow-execute", + "allow": [] + }, + { + "identifier": "opener:allow-open-url", + "description": "opens the default permissions for the core module", + "windows": ["*"], + "allow": [ + { + "url": "https://*" + }, + { + "url": "http://127.0.0.1:*" + }, + { + "url": "http://0.0.0.0:*" + } + ] + } + ] +} \ No newline at end of file diff --git a/src-tauri/capabilities/system-monitor-window.json b/src-tauri/capabilities/system-monitor-window.json index 740bb82cc..cec43f8d8 100644 --- a/src-tauri/capabilities/system-monitor-window.json +++ b/src-tauri/capabilities/system-monitor-window.json @@ -3,17 +3,33 @@ "identifier": "system-monitor-window", "description": "enables permissions for the system monitor window", "windows": ["system-monitor-window"], + "platforms": ["linux", "macOS", "windows"], "permissions": [ "core:default", "core:window:allow-start-dragging", "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", "log:default", "core:webview:allow-create-webview-window", + "core:webview:allow-get-all-webviews", "core:window:allow-set-focus", "hardware:allow-get-system-info", "hardware:allow-get-system-usage", "llamacpp:allow-get-devices", "llamacpp:allow-read-gguf-metadata", - "deep-link:allow-get-current" + "deep-link:allow-get-current", + { + "identifier": "http:default", + "allow": [ + { + "url": "https://*:*" + }, + { + "url": "http://*:*" + } + ], + "deny": [] + } ] } diff --git a/src-tauri/gen/android/app/src/main/assets/resources/LICENSE b/src-tauri/gen/android/app/src/main/assets/resources/LICENSE new file mode 100644 index 000000000..d614b967f --- /dev/null +++ b/src-tauri/gen/android/app/src/main/assets/resources/LICENSE @@ -0,0 +1,19 @@ +Jan + +Copyright 2025 Menlo Research + +This product includes software developed by Menlo Research (https://menlo.ai). + +Licensed under the Apache License, Version 2.0 (the "License"); +You may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Attribution is requested in user-facing documentation and materials, where appropriate. \ No newline at end of file diff --git a/src-tauri/plugins/tauri-plugin-hardware/Cargo.toml b/src-tauri/plugins/tauri-plugin-hardware/Cargo.toml index eb74d32d1..5e6f983fc 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/Cargo.toml +++ b/src-tauri/plugins/tauri-plugin-hardware/Cargo.toml @@ -11,15 +11,19 @@ exclude = ["/examples", "/dist-js", "/guest-js", "/node_modules"] links = "tauri-plugin-hardware" [dependencies] -ash = "0.38.0" libc = "0.2" log = "0.4" -nvml-wrapper = "0.10.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sysinfo = "0.34.2" tauri = { version = "2.5.0", default-features = false, features = ["test"] } +# Desktop-only dependencies +[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +vulkano = "0.34" +ash = "0.37" +nvml-wrapper = "0.10.0" + # Windows-specific dependencies [target.'cfg(windows)'.dependencies] libloading = "0.8" diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/commands.rs b/src-tauri/plugins/tauri-plugin-hardware/src/commands.rs index 56e78f1c1..ac13eb7f2 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/src/commands.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/commands.rs @@ -1,14 +1,12 @@ use crate::{ - helpers::get_jan_libvulkan_path, types::{CpuStaticInfo, SystemInfo, SystemUsage}, vendor::{nvidia, vulkan}, SYSTEM_INFO, }; use sysinfo::System; -use tauri::Runtime; #[tauri::command] -pub fn get_system_info(app: tauri::AppHandle) -> SystemInfo { +pub fn get_system_info() -> SystemInfo { SYSTEM_INFO .get_or_init(|| { let mut system = System::new(); @@ -19,15 +17,7 @@ pub fn get_system_info(app: tauri::AppHandle) -> SystemInfo { gpu_map.insert(gpu.uuid.clone(), gpu); } - // try system vulkan first - let paths = vec!["".to_string(), get_jan_libvulkan_path(app.clone())]; - let mut vulkan_gpus = vec![]; - for path in paths { - vulkan_gpus = vulkan::get_vulkan_gpus(&path); - if !vulkan_gpus.is_empty() { - break; - } - } + let vulkan_gpus = vulkan::get_vulkan_gpus(); for gpu in vulkan_gpus { match gpu_map.get_mut(&gpu.uuid) { @@ -64,7 +54,7 @@ pub fn get_system_info(app: tauri::AppHandle) -> SystemInfo { } #[tauri::command] -pub fn get_system_usage(app: tauri::AppHandle) -> SystemUsage { +pub fn get_system_usage() -> SystemUsage { let mut system = System::new(); system.refresh_memory(); @@ -81,7 +71,7 @@ pub fn get_system_usage(app: tauri::AppHandle) -> SystemUsage { cpu: cpu_usage, used_memory: system.used_memory() / 1024 / 1024, // bytes to MiB, total_memory: system.total_memory() / 1024 / 1024, // bytes to MiB, - gpus: get_system_info(app.clone()) + gpus: get_system_info() .gpus .iter() .map(|gpu| gpu.get_usage()) diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/helpers.rs b/src-tauri/plugins/tauri-plugin-hardware/src/helpers.rs deleted file mode 100644 index 22bcc8669..000000000 --- a/src-tauri/plugins/tauri-plugin-hardware/src/helpers.rs +++ /dev/null @@ -1,20 +0,0 @@ -use tauri::{path::BaseDirectory, Manager, Runtime}; - -pub fn get_jan_libvulkan_path(app: tauri::AppHandle) -> String { - let lib_name = if cfg!(target_os = "windows") { - "vulkan-1.dll" - } else if cfg!(target_os = "linux") { - "libvulkan.so" - } else { - return "".to_string(); - }; - - // NOTE: this does not work in test mode (mock app) - match app.path().resolve( - format!("resources/lib/{}", lib_name), - BaseDirectory::Resource, - ) { - Ok(lib_path) => lib_path.to_string_lossy().to_string(), - Err(_) => "".to_string(), - } -} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/lib.rs b/src-tauri/plugins/tauri-plugin-hardware/src/lib.rs index 228a3731e..3a069892e 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/src/lib.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/lib.rs @@ -2,12 +2,10 @@ mod commands; mod constants; pub mod cpu; pub mod gpu; -mod helpers; mod types; pub mod vendor; pub use constants::*; -pub use helpers::*; pub use types::*; use std::sync::OnceLock; diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/tests.rs b/src-tauri/plugins/tauri-plugin-hardware/src/tests.rs index 1d4975104..f27554579 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/src/tests.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/tests.rs @@ -4,15 +4,13 @@ use tauri::test::mock_app; #[test] fn test_system_info() { - let app = mock_app(); - let info = get_system_info(app.handle().clone()); + let info = get_system_info(); println!("System Static Info: {:?}", info); } #[test] fn test_system_usage() { - let app = mock_app(); - let usage = get_system_usage(app.handle().clone()); + let usage = get_system_usage(); println!("System Usage Info: {:?}", usage); } @@ -23,23 +21,23 @@ mod cpu_tests { #[test] fn test_cpu_static_info_new() { let cpu_info = CpuStaticInfo::new(); - + // Test that all fields are populated assert!(!cpu_info.name.is_empty()); assert_ne!(cpu_info.name, "unknown"); // Should have detected a CPU name assert!(cpu_info.core_count > 0); assert!(!cpu_info.arch.is_empty()); - + // Architecture should be one of the expected values assert!( - cpu_info.arch == "aarch64" || - cpu_info.arch == "arm64" || - cpu_info.arch == "x86_64" || - cpu_info.arch == std::env::consts::ARCH + cpu_info.arch == "aarch64" + || cpu_info.arch == "arm64" + || cpu_info.arch == "x86_64" + || cpu_info.arch == std::env::consts::ARCH ); - + // Extensions should be a valid list (can be empty on non-x86) - + println!("CPU Info: {:?}", cpu_info); } @@ -48,7 +46,7 @@ mod cpu_tests { // Test that multiple calls return consistent information let info1 = CpuStaticInfo::new(); let info2 = CpuStaticInfo::new(); - + assert_eq!(info1.name, info2.name); assert_eq!(info1.core_count, info2.core_count); assert_eq!(info1.arch, info2.arch); @@ -72,19 +70,41 @@ mod cpu_tests { #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] fn test_x86_extensions() { let cpu_info = CpuStaticInfo::new(); - + // On x86/x86_64, we should always have at least FPU assert!(cpu_info.extensions.contains(&"fpu".to_string())); - + // Check that all extensions are valid x86 feature names let valid_extensions = [ - "fpu", "mmx", "sse", "sse2", "sse3", "ssse3", "sse4_1", "sse4_2", - "pclmulqdq", "avx", "avx2", "avx512_f", "avx512_dq", "avx512_ifma", - "avx512_pf", "avx512_er", "avx512_cd", "avx512_bw", "avx512_vl", - "avx512_vbmi", "avx512_vbmi2", "avx512_vnni", "avx512_bitalg", - "avx512_vpopcntdq", "avx512_vp2intersect", "aes", "f16c" + "fpu", + "mmx", + "sse", + "sse2", + "sse3", + "ssse3", + "sse4_1", + "sse4_2", + "pclmulqdq", + "avx", + "avx2", + "avx512_f", + "avx512_dq", + "avx512_ifma", + "avx512_pf", + "avx512_er", + "avx512_cd", + "avx512_bw", + "avx512_vl", + "avx512_vbmi", + "avx512_vbmi2", + "avx512_vnni", + "avx512_bitalg", + "avx512_vpopcntdq", + "avx512_vp2intersect", + "aes", + "f16c", ]; - + for ext in &cpu_info.extensions { assert!( valid_extensions.contains(&ext.as_str()), @@ -98,7 +118,7 @@ mod cpu_tests { #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))] fn test_non_x86_extensions() { let cpu_info = CpuStaticInfo::new(); - + // On non-x86 architectures, extensions should be empty assert!(cpu_info.extensions.is_empty()); } @@ -106,15 +126,15 @@ mod cpu_tests { #[test] fn test_arch_detection() { let cpu_info = CpuStaticInfo::new(); - + // Architecture should be a valid string assert!(!cpu_info.arch.is_empty()); - + // Should be one of the common architectures let common_archs = ["x86_64", "aarch64", "arm", "arm64", "x86"]; let is_common_arch = common_archs.iter().any(|&arch| cpu_info.arch == arch); let is_compile_time_arch = cpu_info.arch == std::env::consts::ARCH; - + assert!( is_common_arch || is_compile_time_arch, "Unexpected architecture: {}", @@ -125,11 +145,11 @@ mod cpu_tests { #[test] fn test_cpu_info_serialization() { let cpu_info = CpuStaticInfo::new(); - + // Test that the struct can be serialized (since it derives Serialize) let serialized = serde_json::to_string(&cpu_info); assert!(serialized.is_ok()); - + let json_str = serialized.unwrap(); assert!(json_str.contains("name")); assert!(json_str.contains("core_count")); diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/amd.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/amd.rs index 62d90ca1b..7521fd2b0 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/amd.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/amd.rs @@ -126,13 +126,13 @@ mod windows_impl { pub iOSDisplayIndex: c_int, } - type ADL_MAIN_MALLOC_CALLBACK = Option *mut c_void>; - type ADL_MAIN_CONTROL_CREATE = unsafe extern "C" fn(ADL_MAIN_MALLOC_CALLBACK, c_int) -> c_int; - type ADL_MAIN_CONTROL_DESTROY = unsafe extern "C" fn() -> c_int; - type ADL_ADAPTER_NUMBEROFADAPTERS_GET = unsafe extern "C" fn(*mut c_int) -> c_int; - type ADL_ADAPTER_ADAPTERINFO_GET = unsafe extern "C" fn(*mut AdapterInfo, c_int) -> c_int; - type ADL_ADAPTER_ACTIVE_GET = unsafe extern "C" fn(c_int, *mut c_int) -> c_int; - type ADL_GET_DEDICATED_VRAM_USAGE = + type AdlMainMallocCallback = Option *mut c_void>; + type ADLMAINCONTROLCREATE = unsafe extern "C" fn(AdlMainMallocCallback, c_int) -> c_int; + type ADLMAINCONTROLDESTROY = unsafe extern "C" fn() -> c_int; + type AdlAdapterNumberofadaptersGet = unsafe extern "C" fn(*mut c_int) -> c_int; + type AdlAdapterAdapterinfoGet = unsafe extern "C" fn(*mut AdapterInfo, c_int) -> c_int; + type AdlAdapterActiveGet = unsafe extern "C" fn(c_int, *mut c_int) -> c_int; + type AdlGetDedicatedVramUsage = unsafe extern "C" fn(*mut c_void, c_int, *mut c_int) -> c_int; // === ADL Memory Allocator === @@ -144,24 +144,24 @@ mod windows_impl { unsafe { let lib = Library::new("atiadlxx.dll").or_else(|_| Library::new("atiadlxy.dll"))?; - let adl_main_control_create: Symbol = - lib.get(b"ADL_Main_Control_Create")?; - let adl_main_control_destroy: Symbol = - lib.get(b"ADL_Main_Control_Destroy")?; - let adl_adapter_number_of_adapters_get: Symbol = - lib.get(b"ADL_Adapter_NumberOfAdapters_Get")?; - let adl_adapter_adapter_info_get: Symbol = - lib.get(b"ADL_Adapter_AdapterInfo_Get")?; - let adl_adapter_active_get: Symbol = - lib.get(b"ADL_Adapter_Active_Get")?; - let adl_get_dedicated_vram_usage: Symbol = + let adlmaincontrolcreate: Symbol = + lib.get(b"AdlMainControlCreate")?; + let adlmaincontroldestroy: Symbol = + lib.get(b"AdlMainControlDestroy")?; + let adl_adapter_number_of_adapters_get: Symbol = + lib.get(b"AdlAdapterNumberofadaptersGet")?; + let adl_adapter_adapter_info_get: Symbol = + lib.get(b"AdlAdapterAdapterinfoGet")?; + let AdlAdapterActiveGet: Symbol = + lib.get(b"AdlAdapterActiveGet")?; + let AdlGetDedicatedVramUsage: Symbol = lib.get(b"ADL2_Adapter_DedicatedVRAMUsage_Get")?; // TODO: try to put nullptr here. then we don't need direct libc dep - if adl_main_control_create(Some(adl_malloc), 1) != 0 { + if adlmaincontrolcreate(Some(adl_malloc), 1) != 0 { return Err("ADL initialization error!".into()); } - // NOTE: after this call, we must call ADL_Main_Control_Destroy + // NOTE: after this call, we must call AdlMainControlDestroy // whenver we encounter an error let mut num_adapters: c_int = 0; @@ -184,11 +184,11 @@ mod windows_impl { for adapter in adapter_info.iter() { let mut is_active = 0; - adl_adapter_active_get(adapter.iAdapterIndex, &mut is_active); + AdlAdapterActiveGet(adapter.iAdapterIndex, &mut is_active); if is_active != 0 { let mut vram_mb = 0; - let _ = adl_get_dedicated_vram_usage( + let _ = AdlGetDedicatedVramUsage( ptr::null_mut(), adapter.iAdapterIndex, &mut vram_mb, @@ -202,7 +202,7 @@ mod windows_impl { } } - adl_main_control_destroy(); + adlmaincontroldestroy(); Ok(vram_usages) } diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/nvidia.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/nvidia.rs index 006ca66ba..083c0fdae 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/nvidia.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/nvidia.rs @@ -1,7 +1,13 @@ -use crate::types::{GpuInfo, GpuUsage, Vendor}; -use nvml_wrapper::{error::NvmlError, Nvml}; -use std::sync::OnceLock; +use crate::types::{GpuInfo, GpuUsage}; +#[cfg(not(any(target_os = "android", target_os = "ios")))] +use { + crate::types::Vendor, + nvml_wrapper::{error::NvmlError, Nvml}, + std::sync::OnceLock, +}; + +#[cfg(not(any(target_os = "android", target_os = "ios")))] static NVML: OnceLock> = OnceLock::new(); #[derive(Debug, Clone, serde::Serialize)] @@ -10,11 +16,13 @@ pub struct NvidiaInfo { pub compute_capability: String, } +#[cfg(not(any(target_os = "android", target_os = "ios")))] fn get_nvml() -> Option<&'static Nvml> { NVML.get_or_init(|| { + // Try to initialize NVML, with fallback for Linux let result = Nvml::init().or_else(|e| { - // fallback if cfg!(target_os = "linux") { + log::debug!("NVML init failed, trying Linux fallback: {}", e); let lib_path = std::ffi::OsStr::new("libnvidia-ml.so.1"); Nvml::builder().lib_path(lib_path).init() } else { @@ -22,11 +30,13 @@ fn get_nvml() -> Option<&'static Nvml> { } }); - // NvmlError doesn't implement Copy, so we have to store an Option in OnceLock match result { - Ok(nvml) => Some(nvml), + Ok(nvml) => { + log::debug!("NVML initialized successfully"); + Some(nvml) + } Err(e) => { - log::error!("Unable to initialize NVML: {}", e); + log::debug!("Unable to initialize NVML: {}", e); None } } @@ -36,70 +46,111 @@ fn get_nvml() -> Option<&'static Nvml> { impl GpuInfo { pub fn get_usage_nvidia(&self) -> GpuUsage { - let index = match self.nvidia_info { - Some(ref nvidia_info) => nvidia_info.index, - None => { - log::error!("get_usage_nvidia() called on non-NVIDIA GPU"); - return self.get_usage_unsupported(); - } - }; - let closure = || -> Result { - let nvml = get_nvml().ok_or(NvmlError::Unknown)?; - let device = nvml.device_by_index(index)?; - let mem_info = device.memory_info()?; - Ok(GpuUsage { - uuid: self.uuid.clone(), - used_memory: mem_info.used / 1024 / 1024, // bytes to MiB - total_memory: mem_info.total / 1024 / 1024, // bytes to MiB - }) - }; - closure().unwrap_or_else(|e| { - log::error!("Failed to get memory usage for NVIDIA GPU {}: {}", index, e); - self.get_usage_unsupported() + #[cfg(any(target_os = "android", target_os = "ios"))] + { + log::warn!("NVIDIA GPU usage detection is not supported on mobile platforms"); + return self.get_usage_unsupported(); + } + + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + let index = match &self.nvidia_info { + Some(nvidia_info) => nvidia_info.index, + None => { + log::error!("get_usage_nvidia() called on non-NVIDIA GPU"); + return self.get_usage_unsupported(); + } + }; + + self.get_nvidia_memory_usage(index) + .unwrap_or_else(|e| { + log::error!("Failed to get memory usage for NVIDIA GPU {}: {}", index, e); + self.get_usage_unsupported() + }) + } + } + + #[cfg(not(any(target_os = "android", target_os = "ios")))] + fn get_nvidia_memory_usage(&self, index: u32) -> Result { + let nvml = get_nvml().ok_or(NvmlError::Unknown)?; + let device = nvml.device_by_index(index)?; + let mem_info = device.memory_info()?; + + Ok(GpuUsage { + uuid: self.uuid.clone(), + used_memory: mem_info.used / (1024 * 1024), // bytes to MiB + total_memory: mem_info.total / (1024 * 1024), // bytes to MiB }) } } pub fn get_nvidia_gpus() -> Vec { - let closure = || -> Result, NvmlError> { - let nvml = get_nvml().ok_or(NvmlError::Unknown)?; - let num_gpus = nvml.device_count()?; - let driver_version = nvml.sys_driver_version()?; + #[cfg(any(target_os = "android", target_os = "ios"))] + { + // On mobile platforms, NVIDIA GPU detection is not supported + log::info!("NVIDIA GPU detection is not supported on mobile platforms"); + vec![] + } - let mut gpus = Vec::with_capacity(num_gpus as usize); - for i in 0..num_gpus { - let device = nvml.device_by_index(i)?; - gpus.push(GpuInfo { - name: device.name()?, - total_memory: device.memory_info()?.total / 1024 / 1024, // bytes to MiB - vendor: Vendor::NVIDIA, - uuid: { - let mut uuid = device.uuid()?; - if uuid.starts_with("GPU-") { - uuid = uuid[4..].to_string(); - } - uuid - }, - driver_version: driver_version.clone(), - nvidia_info: Some(NvidiaInfo { - index: i, - compute_capability: { - let cc = device.cuda_compute_capability()?; - format!("{}.{}", cc.major, cc.minor) - }, - }), - vulkan_info: None, - }); - } - - Ok(gpus) - }; - - match closure() { - Ok(gpus) => gpus, - Err(e) => { - log::error!("Failed to get NVIDIA GPUs: {}", e); - vec![] - } + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + get_nvidia_gpus_internal() } } + +#[cfg(not(any(target_os = "android", target_os = "ios")))] +fn get_nvidia_gpus_internal() -> Vec { + let nvml = match get_nvml() { + Some(nvml) => nvml, + None => { + log::debug!("NVML not available"); + return vec![]; + } + }; + + let (num_gpus, driver_version) = match (nvml.device_count(), nvml.sys_driver_version()) { + (Ok(count), Ok(version)) => (count, version), + (Err(e), _) | (_, Err(e)) => { + log::error!("Failed to get NVIDIA system info: {}", e); + return vec![]; + } + }; + + let mut gpus = Vec::with_capacity(num_gpus as usize); + + for i in 0..num_gpus { + match create_gpu_info(nvml, i, &driver_version) { + Ok(gpu_info) => gpus.push(gpu_info), + Err(e) => log::warn!("Failed to get info for NVIDIA GPU {}: {}", i, e), + } + } + + gpus +} + +#[cfg(not(any(target_os = "android", target_os = "ios")))] +fn create_gpu_info(nvml: &Nvml, index: u32, driver_version: &str) -> Result { + let device = nvml.device_by_index(index)?; + let memory_info = device.memory_info()?; + let compute_capability = device.cuda_compute_capability()?; + + let uuid = device.uuid()?; + let clean_uuid = if uuid.starts_with("GPU-") { + uuid[4..].to_string() + } else { + uuid + }; + + Ok(GpuInfo { + name: device.name()?, + total_memory: memory_info.total / (1024 * 1024), // bytes to MiB + vendor: Vendor::NVIDIA, + uuid: clean_uuid, + driver_version: driver_version.to_string(), + nvidia_info: Some(NvidiaInfo { + index, + compute_capability: format!("{}.{}", compute_capability.major, compute_capability.minor), + }), + vulkan_info: None, + }) +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs index 078efe91b..d683e4d91 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs @@ -12,10 +12,122 @@ fn test_get_nvidia_gpus() { #[test] fn test_get_vulkan_gpus() { - let gpus = vulkan::get_vulkan_gpus(""); + let gpus = vulkan::get_vulkan_gpus(); for (i, gpu) in gpus.iter().enumerate() { println!("GPU {}:", i); println!(" {:?}", gpu); println!(" {:?}", gpu.get_usage()); } } + +#[cfg(not(any(target_os = "android", target_os = "ios")))] +#[test] +fn test_get_vulkan_gpus_on_desktop() { + let gpus = vulkan::get_vulkan_gpus(); + + // Test that function returns without panicking on desktop platforms + assert!(gpus.len() >= 0); + + // If GPUs are found, verify they have valid properties + for (i, gpu) in gpus.iter().enumerate() { + println!("Desktop GPU {}:", i); + println!(" Name: {}", gpu.name); + println!(" Vendor: {:?}", gpu.vendor); + println!(" Total Memory: {} MB", gpu.total_memory); + println!(" UUID: {}", gpu.uuid); + println!(" Driver Version: {}", gpu.driver_version); + + // Verify that GPU properties are not empty/default values + assert!(!gpu.name.is_empty(), "GPU name should not be empty"); + assert!(!gpu.uuid.is_empty(), "GPU UUID should not be empty"); + + // Test vulkan-specific info is present + if let Some(vulkan_info) = &gpu.vulkan_info { + println!(" Vulkan API Version: {}", vulkan_info.api_version); + println!(" Device Type: {}", vulkan_info.device_type); + assert!(!vulkan_info.api_version.is_empty(), "Vulkan API version should not be empty"); + assert!(!vulkan_info.device_type.is_empty(), "Device type should not be empty"); + } + } +} + +#[cfg(target_os = "android")] +#[test] +fn test_get_vulkan_gpus_on_android() { + let gpus = vulkan::get_vulkan_gpus(); + + // Test that function returns without panicking on Android + assert!(gpus.len() >= 0); + + // Android-specific validation + for (i, gpu) in gpus.iter().enumerate() { + println!("Android GPU {}:", i); + println!(" Name: {}", gpu.name); + println!(" Vendor: {:?}", gpu.vendor); + println!(" Total Memory: {} MB", gpu.total_memory); + println!(" UUID: {}", gpu.uuid); + println!(" Driver Version: {}", gpu.driver_version); + + // Verify C string parsing works correctly with i8 on Android + assert!(!gpu.name.is_empty(), "GPU name should not be empty on Android"); + assert!(!gpu.uuid.is_empty(), "GPU UUID should not be empty on Android"); + + // Android devices should typically have Adreno, Mali, or PowerVR GPUs + // The name parsing should handle i8 char arrays correctly + assert!( + gpu.name.chars().all(|c| c.is_ascii() || c.is_ascii_control()), + "GPU name should contain valid characters when parsed from i8 array" + ); + + if let Some(vulkan_info) = &gpu.vulkan_info { + println!(" Vulkan API Version: {}", vulkan_info.api_version); + println!(" Device Type: {}", vulkan_info.device_type); + // Verify API version parsing works with Android's i8 char arrays + assert!( + vulkan_info.api_version.matches('.').count() >= 2, + "API version should be in format X.Y.Z" + ); + } + } +} + +#[cfg(target_os = "ios")] +#[test] +fn test_get_vulkan_gpus_on_ios() { + let gpus = vulkan::get_vulkan_gpus(); + + // Note: iOS doesn't support Vulkan natively, so this might return empty + // But the function should still work without crashing + assert!(gpus.len() >= 0); + + // iOS-specific validation (if any Vulkan implementation is available via MoltenVK) + for (i, gpu) in gpus.iter().enumerate() { + println!("iOS GPU {}:", i); + println!(" Name: {}", gpu.name); + println!(" Vendor: {:?}", gpu.vendor); + println!(" Total Memory: {} MB", gpu.total_memory); + println!(" UUID: {}", gpu.uuid); + println!(" Driver Version: {}", gpu.driver_version); + + // Verify C string parsing works correctly with i8 on iOS + assert!(!gpu.name.is_empty(), "GPU name should not be empty on iOS"); + assert!(!gpu.uuid.is_empty(), "GPU UUID should not be empty on iOS"); + + // iOS devices should typically have Apple GPU (if Vulkan is available via MoltenVK) + // The name parsing should handle i8 char arrays correctly + assert!( + gpu.name.chars().all(|c| c.is_ascii() || c.is_ascii_control()), + "GPU name should contain valid characters when parsed from i8 array" + ); + + if let Some(vulkan_info) = &gpu.vulkan_info { + println!(" Vulkan API Version: {}", vulkan_info.api_version); + println!(" Device Type: {}", vulkan_info.device_type); + // Verify API version parsing works with iOS's i8 char arrays + assert!( + vulkan_info.api_version.matches('.').count() >= 2, + "API version should be in format X.Y.Z" + ); + } + } +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/vulkan.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/vulkan.rs index 6a9bf21aa..372e11037 100644 --- a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/vulkan.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/vulkan.rs @@ -1,5 +1,13 @@ -use crate::types::{GpuInfo, Vendor}; -use ash::{vk, Entry}; +use crate::types::GpuInfo; + +#[cfg(not(any(target_os = "android", target_os = "ios")))] +use { + crate::types::Vendor, + vulkano::device::physical::PhysicalDeviceType, + vulkano::instance::{Instance, InstanceCreateInfo}, + vulkano::memory::MemoryHeapFlags, + vulkano::VulkanLibrary, +}; #[derive(Debug, Clone, serde::Serialize)] pub struct VulkanInfo { @@ -9,6 +17,7 @@ pub struct VulkanInfo { pub device_id: u32, } +#[cfg(not(any(target_os = "android", target_os = "ios")))] fn parse_uuid(bytes: &[u8; 16]) -> String { format!( "{:02x}{:02x}{:02x}{:02x}-\ @@ -35,96 +44,79 @@ fn parse_uuid(bytes: &[u8; 16]) -> String { ) } -pub fn get_vulkan_gpus(lib_path: &str) -> Vec { - match get_vulkan_gpus_internal(lib_path) { - Ok(gpus) => gpus, - Err(e) => { - log::error!("Failed to get Vulkan GPUs: {:?}", e); - vec![] +pub fn get_vulkan_gpus() -> Vec { + #[cfg(any(target_os = "android", target_os = "ios"))] + { + log::info!("Vulkan GPU detection is not supported on mobile platforms"); + vec![] + } + + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + match get_vulkan_gpus_internal() { + Ok(gpus) => gpus, + Err(e) => { + log::error!("Failed to get Vulkan GPUs: {:?}", e); + vec![] + } } } } -fn parse_c_string(buf: &[i8]) -> String { - unsafe { std::ffi::CStr::from_ptr(buf.as_ptr()) } - .to_str() - .unwrap_or_default() - .to_string() -} +#[cfg(not(any(target_os = "android", target_os = "ios")))] +fn get_vulkan_gpus_internal() -> Result, Box> { + let library = VulkanLibrary::new()?; -fn get_vulkan_gpus_internal(lib_path: &str) -> Result, Box> { - let entry = if lib_path.is_empty() { - unsafe { Entry::load()? } - } else { - unsafe { Entry::load_from(lib_path)? } - }; - let app_info = vk::ApplicationInfo { - api_version: vk::make_api_version(0, 1, 1, 0), - ..Default::default() - }; - let create_info = vk::InstanceCreateInfo { - p_application_info: &app_info, - ..Default::default() - }; - let instance = unsafe { entry.create_instance(&create_info, None)? }; + let instance = Instance::new( + library, + InstanceCreateInfo { + application_name: Some("Jan GPU Detection".into()), + application_version: vulkano::Version::V1_1, + ..Default::default() + }, + )?; let mut device_info_list = vec![]; - for (i, device) in unsafe { instance.enumerate_physical_devices()? } - .iter() - .enumerate() - { - // create a chain of properties struct for VkPhysicalDeviceProperties2(3) - // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties2.html - // props2 -> driver_props -> id_props - let mut id_props = vk::PhysicalDeviceIDProperties::default(); - let mut driver_props = vk::PhysicalDeviceDriverProperties { - p_next: &mut id_props as *mut _ as *mut std::ffi::c_void, - ..Default::default() - }; - let mut props2 = vk::PhysicalDeviceProperties2 { - p_next: &mut driver_props as *mut _ as *mut std::ffi::c_void, - ..Default::default() - }; - unsafe { - instance.get_physical_device_properties2(*device, &mut props2); - } + for (i, physical_device) in instance.enumerate_physical_devices()?.enumerate() { + let properties = physical_device.properties(); - let props = props2.properties; - if props.device_type == vk::PhysicalDeviceType::CPU { + if properties.device_type == PhysicalDeviceType::Cpu { continue; } + let memory_properties = physical_device.memory_properties(); + let total_memory: u64 = memory_properties + .memory_heaps + .iter() + .filter(|heap| heap.flags.intersects(MemoryHeapFlags::DEVICE_LOCAL)) + .map(|heap| heap.size / (1024 * 1024)) + .sum(); + + let device_uuid = physical_device.properties().device_uuid.unwrap_or([0; 16]); + let driver_version = format!("{}", properties.driver_version); + let device_info = GpuInfo { - name: parse_c_string(&props.device_name), - total_memory: unsafe { instance.get_physical_device_memory_properties(*device) } - .memory_heaps - .iter() - .filter(|heap| heap.flags.contains(vk::MemoryHeapFlags::DEVICE_LOCAL)) - .map(|heap| heap.size / (1024 * 1024)) - .sum(), - vendor: Vendor::from_vendor_id(props.vendor_id), - uuid: parse_uuid(&id_props.device_uuid), - driver_version: parse_c_string(&driver_props.driver_info), + name: properties.device_name.clone(), + total_memory, + vendor: Vendor::from_vendor_id(properties.vendor_id), + uuid: parse_uuid(&device_uuid), + driver_version, nvidia_info: None, vulkan_info: Some(VulkanInfo { index: i as u64, - device_type: format!("{:?}", props.device_type), + device_type: format!("{:?}", properties.device_type), api_version: format!( "{}.{}.{}", - vk::api_version_major(props.api_version), - vk::api_version_minor(props.api_version), - vk::api_version_patch(props.api_version) + properties.api_version.major, + properties.api_version.minor, + properties.api_version.patch ), - device_id: props.device_id, + device_id: properties.device_id, }), }; device_info_list.push(device_info); } - unsafe { - instance.destroy_instance(None); - } - Ok(device_info_list) } diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml b/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml index e1a57b962..38f7de3bd 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml +++ b/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml @@ -23,9 +23,14 @@ sysinfo = "0.34.2" tauri = { version = "2.5.0", default-features = false, features = [] } thiserror = "2.0.12" tokio = { version = "1", features = ["full"] } -reqwest = { version = "0.11", features = ["json", "blocking", "stream"] } tauri-plugin-hardware = { path = "../tauri-plugin-hardware" } +[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls"] } + +[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "rustls-tls"], default-features = false } + # Unix-specific dependencies [target.'cfg(unix)'.dependencies] nix = { version = "=0.30.1", features = ["signal", "process"] } diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs index c636fa8bd..5af92f91d 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs @@ -3,7 +3,6 @@ use super::utils::{estimate_kv_cache_internal, read_gguf_metadata_internal}; use crate::gguf::types::{KVCacheError, KVCacheEstimate, ModelSupportStatus}; use std::collections::HashMap; use std::fs; -use tauri::Runtime; use tauri_plugin_hardware::get_system_info; /// Read GGUF metadata from a model file #[tauri::command] @@ -49,16 +48,15 @@ pub async fn get_model_size(path: String) -> Result { } #[tauri::command] -pub async fn is_model_supported( +pub async fn is_model_supported( path: String, ctx_size: Option, - app_handle: tauri::AppHandle, ) -> Result { // Get model size let model_size = get_model_size(path.clone()).await?; // Get system info - let system_info = get_system_info(app_handle.clone()); + let system_info = get_system_info(); log::info!("modelSize: {}", model_size); @@ -89,19 +87,25 @@ pub async fn is_model_supported( ); const RESERVE_BYTES: u64 = 2288490189; - let total_system_memory = system_info.total_memory * 1024 * 1024; + let total_system_memory: u64 = match system_info.gpus.is_empty() { + // on MacOS with unified memory, treat RAM = 0 for now + true => 0, + false => system_info.total_memory * 1024 * 1024, + }; + // Calculate total VRAM from all GPUs - let total_vram: u64 = if system_info.gpus.is_empty() { + let total_vram: u64 = match system_info.gpus.is_empty() { // On macOS with unified memory, GPU info may be empty // Use total RAM as VRAM since memory is shared - log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); - total_system_memory - } else { - system_info + true => { + log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); + system_info.total_memory * 1024 * 1024 + } + false => system_info .gpus .iter() .map(|g| g.total_memory * 1024 * 1024) - .sum::() + .sum::(), }; log::info!("Total VRAM reported/calculated (in bytes): {}", &total_vram); @@ -115,7 +119,7 @@ pub async fn is_model_supported( let usable_total_memory = if total_system_memory > RESERVE_BYTES { (total_system_memory - RESERVE_BYTES) + usable_vram } else { - 0 + usable_vram }; log::info!("System RAM: {} bytes", &total_system_memory); log::info!("Total VRAM: {} bytes", &total_vram); diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs index 118894871..aad9dfe16 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs @@ -3,7 +3,6 @@ use crate::gguf::utils::estimate_kv_cache_internal; use crate::gguf::utils::read_gguf_metadata_internal; use serde::{Deserialize, Serialize}; use std::collections::HashMap; -use tauri::Runtime; use tauri_plugin_hardware::get_system_info; #[derive(Serialize, Deserialize, Clone, Debug)] @@ -27,15 +26,14 @@ pub enum ModelMode { } #[tauri::command] -pub async fn plan_model_load( +pub async fn plan_model_load( path: String, memory_mode: String, mmproj_path: Option, requested_ctx: Option, - app: tauri::AppHandle, ) -> Result { let model_size = get_model_size(path.clone()).await?; - let sys_info = get_system_info(app.clone()); + let sys_info = get_system_info(); let gguf = read_gguf_metadata_internal(path.clone()).await?; let mut mmproj_size: u64 = 0; @@ -82,25 +80,25 @@ pub async fn plan_model_load( log::info!("Got GPUs:\n{:?}", &sys_info.gpus); - let total_ram: u64 = sys_info.total_memory * 1024 * 1024; - log::info!( - "Total system memory reported from tauri_plugin_hardware(in bytes): {}", - &total_ram - ); + let total_ram: u64 = match sys_info.gpus.is_empty() { + // Consider RAM as 0 for unified memory + true => 0, + false => sys_info.total_memory * 1024 * 1024, + }; - let total_vram: u64 = if sys_info.gpus.is_empty() { - // On macOS with unified memory, GPU info may be empty - // Use total RAM as VRAM since memory is shared - log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); - total_ram - } else { - sys_info + // Calculate total VRAM from all GPUs + let total_vram: u64 = match sys_info.gpus.is_empty() { + true => { + log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); + sys_info.total_memory * 1024 * 1024 + } + false => sys_info .gpus .iter() .map(|g| g.total_memory * 1024 * 1024) - .sum::() + .sum::(), }; - + log::info!("Total RAM reported/calculated (in bytes): {}", &total_ram); log::info!("Total VRAM reported/calculated (in bytes): {}", &total_vram); let usable_vram: u64 = if total_vram > RESERVE_BYTES { (((total_vram - RESERVE_BYTES) as f64) * multiplier) as u64 diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs index 50e3f4a14..cdbbf92d5 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs @@ -62,6 +62,7 @@ pub async fn estimate_kv_cache_internal( ctx_size: Option, ) -> Result { log::info!("Received ctx_size parameter: {:?}", ctx_size); + log::info!("Received model metadata:\n{:?}", &meta); let arch = meta .get("general.architecture") .ok_or(KVCacheError::ArchitectureNotFound)?; @@ -94,15 +95,43 @@ pub async fn estimate_kv_cache_internal( let key_len_key = format!("{}.attention.key_length", arch); let val_len_key = format!("{}.attention.value_length", arch); - let key_len = meta + let mut key_len = meta .get(&key_len_key) .and_then(|s| s.parse::().ok()) .unwrap_or(0); - let val_len = meta + let mut val_len = meta .get(&val_len_key) .and_then(|s| s.parse::().ok()) .unwrap_or(0); + // Fallback: calculate from embedding_length if key/val lengths not found + if key_len == 0 || val_len == 0 { + let emb_len_key = format!("{}.embedding_length", arch); + let emb_len = meta + .get(&emb_len_key) + .and_then(|s| s.parse::().ok()) + .unwrap_or(0); + + if emb_len > 0 && n_head > 0 { + // For most transformers: head_dim = embedding_length / total_heads + let total_heads = meta + .get(&n_head_key) + .and_then(|s| s.parse::().ok()) + .unwrap_or(n_head); + + let head_dim = emb_len / total_heads; + key_len = head_dim; + val_len = head_dim; + + log::info!( + "Calculated key_len and val_len from embedding_length: {} / {} heads = {} per head", + emb_len, + total_heads, + head_dim + ); + } + } + if key_len == 0 || val_len == 0 { return Err(KVCacheError::EmbeddingLengthInvalid); } diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/process.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/process.rs index 3de983c51..06d83fcb0 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/process.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/process.rs @@ -1,8 +1,6 @@ use std::collections::HashSet; -use std::time::Duration; use sysinfo::{Pid, System}; use tauri::{Manager, Runtime, State}; -use tokio::time::timeout; use crate::state::{LlamacppState, SessionInfo}; use jan_utils::generate_random_port; @@ -56,6 +54,8 @@ pub async fn get_random_available_port( pub async fn graceful_terminate_process(child: &mut tokio::process::Child) { use nix::sys::signal::{kill, Signal}; use nix::unistd::Pid; + use std::time::Duration; + use tokio::time::timeout; if let Some(raw_pid) = child.id() { let raw_pid = raw_pid as i32; diff --git a/src-tauri/resources/LICENSE b/src-tauri/resources/LICENSE new file mode 100644 index 000000000..d614b967f --- /dev/null +++ b/src-tauri/resources/LICENSE @@ -0,0 +1,19 @@ +Jan + +Copyright 2025 Menlo Research + +This product includes software developed by Menlo Research (https://menlo.ai). + +Licensed under the Apache License, Version 2.0 (the "License"); +You may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Attribution is requested in user-facing documentation and materials, where appropriate. \ No newline at end of file diff --git a/src-tauri/src/core/app/commands.rs b/src-tauri/src/core/app/commands.rs index ba3e493b3..18e746869 100644 --- a/src-tauri/src/core/app/commands.rs +++ b/src-tauri/src/core/app/commands.rs @@ -19,10 +19,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap let default_data_folder = default_data_folder_path(app_handle.clone()); if !configuration_file.exists() { - log::info!( - "App config not found, creating default config at {:?}", - configuration_file - ); + log::info!("App config not found, creating default config at {configuration_file:?}"); app_default_configuration.data_folder = default_data_folder; @@ -30,7 +27,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap &configuration_file, serde_json::to_string(&app_default_configuration).unwrap(), ) { - log::error!("Failed to create default config: {}", err); + log::error!("Failed to create default config: {err}"); } return app_default_configuration; @@ -40,33 +37,24 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap Ok(content) => match serde_json::from_str::(&content) { Ok(app_configurations) => app_configurations, Err(err) => { - log::error!( - "Failed to parse app config, returning default config instead. Error: {}", - err - ); + log::error!("Failed to parse app config, returning default config instead. Error: {err}"); app_default_configuration } }, Err(err) => { - log::error!( - "Failed to read app config, returning default config instead. Error: {}", - err - ); + log::error!("Failed to read app config, returning default config instead. Error: {err}"); app_default_configuration } } } #[tauri::command] -pub fn update_app_configuration( - app_handle: tauri::AppHandle, +pub fn update_app_configuration( + app_handle: tauri::AppHandle, configuration: AppConfiguration, ) -> Result<(), String> { let configuration_file = get_configuration_file_path(app_handle); - log::info!( - "update_app_configuration, configuration_file: {:?}", - configuration_file - ); + log::info!("update_app_configuration, configuration_file: {configuration_file:?}"); fs::write( configuration_file, @@ -95,8 +83,7 @@ pub fn get_jan_data_folder_path(app_handle: tauri::AppHandle) -> pub fn get_configuration_file_path(app_handle: tauri::AppHandle) -> PathBuf { let app_path = app_handle.path().app_data_dir().unwrap_or_else(|err| { log::error!( - "Failed to get app data directory: {}. Using home directory instead.", - err + "Failed to get app data directory: {err}. Using home directory instead." ); let home_dir = std::env::var(if cfg!(target_os = "windows") { @@ -130,9 +117,9 @@ pub fn get_configuration_file_path(app_handle: tauri::AppHandle) .join(package_name); if old_data_dir.exists() { - return old_data_dir.join(CONFIGURATION_FILE_NAME); + old_data_dir.join(CONFIGURATION_FILE_NAME) } else { - return app_path.join(CONFIGURATION_FILE_NAME); + app_path.join(CONFIGURATION_FILE_NAME) } } @@ -155,13 +142,13 @@ pub fn default_data_folder_path(app_handle: tauri::AppHandle) -> } #[tauri::command] -pub fn get_user_home_path(app: AppHandle) -> String { - return get_app_configurations(app.clone()).data_folder; +pub fn get_user_home_path(app: AppHandle) -> String { + get_app_configurations(app.clone()).data_folder } #[tauri::command] -pub fn change_app_data_folder( - app_handle: tauri::AppHandle, +pub fn change_app_data_folder( + app_handle: tauri::AppHandle, new_data_folder: String, ) -> Result<(), String> { // Get current data folder path @@ -171,16 +158,12 @@ pub fn change_app_data_folder( // Create the new data folder if it doesn't exist if !new_data_folder_path.exists() { fs::create_dir_all(&new_data_folder_path) - .map_err(|e| format!("Failed to create new data folder: {}", e))?; + .map_err(|e| format!("Failed to create new data folder: {e}"))?; } // Copy all files from the old folder to the new one if current_data_folder.exists() { - log::info!( - "Copying data from {:?} to {:?}", - current_data_folder, - new_data_folder_path - ); + log::info!("Copying data from {current_data_folder:?} to {new_data_folder_path:?}"); // Check if this is a parent directory to avoid infinite recursion if new_data_folder_path.starts_with(¤t_data_folder) { @@ -193,7 +176,7 @@ pub fn change_app_data_folder( &new_data_folder_path, &[".uvx", ".npx"], ) - .map_err(|e| format!("Failed to copy data to new folder: {}", e))?; + .map_err(|e| format!("Failed to copy data to new folder: {e}"))?; } else { log::info!("Current data folder does not exist, nothing to copy"); } diff --git a/src-tauri/src/core/downloads/commands.rs b/src-tauri/src/core/downloads/commands.rs index f2187046a..a24ae32f0 100644 --- a/src-tauri/src/core/downloads/commands.rs +++ b/src-tauri/src/core/downloads/commands.rs @@ -3,12 +3,12 @@ use super::models::DownloadItem; use crate::core::app::commands::get_jan_data_folder_path; use crate::core::state::AppState; use std::collections::HashMap; -use tauri::State; +use tauri::{Runtime, State}; use tokio_util::sync::CancellationToken; #[tauri::command] -pub async fn download_files( - app: tauri::AppHandle, +pub async fn download_files( + app: tauri::AppHandle, state: State<'_, AppState>, items: Vec, task_id: &str, @@ -19,7 +19,7 @@ pub async fn download_files( { let mut download_manager = state.download_manager.lock().await; if download_manager.cancel_tokens.contains_key(task_id) { - return Err(format!("task_id {} exists", task_id)); + return Err(format!("task_id {task_id} exists")); } download_manager .cancel_tokens @@ -60,9 +60,9 @@ pub async fn cancel_download_task(state: State<'_, AppState>, task_id: &str) -> let mut download_manager = state.download_manager.lock().await; if let Some(token) = download_manager.cancel_tokens.remove(task_id) { token.cancel(); - log::info!("Cancelled download task: {}", task_id); + log::info!("Cancelled download task: {task_id}"); Ok(()) } else { - Err(format!("No download task: {}", task_id)) + Err(format!("No download task: {task_id}")) } } diff --git a/src-tauri/src/core/downloads/helpers.rs b/src-tauri/src/core/downloads/helpers.rs index 137bbdd3d..3ce1d89fa 100644 --- a/src-tauri/src/core/downloads/helpers.rs +++ b/src-tauri/src/core/downloads/helpers.rs @@ -6,7 +6,7 @@ use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; use std::collections::HashMap; use std::path::Path; use std::time::Duration; -use tauri::Emitter; +use tauri::{Emitter, Runtime}; use tokio::fs::File; use tokio::io::AsyncWriteExt; use tokio_util::sync::CancellationToken; @@ -15,7 +15,7 @@ use url::Url; // ===== UTILITY FUNCTIONS ===== pub fn err_to_string(e: E) -> String { - format!("Error: {}", e) + format!("Error: {e}") } @@ -25,7 +25,7 @@ pub fn err_to_string(e: E) -> String { async fn validate_downloaded_file( item: &DownloadItem, save_path: &Path, - app: &tauri::AppHandle, + app: &tauri::AppHandle, cancel_token: &CancellationToken, ) -> Result<(), String> { // Skip validation if no verification data is provided @@ -55,7 +55,7 @@ async fn validate_downloaded_file( ) .unwrap(); - log::info!("Starting validation for model: {}", model_id); + log::info!("Starting validation for model: {model_id}"); // Validate size if provided (fast check first) if let Some(expected_size) = &item.size { @@ -73,8 +73,7 @@ async fn validate_downloaded_file( actual_size ); return Err(format!( - "Size verification failed. Expected {} bytes but got {} bytes.", - expected_size, actual_size + "Size verification failed. Expected {expected_size} bytes but got {actual_size} bytes." )); } @@ -90,7 +89,7 @@ async fn validate_downloaded_file( save_path.display(), e ); - return Err(format!("Failed to verify file size: {}", e)); + return Err(format!("Failed to verify file size: {e}")); } } } @@ -115,9 +114,7 @@ async fn validate_downloaded_file( computed_sha256 ); - return Err(format!( - "Hash verification failed. The downloaded file is corrupted or has been tampered with." - )); + return Err("Hash verification failed. The downloaded file is corrupted or has been tampered with.".to_string()); } log::info!("Hash verification successful for {}", item.url); @@ -128,7 +125,7 @@ async fn validate_downloaded_file( save_path.display(), e ); - return Err(format!("Failed to verify file integrity: {}", e)); + return Err(format!("Failed to verify file integrity: {e}")); } } } @@ -140,14 +137,14 @@ async fn validate_downloaded_file( pub fn validate_proxy_config(config: &ProxyConfig) -> Result<(), String> { // Validate proxy URL format if let Err(e) = Url::parse(&config.url) { - return Err(format!("Invalid proxy URL '{}': {}", config.url, e)); + return Err(format!("Invalid proxy URL '{}': {e}", config.url)); } // Check if proxy URL has valid scheme let url = Url::parse(&config.url).unwrap(); // Safe to unwrap as we just validated it match url.scheme() { "http" | "https" | "socks4" | "socks5" => {} - scheme => return Err(format!("Unsupported proxy scheme: {}", scheme)), + scheme => return Err(format!("Unsupported proxy scheme: {scheme}")), } // Validate authentication credentials @@ -167,7 +164,7 @@ pub fn validate_proxy_config(config: &ProxyConfig) -> Result<(), String> { } // Basic validation for wildcard patterns if entry.starts_with("*.") && entry.len() < 3 { - return Err(format!("Invalid wildcard pattern: {}", entry)); + return Err(format!("Invalid wildcard pattern: {entry}")); } } } @@ -214,8 +211,7 @@ pub fn should_bypass_proxy(url: &str, no_proxy: &[String]) -> bool { } // Simple wildcard matching - if entry.starts_with("*.") { - let domain = &entry[2..]; + if let Some(domain) = entry.strip_prefix("*.") { if host.ends_with(domain) { return true; } @@ -298,14 +294,14 @@ pub async fn _get_file_size( /// Downloads multiple files in parallel with individual progress tracking pub async fn _download_files_internal( - app: tauri::AppHandle, + app: tauri::AppHandle, items: &[DownloadItem], headers: &HashMap, task_id: &str, resume: bool, cancel_token: CancellationToken, ) -> Result<(), String> { - log::info!("Start download task: {}", task_id); + log::info!("Start download task: {task_id}"); let header_map = _convert_headers(headers).map_err(err_to_string)?; @@ -320,9 +316,9 @@ pub async fn _download_files_internal( } let total_size: u64 = file_sizes.values().sum(); - log::info!("Total download size: {}", total_size); + log::info!("Total download size: {total_size}"); - let evt_name = format!("download-{}", task_id); + let evt_name = format!("download-{task_id}"); // Create progress tracker let progress_tracker = ProgressTracker::new(items, file_sizes.clone()); @@ -352,7 +348,7 @@ pub async fn _download_files_internal( let cancel_token_clone = cancel_token.clone(); let evt_name_clone = evt_name.clone(); let progress_tracker_clone = progress_tracker.clone(); - let file_id = format!("{}-{}", task_id, index); + let file_id = format!("{task_id}-{index}"); let file_size = file_sizes.get(&item.url).copied().unwrap_or(0); let task = tokio::spawn(async move { @@ -377,7 +373,7 @@ pub async fn _download_files_internal( // Wait for all downloads to complete let mut validation_tasks = Vec::new(); for (task, item) in download_tasks.into_iter().zip(items.iter()) { - let result = task.await.map_err(|e| format!("Task join error: {}", e))?; + let result = task.await.map_err(|e| format!("Task join error: {e}"))?; match result { Ok(downloaded_path) => { @@ -399,7 +395,7 @@ pub async fn _download_files_internal( for (validation_task, save_path, _item) in validation_tasks { let validation_result = validation_task .await - .map_err(|e| format!("Validation task join error: {}", e))?; + .map_err(|e| format!("Validation task join error: {e}"))?; if let Err(validation_error) = validation_result { // Clean up the file if validation fails @@ -423,7 +419,7 @@ pub async fn _download_files_internal( /// Downloads a single file without blocking other downloads async fn download_single_file( - app: tauri::AppHandle, + app: tauri::AppHandle, item: &DownloadItem, header_map: &HeaderMap, save_path: &std::path::Path, @@ -448,7 +444,7 @@ async fn download_single_file( if current_extension.is_empty() { ext.to_string() } else { - format!("{}.{}", current_extension, ext) + format!("{current_extension}.{ext}") } }; let tmp_save_path = save_path.with_extension(append_extension("tmp")); @@ -465,8 +461,12 @@ async fn download_single_file( .await .map_err(err_to_string)?; - log::info!("Started downloading: {}", item.url); - let client = _get_client_for_item(item, &header_map).map_err(err_to_string)?; + // Decode URL for better readability in logs + let decoded_url = url::Url::parse(&item.url) + .map(|u| u.to_string()) + .unwrap_or_else(|_| item.url.clone()); + log::info!("Started downloading: {decoded_url}"); + let client = _get_client_for_item(item, header_map).map_err(err_to_string)?; let mut download_delta = 0u64; let mut initial_progress = 0u64; @@ -499,7 +499,7 @@ async fn download_single_file( } Err(e) => { // fallback to normal download - log::warn!("Failed to resume download: {}", e); + log::warn!("Failed to resume download: {e}"); should_resume = false; _get_maybe_resume(&client, &item.url, 0).await? } @@ -584,7 +584,11 @@ async fn download_single_file( .await .map_err(err_to_string)?; - log::info!("Finished downloading: {}", item.url); + // Decode URL for better readability in logs + let decoded_url = url::Url::parse(&item.url) + .map(|u| u.to_string()) + .unwrap_or_else(|_| item.url.clone()); + log::info!("Finished downloading: {decoded_url}"); Ok(save_path.to_path_buf()) } @@ -598,7 +602,7 @@ pub async fn _get_maybe_resume( if start_bytes > 0 { let resp = client .get(url) - .header("Range", format!("bytes={}-", start_bytes)) + .header("Range", format!("bytes={start_bytes}-")) .send() .await .map_err(err_to_string)?; diff --git a/src-tauri/src/core/extensions/commands.rs b/src-tauri/src/core/extensions/commands.rs index 784c71f46..e416a03a3 100644 --- a/src-tauri/src/core/extensions/commands.rs +++ b/src-tauri/src/core/extensions/commands.rs @@ -1,53 +1,63 @@ use std::fs; use std::path::PathBuf; -use tauri::AppHandle; +use tauri::{AppHandle, Runtime}; use crate::core::app::commands::get_jan_data_folder_path; use crate::core::setup; #[tauri::command] -pub fn get_jan_extensions_path(app_handle: tauri::AppHandle) -> PathBuf { +pub fn get_jan_extensions_path(app_handle: tauri::AppHandle) -> PathBuf { get_jan_data_folder_path(app_handle).join("extensions") } #[tauri::command] -pub fn install_extensions(app: AppHandle) { +pub fn install_extensions(app: AppHandle) { if let Err(err) = setup::install_extensions(app, true) { - log::error!("Failed to install extensions: {}", err); + log::error!("Failed to install extensions: {err}"); } } #[tauri::command] -pub fn get_active_extensions(app: AppHandle) -> Vec { - let mut path = get_jan_extensions_path(app); - path.push("extensions.json"); - log::info!("get jan extensions, path: {:?}", path); +pub fn get_active_extensions(app: AppHandle) -> Vec { + // On mobile platforms, extensions are pre-bundled in the frontend + // Return empty array so frontend's MobileCoreService handles it + #[cfg(any(target_os = "android", target_os = "ios"))] + { + return vec![]; + } - let contents = fs::read_to_string(path); - let contents: Vec = match contents { - Ok(data) => match serde_json::from_str::>(&data) { - Ok(exts) => exts - .into_iter() - .map(|ext| { - serde_json::json!({ - "url": ext["url"], - "name": ext["name"], - "productName": ext["productName"], - "active": ext["_active"], - "description": ext["description"], - "version": ext["version"] + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + let mut path = get_jan_extensions_path(app); + path.push("extensions.json"); + log::info!("get jan extensions, path: {path:?}"); + + let contents = fs::read_to_string(path); + let contents: Vec = match contents { + Ok(data) => match serde_json::from_str::>(&data) { + Ok(exts) => exts + .into_iter() + .map(|ext| { + serde_json::json!({ + "url": ext["url"], + "name": ext["name"], + "productName": ext["productName"], + "active": ext["_active"], + "description": ext["description"], + "version": ext["version"] + }) }) - }) - .collect(), + .collect(), + Err(error) => { + log::error!("Failed to parse extensions.json: {error}"); + vec![] + } + }, Err(error) => { - log::error!("Failed to parse extensions.json: {}", error); + log::error!("Failed to read extensions.json: {error}"); vec![] } - }, - Err(error) => { - log::error!("Failed to read extensions.json: {}", error); - vec![] - } - }; - return contents; + }; + return contents; + } } diff --git a/src-tauri/src/core/filesystem/commands.rs b/src-tauri/src/core/filesystem/commands.rs index a37cc00ec..fe44052b8 100644 --- a/src-tauri/src/core/filesystem/commands.rs +++ b/src-tauri/src/core/filesystem/commands.rs @@ -140,7 +140,7 @@ pub fn readdir_sync( #[tauri::command] pub fn write_yaml( - app: tauri::AppHandle, + app: tauri::AppHandle, data: serde_json::Value, save_path: &str, ) -> Result<(), String> { @@ -161,7 +161,7 @@ pub fn write_yaml( } #[tauri::command] -pub fn read_yaml(app: tauri::AppHandle, path: &str) -> Result { +pub fn read_yaml(app: tauri::AppHandle, path: &str) -> Result { let jan_data_folder = crate::core::app::commands::get_jan_data_folder_path(app.clone()); let path = jan_utils::normalize_path(&jan_data_folder.join(path)); if !path.starts_with(&jan_data_folder) { @@ -178,7 +178,7 @@ pub fn read_yaml(app: tauri::AppHandle, path: &str) -> Result Result<(), String> { +pub fn decompress(app: tauri::AppHandle, path: &str, output_dir: &str) -> Result<(), String> { let jan_data_folder = crate::core::app::commands::get_jan_data_folder_path(app.clone()); let path_buf = jan_utils::normalize_path(&jan_data_folder.join(path)); diff --git a/src-tauri/src/core/filesystem/tests.rs b/src-tauri/src/core/filesystem/tests.rs index b4e96e994..b89b834d6 100644 --- a/src-tauri/src/core/filesystem/tests.rs +++ b/src-tauri/src/core/filesystem/tests.rs @@ -9,7 +9,7 @@ fn test_rm() { let app = mock_app(); let path = "test_rm_dir"; fs::create_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap(); - let args = vec![format!("file://{}", path).to_string()]; + let args = vec![format!("file://{path}").to_string()]; let result = rm(app.handle().clone(), args); assert!(result.is_ok()); assert!(!get_jan_data_folder_path(app.handle().clone()) @@ -21,7 +21,7 @@ fn test_rm() { fn test_mkdir() { let app = mock_app(); let path = "test_mkdir_dir"; - let args = vec![format!("file://{}", path).to_string()]; + let args = vec![format!("file://{path}").to_string()]; let result = mkdir(app.handle().clone(), args); assert!(result.is_ok()); assert!(get_jan_data_folder_path(app.handle().clone()) @@ -39,7 +39,7 @@ fn test_join_path() { assert_eq!( result, get_jan_data_folder_path(app.handle().clone()) - .join(&format!("test_dir{}test_file", std::path::MAIN_SEPARATOR)) + .join(format!("test_dir{}test_file", std::path::MAIN_SEPARATOR)) .to_string_lossy() .to_string() ); diff --git a/src-tauri/src/core/mcp/commands.rs b/src-tauri/src/core/mcp/commands.rs index 3bef12149..6eb6dab40 100644 --- a/src-tauri/src/core/mcp/commands.rs +++ b/src-tauri/src/core/mcp/commands.rs @@ -30,28 +30,28 @@ pub async fn activate_mcp_server( #[tauri::command] pub async fn deactivate_mcp_server(state: State<'_, AppState>, name: String) -> Result<(), String> { - log::info!("Deactivating MCP server: {}", name); + log::info!("Deactivating MCP server: {name}"); // First, mark server as manually deactivated to prevent restart // Remove from active servers list to prevent restart { let mut active_servers = state.mcp_active_servers.lock().await; active_servers.remove(&name); - log::info!("Removed MCP server {} from active servers list", name); + log::info!("Removed MCP server {name} from active servers list"); } // Mark as not successfully connected to prevent restart logic { let mut connected = state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), false); - log::info!("Marked MCP server {} as not successfully connected", name); + log::info!("Marked MCP server {name} as not successfully connected"); } // Reset restart count { let mut counts = state.mcp_restart_counts.lock().await; counts.remove(&name); - log::info!("Reset restart count for MCP server {}", name); + log::info!("Reset restart count for MCP server {name}"); } // Now remove and stop the server @@ -60,7 +60,7 @@ pub async fn deactivate_mcp_server(state: State<'_, AppState>, name: String) -> let service = servers_map .remove(&name) - .ok_or_else(|| format!("Server {} not found", name))?; + .ok_or_else(|| format!("Server {name} not found"))?; // Release the lock before calling cancel drop(servers_map); @@ -80,7 +80,7 @@ pub async fn deactivate_mcp_server(state: State<'_, AppState>, name: String) -> } #[tauri::command] -pub async fn restart_mcp_servers(app: AppHandle, state: State<'_, AppState>) -> Result<(), String> { +pub async fn restart_mcp_servers(app: AppHandle, state: State<'_, AppState>) -> Result<(), String> { let servers = state.mcp_servers.clone(); // Stop the servers stop_mcp_servers(state.mcp_servers.clone()).await?; @@ -89,7 +89,7 @@ pub async fn restart_mcp_servers(app: AppHandle, state: State<'_, AppState>) -> restart_active_mcp_servers(&app, servers).await?; app.emit("mcp-update", "MCP servers updated") - .map_err(|e| format!("Failed to emit event: {}", e))?; + .map_err(|e| format!("Failed to emit event: {e}"))?; Ok(()) } @@ -110,16 +110,14 @@ pub async fn reset_mcp_restart_count( let old_count = *count; *count = 0; log::info!( - "MCP server {} restart count reset from {} to 0.", - server_name, - old_count + "MCP server {server_name} restart count reset from {old_count} to 0." ); Ok(()) } #[tauri::command] pub async fn get_connected_servers( - _app: AppHandle, + _app: AppHandle, state: State<'_, AppState>, ) -> Result, String> { let servers = state.mcp_servers.clone(); @@ -219,7 +217,7 @@ pub async fn call_tool( continue; // Tool not found in this server, try next } - println!("Found tool {} in server", tool_name); + println!("Found tool {tool_name} in server"); // Call the tool with timeout and cancellation support let tool_call = service.call_tool(CallToolRequestParam { @@ -234,22 +232,20 @@ pub async fn call_tool( match result { Ok(call_result) => call_result.map_err(|e| e.to_string()), Err(_) => Err(format!( - "Tool call '{}' timed out after {} seconds", - tool_name, + "Tool call '{tool_name}' timed out after {} seconds", MCP_TOOL_CALL_TIMEOUT.as_secs() )), } } _ = cancel_rx => { - Err(format!("Tool call '{}' was cancelled", tool_name)) + Err(format!("Tool call '{tool_name}' was cancelled")) } } } else { match timeout(MCP_TOOL_CALL_TIMEOUT, tool_call).await { Ok(call_result) => call_result.map_err(|e| e.to_string()), Err(_) => Err(format!( - "Tool call '{}' timed out after {} seconds", - tool_name, + "Tool call '{tool_name}' timed out after {} seconds", MCP_TOOL_CALL_TIMEOUT.as_secs() )), } @@ -264,7 +260,7 @@ pub async fn call_tool( return result; } - Err(format!("Tool {} not found", tool_name)) + Err(format!("Tool {tool_name} not found")) } /// Cancels a running tool call by its cancellation token @@ -285,15 +281,15 @@ pub async fn cancel_tool_call( if let Some(cancel_tx) = cancellations.remove(&cancellation_token) { // Send cancellation signal - ignore if receiver is already dropped let _ = cancel_tx.send(()); - println!("Tool call with token {} cancelled", cancellation_token); + println!("Tool call with token {cancellation_token} cancelled"); Ok(()) } else { - Err(format!("Cancellation token {} not found", cancellation_token)) + Err(format!("Cancellation token {cancellation_token} not found")) } } #[tauri::command] -pub async fn get_mcp_configs(app: AppHandle) -> Result { +pub async fn get_mcp_configs(app: AppHandle) -> Result { let mut path = get_jan_data_folder_path(app); path.push("mcp_config.json"); @@ -301,17 +297,17 @@ pub async fn get_mcp_configs(app: AppHandle) -> Result { if !path.exists() { log::info!("mcp_config.json not found, creating default empty config"); fs::write(&path, DEFAULT_MCP_CONFIG) - .map_err(|e| format!("Failed to create default MCP config: {}", e))?; + .map_err(|e| format!("Failed to create default MCP config: {e}"))?; } fs::read_to_string(path).map_err(|e| e.to_string()) } #[tauri::command] -pub async fn save_mcp_configs(app: AppHandle, configs: String) -> Result<(), String> { +pub async fn save_mcp_configs(app: AppHandle, configs: String) -> Result<(), String> { let mut path = get_jan_data_folder_path(app); path.push("mcp_config.json"); - log::info!("save mcp configs, path: {:?}", path); + log::info!("save mcp configs, path: {path:?}"); fs::write(path, configs).map_err(|e| e.to_string()) } diff --git a/src-tauri/src/core/mcp/helpers.rs b/src-tauri/src/core/mcp/helpers.rs index 80a8b5f86..4e226a055 100644 --- a/src-tauri/src/core/mcp/helpers.rs +++ b/src-tauri/src/core/mcp/helpers.rs @@ -25,7 +25,7 @@ use crate::core::{ mcp::models::McpServerConfig, state::{AppState, RunningServiceEnum, SharedMcpServers}, }; -use jan_utils::can_override_npx; +use jan_utils::{can_override_npx, can_override_uvx}; /// Calculate exponential backoff delay with jitter /// @@ -56,22 +56,13 @@ pub fn calculate_exponential_backoff_delay(attempt: u32) -> u64 { let hash = hasher.finish(); // Convert hash to jitter value in range [-jitter_range, +jitter_range] - let jitter_offset = (hash % (jitter_range * 2)) as i64 - jitter_range as i64; - jitter_offset + (hash % (jitter_range * 2)) as i64 - jitter_range as i64 } else { 0 }; // Apply jitter while ensuring delay stays positive and within bounds - let final_delay = cmp::max( - 100, // Minimum 100ms delay - cmp::min( - MCP_MAX_RESTART_DELAY_MS, - (capped_delay as i64 + jitter) as u64, - ), - ); - - final_delay + ((capped_delay as i64 + jitter) as u64).clamp(100, MCP_MAX_RESTART_DELAY_MS) } /// Runs MCP commands by reading configuration from a JSON file and initializing servers @@ -135,9 +126,7 @@ pub async fn run_mcp_commands( // If initial startup failed, we still want to continue with other servers if let Err(e) = &result { log::error!( - "Initial startup failed for MCP server {}: {}", - name_clone, - e + "Initial startup failed for MCP server {name_clone}: {e}" ); } @@ -155,25 +144,23 @@ pub async fn run_mcp_commands( match handle.await { Ok((name, result)) => match result { Ok(_) => { - log::info!("MCP server {} initialized successfully", name); + log::info!("MCP server {name} initialized successfully"); successful_count += 1; } Err(e) => { - log::error!("MCP server {} failed to initialize: {}", name, e); + log::error!("MCP server {name} failed to initialize: {e}"); failed_count += 1; } }, Err(e) => { - log::error!("Failed to join startup task: {}", e); + log::error!("Failed to join startup task: {e}"); failed_count += 1; } } } log::info!( - "MCP server initialization complete: {} successful, {} failed", - successful_count, - failed_count + "MCP server initialization complete: {successful_count} successful, {failed_count} failed" ); Ok(()) @@ -184,7 +171,7 @@ pub async fn monitor_mcp_server_handle( servers_state: SharedMcpServers, name: String, ) -> Option { - log::info!("Monitoring MCP server {} health", name); + log::info!("Monitoring MCP server {name} health"); // Monitor server health with periodic checks loop { @@ -202,17 +189,17 @@ pub async fn monitor_mcp_server_handle( true } Ok(Err(e)) => { - log::warn!("MCP server {} health check failed: {}", name, e); + log::warn!("MCP server {name} health check failed: {e}"); false } Err(_) => { - log::warn!("MCP server {} health check timed out", name); + log::warn!("MCP server {name} health check timed out"); false } } } else { // Server was removed from HashMap (e.g., by deactivate_mcp_server) - log::info!("MCP server {} no longer in running services", name); + log::info!("MCP server {name} no longer in running services"); return Some(rmcp::service::QuitReason::Closed); } }; @@ -220,8 +207,7 @@ pub async fn monitor_mcp_server_handle( if !health_check_result { // Server failed health check - remove it and return log::error!( - "MCP server {} failed health check, removing from active servers", - name + "MCP server {name} failed health check, removing from active servers" ); let mut servers = servers_state.lock().await; if let Some(service) = servers.remove(&name) { @@ -262,7 +248,7 @@ pub async fn start_mcp_server_with_restart( let max_restarts = max_restarts.unwrap_or(5); // Try the first start attempt and return its result - log::info!("Starting MCP server {} (Initial attempt)", name); + log::info!("Starting MCP server {name} (Initial attempt)"); let first_start_result = schedule_mcp_start_task( app.clone(), servers_state.clone(), @@ -273,7 +259,7 @@ pub async fn start_mcp_server_with_restart( match first_start_result { Ok(_) => { - log::info!("MCP server {} started successfully on first attempt", name); + log::info!("MCP server {name} started successfully on first attempt"); reset_restart_count(&restart_counts, &name).await; // Check if server was marked as successfully connected (passed verification) @@ -298,18 +284,15 @@ pub async fn start_mcp_server_with_restart( Ok(()) } else { // Server failed verification, don't monitor for restarts - log::error!("MCP server {} failed verification after startup", name); + log::error!("MCP server {name} failed verification after startup"); Err(format!( - "MCP server {} failed verification after startup", - name + "MCP server {name} failed verification after startup" )) } } Err(e) => { log::error!( - "Failed to start MCP server {} on first attempt: {}", - name, - e + "Failed to start MCP server {name} on first attempt: {e}" ); Err(e) } @@ -336,9 +319,7 @@ pub async fn start_restart_loop( if current_restart_count > max_restarts { log::error!( - "MCP server {} reached maximum restart attempts ({}). Giving up.", - name, - max_restarts + "MCP server {name} reached maximum restart attempts ({max_restarts}). Giving up." ); if let Err(e) = app.emit( "mcp_max_restarts_reached", @@ -353,19 +334,13 @@ pub async fn start_restart_loop( } log::info!( - "Restarting MCP server {} (Attempt {}/{})", - name, - current_restart_count, - max_restarts + "Restarting MCP server {name} (Attempt {current_restart_count}/{max_restarts})" ); // Calculate exponential backoff delay let delay_ms = calculate_exponential_backoff_delay(current_restart_count); log::info!( - "Waiting {}ms before restart attempt {} for MCP server {}", - delay_ms, - current_restart_count, - name + "Waiting {delay_ms}ms before restart attempt {current_restart_count} for MCP server {name}" ); sleep(Duration::from_millis(delay_ms)).await; @@ -380,7 +355,7 @@ pub async fn start_restart_loop( match start_result { Ok(_) => { - log::info!("MCP server {} restarted successfully.", name); + log::info!("MCP server {name} restarted successfully."); // Check if server passed verification (was marked as successfully connected) let passed_verification = { @@ -390,8 +365,7 @@ pub async fn start_restart_loop( if !passed_verification { log::error!( - "MCP server {} failed verification after restart - stopping permanently", - name + "MCP server {name} failed verification after restart - stopping permanently" ); break; } @@ -402,9 +376,7 @@ pub async fn start_restart_loop( if let Some(count) = counts.get_mut(&name) { if *count > 0 { log::info!( - "MCP server {} restarted successfully, resetting restart count from {} to 0.", - name, - *count + "MCP server {name} restarted successfully, resetting restart count from {count} to 0." ); *count = 0; } @@ -415,7 +387,7 @@ pub async fn start_restart_loop( let quit_reason = monitor_mcp_server_handle(servers_state.clone(), name.clone()).await; - log::info!("MCP server {} quit with reason: {:?}", name, quit_reason); + log::info!("MCP server {name} quit with reason: {quit_reason:?}"); // Check if server was marked as successfully connected let was_connected = { @@ -426,8 +398,7 @@ pub async fn start_restart_loop( // Only continue restart loop if server was previously connected if !was_connected { log::error!( - "MCP server {} failed before establishing successful connection - stopping permanently", - name + "MCP server {name} failed before establishing successful connection - stopping permanently" ); break; } @@ -435,11 +406,11 @@ pub async fn start_restart_loop( // Determine if we should restart based on quit reason let should_restart = match quit_reason { Some(reason) => { - log::warn!("MCP server {} terminated unexpectedly: {:?}", name, reason); + log::warn!("MCP server {name} terminated unexpectedly: {reason:?}"); true } None => { - log::info!("MCP server {} was manually stopped - not restarting", name); + log::info!("MCP server {name} was manually stopped - not restarting"); false } }; @@ -450,7 +421,7 @@ pub async fn start_restart_loop( // Continue the loop for another restart attempt } Err(e) => { - log::error!("Failed to restart MCP server {}: {}", name, e); + log::error!("Failed to restart MCP server {name}: {e}"); // Check if server was marked as successfully connected before let was_connected = { @@ -461,8 +432,7 @@ pub async fn start_restart_loop( // Only continue restart attempts if server was previously connected if !was_connected { log::error!( - "MCP server {} failed restart and was never successfully connected - stopping permanently", - name + "MCP server {name} failed restart and was never successfully connected - stopping permanently" ); break; } @@ -529,7 +499,7 @@ async fn schedule_mcp_start_task( }, }; let client = client_info.serve(transport).await.inspect_err(|e| { - log::error!("client error: {:?}", e); + log::error!("client error: {e:?}"); }); match client { @@ -545,12 +515,12 @@ async fn schedule_mcp_start_task( let app_state = app.state::(); let mut connected = app_state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), true); - log::info!("Marked MCP server {} as successfully connected", name); + log::info!("Marked MCP server {name} as successfully connected"); } } Err(e) => { - log::error!("Failed to connect to server: {}", e); - return Err(format!("Failed to connect to server: {}", e)); + log::error!("Failed to connect to server: {e}"); + return Err(format!("Failed to connect to server: {e}")); } } } else if config_params.transport_type.as_deref() == Some("sse") && config_params.url.is_some() @@ -587,8 +557,8 @@ async fn schedule_mcp_start_task( ) .await .map_err(|e| { - log::error!("transport error: {:?}", e); - format!("Failed to start SSE transport: {}", e) + log::error!("transport error: {e:?}"); + format!("Failed to start SSE transport: {e}") })?; let client_info = ClientInfo { @@ -600,7 +570,7 @@ async fn schedule_mcp_start_task( }, }; let client = client_info.serve(transport).await.map_err(|e| { - log::error!("client error: {:?}", e); + log::error!("client error: {e:?}"); e.to_string() }); @@ -617,32 +587,44 @@ async fn schedule_mcp_start_task( let app_state = app.state::(); let mut connected = app_state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), true); - log::info!("Marked MCP server {} as successfully connected", name); + log::info!("Marked MCP server {name} as successfully connected"); } } Err(e) => { - log::error!("Failed to connect to server: {}", e); - return Err(format!("Failed to connect to server: {}", e)); + log::error!("Failed to connect to server: {e}"); + return Err(format!("Failed to connect to server: {e}")); } } } else { let mut cmd = Command::new(config_params.command.clone()); - if config_params.command.clone() == "npx" && can_override_npx() { + let bun_x_path = if cfg!(windows) { + bin_path.join("bun.exe") + } else { + bin_path.join("bun") + }; + if config_params.command.clone() == "npx" + && can_override_npx(bun_x_path.display().to_string()) + { let mut cache_dir = app_path.clone(); cache_dir.push(".npx"); - let bun_x_path = format!("{}/bun", bin_path.display()); - cmd = Command::new(bun_x_path); + cmd = Command::new(bun_x_path.display().to_string()); cmd.arg("x"); - cmd.env("BUN_INSTALL", cache_dir.to_str().unwrap().to_string()); + cmd.env("BUN_INSTALL", cache_dir.to_str().unwrap()); } - if config_params.command.clone() == "uvx" { + + let uv_path = if cfg!(windows) { + bin_path.join("uv.exe") + } else { + bin_path.join("uv") + }; + if config_params.command.clone() == "uvx" && can_override_uvx(uv_path.display().to_string()) + { let mut cache_dir = app_path.clone(); cache_dir.push(".uvx"); - let bun_x_path = format!("{}/uv", bin_path.display()); - cmd = Command::new(bun_x_path); + cmd = Command::new(uv_path); cmd.arg("tool"); cmd.arg("run"); - cmd.env("UV_CACHE_DIR", cache_dir.to_str().unwrap().to_string()); + cmd.env("UV_CACHE_DIR", cache_dir.to_str().unwrap()); } #[cfg(windows)] { @@ -714,8 +696,7 @@ async fn schedule_mcp_start_task( if !server_still_running { return Err(format!( - "MCP server {} quit immediately after starting", - name + "MCP server {name} quit immediately after starting" )); } // Mark server as successfully connected (for restart policy) @@ -723,7 +704,7 @@ async fn schedule_mcp_start_task( let app_state = app.state::(); let mut connected = app_state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), true); - log::info!("Marked MCP server {} as successfully connected", name); + log::info!("Marked MCP server {name} as successfully connected"); } } Ok(()) @@ -780,7 +761,7 @@ pub async fn restart_active_mcp_servers( ); for (name, config) in active_servers.iter() { - log::info!("Restarting MCP server: {}", name); + log::info!("Restarting MCP server: {name}"); // Start server with restart monitoring - spawn async task let app_clone = app.clone(); @@ -879,9 +860,7 @@ pub async fn spawn_server_monitoring_task( monitor_mcp_server_handle(servers_clone.clone(), name_clone.clone()).await; log::info!( - "MCP server {} quit with reason: {:?}", - name_clone, - quit_reason + "MCP server {name_clone} quit with reason: {quit_reason:?}" ); // Check if we should restart based on connection status and quit reason @@ -916,8 +895,7 @@ pub async fn should_restart_server( // Only restart if server was previously connected if !was_connected { log::error!( - "MCP server {} failed before establishing successful connection - stopping permanently", - name + "MCP server {name} failed before establishing successful connection - stopping permanently" ); return false; } @@ -925,12 +903,56 @@ pub async fn should_restart_server( // Determine if we should restart based on quit reason match quit_reason { Some(reason) => { - log::warn!("MCP server {} terminated unexpectedly: {:?}", name, reason); + log::warn!("MCP server {name} terminated unexpectedly: {reason:?}"); true } None => { - log::info!("MCP server {} was manually stopped - not restarting", name); + log::info!("MCP server {name} was manually stopped - not restarting"); false } } } + +// Add a new server configuration to the MCP config file +pub fn add_server_config( + app_handle: tauri::AppHandle, + server_key: String, + server_value: Value, +) -> Result<(), String> { + add_server_config_with_path(app_handle, server_key, server_value, None) +} + +// Add a new server configuration to the MCP config file with custom path support +pub fn add_server_config_with_path( + app_handle: tauri::AppHandle, + server_key: String, + server_value: Value, + config_filename: Option<&str>, +) -> Result<(), String> { + let config_filename = config_filename.unwrap_or("mcp_config.json"); + let config_path = get_jan_data_folder_path(app_handle).join(config_filename); + + let mut config: Value = serde_json::from_str( + &std::fs::read_to_string(&config_path) + .map_err(|e| format!("Failed to read config file: {e}"))?, + ) + .map_err(|e| format!("Failed to parse config: {e}"))?; + + config + .as_object_mut() + .ok_or("Config root is not an object")? + .entry("mcpServers") + .or_insert_with(|| Value::Object(serde_json::Map::new())) + .as_object_mut() + .ok_or("mcpServers is not an object")? + .insert(server_key, server_value); + + std::fs::write( + &config_path, + serde_json::to_string_pretty(&config) + .map_err(|e| format!("Failed to serialize config: {e}"))?, + ) + .map_err(|e| format!("Failed to write config file: {e}"))?; + + Ok(()) +} diff --git a/src-tauri/src/core/mcp/tests.rs b/src-tauri/src/core/mcp/tests.rs index 081a188e8..71967cd96 100644 --- a/src-tauri/src/core/mcp/tests.rs +++ b/src-tauri/src/core/mcp/tests.rs @@ -1,9 +1,10 @@ -use super::helpers::run_mcp_commands; +use super::helpers::{add_server_config, add_server_config_with_path, run_mcp_commands}; use crate::core::app::commands::get_jan_data_folder_path; use crate::core::state::SharedMcpServers; use std::collections::HashMap; use std::fs::File; use std::io::Write; +use std::path::PathBuf; use std::sync::Arc; use tauri::test::mock_app; use tokio::sync::Mutex; @@ -27,8 +28,7 @@ async fn test_run_mcp_commands() { .expect("Failed to write to config file"); // Call the run_mcp_commands function - let servers_state: SharedMcpServers = - Arc::new(Mutex::new(HashMap::new())); + let servers_state: SharedMcpServers = Arc::new(Mutex::new(HashMap::new())); let result = run_mcp_commands(app.handle(), servers_state).await; // Assert that the function returns Ok(()) @@ -37,3 +37,188 @@ async fn test_run_mcp_commands() { // Clean up the mock config file std::fs::remove_file(&config_path).expect("Failed to remove config file"); } + +#[test] +fn test_add_server_config_new_file() { + let app = mock_app(); + let app_path = get_jan_data_folder_path(app.handle().clone()); + let config_path = app_path.join("mcp_config_test_new.json"); + + // Ensure the directory exists + if let Some(parent) = config_path.parent() { + std::fs::create_dir_all(parent).expect("Failed to create parent directory"); + } + + // Create initial config file with empty mcpServers + let mut file = File::create(&config_path).expect("Failed to create config file"); + file.write_all(b"{\"mcpServers\":{}}") + .expect("Failed to write to config file"); + drop(file); + + // Test adding a new server config + let server_value = serde_json::json!({ + "command": "npx", + "args": ["-y", "test-server"], + "env": { "TEST_API_KEY": "test_key" }, + "active": false + }); + + let result = add_server_config_with_path( + app.handle().clone(), + "test_server".to_string(), + server_value.clone(), + Some("mcp_config_test_new.json"), + ); + + assert!(result.is_ok(), "Failed to add server config: {result:?}"); + + // Verify the config was added correctly + let config_content = std::fs::read_to_string(&config_path) + .expect("Failed to read config file"); + let config: serde_json::Value = serde_json::from_str(&config_content) + .expect("Failed to parse config"); + + assert!(config["mcpServers"]["test_server"].is_object()); + assert_eq!(config["mcpServers"]["test_server"]["command"], "npx"); + assert_eq!(config["mcpServers"]["test_server"]["args"][0], "-y"); + assert_eq!(config["mcpServers"]["test_server"]["args"][1], "test-server"); + + // Clean up + std::fs::remove_file(&config_path).expect("Failed to remove config file"); +} + +#[test] +fn test_add_server_config_existing_servers() { + let app = mock_app(); + let app_path = get_jan_data_folder_path(app.handle().clone()); + let config_path = app_path.join("mcp_config_test_existing.json"); + + // Ensure the directory exists + if let Some(parent) = config_path.parent() { + std::fs::create_dir_all(parent).expect("Failed to create parent directory"); + } + + // Create config file with existing server + let initial_config = serde_json::json!({ + "mcpServers": { + "existing_server": { + "command": "existing_command", + "args": ["arg1"], + "active": true + } + } + }); + + let mut file = File::create(&config_path).expect("Failed to create config file"); + file.write_all(serde_json::to_string_pretty(&initial_config).unwrap().as_bytes()) + .expect("Failed to write to config file"); + drop(file); + + // Add new server + let new_server_value = serde_json::json!({ + "command": "new_command", + "args": ["new_arg"], + "active": false + }); + + let result = add_server_config_with_path( + app.handle().clone(), + "new_server".to_string(), + new_server_value, + Some("mcp_config_test_existing.json"), + ); + + assert!(result.is_ok(), "Failed to add server config: {result:?}"); + + // Verify both servers exist + let config_content = std::fs::read_to_string(&config_path) + .expect("Failed to read config file"); + let config: serde_json::Value = serde_json::from_str(&config_content) + .expect("Failed to parse config"); + + // Check existing server is still there + assert!(config["mcpServers"]["existing_server"].is_object()); + assert_eq!(config["mcpServers"]["existing_server"]["command"], "existing_command"); + + // Check new server was added + assert!(config["mcpServers"]["new_server"].is_object()); + assert_eq!(config["mcpServers"]["new_server"]["command"], "new_command"); + + // Clean up + std::fs::remove_file(&config_path).expect("Failed to remove config file"); +} + +#[test] +fn test_add_server_config_missing_config_file() { + let app = mock_app(); + let app_path = get_jan_data_folder_path(app.handle().clone()); + + // Ensure the directory exists + if let Some(parent) = app_path.parent() { + std::fs::create_dir_all(parent).ok(); + } + std::fs::create_dir_all(&app_path).ok(); + + let config_path = app_path.join("mcp_config.json"); + + // Ensure the file doesn't exist + if config_path.exists() { + std::fs::remove_file(&config_path).ok(); + } + + let server_value = serde_json::json!({ + "command": "test", + "args": [], + "active": false + }); + + let result = add_server_config( + app.handle().clone(), + "test".to_string(), + server_value, + ); + + assert!(result.is_err(), "Expected error when config file doesn't exist"); + assert!(result.unwrap_err().contains("Failed to read config file")); +} + +#[cfg(not(target_os = "windows"))] +#[test] +fn test_bin_path_construction_with_join() { + // Test that PathBuf::join properly constructs paths + let bin_path = PathBuf::from("/usr/local/bin"); + let bun_path = bin_path.join("bun"); + + assert_eq!(bun_path.to_string_lossy(), "/usr/local/bin/bun"); + + // Test conversion to String via display() + let bun_path_str = bun_path.display().to_string(); + assert_eq!(bun_path_str, "/usr/local/bin/bun"); +} + +#[cfg(not(target_os = "windows"))] +#[test] +fn test_uv_path_construction_with_join() { + // Test that PathBuf::join properly constructs paths for uv + let bin_path = PathBuf::from("/usr/local/bin"); + let uv_path = bin_path.join("uv"); + + assert_eq!(uv_path.to_string_lossy(), "/usr/local/bin/uv"); + + // Test conversion to String via display() + let uv_path_str = uv_path.display().to_string(); + assert_eq!(uv_path_str, "/usr/local/bin/uv"); +} + +#[cfg(target_os = "windows")] +#[test] +fn test_bin_path_construction_windows() { + // Test Windows-style paths + let bin_path = PathBuf::from(r"C:\Program Files\bin"); + let bun_path = bin_path.join("bun.exe"); + + assert_eq!(bun_path.to_string_lossy(), r"C:\Program Files\bin\bun.exe"); + + let bun_path_str = bun_path.display().to_string(); + assert_eq!(bun_path_str, r"C:\Program Files\bin\bun.exe"); +} diff --git a/src-tauri/src/core/server/commands.rs b/src-tauri/src/core/server/commands.rs index 85450bee5..286d40cc1 100644 --- a/src-tauri/src/core/server/commands.rs +++ b/src-tauri/src/core/server/commands.rs @@ -14,12 +14,12 @@ pub async fn start_server( api_key: String, trusted_hosts: Vec, proxy_timeout: u64, -) -> Result { +) -> Result { let server_handle = state.server_handle.clone(); let plugin_state: State = app_handle.state(); let sessions = plugin_state.llama_server_process.clone(); - proxy::start_server( + let actual_port = proxy::start_server( server_handle, sessions, host, @@ -31,7 +31,7 @@ pub async fn start_server( ) .await .map_err(|e| e.to_string())?; - Ok(true) + Ok(actual_port) } #[tauri::command] diff --git a/src-tauri/src/core/server/proxy.rs b/src-tauri/src/core/server/proxy.rs index 4baf36503..b832b03a2 100644 --- a/src-tauri/src/core/server/proxy.rs +++ b/src-tauri/src/core/server/proxy.rs @@ -67,7 +67,7 @@ async fn proxy_request( .any(|&method| method.eq_ignore_ascii_case(requested_method)); if !method_allowed { - log::warn!("CORS preflight: Method '{}' not allowed", requested_method); + log::warn!("CORS preflight: Method '{requested_method}' not allowed"); return Ok(Response::builder() .status(StatusCode::METHOD_NOT_ALLOWED) .body(Body::from("Method not allowed")) @@ -80,14 +80,12 @@ async fn proxy_request( let is_trusted = if is_whitelisted_path { log::debug!( - "CORS preflight: Bypassing host check for whitelisted path: {}", - request_path + "CORS preflight: Bypassing host check for whitelisted path: {request_path}" ); true } else if !host.is_empty() { log::debug!( - "CORS preflight: Host is '{}', trusted hosts: {:?}", - host, + "CORS preflight: Host is '{host}', trusted hosts: {:?}", &config.trusted_hosts ); is_valid_host(host, &config.trusted_hosts) @@ -98,9 +96,7 @@ async fn proxy_request( if !is_trusted { log::warn!( - "CORS preflight: Host '{}' not trusted for path '{}'", - host, - request_path + "CORS preflight: Host '{host}' not trusted for path '{request_path}'" ); return Ok(Response::builder() .status(StatusCode::FORBIDDEN) @@ -158,8 +154,7 @@ async fn proxy_request( if !headers_valid { log::warn!( - "CORS preflight: Some requested headers not allowed: {}", - requested_headers + "CORS preflight: Some requested headers not allowed: {requested_headers}" ); return Ok(Response::builder() .status(StatusCode::FORBIDDEN) @@ -186,9 +181,7 @@ async fn proxy_request( } log::debug!( - "CORS preflight response: host_trusted={}, origin='{}'", - is_trusted, - origin + "CORS preflight response: host_trusted={is_trusted}, origin='{origin}'" ); return Ok(response.body(Body::empty()).unwrap()); } @@ -252,7 +245,7 @@ async fn proxy_request( .unwrap()); } } else { - log::debug!("Bypassing host validation for whitelisted path: {}", path); + log::debug!("Bypassing host validation for whitelisted path: {path}"); } if !is_whitelisted_path && !config.proxy_api_key.is_empty() { @@ -285,8 +278,7 @@ async fn proxy_request( } } else if is_whitelisted_path { log::debug!( - "Bypassing authorization check for whitelisted path: {}", - path + "Bypassing authorization check for whitelisted path: {path}" ); } @@ -312,8 +304,7 @@ async fn proxy_request( | (hyper::Method::POST, "/completions") | (hyper::Method::POST, "/embeddings") => { log::debug!( - "Handling POST request to {} requiring model lookup in body", - destination_path + "Handling POST request to {destination_path} requiring model lookup in body", ); let body_bytes = match hyper::body::to_bytes(body).await { Ok(bytes) => bytes, @@ -336,13 +327,12 @@ async fn proxy_request( match serde_json::from_slice::(&body_bytes) { Ok(json_body) => { if let Some(model_id) = json_body.get("model").and_then(|v| v.as_str()) { - log::debug!("Extracted model_id: {}", model_id); + log::debug!("Extracted model_id: {model_id}"); let sessions_guard = sessions.lock().await; if sessions_guard.is_empty() { log::warn!( - "Request for model '{}' but no models are running.", - model_id + "Request for model '{model_id}' but no models are running." ); let mut error_response = Response::builder().status(StatusCode::SERVICE_UNAVAILABLE); @@ -363,9 +353,9 @@ async fn proxy_request( { target_port = Some(session.info.port); session_api_key = Some(session.info.api_key.clone()); - log::debug!("Found session for model_id {}", model_id,); + log::debug!("Found session for model_id {model_id}"); } else { - log::warn!("No running session found for model_id: {}", model_id); + log::warn!("No running session found for model_id: {model_id}"); let mut error_response = Response::builder().status(StatusCode::NOT_FOUND); error_response = add_cors_headers_with_host_and_origin( @@ -376,15 +366,13 @@ async fn proxy_request( ); return Ok(error_response .body(Body::from(format!( - "No running session found for model '{}'", - model_id + "No running session found for model '{model_id}'" ))) .unwrap()); } } else { log::warn!( - "POST body for {} is missing 'model' field or it's not a string", - destination_path + "POST body for {destination_path} is missing 'model' field or it's not a string" ); let mut error_response = Response::builder().status(StatusCode::BAD_REQUEST); @@ -401,9 +389,7 @@ async fn proxy_request( } Err(e) => { log::warn!( - "Failed to parse POST body for {} as JSON: {}", - destination_path, - e + "Failed to parse POST body for {destination_path} as JSON: {e}" ); let mut error_response = Response::builder().status(StatusCode::BAD_REQUEST); error_response = add_cors_headers_with_host_and_origin( @@ -535,7 +521,7 @@ async fn proxy_request( let is_explicitly_whitelisted_get = method == hyper::Method::GET && whitelisted_paths.contains(&destination_path.as_str()); if is_explicitly_whitelisted_get { - log::debug!("Handled whitelisted GET path: {}", destination_path); + log::debug!("Handled whitelisted GET path: {destination_path}"); let mut error_response = Response::builder().status(StatusCode::NOT_FOUND); error_response = add_cors_headers_with_host_and_origin( error_response, @@ -546,9 +532,7 @@ async fn proxy_request( return Ok(error_response.body(Body::from("Not Found")).unwrap()); } else { log::warn!( - "Unhandled method/path for dynamic routing: {} {}", - method, - destination_path + "Unhandled method/path for dynamic routing: {method} {destination_path}" ); let mut error_response = Response::builder().status(StatusCode::NOT_FOUND); error_response = add_cors_headers_with_host_and_origin( @@ -581,7 +565,7 @@ async fn proxy_request( } }; - let upstream_url = format!("http://127.0.0.1:{}{}", port, destination_path); + let upstream_url = format!("http://127.0.0.1:{port}{destination_path}"); let mut outbound_req = client.request(method.clone(), &upstream_url); @@ -593,13 +577,14 @@ async fn proxy_request( if let Some(key) = session_api_key { log::debug!("Adding session Authorization header"); - outbound_req = outbound_req.header("Authorization", format!("Bearer {}", key)); + outbound_req = outbound_req.header("Authorization", format!("Bearer {key}")); } else { log::debug!("No session API key available for this request"); } let outbound_req_with_body = if let Some(bytes) = buffered_body { - log::debug!("Sending buffered body ({} bytes)", bytes.len()); + let bytes_len = bytes.len(); + log::debug!("Sending buffered body ({bytes_len} bytes)"); outbound_req.body(bytes) } else { log::error!("Internal logic error: Request reached proxy stage without a buffered body."); @@ -618,7 +603,7 @@ async fn proxy_request( match outbound_req_with_body.send().await { Ok(response) => { let status = response.status(); - log::debug!("Received response with status: {}", status); + log::debug!("Received response with status: {status}"); let mut builder = Response::builder().status(status); @@ -648,7 +633,7 @@ async fn proxy_request( } } Err(e) => { - log::error!("Stream error: {}", e); + log::error!("Stream error: {e}"); break; } } @@ -659,8 +644,8 @@ async fn proxy_request( Ok(builder.body(body).unwrap()) } Err(e) => { - let error_msg = format!("Proxy request to model failed: {}", e); - log::error!("{}", error_msg); + let error_msg = format!("Proxy request to model failed: {e}"); + log::error!("{error_msg}"); let mut error_response = Response::builder().status(StatusCode::BAD_GATEWAY); error_response = add_cors_headers_with_host_and_origin( error_response, @@ -675,14 +660,12 @@ async fn proxy_request( fn add_cors_headers_with_host_and_origin( builder: hyper::http::response::Builder, - host: &str, + _host: &str, origin: &str, - trusted_hosts: &[Vec], + _trusted_hosts: &[Vec], ) -> hyper::http::response::Builder { let mut builder = builder; - let allow_origin_header = if !origin.is_empty() && is_valid_host(host, trusted_hosts) { - origin.to_string() - } else if !origin.is_empty() { + let allow_origin_header = if !origin.is_empty() { origin.to_string() } else { "*".to_string() @@ -706,6 +689,7 @@ pub async fn is_server_running(server_handle: Arc>>) handle_guard.is_some() } +#[allow(clippy::too_many_arguments)] pub async fn start_server( server_handle: Arc>>, sessions: Arc>>, @@ -715,15 +699,15 @@ pub async fn start_server( proxy_api_key: String, trusted_hosts: Vec>, proxy_timeout: u64, -) -> Result> { +) -> Result> { let mut handle_guard = server_handle.lock().await; if handle_guard.is_some() { return Err("Server is already running".into()); } - let addr: SocketAddr = format!("{}:{}", host, port) + let addr: SocketAddr = format!("{host}:{port}") .parse() - .map_err(|e| format!("Invalid address: {}", e))?; + .map_err(|e| format!("Invalid address: {e}"))?; let config = ProxyConfig { prefix, @@ -752,22 +736,24 @@ pub async fn start_server( let server = match Server::try_bind(&addr) { Ok(builder) => builder.serve(make_svc), Err(e) => { - log::error!("Failed to bind to {}: {}", addr, e); + log::error!("Failed to bind to {addr}: {e}"); return Err(Box::new(e)); } }; - log::info!("Jan API server started on http://{}", addr); + log::info!("Jan API server started on http://{addr}"); let server_task = tokio::spawn(async move { if let Err(e) = server.await { - log::error!("Server error: {}", e); + log::error!("Server error: {e}"); return Err(Box::new(e) as Box); } Ok(()) }); *handle_guard = Some(server_task); - Ok(true) + let actual_port = addr.port(); + log::info!("Jan API server started successfully on port {actual_port}"); + Ok(actual_port) } pub async fn stop_server( diff --git a/src-tauri/src/core/setup.rs b/src-tauri/src/core/setup.rs index c88e62a8d..7ba8f2f74 100644 --- a/src-tauri/src/core/setup.rs +++ b/src-tauri/src/core/setup.rs @@ -3,38 +3,33 @@ use std::{ fs::{self, File}, io::Read, path::PathBuf, + sync::Arc, }; use tar::Archive; +use tauri::{ + App, Emitter, Manager, Runtime, Wry, WindowEvent +}; + +#[cfg(desktop)] use tauri::{ menu::{Menu, MenuItem, PredefinedMenuItem}, tray::{MouseButton, MouseButtonState, TrayIcon, TrayIconBuilder, TrayIconEvent}, - App, Emitter, Manager, }; -use tauri_plugin_store::StoreExt; -// use tokio::sync::Mutex; -// use tokio::time::{sleep, Duration}; // Using tokio::sync::Mutex -// // MCP +use tauri_plugin_store::Store; + +use crate::core::mcp::helpers::add_server_config; -// MCP use super::{ - app::commands::get_jan_data_folder_path, extensions::commands::get_jan_extensions_path, - mcp::helpers::run_mcp_commands, state::AppState, + extensions::commands::get_jan_extensions_path, mcp::helpers::run_mcp_commands, state::AppState, }; -pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> { - let mut store_path = get_jan_data_folder_path(app.clone()); - store_path.push("store.json"); - let store = app.store(store_path).expect("Store not initialized"); - let stored_version = store - .get("version") - .and_then(|v| v.as_str().map(String::from)) - .unwrap_or_default(); - - let app_version = app - .config() - .version - .clone() - .unwrap_or_else(|| "".to_string()); +pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> { + // Skip extension installation on mobile platforms + // Mobile uses pre-bundled extensions loaded via MobileCoreService in the frontend + #[cfg(any(target_os = "android", target_os = "ios"))] + { + return Ok(()); + } let extensions_path = get_jan_extensions_path(app.clone()); let pre_install_path = app @@ -50,13 +45,8 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri if std::env::var("IS_CLEAN").is_ok() { clean_up = true; } - log::info!( - "Installing extensions. Clean up: {}, Stored version: {}, App version: {}", - clean_up, - stored_version, - app_version - ); - if !clean_up && stored_version == app_version && extensions_path.exists() { + log::info!("Installing extensions. Clean up: {clean_up}"); + if !clean_up && extensions_path.exists() { return Ok(()); } @@ -85,7 +75,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri let entry = entry.map_err(|e| e.to_string())?; let path = entry.path(); - if path.extension().map_or(false, |ext| ext == "tgz") { + if path.extension().is_some_and(|ext| ext == "tgz") { let tar_gz = File::open(&path).map_err(|e| e.to_string())?; let gz_decoder = GzDecoder::new(tar_gz); let mut archive = Archive::new(gz_decoder); @@ -151,7 +141,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri extensions_list.push(new_extension); - log::info!("Installed extension to {:?}", extension_dir); + log::info!("Installed extension to {extension_dir:?}"); } } fs::write( @@ -160,10 +150,36 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri ) .map_err(|e| e.to_string())?; - // Store the new app version - store.set("version", serde_json::json!(app_version)); - store.save().expect("Failed to save store"); + Ok(()) +} +// Migrate MCP servers configuration +pub fn migrate_mcp_servers( + app_handle: tauri::AppHandle, + store: Arc>, +) -> Result<(), String> { + let mcp_version = store + .get("mcp_version") + .and_then(|v| v.as_i64()) + .unwrap_or(0); + if mcp_version < 1 { + log::info!("Migrating MCP schema version 1"); + let result = add_server_config( + app_handle, + "exa".to_string(), + serde_json::json!({ + "command": "npx", + "args": ["-y", "exa-mcp-server"], + "env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" }, + "active": false + }), + ); + if let Err(e) = result { + log::error!("Failed to add server config: {e}"); + } + } + store.set("mcp_version", 1); + store.save().expect("Failed to save store"); Ok(()) } @@ -197,13 +213,13 @@ pub fn extract_extension_manifest( Ok(None) } -pub fn setup_mcp(app: &App) { +pub fn setup_mcp(app: &App) { let state = app.state::(); let servers = state.mcp_servers.clone(); - let app_handle: tauri::AppHandle = app.handle().clone(); + let app_handle = app.handle().clone(); tauri::async_runtime::spawn(async move { if let Err(e) = run_mcp_commands(&app_handle, servers).await { - log::error!("Failed to run mcp commands: {}", e); + log::error!("Failed to run mcp commands: {e}"); } app_handle .emit("mcp-update", "MCP servers updated") @@ -211,6 +227,7 @@ pub fn setup_mcp(app: &App) { }); } +#[cfg(desktop)] pub fn setup_tray(app: &App) -> tauri::Result { let show_i = MenuItem::with_id(app.handle(), "open", "Open Jan", true, None::<&str>)?; let quit_i = MenuItem::with_id(app.handle(), "quit", "Quit", true, None::<&str>)?; @@ -248,8 +265,37 @@ pub fn setup_tray(app: &App) -> tauri::Result { app.exit(0); } other => { - println!("menu item {} not handled", other); + println!("menu item {other} not handled"); } }) .build(app) } + +pub fn setup_theme_listener(app: &App) -> tauri::Result<()> { + // Setup theme listener for main window + if let Some(window) = app.get_webview_window("main") { + setup_window_theme_listener(app.handle().clone(), window); + } + + Ok(()) +} + +fn setup_window_theme_listener( + app_handle: tauri::AppHandle, + window: tauri::WebviewWindow, +) { + let window_label = window.label().to_string(); + let app_handle_clone = app_handle.clone(); + + window.on_window_event(move |event| { + if let WindowEvent::ThemeChanged(theme) = event { + let theme_str = match theme { + tauri::Theme::Light => "light", + tauri::Theme::Dark => "dark", + _ => "auto", + }; + log::info!("System theme changed to: {} for window: {}", theme_str, window_label); + let _ = app_handle_clone.emit("theme-changed", theme_str); + } + }); +} diff --git a/src-tauri/src/core/system/commands.rs b/src-tauri/src/core/system/commands.rs index a8b58d745..9c72fd4da 100644 --- a/src-tauri/src/core/system/commands.rs +++ b/src-tauri/src/core/system/commands.rs @@ -1,6 +1,6 @@ use std::fs; use std::path::PathBuf; -use tauri::{AppHandle, Manager, State}; +use tauri::{AppHandle, Manager, Runtime, State}; use tauri_plugin_llamacpp::cleanup_llama_processes; use crate::core::app::commands::{ @@ -11,16 +11,19 @@ use crate::core::mcp::helpers::clean_up_mcp_servers; use crate::core::state::AppState; #[tauri::command] -pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) { - // close window - let windows = app_handle.webview_windows(); - for (label, window) in windows.iter() { - window.close().unwrap_or_else(|_| { - log::warn!("Failed to close window: {:?}", label); - }); +pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) { + // close window (not available on mobile platforms) + #[cfg(not(any(target_os = "ios", target_os = "android")))] + { + let windows = app_handle.webview_windows(); + for (label, window) in windows.iter() { + window.close().unwrap_or_else(|_| { + log::warn!("Failed to close window: {label:?}"); + }); + } } let data_folder = get_jan_data_folder_path(app_handle.clone()); - log::info!("Factory reset, removing data folder: {:?}", data_folder); + log::info!("Factory reset, removing data folder: {data_folder:?}"); tauri::async_runtime::block_on(async { clean_up_mcp_servers(state.clone()).await; @@ -28,7 +31,7 @@ pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) { if data_folder.exists() { if let Err(e) = fs::remove_dir_all(&data_folder) { - log::error!("Failed to remove data folder: {}", e); + log::error!("Failed to remove data folder: {e}"); return; } } @@ -46,27 +49,27 @@ pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) { } #[tauri::command] -pub fn relaunch(app: AppHandle) { +pub fn relaunch(app: AppHandle) { app.restart() } #[tauri::command] -pub fn open_app_directory(app: AppHandle) { +pub fn open_app_directory(app: AppHandle) { let app_path = app.path().app_data_dir().unwrap(); if cfg!(target_os = "windows") { std::process::Command::new("explorer") .arg(app_path) - .spawn() + .status() .expect("Failed to open app directory"); } else if cfg!(target_os = "macos") { std::process::Command::new("open") .arg(app_path) - .spawn() + .status() .expect("Failed to open app directory"); } else { std::process::Command::new("xdg-open") .arg(app_path) - .spawn() + .status() .expect("Failed to open app directory"); } } @@ -77,29 +80,29 @@ pub fn open_file_explorer(path: String) { if cfg!(target_os = "windows") { std::process::Command::new("explorer") .arg(path) - .spawn() + .status() .expect("Failed to open file explorer"); } else if cfg!(target_os = "macos") { std::process::Command::new("open") .arg(path) - .spawn() + .status() .expect("Failed to open file explorer"); } else { std::process::Command::new("xdg-open") .arg(path) - .spawn() + .status() .expect("Failed to open file explorer"); } } #[tauri::command] -pub async fn read_logs(app: AppHandle) -> Result { +pub async fn read_logs(app: AppHandle) -> Result { let log_path = get_jan_data_folder_path(app).join("logs").join("app.log"); if log_path.exists() { let content = fs::read_to_string(log_path).map_err(|e| e.to_string())?; Ok(content) } else { - Err(format!("Log file not found")) + Err("Log file not found".to_string()) } } @@ -109,8 +112,9 @@ pub fn is_library_available(library: &str) -> bool { match unsafe { libloading::Library::new(library) } { Ok(_) => true, Err(e) => { - log::info!("Library {} is not available: {}", library, e); + log::info!("Library {library} is not available: {e}"); false } } } + diff --git a/src-tauri/src/core/threads/commands.rs b/src-tauri/src/core/threads/commands.rs index a9012193a..07bf46094 100644 --- a/src-tauri/src/core/threads/commands.rs +++ b/src-tauri/src/core/threads/commands.rs @@ -3,8 +3,11 @@ use std::io::Write; use tauri::Runtime; use uuid::Uuid; +#[cfg(any(target_os = "android", target_os = "ios"))] +use super::db; use super::helpers::{ - get_lock_for_thread, read_messages_from_file, update_thread_metadata, write_messages_to_file, + get_lock_for_thread, read_messages_from_file, should_use_sqlite, update_thread_metadata, + write_messages_to_file, }; use super::{ constants::THREADS_FILE, @@ -14,12 +17,19 @@ use super::{ }, }; -/// Lists all threads by reading their metadata from the threads directory. +/// Lists all threads by reading their metadata from the threads directory or database. /// Returns a vector of thread metadata as JSON values. #[tauri::command] pub async fn list_threads( app_handle: tauri::AppHandle, ) -> Result, String> { + if should_use_sqlite() { + // Use SQLite on mobile platforms + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_list_threads(app_handle).await; + } + + // Use file-based storage on desktop ensure_data_dirs(app_handle.clone())?; let data_dir = get_data_dir(app_handle.clone()); let mut threads = Vec::new(); @@ -38,7 +48,7 @@ pub async fn list_threads( match serde_json::from_str(&data) { Ok(thread) => threads.push(thread), Err(e) => { - println!("Failed to parse thread file: {}", e); + println!("Failed to parse thread file: {e}"); continue; // skip invalid thread files } } @@ -56,6 +66,12 @@ pub async fn create_thread( app_handle: tauri::AppHandle, mut thread: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_create_thread(app_handle, thread).await; + } + + // Use file-based storage on desktop ensure_data_dirs(app_handle.clone())?; let uuid = Uuid::new_v4().to_string(); thread["id"] = serde_json::Value::String(uuid.clone()); @@ -76,6 +92,12 @@ pub async fn modify_thread( app_handle: tauri::AppHandle, thread: serde_json::Value, ) -> Result<(), String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_modify_thread(app_handle, thread).await; + } + + // Use file-based storage on desktop let thread_id = thread .get("id") .and_then(|id| id.as_str()) @@ -96,6 +118,12 @@ pub async fn delete_thread( app_handle: tauri::AppHandle, thread_id: String, ) -> Result<(), String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_delete_thread(app_handle, &thread_id).await; + } + + // Use file-based storage on desktop let thread_dir = get_thread_dir(app_handle.clone(), &thread_id); if thread_dir.exists() { let _ = fs::remove_dir_all(thread_dir); @@ -110,6 +138,12 @@ pub async fn list_messages( app_handle: tauri::AppHandle, thread_id: String, ) -> Result, String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_list_messages(app_handle, &thread_id).await; + } + + // Use file-based storage on desktop read_messages_from_file(app_handle, &thread_id) } @@ -120,6 +154,12 @@ pub async fn create_message( app_handle: tauri::AppHandle, mut message: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_create_message(app_handle, message).await; + } + + // Use file-based storage on desktop let thread_id = { let id = message .get("thread_id") @@ -127,7 +167,6 @@ pub async fn create_message( .ok_or("Missing thread_id")?; id.to_string() }; - ensure_thread_dir_exists(app_handle.clone(), &thread_id)?; let path = get_messages_path(app_handle.clone(), &thread_id); if message.get("id").is_none() { @@ -140,6 +179,9 @@ pub async fn create_message( let lock = get_lock_for_thread(&thread_id).await; let _guard = lock.lock().await; + // Ensure directory exists right before file operations to handle race conditions + ensure_thread_dir_exists(app_handle.clone(), &thread_id)?; + let mut file: File = fs::OpenOptions::new() .create(true) .append(true) @@ -147,7 +189,10 @@ pub async fn create_message( .map_err(|e| e.to_string())?; let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; - writeln!(file, "{}", data).map_err(|e| e.to_string())?; + writeln!(file, "{data}").map_err(|e| e.to_string())?; + + // Explicitly flush to ensure data is written before returning + file.flush().map_err(|e| e.to_string())?; } Ok(message) @@ -161,6 +206,12 @@ pub async fn modify_message( app_handle: tauri::AppHandle, message: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_modify_message(app_handle, message).await; + } + + // Use file-based storage on desktop let thread_id = message .get("thread_id") .and_then(|v| v.as_str()) @@ -199,6 +250,12 @@ pub async fn delete_message( thread_id: String, message_id: String, ) -> Result<(), String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_delete_message(app_handle, &thread_id, &message_id).await; + } + + // Use file-based storage on desktop // Acquire per-thread lock before modifying { let lock = get_lock_for_thread(&thread_id).await; @@ -222,6 +279,12 @@ pub async fn get_thread_assistant( app_handle: tauri::AppHandle, thread_id: String, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_get_thread_assistant(app_handle, &thread_id).await; + } + + // Use file-based storage on desktop let path = get_thread_metadata_path(app_handle, &thread_id); if !path.exists() { return Err("Thread not found".to_string()); @@ -229,7 +292,7 @@ pub async fn get_thread_assistant( let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; let thread: serde_json::Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) { - if let Some(first) = assistants.get(0) { + if let Some(first) = assistants.first() { Ok(first.clone()) } else { Err("Assistant not found".to_string()) @@ -247,6 +310,12 @@ pub async fn create_thread_assistant( thread_id: String, assistant: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_create_thread_assistant(app_handle, &thread_id, assistant).await; + } + + // Use file-based storage on desktop let path = get_thread_metadata_path(app_handle.clone(), &thread_id); if !path.exists() { return Err("Thread not found".to_string()); @@ -272,6 +341,12 @@ pub async fn modify_thread_assistant( thread_id: String, assistant: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_modify_thread_assistant(app_handle, &thread_id, assistant).await; + } + + // Use file-based storage on desktop let path = get_thread_metadata_path(app_handle.clone(), &thread_id); if !path.exists() { return Err("Thread not found".to_string()); diff --git a/src-tauri/src/core/threads/db.rs b/src-tauri/src/core/threads/db.rs new file mode 100644 index 000000000..b888b94bb --- /dev/null +++ b/src-tauri/src/core/threads/db.rs @@ -0,0 +1,397 @@ +/*! + SQLite Database Module for Mobile Thread Storage + + This module provides SQLite-based storage for threads and messages on mobile platforms. + It ensures data persistence and retrieval work correctly on Android and iOS devices. + + Note: This module is only compiled and used on mobile platforms (Android/iOS). + On desktop, the file-based storage in helpers.rs is used instead. +*/ + +#![allow(dead_code)] // Functions only used on mobile platforms + +use serde_json::Value; +use sqlx::sqlite::{SqliteConnectOptions, SqlitePool, SqlitePoolOptions}; +use sqlx::Row; +use std::str::FromStr; +use std::sync::OnceLock; +use tauri::{AppHandle, Manager, Runtime}; +use tokio::sync::Mutex; + +const DB_NAME: &str = "jan.db"; + +/// Global database pool for mobile platforms +static DB_POOL: OnceLock>> = OnceLock::new(); + +/// Initialize database with connection pool and run migrations +pub async fn init_database(app: &AppHandle) -> Result<(), String> { + // Get app data directory + let app_data_dir = app + .path() + .app_data_dir() + .map_err(|e| format!("Failed to get app data dir: {}", e))?; + + // Ensure directory exists + std::fs::create_dir_all(&app_data_dir) + .map_err(|e| format!("Failed to create app data dir: {}", e))?; + + // Create database path + let db_path = app_data_dir.join(DB_NAME); + let db_url = format!("sqlite:{}", db_path.display()); + + log::info!("Initializing SQLite database at: {}", db_url); + + // Create connection options + let connect_options = SqliteConnectOptions::from_str(&db_url) + .map_err(|e| format!("Failed to parse connection options: {}", e))? + .create_if_missing(true); + + // Create connection pool + let pool = SqlitePoolOptions::new() + .max_connections(5) + .connect_with(connect_options) + .await + .map_err(|e| format!("Failed to create connection pool: {}", e))?; + + // Run migrations + sqlx::query( + r#" + CREATE TABLE IF NOT EXISTS threads ( + id TEXT PRIMARY KEY, + data TEXT NOT NULL, + created_at INTEGER DEFAULT (strftime('%s', 'now')), + updated_at INTEGER DEFAULT (strftime('%s', 'now')) + ); + "#, + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create threads table: {}", e))?; + + sqlx::query( + r#" + CREATE TABLE IF NOT EXISTS messages ( + id TEXT PRIMARY KEY, + thread_id TEXT NOT NULL, + data TEXT NOT NULL, + created_at INTEGER DEFAULT (strftime('%s', 'now')), + FOREIGN KEY (thread_id) REFERENCES threads(id) ON DELETE CASCADE + ); + "#, + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create messages table: {}", e))?; + + // Create indexes + sqlx::query( + "CREATE INDEX IF NOT EXISTS idx_messages_thread_id ON messages(thread_id);", + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create thread_id index: {}", e))?; + + sqlx::query( + "CREATE INDEX IF NOT EXISTS idx_messages_created_at ON messages(created_at);", + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create created_at index: {}", e))?; + + // Store pool globally + DB_POOL + .get_or_init(|| Mutex::new(None)) + .lock() + .await + .replace(pool); + + log::info!("SQLite database initialized successfully for mobile platform"); + Ok(()) +} + +/// Get database pool +async fn get_pool() -> Result { + let pool_mutex = DB_POOL + .get() + .ok_or("Database not initialized")?; + + let pool_guard = pool_mutex.lock().await; + pool_guard + .clone() + .ok_or("Database pool not available".to_string()) +} + +/// List all threads from database +pub async fn db_list_threads( + _app_handle: AppHandle, +) -> Result, String> { + let pool = get_pool().await?; + + let rows = sqlx::query("SELECT data FROM threads ORDER BY updated_at DESC") + .fetch_all(&pool) + .await + .map_err(|e| format!("Failed to list threads: {}", e))?; + + let threads: Result, _> = rows + .iter() + .map(|row| { + let data: String = row.get("data"); + serde_json::from_str(&data).map_err(|e| e.to_string()) + }) + .collect(); + + threads +} + +/// Create a new thread in database +pub async fn db_create_thread( + _app_handle: AppHandle, + thread: Value, +) -> Result { + let pool = get_pool().await?; + + let thread_id = thread + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread id")?; + + let data = serde_json::to_string(&thread).map_err(|e| e.to_string())?; + + sqlx::query("INSERT INTO threads (id, data) VALUES (?1, ?2)") + .bind(thread_id) + .bind(&data) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create thread: {}", e))?; + + Ok(thread) +} + +/// Modify an existing thread in database +pub async fn db_modify_thread( + _app_handle: AppHandle, + thread: Value, +) -> Result<(), String> { + let pool = get_pool().await?; + + let thread_id = thread + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread id")?; + + let data = serde_json::to_string(&thread).map_err(|e| e.to_string())?; + + sqlx::query("UPDATE threads SET data = ?1, updated_at = strftime('%s', 'now') WHERE id = ?2") + .bind(&data) + .bind(thread_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to modify thread: {}", e))?; + + Ok(()) +} + +/// Delete a thread from database +pub async fn db_delete_thread( + _app_handle: AppHandle, + thread_id: &str, +) -> Result<(), String> { + let pool = get_pool().await?; + + // Messages will be auto-deleted via CASCADE + sqlx::query("DELETE FROM threads WHERE id = ?1") + .bind(thread_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to delete thread: {}", e))?; + + Ok(()) +} + +/// List all messages for a thread from database +pub async fn db_list_messages( + _app_handle: AppHandle, + thread_id: &str, +) -> Result, String> { + let pool = get_pool().await?; + + let rows = sqlx::query( + "SELECT data FROM messages WHERE thread_id = ?1 ORDER BY created_at ASC", + ) + .bind(thread_id) + .fetch_all(&pool) + .await + .map_err(|e| format!("Failed to list messages: {}", e))?; + + let messages: Result, _> = rows + .iter() + .map(|row| { + let data: String = row.get("data"); + serde_json::from_str(&data).map_err(|e| e.to_string()) + }) + .collect(); + + messages +} + +/// Create a new message in database +pub async fn db_create_message( + _app_handle: AppHandle, + message: Value, +) -> Result { + let pool = get_pool().await?; + + let message_id = message + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing message id")?; + + let thread_id = message + .get("thread_id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread_id")?; + + let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; + + sqlx::query("INSERT INTO messages (id, thread_id, data) VALUES (?1, ?2, ?3)") + .bind(message_id) + .bind(thread_id) + .bind(&data) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create message: {}", e))?; + + Ok(message) +} + +/// Modify an existing message in database +pub async fn db_modify_message( + _app_handle: AppHandle, + message: Value, +) -> Result { + let pool = get_pool().await?; + + let message_id = message + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing message id")?; + + let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; + + sqlx::query("UPDATE messages SET data = ?1 WHERE id = ?2") + .bind(&data) + .bind(message_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to modify message: {}", e))?; + + Ok(message) +} + +/// Delete a message from database +pub async fn db_delete_message( + _app_handle: AppHandle, + _thread_id: &str, + message_id: &str, +) -> Result<(), String> { + let pool = get_pool().await?; + + sqlx::query("DELETE FROM messages WHERE id = ?1") + .bind(message_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to delete message: {}", e))?; + + Ok(()) +} + +/// Get thread assistant information from thread metadata +pub async fn db_get_thread_assistant( + _app_handle: AppHandle, + thread_id: &str, +) -> Result { + let pool = get_pool().await?; + + let row = sqlx::query("SELECT data FROM threads WHERE id = ?1") + .bind(thread_id) + .fetch_optional(&pool) + .await + .map_err(|e| format!("Failed to get thread: {}", e))? + .ok_or("Thread not found")?; + + let data: String = row.get("data"); + let thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + + if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) { + assistants + .first() + .cloned() + .ok_or("Assistant not found".to_string()) + } else { + Err("Assistant not found".to_string()) + } +} + +/// Create thread assistant in database +pub async fn db_create_thread_assistant( + app_handle: AppHandle, + thread_id: &str, + assistant: Value, +) -> Result { + let pool = get_pool().await?; + + let row = sqlx::query("SELECT data FROM threads WHERE id = ?1") + .bind(thread_id) + .fetch_optional(&pool) + .await + .map_err(|e| format!("Failed to get thread: {}", e))? + .ok_or("Thread not found")?; + + let data: String = row.get("data"); + let mut thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + + if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) { + assistants.push(assistant.clone()); + } else { + thread["assistants"] = Value::Array(vec![assistant.clone()]); + } + + db_modify_thread(app_handle, thread).await?; + Ok(assistant) +} + +/// Modify thread assistant in database +pub async fn db_modify_thread_assistant( + app_handle: AppHandle, + thread_id: &str, + assistant: Value, +) -> Result { + let pool = get_pool().await?; + + let row = sqlx::query("SELECT data FROM threads WHERE id = ?1") + .bind(thread_id) + .fetch_optional(&pool) + .await + .map_err(|e| format!("Failed to get thread: {}", e))? + .ok_or("Thread not found")?; + + let data: String = row.get("data"); + let mut thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + + let assistant_id = assistant + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing assistant id")?; + + if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) { + if let Some(index) = assistants + .iter() + .position(|a| a.get("id").and_then(|v| v.as_str()) == Some(assistant_id)) + { + assistants[index] = assistant.clone(); + db_modify_thread(app_handle, thread).await?; + } + } + + Ok(assistant) +} diff --git a/src-tauri/src/core/threads/helpers.rs b/src-tauri/src/core/threads/helpers.rs index 0edcf41b2..1710c5767 100644 --- a/src-tauri/src/core/threads/helpers.rs +++ b/src-tauri/src/core/threads/helpers.rs @@ -3,7 +3,7 @@ use std::io::{BufRead, BufReader, Write}; use tauri::Runtime; // For async file write serialization -use once_cell::sync::Lazy; +use std::sync::OnceLock; use std::collections::HashMap; use std::sync::Arc; use tokio::sync::Mutex; @@ -11,12 +11,17 @@ use tokio::sync::Mutex; use super::utils::{get_messages_path, get_thread_metadata_path}; // Global per-thread locks for message file writes -pub static MESSAGE_LOCKS: Lazy>>>> = - Lazy::new(|| Mutex::new(HashMap::new())); +pub static MESSAGE_LOCKS: OnceLock>>>> = OnceLock::new(); + +/// Check if the platform should use SQLite (mobile platforms) +pub fn should_use_sqlite() -> bool { + cfg!(any(target_os = "android", target_os = "ios")) +} /// Get a lock for a specific thread to ensure thread-safe message file operations pub async fn get_lock_for_thread(thread_id: &str) -> Arc> { - let mut locks = MESSAGE_LOCKS.lock().await; + let locks = MESSAGE_LOCKS.get_or_init(|| Mutex::new(HashMap::new())); + let mut locks = locks.lock().await; let lock = locks .entry(thread_id.to_string()) .or_insert_with(|| Arc::new(Mutex::new(()))) @@ -33,7 +38,7 @@ pub fn write_messages_to_file( let mut file = File::create(path).map_err(|e| e.to_string())?; for msg in messages { let data = serde_json::to_string(msg).map_err(|e| e.to_string())?; - writeln!(file, "{}", data).map_err(|e| e.to_string())?; + writeln!(file, "{data}").map_err(|e| e.to_string())?; } Ok(()) } diff --git a/src-tauri/src/core/threads/mod.rs b/src-tauri/src/core/threads/mod.rs index fb76bee8c..99c00253e 100644 --- a/src-tauri/src/core/threads/mod.rs +++ b/src-tauri/src/core/threads/mod.rs @@ -12,8 +12,9 @@ pub mod commands; mod constants; +#[cfg(any(target_os = "android", target_os = "ios"))] +pub mod db; pub mod helpers; -pub mod models; pub mod utils; #[cfg(test)] diff --git a/src-tauri/src/core/threads/models.rs b/src-tauri/src/core/threads/models.rs deleted file mode 100644 index 5038c6def..000000000 --- a/src-tauri/src/core/threads/models.rs +++ /dev/null @@ -1,103 +0,0 @@ -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Thread { - pub id: String, - pub object: String, - pub title: String, - pub assistants: Vec, - pub created: i64, - pub updated: i64, - pub metadata: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadMessage { - pub id: String, - pub object: String, - pub thread_id: String, - pub assistant_id: Option, - pub attachments: Option>, - pub role: String, - pub content: Vec, - pub status: String, - pub created_at: i64, - pub completed_at: i64, - pub metadata: Option, - pub type_: Option, - pub error_code: Option, - pub tool_call_id: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Attachment { - pub file_id: Option, - pub tools: Option>, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(tag = "type")] -pub enum Tool { - #[serde(rename = "file_search")] - FileSearch, - #[serde(rename = "code_interpreter")] - CodeInterpreter, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadContent { - pub type_: String, - pub text: Option, - pub image_url: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ContentValue { - pub value: String, - pub annotations: Vec, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ImageContentValue { - pub detail: Option, - pub url: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadAssistantInfo { - pub id: String, - pub name: String, - pub model: ModelInfo, - pub instructions: Option, - pub tools: Option>, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ModelInfo { - pub id: String, - pub name: String, - pub settings: serde_json::Value, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(tag = "type")] -pub enum AssistantTool { - #[serde(rename = "code_interpreter")] - CodeInterpreter, - #[serde(rename = "retrieval")] - Retrieval, - #[serde(rename = "function")] - Function { - name: String, - description: Option, - parameters: Option, - }, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadState { - pub has_more: bool, - pub waiting_for_response: bool, - pub error: Option, - pub last_message: Option, -} diff --git a/src-tauri/src/core/threads/tests.rs b/src-tauri/src/core/threads/tests.rs index 5b4aaec57..15c91de85 100644 --- a/src-tauri/src/core/threads/tests.rs +++ b/src-tauri/src/core/threads/tests.rs @@ -1,6 +1,7 @@ -use crate::core::app::commands::get_jan_data_folder_path; use super::commands::*; +use super::helpers::should_use_sqlite; +use futures_util::future; use serde_json::json; use std::fs; use std::path::PathBuf; @@ -9,14 +10,47 @@ use tauri::test::{mock_app, MockRuntime}; // Helper to create a mock app handle with a temp data dir fn mock_app_with_temp_data_dir() -> (tauri::App, PathBuf) { let app = mock_app(); - let data_dir = get_jan_data_folder_path(app.handle().clone()); + // Create a unique test directory to avoid race conditions between parallel tests + let unique_id = std::thread::current().id(); + let timestamp = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_nanos(); + let data_dir = std::env::current_dir() + .unwrap_or_else(|_| PathBuf::from(".")) + .join(format!("test-data-{unique_id:?}-{timestamp}")); println!("Mock app data dir: {}", data_dir.display()); - // Patch get_data_dir to use temp dir (requires get_data_dir to be overridable or injectable) - // For now, we assume get_data_dir uses tauri::api::path::app_data_dir(&app_handle) - // and that we can set the environment variable to redirect it. + // Ensure the unique test directory exists + let _ = fs::create_dir_all(&data_dir); (app, data_dir) } +// Helper to create a basic thread +fn create_test_thread(title: &str) -> serde_json::Value { + json!({ + "object": "thread", + "title": title, + "assistants": [], + "created": 123, + "updated": 123, + "metadata": null + }) +} + +// Helper to create a basic message +fn create_test_message(thread_id: &str, content_text: &str) -> serde_json::Value { + json!({ + "object": "message", + "thread_id": thread_id, + "role": "user", + "content": [{"type": "text", "text": content_text}], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null + }) +} + #[tokio::test] async fn test_create_and_list_threads() { let (app, data_dir) = mock_app_with_temp_data_dir(); @@ -36,7 +70,7 @@ async fn test_create_and_list_threads() { // List threads let threads = list_threads(app.handle().clone()).await.unwrap(); - assert!(threads.len() > 0); + assert!(!threads.is_empty()); // Clean up let _ = fs::remove_dir_all(data_dir); @@ -82,7 +116,7 @@ async fn test_create_and_list_messages() { let messages = list_messages(app.handle().clone(), thread_id.clone()) .await .unwrap(); - assert!(messages.len() > 0); + assert!(!messages.is_empty(), "Expected at least one message, but got none. Thread ID: {thread_id}"); assert_eq!(messages[0]["role"], "user"); // Clean up @@ -131,3 +165,314 @@ async fn test_create_and_get_thread_assistant() { // Clean up let _ = fs::remove_dir_all(data_dir); } + +#[test] +fn test_should_use_sqlite_platform_detection() { + // Test that should_use_sqlite returns correct value based on platform + // On desktop platforms (macOS, Linux, Windows), it should return false + // On mobile platforms (Android, iOS), it should return true + + #[cfg(any(target_os = "android", target_os = "ios"))] + { + assert!(should_use_sqlite(), "should_use_sqlite should return true on mobile platforms"); + } + + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + assert!(!should_use_sqlite(), "should_use_sqlite should return false on desktop platforms"); + } +} + +#[tokio::test] +async fn test_desktop_storage_backend() { + // This test verifies that on desktop platforms, the file-based storage is used + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + let (app, _data_dir) = mock_app_with_temp_data_dir(); + + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Desktop Test Thread", + "assistants": [], + "created": 1234567890, + "updated": 1234567890, + "metadata": null + }); + + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Verify we can retrieve the thread (which proves file storage works) + let threads = list_threads(app.handle().clone()).await.unwrap(); + let found = threads.iter().any(|t| t["id"] == thread_id); + assert!(found, "Thread should be retrievable from file-based storage"); + + // Create a message + let message = json!({ + "object": "message", + "thread_id": thread_id, + "role": "user", + "content": [], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null + }); + + let _created_msg = create_message(app.handle().clone(), message).await.unwrap(); + + // Verify we can retrieve the message (which proves file storage works) + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(messages.len(), 1, "Message should be retrievable from file-based storage"); + + // Clean up - get the actual data directory used by the app + use super::utils::get_data_dir; + let actual_data_dir = get_data_dir(app.handle().clone()); + let _ = fs::remove_dir_all(actual_data_dir); + } +} + +#[tokio::test] +async fn test_modify_and_delete_thread() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Original Title", + "assistants": [], + "created": 1234567890, + "updated": 1234567890, + "metadata": null + }); + + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Modify the thread + let mut modified_thread = created.clone(); + modified_thread["title"] = json!("Modified Title"); + + modify_thread(app.handle().clone(), modified_thread.clone()) + .await + .unwrap(); + + // Verify modification by listing threads + let threads = list_threads(app.handle().clone()).await.unwrap(); + let found_thread = threads.iter().find(|t| t["id"] == thread_id); + assert!(found_thread.is_some(), "Modified thread should exist"); + assert_eq!(found_thread.unwrap()["title"], "Modified Title"); + + // Delete the thread + delete_thread(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + + // Verify deletion + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + let thread_dir = data_dir.join(&thread_id); + assert!(!thread_dir.exists(), "Thread directory should be deleted"); + } + + // Clean up + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_modify_and_delete_message() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Message Test Thread", + "assistants": [], + "created": 123, + "updated": 123, + "metadata": null + }); + + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Create a message + let message = json!({ + "object": "message", + "thread_id": thread_id, + "role": "user", + "content": [{"type": "text", "text": "Original content"}], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null + }); + + let created_msg = create_message(app.handle().clone(), message).await.unwrap(); + let message_id = created_msg["id"].as_str().unwrap().to_string(); + + // Modify the message + let mut modified_msg = created_msg.clone(); + modified_msg["content"] = json!([{"type": "text", "text": "Modified content"}]); + + modify_message(app.handle().clone(), modified_msg.clone()) + .await + .unwrap(); + + // Verify modification + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(messages.len(), 1); + assert_eq!(messages[0]["content"][0]["text"], "Modified content"); + + // Delete the message + delete_message(app.handle().clone(), thread_id.clone(), message_id.clone()) + .await + .unwrap(); + + // Verify deletion + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(messages.len(), 0, "Message should be deleted"); + + // Clean up + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_modify_thread_assistant() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Assistant Mod Thread")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap(); + + let assistant = json!({ + "id": "assistant-1", + "assistant_name": "Original Assistant", + "model": {"id": "model-1", "name": "Test Model"} + }); + + create_thread_assistant(app_handle.clone(), thread_id.to_string(), assistant.clone()) + .await + .unwrap(); + + let mut modified_assistant = assistant; + modified_assistant["assistant_name"] = json!("Modified Assistant"); + + modify_thread_assistant(app_handle.clone(), thread_id.to_string(), modified_assistant) + .await + .unwrap(); + + let retrieved = get_thread_assistant(app_handle, thread_id.to_string()) + .await + .unwrap(); + assert_eq!(retrieved["assistant_name"], "Modified Assistant"); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_thread_not_found_errors() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + let fake_thread_id = "non-existent-thread-id".to_string(); + let assistant = json!({"id": "assistant-1", "assistant_name": "Test Assistant"}); + + assert!(get_thread_assistant(app_handle.clone(), fake_thread_id.clone()).await.is_err()); + assert!(create_thread_assistant(app_handle.clone(), fake_thread_id.clone(), assistant.clone()).await.is_err()); + assert!(modify_thread_assistant(app_handle, fake_thread_id, assistant).await.is_err()); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_message_without_id_gets_generated() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Message ID Test")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap(); + + let message = json!({"object": "message", "thread_id": thread_id, "role": "user", "content": [], "status": "sent"}); + let created_msg = create_message(app_handle, message).await.unwrap(); + + assert!(created_msg["id"].as_str().is_some_and(|id| !id.is_empty())); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_concurrent_message_operations() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Concurrent Test")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + let handles: Vec<_> = (0..5) + .map(|i| { + let app_h = app_handle.clone(); + let tid = thread_id.clone(); + tokio::spawn(async move { + create_message(app_h, create_test_message(&tid, &format!("Message {}", i))).await + }) + }) + .collect(); + + let results = future::join_all(handles).await; + assert!(results.iter().all(|r| r.is_ok() && r.as_ref().unwrap().is_ok())); + + let messages = list_messages(app_handle, thread_id).await.unwrap(); + assert_eq!(messages.len(), 5); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_empty_thread_list() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Clean up any leftover test data + let test_data_threads = std::env::current_dir() + .unwrap_or_else(|_| PathBuf::from(".")) + .join("test-data") + .join("threads"); + let _ = fs::remove_dir_all(&test_data_threads); + + let threads = list_threads(app.handle().clone()).await.unwrap(); + assert_eq!(threads.len(), 0); + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_empty_message_list() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Empty Messages Test")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap(); + + let messages = list_messages(app_handle, thread_id.to_string()).await.unwrap(); + assert_eq!(messages.len(), 0); + + let _ = fs::remove_dir_all(data_dir); +} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index dad155875..8ca44d9a9 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -10,11 +10,10 @@ use jan_utils::generate_app_token; use std::{collections::HashMap, sync::Arc}; use tauri::{Emitter, Manager, RunEvent}; use tauri_plugin_llamacpp::cleanup_llama_processes; +use tauri_plugin_store::StoreExt; use tokio::sync::Mutex; -use crate::core::setup::setup_tray; - -#[cfg_attr(mobile, tauri::mobile_entry_point)] +#[cfg_attr(all(mobile, any(target_os = "android", target_os = "ios")), tauri::mobile_entry_point)] pub fn run() { let mut builder = tauri::Builder::default(); #[cfg(desktop)] @@ -22,29 +21,29 @@ pub fn run() { builder = builder.plugin(tauri_plugin_single_instance::init(|_app, argv, _cwd| { println!("a new app instance was opened with {argv:?} and the deep link event was already triggered"); // when defining deep link schemes at runtime, you must also check `argv` here - let arg = argv.iter().find(|arg| arg.starts_with("jan://")); - if let Some(deep_link) = arg { - println!("deep link: {deep_link}"); - // handle the deep link, e.g., emit an event to the webview - _app.app_handle().emit("deep-link", deep_link).unwrap(); - if let Some(window) = _app.app_handle().get_webview_window("main") { - let _ = window.set_focus(); - } - } })); } - let app = builder + let mut app_builder = builder .plugin(tauri_plugin_os::init()) - .plugin(tauri_plugin_deep_link::init()) .plugin(tauri_plugin_dialog::init()) .plugin(tauri_plugin_opener::init()) .plugin(tauri_plugin_http::init()) .plugin(tauri_plugin_store::Builder::new().build()) - .plugin(tauri_plugin_updater::Builder::new().build()) .plugin(tauri_plugin_shell::init()) - .plugin(tauri_plugin_llamacpp::init()) - .plugin(tauri_plugin_hardware::init()) + .plugin(tauri_plugin_llamacpp::init()); + + #[cfg(feature = "deep-link")] + { + app_builder = app_builder.plugin(tauri_plugin_deep_link::init()); + } + + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + app_builder = app_builder.plugin(tauri_plugin_hardware::init()); + } + + let app = app_builder .invoke_handler(tauri::generate_handler![ // FS commands - Deperecate soon core::filesystem::commands::join_path, @@ -120,21 +119,6 @@ pub fn run() { server_handle: Arc::new(Mutex::new(None)), tool_call_cancellations: Arc::new(Mutex::new(HashMap::new())), }) - .on_window_event(|window, event| match event { - tauri::WindowEvent::CloseRequested { api, .. } => { - if option_env!("ENABLE_SYSTEM_TRAY_ICON").unwrap_or("false") == "true" { - #[cfg(target_os = "macos")] - window - .app_handle() - .set_activation_policy(tauri::ActivationPolicy::Accessory) - .unwrap(); - - window.hide().unwrap(); - api.prevent_close(); - } - } - _ => {} - }) .setup(|app| { app.handle().plugin( tauri_plugin_log::Builder::default() @@ -149,42 +133,87 @@ pub fn run() { ]) .build(), )?; - app.handle() - .plugin(tauri_plugin_updater::Builder::new().build())?; - // Install extensions - if let Err(e) = setup::install_extensions(app.handle().clone(), false) { - log::error!("Failed to install extensions: {}", e); + #[cfg(not(any(target_os = "ios", target_os = "android")))] + app.handle().plugin(tauri_plugin_updater::Builder::new().build())?; + + // Start migration + let mut store_path = get_jan_data_folder_path(app.handle().clone()); + store_path.push("store.json"); + let store = app + .handle() + .store(store_path) + .expect("Store not initialized"); + let stored_version = store + .get("version") + .and_then(|v| v.as_str().map(String::from)) + .unwrap_or_default(); + let app_version = app + .config() + .version + .clone() + .unwrap_or_default(); + // Migrate extensions + if let Err(e) = + setup::install_extensions(app.handle().clone(), stored_version != app_version) + { + log::error!("Failed to install extensions: {e}"); } + // Migrate MCP servers + if let Err(e) = setup::migrate_mcp_servers(app.handle().clone(), store.clone()) { + log::error!("Failed to migrate MCP servers: {e}"); + } + + // Store the new app version + store.set("version", serde_json::json!(app_version)); + store.save().expect("Failed to save store"); + // Migration completed + + #[cfg(desktop)] if option_env!("ENABLE_SYSTEM_TRAY_ICON").unwrap_or("false") == "true" { log::info!("Enabling system tray icon"); - let _ = setup_tray(app); + let _ = setup::setup_tray(app); } - #[cfg(any(windows, target_os = "linux"))] + #[cfg(all(feature = "deep-link", any(windows, target_os = "linux")))] { use tauri_plugin_deep_link::DeepLinkExt; - app.deep_link().register_all()?; } + + // Initialize SQLite database for mobile platforms + #[cfg(any(target_os = "android", target_os = "ios"))] + { + let app_handle = app.handle().clone(); + tauri::async_runtime::spawn(async move { + if let Err(e) = crate::core::threads::db::init_database(&app_handle).await { + log::error!("Failed to initialize mobile database: {}", e); + } + }); + } + setup_mcp(app); + setup::setup_theme_listener(app)?; Ok(()) }) .build(tauri::generate_context!()) .expect("error while running tauri application"); // Handle app lifecycle events - app.run(|app, event| match event { - RunEvent::Exit => { + app.run(|app, event| { + if let RunEvent::Exit = event { // This is called when the app is actually exiting (e.g., macOS dock quit) // We can't prevent this, so run cleanup quickly let app_handle = app.clone(); - // Hide window immediately - if let Some(window) = app_handle.get_webview_window("main") { - let _ = window.hide(); - } tokio::task::block_in_place(|| { tauri::async_runtime::block_on(async { + // Hide window immediately (not available on mobile platforms) + if let Some(window) = app_handle.get_webview_window("main") { + #[cfg(not(any(target_os = "ios", target_os = "android")))] + { let _ = window.hide(); } + let _ = window.emit("kill-mcp-servers", ()); + } + // Quick cleanup with shorter timeout let state = app_handle.state::(); let _ = clean_up_mcp_servers(state).await; @@ -192,6 +221,5 @@ pub fn run() { }); }); } - _ => {} }); } diff --git a/src-tauri/tauri b/src-tauri/tauri new file mode 100755 index 000000000..f944754d6 --- /dev/null +++ b/src-tauri/tauri @@ -0,0 +1,2 @@ +#!/usr/bin/env node +import('../node_modules/@tauri-apps/cli/tauri.js'); \ No newline at end of file diff --git a/src-tauri/tauri.android.conf.json b/src-tauri/tauri.android.conf.json new file mode 100644 index 000000000..2f1144c20 --- /dev/null +++ b/src-tauri/tauri.android.conf.json @@ -0,0 +1,26 @@ +{ + "identifier": "jan.ai.app", + "build": { + "devUrl": null, + "frontendDist": "../web-app/dist", + "beforeDevCommand": "cross-env IS_DEV=true IS_ANDROID=true yarn build:web", + "beforeBuildCommand": "cross-env IS_ANDROID=true yarn build:web" + }, + "app": { + "security": { + "capabilities": ["mobile"] + } + }, + "plugins": {}, + "bundle": { + "active": true, + "resources": [ + "resources/pre-install/**/*", + "resources/LICENSE" + ], + "externalBin": [], + "android": { + "minSdkVersion": 24 + } + } +} \ No newline at end of file diff --git a/src-tauri/tauri.bundle.windows.nsis.template b/src-tauri/tauri.bundle.windows.nsis.template new file mode 100644 index 000000000..8e7602f25 --- /dev/null +++ b/src-tauri/tauri.bundle.windows.nsis.template @@ -0,0 +1,1006 @@ +Unicode true +ManifestDPIAware true +; Add in `dpiAwareness` `PerMonitorV2` to manifest for Windows 10 1607+ (note this should not affect lower versions since they should be able to ignore this and pick up `dpiAware` `true` set by `ManifestDPIAware true`) +; Currently undocumented on NSIS's website but is in the Docs folder of source tree, see +; https://github.com/kichik/nsis/blob/5fc0b87b819a9eec006df4967d08e522ddd651c9/Docs/src/attributes.but#L286-L300 +; https://github.com/tauri-apps/tauri/pull/10106 +ManifestDPIAwareness PerMonitorV2 + +!if "lzma" == "none" + SetCompress off +!else + ; Set the compression algorithm. We default to LZMA. + SetCompressor /SOLID "lzma" +!endif + +!include MUI2.nsh +!include FileFunc.nsh +!include x64.nsh +!include WordFunc.nsh +!include "utils.nsh" +!include "FileAssociation.nsh" +!include "Win\COM.nsh" +!include "Win\Propkey.nsh" +!include "StrFunc.nsh" +${StrCase} +${StrLoc} + + +!define WEBVIEW2APPGUID "{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}" + +!define MANUFACTURER "ai" +!define PRODUCTNAME "jan_productname" +!define VERSION "jan_version" +!define VERSIONWITHBUILD "jan_build" +!define HOMEPAGE "" +!define INSTALLMODE "currentUser" +!define LICENSE "" +!define INSTALLERICON "D:\a\jan\jan\src-tauri\icons\icon.ico" +!define SIDEBARIMAGE "" +!define HEADERIMAGE "" +!define MAINBINARYNAME "jan_mainbinaryname" +!define MAINBINARYSRCPATH "D:\a\jan\jan\src-tauri\target\release\jan_mainbinaryname.exe" +!define BUNDLEID "jan_mainbinaryname.ai.app" +!define COPYRIGHT "" +!define OUTFILE "nsis-output.exe" +!define ARCH "x64" +!define ADDITIONALPLUGINSPATH "D:\a\jan\jan\src-tauri\target\release\nsis\x64\Plugins\x86-unicode\additional" +!define ALLOWDOWNGRADES "true" +!define DISPLAYLANGUAGESELECTOR "false" +!define INSTALLWEBVIEW2MODE "downloadBootstrapper" +!define WEBVIEW2INSTALLERARGS "/silent" +!define WEBVIEW2BOOTSTRAPPERPATH "" +!define WEBVIEW2INSTALLERPATH "" +!define MINIMUMWEBVIEW2VERSION "" +!define UNINSTKEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCTNAME}" +!define MANUKEY "Software\${MANUFACTURER}" +!define MANUPRODUCTKEY "${MANUKEY}\${PRODUCTNAME}" +!define UNINSTALLERSIGNCOMMAND "$\"powershell$\" $\"-ExecutionPolicy$\" $\"Bypass$\" $\"-File$\" $\"./sign.ps1$\" $\"%1$\"" +!define ESTIMATEDSIZE "793795" +!define STARTMENUFOLDER "" +!define VCREDIST_URL "https://aka.ms/vs/17/release/vc_redist.x64.exe" +!define VCREDIST_FILENAME "vc_redist.x64.exe" + +Var PassiveMode +Var UpdateMode +Var NoShortcutMode +Var WixMode +Var OldMainBinaryName + +Name "${PRODUCTNAME}" +BrandingText "${COPYRIGHT}" +OutFile "${OUTFILE}" + +ShowInstDetails nevershow +ShowUninstDetails nevershow + +; We don't actually use this value as default install path, +; it's just for nsis to append the product name folder in the directory selector +; https://nsis.sourceforge.io/Reference/InstallDir +!define PLACEHOLDER_INSTALL_DIR "placeholder\${PRODUCTNAME}" +InstallDir "${PLACEHOLDER_INSTALL_DIR}" + +VIProductVersion "${VERSIONWITHBUILD}" +VIAddVersionKey "ProductName" "${PRODUCTNAME}" +VIAddVersionKey "FileDescription" "${PRODUCTNAME}" +VIAddVersionKey "LegalCopyright" "${COPYRIGHT}" +VIAddVersionKey "FileVersion" "${VERSION}" +VIAddVersionKey "ProductVersion" "${VERSION}" + +# additional plugins +!addplugindir "${ADDITIONALPLUGINSPATH}" + +; Uninstaller signing command +!if "${UNINSTALLERSIGNCOMMAND}" != "" + !uninstfinalize '${UNINSTALLERSIGNCOMMAND}' +!endif + +; Handle install mode, `perUser`, `perMachine` or `both` +!if "${INSTALLMODE}" == "perMachine" + RequestExecutionLevel highest +!endif + +!if "${INSTALLMODE}" == "currentUser" + RequestExecutionLevel user +!endif + +!if "${INSTALLMODE}" == "both" + !define MULTIUSER_MUI + !define MULTIUSER_INSTALLMODE_INSTDIR "${PRODUCTNAME}" + !define MULTIUSER_INSTALLMODE_COMMANDLINE + !if "${ARCH}" == "x64" + !define MULTIUSER_USE_PROGRAMFILES64 + !else if "${ARCH}" == "arm64" + !define MULTIUSER_USE_PROGRAMFILES64 + !endif + !define MULTIUSER_INSTALLMODE_DEFAULT_REGISTRY_KEY "${UNINSTKEY}" + !define MULTIUSER_INSTALLMODE_DEFAULT_REGISTRY_VALUENAME "CurrentUser" + !define MULTIUSER_INSTALLMODEPAGE_SHOWUSERNAME + !define MULTIUSER_INSTALLMODE_FUNCTION RestorePreviousInstallLocation + !define MULTIUSER_EXECUTIONLEVEL Highest + !include MultiUser.nsh +!endif + +; Installer icon +!if "${INSTALLERICON}" != "" + !define MUI_ICON "${INSTALLERICON}" +!endif + +; Installer sidebar image +!if "${SIDEBARIMAGE}" != "" + !define MUI_WELCOMEFINISHPAGE_BITMAP "${SIDEBARIMAGE}" +!endif + +; Installer header image +!if "${HEADERIMAGE}" != "" + !define MUI_HEADERIMAGE + !define MUI_HEADERIMAGE_BITMAP "${HEADERIMAGE}" +!endif + +; Define registry key to store installer language +!define MUI_LANGDLL_REGISTRY_ROOT "HKCU" +!define MUI_LANGDLL_REGISTRY_KEY "${MANUPRODUCTKEY}" +!define MUI_LANGDLL_REGISTRY_VALUENAME "Installer Language" + +; Installer pages, must be ordered as they appear +; 1. Welcome Page +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_WELCOME + +; 2. License Page (if defined) +!if "${LICENSE}" != "" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !insertmacro MUI_PAGE_LICENSE "${LICENSE}" +!endif + +; 3. Install mode (if it is set to `both`) +!if "${INSTALLMODE}" == "both" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !insertmacro MULTIUSER_PAGE_INSTALLMODE +!endif + +; 4. Custom page to ask user if he wants to reinstall/uninstall +; only if a previous installation was detected +Var ReinstallPageCheck +Page custom PageReinstall PageLeaveReinstall +Function PageReinstall + ; Uninstall previous WiX installation if exists. + ; + ; A WiX installer stores the installation info in registry + ; using a UUID and so we have to loop through all keys under + ; `HKLM\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall` + ; and check if `DisplayName` and `Publisher` keys match ${PRODUCTNAME} and ${MANUFACTURER} + ; + ; This has a potential issue that there maybe another installation that matches + ; our ${PRODUCTNAME} and ${MANUFACTURER} but wasn't installed by our WiX installer, + ; however, this should be fine since the user will have to confirm the uninstallation + ; and they can chose to abort it if doesn't make sense. + StrCpy $0 0 + wix_loop: + EnumRegKey $1 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall" $0 + StrCmp $1 "" wix_loop_done ; Exit loop if there is no more keys to loop on + IntOp $0 $0 + 1 + ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "DisplayName" + ReadRegStr $R1 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "Publisher" + StrCmp "$R0$R1" "${PRODUCTNAME}${MANUFACTURER}" 0 wix_loop + ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "UninstallString" + ${StrCase} $R1 $R0 "L" + ${StrLoc} $R0 $R1 "msiexec" ">" + StrCmp $R0 0 0 wix_loop_done + StrCpy $WixMode 1 + StrCpy $R6 "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" + Goto compare_version + wix_loop_done: + + ; Check if there is an existing installation, if not, abort the reinstall page + ReadRegStr $R0 SHCTX "${UNINSTKEY}" "" + ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString" + ${IfThen} "$R0$R1" == "" ${|} Abort ${|} + + ; Compare this installar version with the existing installation + ; and modify the messages presented to the user accordingly + compare_version: + StrCpy $R4 "$(older)" + ${If} $WixMode = 1 + ReadRegStr $R0 HKLM "$R6" "DisplayVersion" + ${Else} + ReadRegStr $R0 SHCTX "${UNINSTKEY}" "DisplayVersion" + ${EndIf} + ${IfThen} $R0 == "" ${|} StrCpy $R4 "$(unknown)" ${|} + + nsis_tauri_utils::SemverCompare "${VERSION}" $R0 + Pop $R0 + ; Reinstalling the same version + ${If} $R0 = 0 + StrCpy $R1 "$(alreadyInstalledLong)" + StrCpy $R2 "$(addOrReinstall)" + StrCpy $R3 "$(uninstallApp)" + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(chooseMaintenanceOption)" + ; Upgrading + ${ElseIf} $R0 = 1 + StrCpy $R1 "$(olderOrUnknownVersionInstalled)" + StrCpy $R2 "$(uninstallBeforeInstalling)" + StrCpy $R3 "$(dontUninstall)" + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(choowHowToInstall)" + ; Downgrading + ${ElseIf} $R0 = -1 + StrCpy $R1 "$(newerVersionInstalled)" + StrCpy $R2 "$(uninstallBeforeInstalling)" + !if "${ALLOWDOWNGRADES}" == "true" + StrCpy $R3 "$(dontUninstall)" + !else + StrCpy $R3 "$(dontUninstallDowngrade)" + !endif + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(choowHowToInstall)" + ${Else} + Abort + ${EndIf} + + ; Skip showing the page if passive + ; + ; Note that we don't call this earlier at the begining + ; of this function because we need to populate some variables + ; related to current installed version if detected and whether + ; we are downgrading or not. + ${If} $PassiveMode = 1 + Call PageLeaveReinstall + ${Else} + nsDialogs::Create 1018 + Pop $R4 + ${IfThen} $(^RTL) = 1 ${|} nsDialogs::SetRTL $(^RTL) ${|} + + ${NSD_CreateLabel} 0 0 100% 24u $R1 + Pop $R1 + + ${NSD_CreateRadioButton} 30u 50u -30u 8u $R2 + Pop $R2 + ${NSD_OnClick} $R2 PageReinstallUpdateSelection + + ${NSD_CreateRadioButton} 30u 70u -30u 8u $R3 + Pop $R3 + ; Disable this radio button if downgrading and downgrades are disabled + !if "${ALLOWDOWNGRADES}" == "false" + ${IfThen} $R0 = -1 ${|} EnableWindow $R3 0 ${|} + !endif + ${NSD_OnClick} $R3 PageReinstallUpdateSelection + + ; Check the first radio button if this the first time + ; we enter this page or if the second button wasn't + ; selected the last time we were on this page + ${If} $ReinstallPageCheck <> 2 + SendMessage $R2 ${BM_SETCHECK} ${BST_CHECKED} 0 + ${Else} + SendMessage $R3 ${BM_SETCHECK} ${BST_CHECKED} 0 + ${EndIf} + + ${NSD_SetFocus} $R2 + nsDialogs::Show + ${EndIf} +FunctionEnd +Function PageReinstallUpdateSelection + ${NSD_GetState} $R2 $R1 + ${If} $R1 == ${BST_CHECKED} + StrCpy $ReinstallPageCheck 1 + ${Else} + StrCpy $ReinstallPageCheck 2 + ${EndIf} +FunctionEnd +Function PageLeaveReinstall + ; In passive mode, always uninstall when upgrading + ${If} $PassiveMode = 1 + ${AndIf} $R0 = 1 ; Upgrading + Goto reinst_uninstall + ${EndIf} + + ${NSD_GetState} $R2 $R1 + + ; If migrating from Wix, always uninstall + ${If} $WixMode = 1 + Goto reinst_uninstall + ${EndIf} + + ; In update mode, always proceeds without uninstalling + ${If} $UpdateMode = 1 + Goto reinst_done + ${EndIf} + + ; $R0 holds whether same(0)/upgrading(1)/downgrading(-1) version + ; $R1 holds the radio buttons state: + ; 1 => first choice was selected + ; 0 => second choice was selected + ${If} $R0 = 0 ; Same version, proceed + ${If} $R1 = 1 ; User chose to add/reinstall + Goto reinst_done + ${Else} ; User chose to uninstall + Goto reinst_uninstall + ${EndIf} + ${ElseIf} $R0 = 1 ; Upgrading + ${If} $R1 = 1 ; User chose to uninstall + Goto reinst_uninstall + ${Else} + Goto reinst_done ; User chose NOT to uninstall + ${EndIf} + ${ElseIf} $R0 = -1 ; Downgrading + ${If} $R1 = 1 ; User chose to uninstall + Goto reinst_uninstall + ${Else} + Goto reinst_done ; User chose NOT to uninstall + ${EndIf} + ${EndIf} + + reinst_uninstall: + HideWindow + ClearErrors + + ${If} $WixMode = 1 + ReadRegStr $R1 HKLM "$R6" "UninstallString" + ExecWait '$R1' $0 + ${Else} + ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" "" + ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString" + ${IfThen} $UpdateMode = 1 ${|} StrCpy $R1 "$R1 /UPDATE" ${|} ; append /UPDATE + ${IfThen} $PassiveMode = 1 ${|} StrCpy $R1 "$R1 /P" ${|} ; append /P + StrCpy $R1 "$R1 _?=$4" ; append uninstall directory + ExecWait '$R1' $0 + ${EndIf} + + BringToFront + + ${IfThen} ${Errors} ${|} StrCpy $0 2 ${|} ; ExecWait failed, set fake exit code + + ${If} $0 <> 0 + ${OrIf} ${FileExists} "$INSTDIR\${MAINBINARYNAME}.exe" + ; User cancelled wix uninstaller? return to select un/reinstall page + ${If} $WixMode = 1 + ${AndIf} $0 = 1602 + Abort + ${EndIf} + + ; User cancelled NSIS uninstaller? return to select un/reinstall page + ${If} $0 = 1 + Abort + ${EndIf} + + ; Other erros? show generic error message and return to select un/reinstall page + MessageBox MB_ICONEXCLAMATION "$(unableToUninstall)" + Abort + ${EndIf} + reinst_done: +FunctionEnd + +; 5. Choose install directory page +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_DIRECTORY + +; 6. Start menu shortcut page +Var AppStartMenuFolder +!if "${STARTMENUFOLDER}" != "" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !define MUI_STARTMENUPAGE_DEFAULTFOLDER "${STARTMENUFOLDER}" +!else + !define MUI_PAGE_CUSTOMFUNCTION_PRE Skip +!endif +!insertmacro MUI_PAGE_STARTMENU Application $AppStartMenuFolder + +; 7. Installation page +!insertmacro MUI_PAGE_INSTFILES + +; 8. Finish page +; +; Don't auto jump to finish page after installation page, +; because the installation page has useful info that can be used debug any issues with the installer. +!define MUI_FINISHPAGE_NOAUTOCLOSE +; Use show readme button in the finish page as a button create a desktop shortcut +!define MUI_FINISHPAGE_SHOWREADME +!define MUI_FINISHPAGE_SHOWREADME_TEXT "$(createDesktop)" +!define MUI_FINISHPAGE_SHOWREADME_FUNCTION CreateOrUpdateDesktopShortcut +; Show run app after installation. +!define MUI_FINISHPAGE_RUN +!define MUI_FINISHPAGE_RUN_FUNCTION RunMainBinary +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_FINISH + +Function RunMainBinary + nsis_tauri_utils::RunAsUser "$INSTDIR\${MAINBINARYNAME}.exe" "" +FunctionEnd + +; Uninstaller Pages +; 1. Confirm uninstall page +Var DeleteAppDataCheckbox +Var DeleteAppDataCheckboxState +!define /ifndef WS_EX_LAYOUTRTL 0x00400000 +!define MUI_PAGE_CUSTOMFUNCTION_SHOW un.ConfirmShow +Function un.ConfirmShow ; Add add a `Delete app data` check box + ; $1 inner dialog HWND + ; $2 window DPI + ; $3 style + ; $4 x + ; $5 y + ; $6 width + ; $7 height + FindWindow $1 "#32770" "" $HWNDPARENT ; Find inner dialog + System::Call "user32::GetDpiForWindow(p r1) i .r2" + ${If} $(^RTL) = 1 + StrCpy $3 "${__NSD_CheckBox_EXSTYLE} | ${WS_EX_LAYOUTRTL}" + IntOp $4 50 * $2 + ${Else} + StrCpy $3 "${__NSD_CheckBox_EXSTYLE}" + IntOp $4 0 * $2 + ${EndIf} + IntOp $5 100 * $2 + IntOp $6 400 * $2 + IntOp $7 25 * $2 + IntOp $4 $4 / 96 + IntOp $5 $5 / 96 + IntOp $6 $6 / 96 + IntOp $7 $7 / 96 + System::Call 'user32::CreateWindowEx(i r3, w "${__NSD_CheckBox_CLASS}", w "$(deleteAppData)", i ${__NSD_CheckBox_STYLE}, i r4, i r5, i r6, i r7, p r1, i0, i0, i0) i .s' + Pop $DeleteAppDataCheckbox + SendMessage $HWNDPARENT ${WM_GETFONT} 0 0 $1 + SendMessage $DeleteAppDataCheckbox ${WM_SETFONT} $1 1 +FunctionEnd +!define MUI_PAGE_CUSTOMFUNCTION_LEAVE un.ConfirmLeave +Function un.ConfirmLeave + SendMessage $DeleteAppDataCheckbox ${BM_GETCHECK} 0 0 $DeleteAppDataCheckboxState +FunctionEnd +!define MUI_PAGE_CUSTOMFUNCTION_PRE un.SkipIfPassive +!insertmacro MUI_UNPAGE_CONFIRM + +; 2. Uninstalling Page +!insertmacro MUI_UNPAGE_INSTFILES + +;Languages +!insertmacro MUI_LANGUAGE "English" +!insertmacro MUI_RESERVEFILE_LANGDLL + !include "D:\a\jan\jan\src-tauri\target\release\nsis\x64\English.nsh" + +Function .onInit + ${GetOptions} $CMDLINE "/P" $PassiveMode + ${IfNot} ${Errors} + StrCpy $PassiveMode 1 + ${EndIf} + ; always run in passive mode + StrCpy $PassiveMode 1 + + ${GetOptions} $CMDLINE "/NS" $NoShortcutMode + ${IfNot} ${Errors} + StrCpy $NoShortcutMode 1 + ${EndIf} + + ${GetOptions} $CMDLINE "/UPDATE" $UpdateMode + ${IfNot} ${Errors} + StrCpy $UpdateMode 1 + ${EndIf} + + !if "${DISPLAYLANGUAGESELECTOR}" == "true" + !insertmacro MUI_LANGDLL_DISPLAY + !endif + + !insertmacro SetContext + + ${If} $INSTDIR == "${PLACEHOLDER_INSTALL_DIR}" + ; Set default install location + !if "${INSTALLMODE}" == "perMachine" + ${If} ${RunningX64} + !if "${ARCH}" == "x64" + StrCpy $INSTDIR "$PROGRAMFILES64\${PRODUCTNAME}" + !else if "${ARCH}" == "arm64" + StrCpy $INSTDIR "$PROGRAMFILES64\${PRODUCTNAME}" + !else + StrCpy $INSTDIR "$PROGRAMFILES\${PRODUCTNAME}" + !endif + ${Else} + StrCpy $INSTDIR "$PROGRAMFILES\${PRODUCTNAME}" + ${EndIf} + !else if "${INSTALLMODE}" == "currentUser" + StrCpy $INSTDIR "$LOCALAPPDATA\Programs\${PRODUCTNAME}" + !endif + + Call RestorePreviousInstallLocation + ${EndIf} + + ; Remove old Jan if it exists + ${If} ${FileExists} "$INSTDIR\LICENSE.electron.txt" + DeleteRegKey HKLM "Software\${PRODUCTNAME}" + RMDir /r "$INSTDIR" + Delete "$INSTDIR\*.*" + ${EndIf} + + !if "${INSTALLMODE}" == "both" + !insertmacro MULTIUSER_INIT + !endif +FunctionEnd + + +Section EarlyChecks + ; Abort silent installer if downgrades is disabled + !if "${ALLOWDOWNGRADES}" == "false" + ${If} ${Silent} + ; If downgrading + ${If} $R0 = -1 + System::Call 'kernel32::AttachConsole(i -1)i.r0' + ${If} $0 <> 0 + System::Call 'kernel32::GetStdHandle(i -11)i.r0' + System::call 'kernel32::SetConsoleTextAttribute(i r0, i 0x0004)' ; set red color + FileWrite $0 "$(silentDowngrades)" + ${EndIf} + Abort + ${EndIf} + ${EndIf} + !endif + +SectionEnd + +Section WebView2 + ; Check if Webview2 is already installed and skip this section + ${If} ${RunningX64} + ReadRegStr $4 HKLM "SOFTWARE\WOW6432Node\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${Else} + ReadRegStr $4 HKLM "SOFTWARE\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${EndIf} + ${If} $4 == "" + ReadRegStr $4 HKCU "SOFTWARE\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${EndIf} + + ${If} $4 == "" + ; Webview2 installation + ; + ; Skip if updating + ${If} $UpdateMode <> 1 + !if "${INSTALLWEBVIEW2MODE}" == "downloadBootstrapper" + Delete "$TEMP\MicrosoftEdgeWebview2Setup.exe" + DetailPrint "$(webview2Downloading)" + NSISdl::download "https://go.microsoft.com/fwlink/p/?LinkId=2124703" "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Pop $0 + ${If} $0 == "success" + DetailPrint "$(webview2DownloadSuccess)" + ${Else} + DetailPrint "$(webview2DownloadError)" + Abort "$(webview2AbortError)" + ${EndIf} + StrCpy $6 "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Goto install_webview2 + !endif + + !if "${INSTALLWEBVIEW2MODE}" == "embedBootstrapper" + Delete "$TEMP\MicrosoftEdgeWebview2Setup.exe" + File "/oname=$TEMP\MicrosoftEdgeWebview2Setup.exe" "${WEBVIEW2BOOTSTRAPPERPATH}" + DetailPrint "$(installingWebview2)" + StrCpy $6 "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Goto install_webview2 + !endif + + !if "${INSTALLWEBVIEW2MODE}" == "offlineInstaller" + Delete "$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" + File "/oname=$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" "${WEBVIEW2INSTALLERPATH}" + DetailPrint "$(installingWebview2)" + StrCpy $6 "$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" + Goto install_webview2 + !endif + + Goto webview2_done + + install_webview2: + DetailPrint "$(installingWebview2)" + ; $6 holds the path to the webview2 installer + ExecWait "$6 ${WEBVIEW2INSTALLERARGS} /install" $1 + ${If} $1 = 0 + DetailPrint "$(webview2InstallSuccess)" + ${Else} + DetailPrint "$(webview2InstallError)" + Abort "$(webview2AbortError)" + ${EndIf} + webview2_done: + ${EndIf} + ${Else} + !if "${MINIMUMWEBVIEW2VERSION}" != "" + ${VersionCompare} "${MINIMUMWEBVIEW2VERSION}" "$4" $R0 + ${If} $R0 = 1 + update_webview: + DetailPrint "$(installingWebview2)" + ${If} ${RunningX64} + ReadRegStr $R1 HKLM "SOFTWARE\WOW6432Node\Microsoft\EdgeUpdate" "path" + ${Else} + ReadRegStr $R1 HKLM "SOFTWARE\Microsoft\EdgeUpdate" "path" + ${EndIf} + ${If} $R1 == "" + ReadRegStr $R1 HKCU "SOFTWARE\Microsoft\EdgeUpdate" "path" + ${EndIf} + ${If} $R1 != "" + ; Chromium updater docs: https://source.chromium.org/chromium/chromium/src/+/main:docs/updater/user_manual.md + ; Modified from "HKEY_LOCAL_MACHINE\SOFTWARE\WOW6432Node\Microsoft\Windows\CurrentVersion\Uninstall\Microsoft EdgeWebView\ModifyPath" + ExecWait `"$R1" /install appguid=${WEBVIEW2APPGUID}&needsadmin=true` $1 + ${If} $1 = 0 + DetailPrint "$(webview2InstallSuccess)" + ${Else} + MessageBox MB_ICONEXCLAMATION|MB_ABORTRETRYIGNORE "$(webview2InstallError)" IDIGNORE ignore IDRETRY update_webview + Quit + ignore: + ${EndIf} + ${EndIf} + ${EndIf} + !endif + ${EndIf} +SectionEnd + +Section VCRedist + ; Check if VC++ Redistributable is already installed + ; Check for Visual Studio 2015-2022 redistributable (14.0 or higher) + ReadRegStr $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" + ${If} $0 == "" + ; Try alternative registry location + ReadRegStr $0 HKLM "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" + ${EndIf} + ${If} $0 == "" + ; Try checking for any version of Visual C++ 2015-2022 Redistributable + ReadRegStr $0 HKLM "SOFTWARE\Classes\Installer\Dependencies\Microsoft.VS.VC_RuntimeMinimumVSU_amd64,v14" "Version" + ${EndIf} + + ${If} $0 == "" + ; VC++ Redistributable not found, need to install + DetailPrint "Visual C++ Redistributable not found, downloading and installing..." + + ; Download VC++ Redistributable + Delete "$TEMP\${VCREDIST_FILENAME}" + DetailPrint "Downloading Visual C++ Redistributable..." + NSISdl::download "${VCREDIST_URL}" "$TEMP\${VCREDIST_FILENAME}" + Pop $1 + + ${If} $1 == "success" + DetailPrint "Visual C++ Redistributable download successful" + + ; Install VC++ Redistributable silently + DetailPrint "Installing Visual C++ Redistributable..." + ExecWait '"$TEMP\${VCREDIST_FILENAME}" /quiet /norestart' $2 + + ${If} $2 == 0 + DetailPrint "Visual C++ Redistributable installed successfully" + ${ElseIf} $2 == 1638 + DetailPrint "Visual C++ Redistributable already installed (newer version)" + ${ElseIf} $2 == 3010 + DetailPrint "Visual C++ Redistributable installed successfully (restart required)" + ; You might want to handle restart requirement here + ${Else} + DetailPrint "Visual C++ Redistributable installation failed with exit code: $2" + MessageBox MB_ICONEXCLAMATION|MB_YESNO "Visual C++ Redistributable installation failed. Continue anyway?" IDYES continue_install + Abort "Installation cancelled due to Visual C++ Redistributable failure" + continue_install: + ${EndIf} + + ; Clean up downloaded file + Delete "$TEMP\${VCREDIST_FILENAME}" + ${Else} + DetailPrint "Failed to download Visual C++ Redistributable: $1" + MessageBox MB_ICONEXCLAMATION|MB_YESNO "Failed to download Visual C++ Redistributable. Continue anyway?" IDYES continue_install_download_fail + Abort "Installation cancelled due to download failure" + continue_install_download_fail: + ${EndIf} + ${Else} + DetailPrint "Visual C++ Redistributable already installed (version: $0)" + ${EndIf} +SectionEnd + +Section Install + SetDetailsPrint none + SetOutPath $INSTDIR + + !ifmacrodef NSIS_HOOK_PREINSTALL + !insertmacro NSIS_HOOK_PREINSTALL + !endif + + !insertmacro CheckIfAppIsRunning "${MAINBINARYNAME}.exe" "${PRODUCTNAME}" + + ; Copy main executable + File "${MAINBINARYSRCPATH}" + + ; Copy resources + CreateDirectory "$INSTDIR\resources" + CreateDirectory "$INSTDIR\resources\pre-install" + SetOutPath $INSTDIR + File /a "/oname=LICENSE" "D:\a\jan\jan\src-tauri\resources\LICENSE" + SetOutPath "$INSTDIR\resources\pre-install" + File /nonfatal /a /r "D:\a\jan\jan\src-tauri\resources\pre-install\" + SetOutPath $INSTDIR + + ; Copy external binaries + File /a "/oname=bun.exe" "D:\a\jan\jan\src-tauri\resources\bin\bun-x86_64-pc-windows-msvc.exe" + File /a "/oname=uv.exe" "D:\a\jan\jan\src-tauri\resources\bin\uv-x86_64-pc-windows-msvc.exe" + + ; Create file associations + + ; Register deep links + + ; Create uninstaller + WriteUninstaller "$INSTDIR\uninstall.exe" + + ; Save $INSTDIR in registry for future installations + WriteRegStr SHCTX "${MANUPRODUCTKEY}" "" $INSTDIR + + !if "${INSTALLMODE}" == "both" + ; Save install mode to be selected by default for the next installation such as updating + ; or when uninstalling + WriteRegStr SHCTX "${UNINSTKEY}" $MultiUser.InstallMode 1 + !endif + + ; Remove old main binary if it doesn't match new main binary name + ReadRegStr $OldMainBinaryName SHCTX "${UNINSTKEY}" "MainBinaryName" + ${If} $OldMainBinaryName != "" + ${AndIf} $OldMainBinaryName != "${MAINBINARYNAME}.exe" + Delete "$INSTDIR\$OldMainBinaryName" + ${EndIf} + + ; Save current MAINBINARYNAME for future updates + WriteRegStr SHCTX "${UNINSTKEY}" "MainBinaryName" "${MAINBINARYNAME}.exe" + + ; Registry information for add/remove programs + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayName" "${PRODUCTNAME}" + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayIcon" "$\"$INSTDIR\${MAINBINARYNAME}.exe$\"" + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayVersion" "${VERSION}" + WriteRegStr SHCTX "${UNINSTKEY}" "Publisher" "${MANUFACTURER}" + WriteRegStr SHCTX "${UNINSTKEY}" "InstallLocation" "$\"$INSTDIR$\"" + WriteRegStr SHCTX "${UNINSTKEY}" "UninstallString" "$\"$INSTDIR\uninstall.exe$\"" + WriteRegDWORD SHCTX "${UNINSTKEY}" "NoModify" "1" + WriteRegDWORD SHCTX "${UNINSTKEY}" "NoRepair" "1" + + ${GetSize} "$INSTDIR" "/M=uninstall.exe /S=0K /G=0" $0 $1 $2 + IntOp $0 $0 + ${ESTIMATEDSIZE} + IntFmt $0 "0x%08X" $0 + WriteRegDWORD SHCTX "${UNINSTKEY}" "EstimatedSize" "$0" + + !if "${HOMEPAGE}" != "" + WriteRegStr SHCTX "${UNINSTKEY}" "URLInfoAbout" "${HOMEPAGE}" + WriteRegStr SHCTX "${UNINSTKEY}" "URLUpdateInfo" "${HOMEPAGE}" + WriteRegStr SHCTX "${UNINSTKEY}" "HelpLink" "${HOMEPAGE}" + !endif + + ; Create start menu shortcut + !insertmacro MUI_STARTMENU_WRITE_BEGIN Application + Call CreateOrUpdateStartMenuShortcut + !insertmacro MUI_STARTMENU_WRITE_END + + ; Create desktop shortcut for silent and passive installers + ; because finish page will be skipped + ${If} $PassiveMode = 1 + ${OrIf} ${Silent} + Call CreateOrUpdateDesktopShortcut + ${EndIf} + + !ifmacrodef NSIS_HOOK_POSTINSTALL + !insertmacro NSIS_HOOK_POSTINSTALL + !endif + + ; Auto close this page for passive mode + ${If} $PassiveMode = 1 + SetAutoClose true + ${EndIf} +SectionEnd + +Function .onInstSuccess + ; Check for `/R` flag only in silent and passive installers because + ; GUI installer has a toggle for the user to (re)start the app + ${If} $PassiveMode = 1 + ${OrIf} ${Silent} + ; ${GetOptions} $CMDLINE "/R" $R0 + ; ${IfNot} ${Errors} + ${GetOptions} $CMDLINE "/ARGS" $R0 + nsis_tauri_utils::RunAsUser "$INSTDIR\${MAINBINARYNAME}.exe" "$R0" + ; ${EndIf} + ${EndIf} +FunctionEnd + +Function un.onInit + !insertmacro SetContext + + !if "${INSTALLMODE}" == "both" + !insertmacro MULTIUSER_UNINIT + !endif + + !insertmacro MUI_UNGETLANGUAGE + + ${GetOptions} $CMDLINE "/P" $PassiveMode + ${IfNot} ${Errors} + StrCpy $PassiveMode 1 + ${EndIf} + + ${GetOptions} $CMDLINE "/UPDATE" $UpdateMode + ${IfNot} ${Errors} + StrCpy $UpdateMode 1 + ${EndIf} +FunctionEnd + +Section Uninstall + SetDetailsPrint none + + !ifmacrodef NSIS_HOOK_PREUNINSTALL + !insertmacro NSIS_HOOK_PREUNINSTALL + !endif + + !insertmacro CheckIfAppIsRunning "${MAINBINARYNAME}.exe" "${PRODUCTNAME}" + + ; Delete the app directory and its content from disk + ; Copy main executable + Delete "$INSTDIR\${MAINBINARYNAME}.exe" + + ; Delete LICENSE file + Delete "$INSTDIR\LICENSE" + + ; Delete resources + Delete "$INSTDIR\resources\pre-install\janhq-assistant-extension-1.0.2.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-conversational-extension-1.0.0.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-engine-management-extension-1.0.3.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-hardware-management-extension-1.0.0.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-inference-cortex-extension-1.0.25.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-model-extension-1.0.36.tgz" + + ; Delete external binaries + Delete "$INSTDIR\bun.exe" + Delete "$INSTDIR\uv.exe" + + ; Delete app associations + + ; Delete deep links + + + ; Delete uninstaller + Delete "$INSTDIR\uninstall.exe" + + RMDir /REBOOTOK "$INSTDIR\resources\pre-install" + RMDir /r /REBOOTOK "$INSTDIR\resources" + RMDir /r "$INSTDIR" + + ; Remove shortcuts if not updating + ${If} $UpdateMode <> 1 + !insertmacro DeleteAppUserModelId + + ; Remove start menu shortcut + !insertmacro MUI_STARTMENU_GETFOLDER Application $AppStartMenuFolder + !insertmacro IsShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + Delete "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + RMDir "$SMPROGRAMS\$AppStartMenuFolder" + ${EndIf} + !insertmacro IsShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$SMPROGRAMS\${PRODUCTNAME}.lnk" + Delete "$SMPROGRAMS\${PRODUCTNAME}.lnk" + ${EndIf} + + ; Remove desktop shortcuts + !insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$DESKTOP\${PRODUCTNAME}.lnk" + Delete "$DESKTOP\${PRODUCTNAME}.lnk" + ${EndIf} + ${EndIf} + + ; Remove registry information for add/remove programs + !if "${INSTALLMODE}" == "both" + DeleteRegKey SHCTX "${UNINSTKEY}" + !else if "${INSTALLMODE}" == "perMachine" + DeleteRegKey HKLM "${UNINSTKEY}" + !else + DeleteRegKey HKCU "${UNINSTKEY}" + !endif + + ; Removes the Autostart entry for ${PRODUCTNAME} from the HKCU Run key if it exists. + ; This ensures the program does not launch automatically after uninstallation if it exists. + ; If it doesn't exist, it does nothing. + ; We do this when not updating (to preserve the registry value on updates) + ${If} $UpdateMode <> 1 + DeleteRegValue HKCU "Software\Microsoft\Windows\CurrentVersion\Run" "${PRODUCTNAME}" + ${EndIf} + + ; Delete app data if the checkbox is selected + ; and if not updating + ${If} $DeleteAppDataCheckboxState = 1 + ${AndIf} $UpdateMode <> 1 + ; Clear the install location $INSTDIR from registry + DeleteRegKey SHCTX "${MANUPRODUCTKEY}" + DeleteRegKey /ifempty SHCTX "${MANUKEY}" + + ; Clear the install language from registry + DeleteRegValue HKCU "${MANUPRODUCTKEY}" "Installer Language" + DeleteRegKey /ifempty HKCU "${MANUPRODUCTKEY}" + DeleteRegKey /ifempty HKCU "${MANUKEY}" + + SetShellVarContext current + RmDir /r "$APPDATA\${BUNDLEID}" + RmDir /r "$LOCALAPPDATA\${BUNDLEID}" + ${EndIf} + + !ifmacrodef NSIS_HOOK_POSTUNINSTALL + !insertmacro NSIS_HOOK_POSTUNINSTALL + !endif + + ; Auto close if passive mode or updating + ${If} $PassiveMode = 1 + ${OrIf} $UpdateMode = 1 + SetAutoClose true + ${EndIf} +SectionEnd + +Function RestorePreviousInstallLocation + ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" "" + StrCmp $4 "" +2 0 + StrCpy $INSTDIR $4 +FunctionEnd + +Function Skip + Abort +FunctionEnd + +Function SkipIfPassive + ${IfThen} $PassiveMode = 1 ${|} Abort ${|} +FunctionEnd +Function un.SkipIfPassive + ${IfThen} $PassiveMode = 1 ${|} Abort ${|} +FunctionEnd + +Function CreateOrUpdateStartMenuShortcut + ; We used to use product name as MAINBINARYNAME + ; migrate old shortcuts to target the new MAINBINARYNAME + StrCpy $R0 0 + + !insertmacro IsShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + StrCpy $R0 1 + ${EndIf} + + !insertmacro IsShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + StrCpy $R0 1 + ${EndIf} + + ${If} $R0 = 1 + Return + ${EndIf} + + ; Skip creating shortcut if in update mode or no shortcut mode + ; but always create if migrating from wix + ${If} $WixMode = 0 + ${If} $UpdateMode = 1 + ${OrIf} $NoShortcutMode = 1 + Return + ${EndIf} + ${EndIf} + + !if "${STARTMENUFOLDER}" != "" + CreateDirectory "$SMPROGRAMS\$AppStartMenuFolder" + CreateShortcut "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + !else + CreateShortcut "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$SMPROGRAMS\${PRODUCTNAME}.lnk" + !endif +FunctionEnd + +Function CreateOrUpdateDesktopShortcut + ; We used to use product name as MAINBINARYNAME + ; migrate old shortcuts to target the new MAINBINARYNAME + !insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Return + ${EndIf} + + ; Skip creating shortcut if in update mode or no shortcut mode + ; but always create if migrating from wix + ${If} $WixMode = 0 + ${If} $UpdateMode = 1 + ${OrIf} $NoShortcutMode = 1 + Return + ${EndIf} + ${EndIf} + + CreateShortcut "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$DESKTOP\${PRODUCTNAME}.lnk" +FunctionEnd \ No newline at end of file diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 6aaa66bb7..fb1b1950b 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -40,6 +40,7 @@ } ], "security": { + "capabilities": ["default", "logs-app-window", "logs-window", "system-monitor-window"], "csp": { "default-src": "'self' customprotocol: asset: http://localhost:* http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:*", "connect-src": "ipc: http://ipc.localhost http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:* https: http:", @@ -72,10 +73,10 @@ "windows": { "installMode": "passive" } - }, - "deep-link": { "schemes": ["jan"] } + } }, "bundle": { + "publisher": "Menlo Research Pte. Ltd.", "active": true, "createUpdaterArtifacts": false, "icon": [ diff --git a/src-tauri/tauri.ios.conf.json b/src-tauri/tauri.ios.conf.json new file mode 100644 index 000000000..347f16bbd --- /dev/null +++ b/src-tauri/tauri.ios.conf.json @@ -0,0 +1,26 @@ +{ + "identifier": "jan.ai.app.ios", + "build": { + "devUrl": null, + "frontendDist": "../web-app/dist", + "beforeDevCommand": "cross-env IS_DEV=true IS_IOS=true yarn build:web", + "beforeBuildCommand": "cross-env IS_IOS=true yarn build:web" + }, + "app": { + "security": { + "capabilities": ["mobile"] + } + }, + "plugins": {}, + "bundle": { + "active": true, + "iOS": { + "developmentTeam": "" + }, + "resources": [ + "resources/pre-install/**/*", + "resources/LICENSE" + ], + "externalBin": [] + } +} \ No newline at end of file diff --git a/src-tauri/tauri.linux.conf.json b/src-tauri/tauri.linux.conf.json index 80e7446ff..32f6068a2 100644 --- a/src-tauri/tauri.linux.conf.json +++ b/src-tauri/tauri.linux.conf.json @@ -1,4 +1,14 @@ { + "app": { + "security": { + "capabilities": [ + "desktop", + "system-monitor-window", + "log-app-window", + "logs-window" + ] + } + }, "bundle": { "targets": ["deb", "appimage"], "resources": ["resources/pre-install/**/*", "resources/LICENSE"], @@ -6,13 +16,11 @@ "linux": { "appimage": { "bundleMediaFramework": false, - "files": { - } + "files": {} }, "deb": { "files": { - "usr/bin/bun": "resources/bin/bun", - "usr/lib/Jan/resources/lib/libvulkan.so": "resources/lib/libvulkan.so" + "usr/bin/bun": "resources/bin/bun" } } } diff --git a/src-tauri/tauri.macos.conf.json b/src-tauri/tauri.macos.conf.json index d7d80f669..5c5b493fa 100644 --- a/src-tauri/tauri.macos.conf.json +++ b/src-tauri/tauri.macos.conf.json @@ -1,4 +1,14 @@ { + "app": { + "security": { + "capabilities": [ + "desktop", + "system-monitor-window", + "log-app-window", + "logs-window" + ] + } + }, "bundle": { "targets": ["app", "dmg"], "resources": ["resources/pre-install/**/*", "resources/LICENSE"], diff --git a/src-tauri/tauri.windows.conf.json b/src-tauri/tauri.windows.conf.json index 16cb9b10a..bc8ede344 100644 --- a/src-tauri/tauri.windows.conf.json +++ b/src-tauri/tauri.windows.conf.json @@ -1,13 +1,23 @@ { + "app": { + "security": { + "capabilities": [ + "desktop", + "system-monitor-window", + "log-app-window", + "logs-window" + ] + } + }, + "bundle": { - "targets": ["nsis"], - "resources": ["resources/pre-install/**/*", "resources/lib/vulkan-1.dll", "resources/lib/vc_redist.x64.exe", "resources/LICENSE"], + "targets": ["nsis", "msi"], + "resources": [ + "resources/pre-install/**/*", + "resources/LICENSE" + ], "externalBin": ["resources/bin/bun", "resources/bin/uv"], "windows": { - "nsis": { - "installerHooks": "./windows/hooks.nsh", - "installerIcon": "icons/icon.ico" - }, "webviewInstallMode": { "silent": true, "type": "downloadBootstrapper" diff --git a/src-tauri/utils/Cargo.toml b/src-tauri/utils/Cargo.toml index 7d313a42b..691f90a92 100644 --- a/src-tauri/utils/Cargo.toml +++ b/src-tauri/utils/Cargo.toml @@ -8,7 +8,6 @@ base64 = "0.22" hmac = "0.12" log = { version = "0.4", optional = true } rand = "0.8" -reqwest = { version = "0.11", features = ["json"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10" @@ -16,6 +15,12 @@ tokio = { version = "1", features = ["process", "fs", "macros", "rt"] } tokio-util = "0.7.14" url = "2.5" +[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +reqwest = { version = "0.11", features = ["json", "native-tls"] } + +[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies] +reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false } + [target.'cfg(windows)'.dependencies] windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] } diff --git a/src-tauri/utils/src/system.rs b/src-tauri/utils/src/system.rs index d4ebc79af..efb137550 100644 --- a/src-tauri/utils/src/system.rs +++ b/src-tauri/utils/src/system.rs @@ -1,5 +1,5 @@ -/// Checks AVX2 CPU support for npx override with bun binary -pub fn can_override_npx() -> bool { +/// Checks if npx can be overridden with bun binary +pub fn can_override_npx(bun_path: String) -> bool { // We need to check the CPU for the AVX2 instruction support if we are running under MacOS // with Intel CPU. We can override `npx` command with `bun` only if CPU is // supporting AVX2, otherwise we need to use default `npx` binary @@ -13,10 +13,31 @@ pub fn can_override_npx() -> bool { return false; // we cannot override npx with bun binary } } - + // Check if bun_path exists + if !std::path::Path::new(bun_path.as_str()).exists() { + #[cfg(feature = "logging")] + log::warn!( + "bun binary not found at '{}', default npx binary will be used", + bun_path + ); + return false; + } true // by default, we can override npx with bun binary } +/// Checks if uv_path exists and determines if uvx can be overridden with the uv binary +pub fn can_override_uvx(uv_path: String) -> bool { + if !std::path::Path::new(uv_path.as_str()).exists() { + #[cfg(feature = "logging")] + log::warn!( + "uv binary not found at '{}', default uvx binary will be used", + uv_path + ); + return false; + } + true // by default, we can override uvx with uv binary +} + /// Setup library paths for different operating systems pub fn setup_library_path(library_path: Option<&str>, command: &mut tokio::process::Command) { if let Some(lib_path) = library_path { @@ -60,7 +81,6 @@ pub fn setup_library_path(library_path: Option<&str>, command: &mut tokio::proce pub fn setup_windows_process_flags(command: &mut tokio::process::Command) { #[cfg(all(windows, target_arch = "x86_64"))] { - use std::os::windows::process::CommandExt; const CREATE_NO_WINDOW: u32 = 0x0800_0000; const CREATE_NEW_PROCESS_GROUP: u32 = 0x0000_0200; command.creation_flags(CREATE_NO_WINDOW | CREATE_NEW_PROCESS_GROUP); diff --git a/src-tauri/windows/hooks.nsh b/src-tauri/windows/hooks.nsh deleted file mode 100644 index d1beed199..000000000 --- a/src-tauri/windows/hooks.nsh +++ /dev/null @@ -1,65 +0,0 @@ -!macro NSIS_HOOK_POSTINSTALL - ; Check if Visual C++ Redistributable is already installed - ReadRegStr $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" - ${If} $0 == "" - ; Try alternative registry location - ReadRegStr $0 HKLM "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" - ${EndIf} - - ${If} $0 == "" - ; VC++ Redistributable not found, need to install - DetailPrint "Visual C++ Redistributable not found, installing from bundled file..." - - ; Install from bundled EXE if not installed - ${If} ${FileExists} "$INSTDIR\resources\lib\vc_redist.x64.exe" - DetailPrint "Installing Visual C++ Redistributable..." - ; Copy to TEMP folder and then execute installer - CopyFiles "$INSTDIR\resources\lib\vc_redist.x64.exe" "$TEMP\vc_redist.x64.exe" - ExecWait '"$TEMP\vc_redist.x64.exe" /quiet /norestart' $1 - - ; Check whether installation process exited successfully (code 0) or not - ${If} $1 == 0 - DetailPrint "Visual C++ Redistributable installed successfully" - ${ElseIf} $1 == 1638 - DetailPrint "Visual C++ Redistributable already installed (newer version)" - ${ElseIf} $1 == 3010 - DetailPrint "Visual C++ Redistributable installed successfully (restart required)" - ${Else} - DetailPrint "Visual C++ installation failed with exit code: $1" - ${EndIf} - - ; Clean up setup files from TEMP and your installed app - Delete "$TEMP\vc_redist.x64.exe" - Delete "$INSTDIR\resources\lib\vc_redist.x64.exe" - ${Else} - DetailPrint "Visual C++ Redistributable not found at expected location: $INSTDIR\resources\lib\vc_redist.x64.exe" - ${EndIf} - ${Else} - DetailPrint "Visual C++ Redistributable already installed (version: $0)" - ${EndIf} - - ; ---- Copy LICENSE to install root ---- - ${If} ${FileExists} "$INSTDIR\resources\LICENSE" - CopyFiles /SILENT "$INSTDIR\resources\LICENSE" "$INSTDIR\LICENSE" - DetailPrint "Copied LICENSE to install root" - - ; Optional cleanup - remove from resources folder - Delete "$INSTDIR\resources\LICENSE" - ${Else} - DetailPrint "LICENSE not found at expected location: $INSTDIR\resources\LICENSE" - ${EndIf} - - ; ---- Copy vulkan-1.dll to install root ---- - ${If} ${FileExists} "$INSTDIR\resources\lib\vulkan-1.dll" - CopyFiles /SILENT "$INSTDIR\resources\lib\vulkan-1.dll" "$INSTDIR\vulkan-1.dll" - DetailPrint "Copied vulkan-1.dll to install root" - - ; Optional cleanup - remove from resources folder - Delete "$INSTDIR\resources\lib\vulkan-1.dll" - - ; Only remove the lib directory if it's empty after removing both files - RMDir "$INSTDIR\resources\lib" - ${Else} - DetailPrint "vulkan-1.dll not found at expected location: $INSTDIR\resources\lib\vulkan-1.dll" - ${EndIf} -!macroend \ No newline at end of file diff --git a/tests/checklist.md b/tests/checklist.md index b2e1da7ca..8e9e65d4b 100644 --- a/tests/checklist.md +++ b/tests/checklist.md @@ -16,7 +16,7 @@ Before testing, set-up the following in the old version to make sure that we can - [ ] Change the `App Data` to some other folder - [ ] Create a Custom Provider - [ ] Disable some model providers -- [NEW] Change llama.cpp setting of 2 models +- [ ] Change llama.cpp setting of 2 models #### Validate that the update does not corrupt existing user data or settings (before and after update show the same information): - [ ] Threads - [ ] Previously used model and assistants is shown correctly @@ -73,35 +73,44 @@ Before testing, set-up the following in the old version to make sure that we can - [ ] Ensure that when this value is changed, there is no broken UI caused by it - [ ] Code Block - [ ] Show Line Numbers -- [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values -- [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values +- [ ] [0.7.0] Compact Token Counter will show token counter in side chat input when toggle, if not it will show a small token counter below the chat input +- [ ] [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values +- [ ] [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values #### In `Model Providers`: In `Llama.cpp`: - [ ] After downloading a model from hub, the model is listed with the correct name under `Models` - [ ] Can import `gguf` model with no error +- [ ] [0.7.0] While importing, there should be an import indication appear under `Models` - [ ] Imported model will be listed with correct name under the `Models` +- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works. +- [ ] [0.6.10] Can import vlm models and chat with images +- [ ] [0.6.10] Import a file that is not `mmproj` in the `mmproj field` should show validation error +- [ ] [0.6.10] Import `mmproj` from different models should error +- [ ] [0.7.0] Users can customize model display names according to their own preferences. - [ ] Check that when click `delete` the model will be removed from the list - [ ] Deleted model doesn't appear in the selectable models section in chat input (even in old threads that use the model previously) - [ ] Ensure that user can re-import deleted imported models +- [ ] [0.6.8] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users. +- [ ] [0.6.10] Change to an older version of llama.cpp backend. Click on `Check for Llamacpp Updates` it should alert that there is a new version. +- [ ] [0.7.0] Users can cancel a backend download while it is in progress. +- [ ] [0.6.10] Try `Install backend from file` for a backend and it should show as an option for backend +- [ ] [0.7.0] User can install a backend from file in both .tar.gz and .zip formats, and the backend appears in the backend selection menu +- [ ] [0.7.0] A manually installed backend is automatically selected after import, and the backend menu updates to show it as the latest imported backend. - [ ] Enable `Auto-Unload Old Models`, and ensure that only one model can run / start at a time. If there are two model running at the time of enable, both of them will be stopped. - [ ] Disable `Auto-Unload Old Models`, and ensure that multiple models can run at the same time. - [ ] Enable `Context Shift` and ensure that context can run for long without encountering memory error. Use the `banana test` by turn on fetch MCP => ask local model to fetch and summarize the history of banana (banana has a very long history on wiki it turns out). It should run out of context memory sufficiently fast if `Context Shift` is not enabled. + +In `Model Settings`: - [ ] [0.6.8] Ensure that user can change the Jinja chat template of individual model and it doesn't affect the template of other model -- [ ] [0.6.8] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users. - [ ] [0.6.8] Ensure we can override Tensor Buffer Type in the model settings to offload layers between GPU and CPU => Download any MoE Model (i.e., gpt-oss-20b) => Set tensor buffer type as `blk\\.([0-30]*[02468])\\.ffn_.*_exps\\.=CPU` => check if those tensors are in cpu and run inference (you can view the app.log if it contains `--override-tensor", "blk\\\\.([0-30]*[02468])\\\\.ffn_.*_exps\\\\.=CPU`) -- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works. -- [ ] [0.6.10] Can import vlm models and chat with images -- [ ] [0.6.10] Import model on mmproj field should show validation error -- [ ] [0.6.10] Import mmproj from different models should not be able to chat with the models -- [ ] [0.6.10] Change to an older version of llama.cpp backend. Click on `Check for Llamacpp Updates` it should alert that there is a new version. -- [ ] [0.6.10] Try `Install backend from file` for a backend and it should show as an option for backend In Remote Model Providers: - [ ] Check that the following providers are presence: - [ ] OpenAI - [ ] Anthropic + - [ ] [0.7.0] Azure - [ ] Cohere - [ ] OpenRouter - [ ] Mistral @@ -113,12 +122,15 @@ In Remote Model Providers: - [ ] Delete a model and ensure that it doesn't show up in the `Models` list view or in the selectable dropdown in chat input. - [ ] Ensure that a deleted model also not selectable or appear in old threads that used it. - [ ] Adding of new model manually works and user can chat with the newly added model without error (you can add back the model you just delete for testing) -- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan +- [ ] [0.7.0] Vision capabilities are now automatically detected for vision models +- [ ] [0.7.0] New default models are available for adding to remote providers through a drop down (OpenAI, Mistral, Groq) + In Custom Providers: - [ ] Ensure that user can create a new custom providers with the right baseURL and API key. - [ ] Click `Refresh` should retrieve a list of available models from the Custom Providers. - [ ] User can chat with the custom providers - [ ] Ensure that Custom Providers can be deleted and won't reappear in a new session +- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan In general: - [ ] Disabled Model Provider should not show up as selectable in chat input of new thread and old thread alike (old threads' chat input should show `Select Model` instead of disabled model) @@ -162,9 +174,10 @@ Ensure that the following section information show up for hardware - [ ] When the user click `Always Allow` on the pop up, the tool will retain permission and won't ask for confirmation again. (this applied at an individual tool level, not at the MCP server level) - [ ] If `Allow All MCP Tool Permissions` is enabled, in every new thread, there should not be any confirmation dialog pop up when a tool is called. - [ ] When the pop-up appear, make sure that the `Tool Parameters` is also shown with detail in the pop-up -- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCp => paste the JSON config inside => click `Save` => server works +- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCP => paste the JSON config inside => click `Save` => server works - [ ] [0.6.9] If individual JSON config format is failed, the MCP server should not be activated - [ ] [0.6.9] Make sure that MCP server can be used with streamable-http transport => connect to Smithery and test MCP server +- [ ] [0.7.0] When deleting an MCP Server, a toast notification is shown #### In `Local API Server`: - [ ] User can `Start Server` and chat with the default endpoint @@ -175,7 +188,8 @@ Ensure that the following section information show up for hardware - [ ] [0.6.9] When the startup configuration, the last used model is also automatically start (users does not have to manually start a model before starting the server) - [ ] [0.6.9] Make sure that you can send an image to a Local API Server and it also works (can set up Local API Server as a Custom Provider in Jan to test) - [ ] [0.6.10] Make sure you are still able to see API key when server local status is running - +- [ ] [0.7.0] Users can see the Jan API Server Swagger UI by opening the following path in their browser `http://:` +- [ ] [0.7.0] Users can set the trusted host to * in the server configuration to accept requests from all host or without host #### In `HTTPS Proxy`: - [ ] Model download request goes through proxy endpoint @@ -188,6 +202,7 @@ Ensure that the following section information show up for hardware - [ ] Clicking download work inside the Model card HTML - [ ] [0.6.9] Check that the model recommendation base on user hardware work as expected in the Model Hub - [ ] [0.6.10] Check that model of the same name but different author can be found in the Hub catalog (test with [https://huggingface.co/unsloth/Qwen3-4B-Thinking-2507-GGUF](https://huggingface.co/unsloth/Qwen3-4B-Thinking-2507-GGUF)) +- [ ] [0.7.0] Support downloading models with the same name from different authors, models not listed on the hub will be prefixed with the author name ## D. Threads @@ -214,19 +229,30 @@ Ensure that the following section information show up for hardware - [ ] User can send message with different type of text content (e.g text, emoji, ...) - [ ] When request model to generate a markdown table, the table is correctly formatted as returned from the model. - [ ] When model generate code, ensure that the code snippets is properly formatted according to the `Appearance -> Code Block` setting. +- [ ] [0.7.0] LaTeX formulas now render correctly in chat. Both inline \(...\) and block \[...\] formats are supported. Code blocks and HTML tags are not affected - [ ] Users can edit their old message and user can regenerate the answer based on the new message - [ ] User can click `Copy` to copy the model response +- [ ] [0.6.10] When click on copy code block from model generation, it will only copy one code-block at a time instead of multiple code block at once - [ ] User can click `Delete` to delete either the user message or the model response. - [ ] The token speed appear when a response from model is being generated and the final value is show under the response. - [ ] Make sure that user when using IME keyboard to type Chinese and Japanese character and they press `Enter`, the `Send` button doesn't trigger automatically after each words. -- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a remote model -- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a local model +- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a Remote model & Local model - [ ] [0.6.9] Check that you can paste an image to text box from your system clipboard (Copy - Paste) -- [ ] [0.6.9] Make sure that user can favourite a model in the llama.cpp list and see the favourite model selection in chat input +- [ ] [0.6.10] User can Paste (e.g Ctrl + v) text into chat input when it is a vision model +- [ ] [0.6.9] Make sure that user can favourite a model in the Model list and see the favourite model selection in chat input - [ ] [0.6.10] User can click mode's setting on chat, enable Auto-Optimize Settings, and continue chatting with the model without interruption. - [ ] Verify this works with at least two models of different sizes (e.g., 1B and 7B). -- [ ] [0.6.10] User can Paste (e.g Ctrl + v) text into chat input when it is a vision model -- [ ] [0.6.10] When click on copy code block from model generation, it will only copy one code-block at a time instead of multiple code block at once +- [ ] [0.7.0] When chatting with a model, the UI displays a token usage counter showing the percentage of context consumed. +- [ ] [0.7.0] When chatting with a model, the scroll no longer follows the model’s streaming response; it only auto-scrolls when the user sends a new message +#### In Project + +- [ ] [0.7.0] User can create new project +- [ ] [0.7.0] User can add existing threads to a project +- [ ] [0.7.0] When the user attempts to delete a project, a confirmation dialog must appear warning that this action will permanently delete the project and all its associated threads. +- [ ] [0.7.0] The user can successfully delete a project, and all threads contained within that project are also permanently deleted. +- [ ] [0.7.0] A thread that already belongs to a project cannot be re-added to the same project. +- [ ] [0.7.0] Favorited threads retain their "favorite" status even after being added to a project + ## E. Assistants - [ ] There is always at least one default Assistant which is Jan - [ ] The default Jan assistant has `stream = True` by default @@ -238,6 +264,7 @@ Ensure that the following section information show up for hardware In `Settings -> General`: - [ ] Change the location of the `App Data` to some other path that is not the default path +- [ ] [0.7.0] Users cannot set the data location to root directories (e.g., C:\, D:\ on Windows), but can select subfolders within those drives (e.g., C:\data, D:\data) - [ ] Click on `Reset` button in `Other` to factory reset the app: - [ ] All threads deleted - [ ] All Assistant deleted except for default Jan Assistant diff --git a/web-app/index.html b/web-app/index.html index fc264d096..55625d33c 100644 --- a/web-app/index.html +++ b/web-app/index.html @@ -1,12 +1,26 @@ - + - - + + - + Jan +
    diff --git a/web-app/package.json b/web-app/package.json index 37323e849..b1f0b58de 100644 --- a/web-app/package.json +++ b/web-app/package.json @@ -21,8 +21,8 @@ "@dnd-kit/core": "6.3.1", "@dnd-kit/modifiers": "9.0.0", "@dnd-kit/sortable": "10.0.0", - "@jan/extensions-web": "link:../extensions-web", - "@janhq/core": "link:../core", + "@jan/extensions-web": "workspace:*", + "@janhq/core": "workspace:*", "@radix-ui/react-accordion": "1.2.11", "@radix-ui/react-avatar": "1.1.10", "@radix-ui/react-dialog": "1.1.15", @@ -83,7 +83,7 @@ "remark-math": "6.0.0", "sonner": "2.0.5", "tailwindcss": "4.1.4", - "token.js": "npm:token.js-fork@0.7.27", + "token.js": "npm:token.js-fork@0.7.29", "tw-animate-css": "1.2.8", "ulidx": "2.4.1", "unified": "11.0.5", diff --git a/web-app/src/__tests__/i18n.test.ts b/web-app/src/__tests__/i18n.test.ts index 644bc019d..262d93194 100644 --- a/web-app/src/__tests__/i18n.test.ts +++ b/web-app/src/__tests__/i18n.test.ts @@ -49,4 +49,4 @@ describe('i18n module', () => { expect(i18nModule[exportName]).toBeDefined() }) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/__tests__/main.test.tsx b/web-app/src/__tests__/main.test.tsx index c105482bf..aec753d56 100644 --- a/web-app/src/__tests__/main.test.tsx +++ b/web-app/src/__tests__/main.test.tsx @@ -76,4 +76,4 @@ describe('main.tsx', () => { await import('../main') }).rejects.toThrow() }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/dialog.test.tsx b/web-app/src/components/ui/__tests__/dialog.test.tsx index b4c1f5aab..aeb0cbf52 100644 --- a/web-app/src/components/ui/__tests__/dialog.test.tsx +++ b/web-app/src/components/ui/__tests__/dialog.test.tsx @@ -416,4 +416,4 @@ describe('Dialog Components', () => { expect(screen.getByText('Dialog description')).toHaveAttribute('data-slot', 'dialog-description') expect(screen.getByText('Footer button').closest('div')).toHaveAttribute('data-slot', 'dialog-footer') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/dropdown-menu.test.tsx b/web-app/src/components/ui/__tests__/dropdown-menu.test.tsx index 7b0da6f76..541ae0f93 100644 --- a/web-app/src/components/ui/__tests__/dropdown-menu.test.tsx +++ b/web-app/src/components/ui/__tests__/dropdown-menu.test.tsx @@ -853,4 +853,4 @@ describe('DropdownMenu Components', () => { expect(handleItemClick).toHaveBeenCalledTimes(1) }) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/dropdrawer.test.tsx b/web-app/src/components/ui/__tests__/dropdrawer.test.tsx index 6203d9f4e..cef88b9d9 100644 --- a/web-app/src/components/ui/__tests__/dropdrawer.test.tsx +++ b/web-app/src/components/ui/__tests__/dropdrawer.test.tsx @@ -530,4 +530,4 @@ describe('DropDrawer Component', () => { expect(trigger).toHaveAttribute('aria-haspopup', 'dialog') }) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/hover-card.test.tsx b/web-app/src/components/ui/__tests__/hover-card.test.tsx index 71e78cb7f..5e0a39628 100644 --- a/web-app/src/components/ui/__tests__/hover-card.test.tsx +++ b/web-app/src/components/ui/__tests__/hover-card.test.tsx @@ -165,4 +165,4 @@ describe('HoverCard Components', () => { expect(screen.getByText('Hover content')).toBeDefined() }) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/input.test.tsx b/web-app/src/components/ui/__tests__/input.test.tsx index 2ae18adad..ddf2fa7db 100644 --- a/web-app/src/components/ui/__tests__/input.test.tsx +++ b/web-app/src/components/ui/__tests__/input.test.tsx @@ -93,4 +93,4 @@ describe('Input', () => { fireEvent.blur(input) expect(handleBlur).toHaveBeenCalledTimes(1) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/popover.test.tsx b/web-app/src/components/ui/__tests__/popover.test.tsx index cec809bb7..b76d1ce04 100644 --- a/web-app/src/components/ui/__tests__/popover.test.tsx +++ b/web-app/src/components/ui/__tests__/popover.test.tsx @@ -436,4 +436,4 @@ describe('Popover Components', () => { }) }) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/progress.test.tsx b/web-app/src/components/ui/__tests__/progress.test.tsx index daa4b5c05..90a7bc70f 100644 --- a/web-app/src/components/ui/__tests__/progress.test.tsx +++ b/web-app/src/components/ui/__tests__/progress.test.tsx @@ -84,4 +84,4 @@ describe('Progress', () => { // For values over 100, the transform should be positive expect(indicator?.style.transform).toContain('translateX(--50%)') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/radio-group.test.tsx b/web-app/src/components/ui/__tests__/radio-group.test.tsx index a788931d8..1cb85e7c6 100644 --- a/web-app/src/components/ui/__tests__/radio-group.test.tsx +++ b/web-app/src/components/ui/__tests__/radio-group.test.tsx @@ -59,4 +59,4 @@ describe('RadioGroup', () => { expect(screen.getByLabelText('HTTP')).toBeChecked() expect(screen.getByLabelText('SSE')).not.toBeChecked() }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/sheet.test.tsx b/web-app/src/components/ui/__tests__/sheet.test.tsx index dc21bbe66..988e512c0 100644 --- a/web-app/src/components/ui/__tests__/sheet.test.tsx +++ b/web-app/src/components/ui/__tests__/sheet.test.tsx @@ -260,4 +260,4 @@ describe('Sheet Components', () => { expect(screen.getByText('Main Content')).toBeInTheDocument() expect(screen.getByText('Close')).toBeInTheDocument() }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/skeleton.test.tsx b/web-app/src/components/ui/__tests__/skeleton.test.tsx index 273be182e..39d9535a7 100644 --- a/web-app/src/components/ui/__tests__/skeleton.test.tsx +++ b/web-app/src/components/ui/__tests__/skeleton.test.tsx @@ -61,4 +61,4 @@ describe('Skeleton', () => { expect(skeleton).toHaveClass('w-full') expect(skeleton).toHaveClass('bg-red-500') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/slider.test.tsx b/web-app/src/components/ui/__tests__/slider.test.tsx index 5fd72f766..2a15441b2 100644 --- a/web-app/src/components/ui/__tests__/slider.test.tsx +++ b/web-app/src/components/ui/__tests__/slider.test.tsx @@ -190,4 +190,4 @@ describe('Slider', () => { expect(thumb).toHaveClass('border-accent', 'bg-main-view', 'rounded-full') }) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/sonner.test.tsx b/web-app/src/components/ui/__tests__/sonner.test.tsx index 72aca5526..8b2fc762d 100644 --- a/web-app/src/components/ui/__tests__/sonner.test.tsx +++ b/web-app/src/components/ui/__tests__/sonner.test.tsx @@ -90,4 +90,4 @@ describe('Toaster Component', () => { expect(toaster).toHaveAttribute('data-rich-colors', 'true') expect(toaster).toHaveAttribute('data-close-button', 'true') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/switch.test.tsx b/web-app/src/components/ui/__tests__/switch.test.tsx index d872dbc11..0db35d716 100644 --- a/web-app/src/components/ui/__tests__/switch.test.tsx +++ b/web-app/src/components/ui/__tests__/switch.test.tsx @@ -189,4 +189,4 @@ describe('Switch', () => { const switchElement = document.querySelector('[data-slot="switch"]') expect(switchElement).toHaveClass('data-[state=unchecked]:bg-main-view-fg/20') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/textarea.test.tsx b/web-app/src/components/ui/__tests__/textarea.test.tsx index 6daf09e4d..806cf9515 100644 --- a/web-app/src/components/ui/__tests__/textarea.test.tsx +++ b/web-app/src/components/ui/__tests__/textarea.test.tsx @@ -113,4 +113,4 @@ describe('Textarea', () => { const textarea = screen.getByRole('textbox') expect(textarea).toHaveAttribute('cols', '50') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/__tests__/tooltip.test.tsx b/web-app/src/components/ui/__tests__/tooltip.test.tsx index 4221751d4..46c68e0b8 100644 --- a/web-app/src/components/ui/__tests__/tooltip.test.tsx +++ b/web-app/src/components/ui/__tests__/tooltip.test.tsx @@ -111,4 +111,4 @@ describe('Tooltip Components', () => { expect(screen.getByText('First')).toBeInTheDocument() expect(screen.getByText('Second')).toBeInTheDocument() }) -}) \ No newline at end of file +}) diff --git a/web-app/src/components/ui/dropdown-menu.tsx b/web-app/src/components/ui/dropdown-menu.tsx index 7a527aaca..4f92ebe60 100644 --- a/web-app/src/components/ui/dropdown-menu.tsx +++ b/web-app/src/components/ui/dropdown-menu.tsx @@ -41,7 +41,7 @@ function DropdownMenuContent({ data-slot="dropdown-menu-content" sideOffset={sideOffset} className={cn( - 'bg-main-view select-none text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-[51] max-h-(--radix-dropdown-menu-content-available-height) min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md', + 'bg-main-view select-none text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 max-h-(--radix-dropdown-menu-content-available-height) min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md z-[90]', className )} {...props} @@ -229,7 +229,7 @@ function DropdownMenuSubContent({ { expect(value.length).toBeGreaterThan(0) }) }) -}) \ No newline at end of file +}) diff --git a/web-app/src/constants/chat.ts b/web-app/src/constants/chat.ts new file mode 100644 index 000000000..e1649049f --- /dev/null +++ b/web-app/src/constants/chat.ts @@ -0,0 +1,6 @@ +/** + * Chat-related constants + */ + +export const TEMPORARY_CHAT_ID = 'temporary-chat' +export const TEMPORARY_CHAT_QUERY_ID = 'temporary-chat' diff --git a/web-app/src/consts/providers.ts b/web-app/src/consts/providers.ts index 328ba3a9a..d3d806888 100644 --- a/web-app/src/consts/providers.ts +++ b/web-app/src/consts/providers.ts @@ -96,7 +96,7 @@ export const predefinedProviders = [ { active: true, api_key: '', - base_url: 'https://api.anthropic.com', + base_url: 'https://api.anthropic.com/v1', provider: 'anthropic', explore_models_url: 'https://docs.anthropic.com/en/docs/about-claude/models', @@ -127,11 +127,21 @@ export const predefinedProviders = [ }, ], models: [], + custom_header: [ + { + header: 'anthropic-version', + value: '2023-06-01' + }, + { + header: 'anthropic-dangerous-direct-browser-access', + value: 'true' + } + ] }, { active: true, api_key: '', - base_url: 'https://api.cohere.ai/compatibility/v1', + base_url: 'https://api.cohere.ai/v1', explore_models_url: 'https://docs.cohere.com/v2/docs/models', provider: 'cohere', settings: [ diff --git a/web-app/src/containers/ChatInput.tsx b/web-app/src/containers/ChatInput.tsx index cba580ebd..3d70e93f0 100644 --- a/web-app/src/containers/ChatInput.tsx +++ b/web-app/src/containers/ChatInput.tsx @@ -4,7 +4,6 @@ import TextareaAutosize from 'react-textarea-autosize' import { cn } from '@/lib/utils' import { usePrompt } from '@/hooks/usePrompt' import { useThreads } from '@/hooks/useThreads' -import { useThreadManagement } from '@/hooks/useThreadManagement' import { useCallback, useEffect, useRef, useState } from 'react' import { Button } from '@/components/ui/button' import { @@ -38,6 +37,11 @@ import { useTools } from '@/hooks/useTools' import { TokenCounter } from '@/components/TokenCounter' import { useMessages } from '@/hooks/useMessages' import { useShallow } from 'zustand/react/shallow' +import { McpExtensionToolLoader } from './McpExtensionToolLoader' +import { ExtensionTypeEnum, MCPExtension } from '@janhq/core' +import { ExtensionManager } from '@/lib/extension' +import { useAnalytic } from '@/hooks/useAnalytic' +import posthog from 'posthog-js' type ChatInputProps = { className?: string @@ -65,8 +69,6 @@ const ChatInput = ({ const prompt = usePrompt((state) => state.prompt) const setPrompt = usePrompt((state) => state.setPrompt) const currentThreadId = useThreads((state) => state.currentThreadId) - const updateThread = useThreads((state) => state.updateThread) - const { getFolderById } = useThreadManagement() const { t } = useTranslation() const spellCheckChatInput = useGeneralSetting( (state) => state.spellCheckChatInput @@ -88,6 +90,7 @@ const ChatInput = ({ const selectedModel = useModelProvider((state) => state.selectedModel) const selectedProvider = useModelProvider((state) => state.selectedProvider) const sendMessage = useChat() + const { productAnalytic } = useAnalytic() const [message, setMessage] = useState('') const [dropdownToolsAvailable, setDropdownToolsAvailable] = useState(false) const [tooltipToolsAvailable, setTooltipToolsAvailable] = useState(false) @@ -132,7 +135,10 @@ const ChatInput = ({ const activeModels = await serviceHub .models() .getActiveModels('llamacpp') - setHasActiveModels(activeModels.length > 0) + const hasMatchingActiveModel = activeModels.some( + (model) => String(model) === selectedModel?.id + ) + setHasActiveModels(activeModels.length > 0 && hasMatchingActiveModel) } catch (error) { console.error('Failed to get active models:', error) setHasActiveModels(false) @@ -145,7 +151,7 @@ const ChatInput = ({ const intervalId = setInterval(checkActiveModels, 3000) return () => clearInterval(intervalId) - }, [serviceHub]) + }, [serviceHub, selectedModel?.id]) // Check for mmproj existence or vision capability when model changes useEffect(() => { @@ -171,7 +177,12 @@ const ChatInput = ({ // Check if there are active MCP servers const hasActiveMCPServers = connectedServers.length > 0 || tools.length > 0 - const handleSendMesage = (prompt: string) => { + // Get MCP extension and its custom component + const extensionManager = ExtensionManager.getInstance() + const mcpExtension = extensionManager.get(ExtensionTypeEnum.MCP) + const MCPToolComponent = mcpExtension?.getToolComponent?.() + + const handleSendMessage = async (prompt: string) => { if (!selectedModel) { setMessage('Please select a model to start chatting.') return @@ -180,34 +191,26 @@ const ChatInput = ({ return } setMessage('') + + // Track message send event with PostHog (only if product analytics is enabled) + if (productAnalytic && selectedModel && selectedProvider) { + try { + posthog.capture('message_sent', { + model_provider: selectedProvider, + model_id: selectedModel.id, + }) + } catch (error) { + console.debug('Failed to track message send event:', error) + } + } + sendMessage( prompt, true, - uploadedFiles.length > 0 ? uploadedFiles : undefined + uploadedFiles.length > 0 ? uploadedFiles : undefined, + projectId ) setUploadedFiles([]) - - // Handle project assignment for new threads - if (projectId && !currentThreadId) { - const project = getFolderById(projectId) - if (project) { - // Use setTimeout to ensure the thread is created first - setTimeout(() => { - const newCurrentThreadId = useThreads.getState().currentThreadId - if (newCurrentThreadId) { - updateThread(newCurrentThreadId, { - metadata: { - project: { - id: project.id, - name: project.name, - updated_at: project.updated_at, - }, - }, - }) - } - }, 100) - } - } } useEffect(() => { @@ -630,7 +633,7 @@ const ChatInput = ({ ) { e.preventDefault() // Submit the message when Enter is pressed without Shift - handleSendMesage(prompt) + handleSendMessage(prompt) // When Shift+Enter is pressed, a new line is added (default behavior) } }} @@ -718,7 +721,20 @@ const ChatInput = ({ )} {selectedModel?.capabilities?.includes('tools') && - hasActiveMCPServers && ( + hasActiveMCPServers && + (MCPToolComponent ? ( + // Use custom MCP component + + ) : ( + // Use default tools dropdown - )} + ))} {selectedModel?.capabilities?.includes('web_search') && ( @@ -846,7 +862,7 @@ const ChatInput = ({ size="icon" disabled={!prompt.trim() && uploadedFiles.length === 0} data-test-id="send-message-button" - onClick={() => handleSendMesage(prompt)} + onClick={() => handleSendMessage(prompt)} > {streamingContent ? ( diff --git a/web-app/src/containers/ColorPickerAppBgColor.tsx b/web-app/src/containers/ColorPickerAppBgColor.tsx index 72e098aa4..c60b34f13 100644 --- a/web-app/src/containers/ColorPickerAppBgColor.tsx +++ b/web-app/src/containers/ColorPickerAppBgColor.tsx @@ -1,4 +1,4 @@ -import { useAppearance, isDefaultColor } from '@/hooks/useAppearance' +import { useAppearance, useBlurSupport } from '@/hooks/useAppearance' import { cn } from '@/lib/utils' import { RgbaColor, RgbaColorPicker } from 'react-colorful' import { IconColorPicker } from '@tabler/icons-react' @@ -14,6 +14,12 @@ export function ColorPickerAppBgColor() { const { appBgColor, setAppBgColor } = useAppearance() const { isDark } = useTheme() const { t } = useTranslation() + const showAlphaSlider = useBlurSupport() + + // Helper to get alpha value based on blur support + const getAlpha = (defaultAlpha: number) => { + return showAlphaSlider ? defaultAlpha : 1 + } const predefineAppBgColor: RgbaColor[] = [ isDark @@ -21,55 +27,64 @@ export function ColorPickerAppBgColor() { r: 25, g: 25, b: 25, - a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.4, + a: getAlpha(0.4), } : { r: 255, g: 255, b: 255, - a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.4, + a: getAlpha(0.4), }, { r: 70, g: 79, b: 229, - a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5, + a: getAlpha(0.5), }, { r: 238, g: 130, b: 238, - a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5, + a: getAlpha(0.5), }, { r: 255, g: 99, b: 71, - a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5, + a: getAlpha(0.5), }, { r: 255, g: 165, b: 0, - a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5, + a: getAlpha(0.5), }, ] + // Check if a color is the default color (considering both dark and light themes) + const isColorDefault = (color: RgbaColor): boolean => { + const isDarkDefault = color.r === 25 && color.g === 25 && color.b === 25 + const isLightDefault = color.r === 255 && color.g === 255 && color.b === 255 + // Accept both 0.4 and 1 as valid default alpha values (handles blur detection timing) + const hasDefaultAlpha = Math.abs(color.a - 0.4) < 0.01 || Math.abs(color.a - 1) < 0.01 + return (isDarkDefault || isLightDefault) && hasDefaultAlpha + } + return (
    {predefineAppBgColor.map((item, i) => { const isSelected = (item.r === appBgColor.r && - item.g === appBgColor.g && - item.b === appBgColor.b && - item.a === appBgColor.a) || - (isDefaultColor(appBgColor) && isDefaultColor(item)) + item.g === appBgColor.g && + item.b === appBgColor.b && + Math.abs(item.a - appBgColor.a) < 0.01) || + (isColorDefault(appBgColor) && isColorDefault(item)) return (
    { diff --git a/web-app/src/containers/DropdownAssistant.tsx b/web-app/src/containers/DropdownAssistant.tsx index a75925002..e474df9ed 100644 --- a/web-app/src/containers/DropdownAssistant.tsx +++ b/web-app/src/containers/DropdownAssistant.tsx @@ -33,7 +33,7 @@ const DropdownAssistant = () => { return ( <> -
    +
    - {searchableModel.model.id} + {getModelDisplayName(searchableModel.model)}
    {capabilities.length > 0 && ( @@ -669,7 +668,7 @@ const DropdownModelProvider = ({ className="text-main-view-fg/80 text-sm" title={searchableModel.model.id} > - {searchableModel.model.id} + {getModelDisplayName(searchableModel.model)}
    {capabilities.length > 0 && ( diff --git a/web-app/src/containers/HeaderPage.tsx b/web-app/src/containers/HeaderPage.tsx index 7c47e9273..91dbe4c7a 100644 --- a/web-app/src/containers/HeaderPage.tsx +++ b/web-app/src/containers/HeaderPage.tsx @@ -1,27 +1,69 @@ import { useLeftPanel } from '@/hooks/useLeftPanel' import { cn } from '@/lib/utils' -import { IconLayoutSidebar } from '@tabler/icons-react' -import { ReactNode } from '@tanstack/react-router' +import { useMobileScreen, useSmallScreen } from '@/hooks/useMediaQuery' +import { IconLayoutSidebar, IconMessage, IconMessageFilled } from '@tabler/icons-react' +import { ReactNode } from 'react' +import { useRouter } from '@tanstack/react-router' +import { route } from '@/constants/routes' +import { PlatformFeatures } from '@/lib/platform/const' +import { PlatformFeature } from '@/lib/platform/types' +import { TEMPORARY_CHAT_QUERY_ID } from '@/constants/chat' type HeaderPageProps = { children?: ReactNode } const HeaderPage = ({ children }: HeaderPageProps) => { const { open, setLeftPanel } = useLeftPanel() + const isMobile = useMobileScreen() + const isSmallScreen = useSmallScreen() + const router = useRouter() + const currentPath = router.state.location.pathname + + const isHomePage = currentPath === route.home + + // Parse temporary chat flag from URL search params directly to avoid invariant errors + const searchString = window.location.search + const urlSearchParams = new URLSearchParams(searchString) + const isTemporaryChat = isHomePage && urlSearchParams.get(TEMPORARY_CHAT_QUERY_ID) === 'true' + + const handleChatToggle = () => { + console.log('Chat toggle clicked!', { isTemporaryChat, isHomePage, currentPath }) + if (isHomePage) { + if (isTemporaryChat) { + console.log('Switching to regular chat') + router.navigate({ to: route.home, search: {} }) + } else { + console.log('Switching to temporary chat') + router.navigate({ to: route.home, search: { [TEMPORARY_CHAT_QUERY_ID]: true } }) + } + } + } return (
    -
    +
    {!open && ( )} - {children} +
    + {children} +
    + + {/* Temporary Chat Toggle - Only show on home page if feature is enabled */} + {PlatformFeatures[PlatformFeature.TEMPORARY_CHAT] && isHomePage && ( +
    + +
    + )}
    ) diff --git a/web-app/src/containers/LeftPanel.tsx b/web-app/src/containers/LeftPanel.tsx index 24f3bf911..8fe4b3c24 100644 --- a/web-app/src/containers/LeftPanel.tsx +++ b/web-app/src/containers/LeftPanel.tsx @@ -1,4 +1,4 @@ -import { Link, useRouterState } from '@tanstack/react-router' +import { Link, useRouterState, useNavigate } from '@tanstack/react-router' import { useLeftPanel } from '@/hooks/useLeftPanel' import { cn } from '@/lib/utils' import { @@ -56,8 +56,11 @@ const mainMenus = [ title: 'common:projects.title', icon: IconFolderPlus, route: route.project, - isEnabled: true, + isEnabled: !(IS_IOS || IS_ANDROID), }, +] + +const secondaryMenus = [ { title: 'common:assistants', icon: IconClipboardSmile, @@ -82,6 +85,7 @@ const LeftPanel = () => { const open = useLeftPanel((state) => state.open) const setLeftPanel = useLeftPanel((state) => state.setLeftPanel) const { t } = useTranslation() + const navigate = useNavigate() const [searchTerm, setSearchTerm] = useState('') const { isAuthenticated } = useAuth() @@ -159,7 +163,7 @@ const LeftPanel = () => { const getFilteredThreads = useThreads((state) => state.getFilteredThreads) const threads = useThreads((state) => state.threads) - const { folders, addFolder, updateFolder, deleteFolder, getFolderById } = + const { folders, addFolder, updateFolder, getFolderById } = useThreadManagement() // Project dialog states @@ -200,19 +204,21 @@ const LeftPanel = () => { setDeleteProjectConfirmOpen(true) } - const confirmProjectDelete = () => { - if (deletingProjectId) { - deleteFolder(deletingProjectId) - setDeleteProjectConfirmOpen(false) - setDeletingProjectId(null) - } + const handleProjectDeleteClose = () => { + setDeleteProjectConfirmOpen(false) + setDeletingProjectId(null) } - const handleProjectSave = (name: string) => { + const handleProjectSave = async (name: string) => { if (editingProjectKey) { - updateFolder(editingProjectKey, name) + await updateFolder(editingProjectKey, name) } else { - addFolder(name) + const newProject = await addFolder(name) + // Navigate to the newly created project + navigate({ + to: '/project/$projectId', + params: { projectId: newProject.id }, + }) } setProjectDialogOpen(false) setEditingProjectKey(null) @@ -234,7 +240,7 @@ const LeftPanel = () => { return ( <> {/* Backdrop overlay for small screens */} - {isSmallScreen && open && ( + {isSmallScreen && open && !IS_IOS && !IS_ANDROID && (
    { @@ -257,7 +263,7 @@ const LeftPanel = () => { isResizableContext && 'h-full w-full', // Small screen context: fixed positioning and styling isSmallScreen && - 'fixed h-[calc(100%-16px)] bg-app z-50 rounded-sm border border-left-panel-fg/10 m-2 px-1 w-48', + 'fixed h-full pb-[calc(env(safe-area-inset-bottom)+env(safe-area-inset-top))] bg-main-view z-50 md:border border-left-panel-fg/10 px-1 w-full md:w-48', // Default context: original styling !isResizableContext && !isSmallScreen && @@ -396,7 +402,7 @@ const LeftPanel = () => { })}
    - {filteredProjects.length > 0 && ( + {filteredProjects.length > 0 && !(IS_IOS || IS_ANDROID) && (
    @@ -487,7 +493,7 @@ const LeftPanel = () => { )}
    -
    +
    {favoritedThreads.length > 0 && ( <> @@ -574,6 +580,10 @@ const LeftPanel = () => { {filteredThreads.length === 0 && searchTerm.length > 0 && (
    + + {t('common:recents')} + +
    @@ -607,6 +617,44 @@ const LeftPanel = () => {
    + + {secondaryMenus.map((menu) => { + if (!menu.isEnabled) { + return null + } + + // Regular menu items must have route and icon + if (!menu.route || !menu.icon) return null + + const isActive = (() => { + // Settings routes + if (menu.route.includes(route.settings.index)) { + return currentPath.includes(route.settings.index) + } + + // Default exact match for other routes + return currentPath === menu.route + })() + return ( + isSmallScreen && setLeftPanel(false)} + data-test-id={`menu-${menu.title}`} + activeOptions={{ exact: true }} + className={cn( + 'flex items-center gap-1.5 cursor-pointer hover:bg-left-panel-fg/10 py-1 my-0.5 px-1 rounded', + isActive && 'bg-left-panel-fg/10' + )} + > + + + {t(menu.title)} + + + ) + })} + {PlatformFeatures[PlatformFeature.AUTHENTICATION] && (
    @@ -633,8 +681,8 @@ const LeftPanel = () => { /> | null +} + +export const McpExtensionToolLoader = ({ + tools, + hasActiveMCPServers, + selectedModelHasTools, + initialMessage, + MCPToolComponent, +}: McpExtensionToolLoaderProps) => { + // Get tool management hooks + const { isToolDisabled, setToolDisabledForThread, setDefaultDisabledTools, getDefaultDisabledTools } = useToolAvailable() + const { getCurrentThread } = useThreads() + const currentThread = getCurrentThread() + + // Handle tool toggle for custom component + const handleToolToggle = (toolName: string, enabled: boolean) => { + if (initialMessage) { + const currentDefaults = getDefaultDisabledTools() + if (enabled) { + setDefaultDisabledTools(currentDefaults.filter((name) => name !== toolName)) + } else { + setDefaultDisabledTools([...currentDefaults, toolName]) + } + } else if (currentThread?.id) { + setToolDisabledForThread(currentThread.id, toolName, enabled) + } + } + + const isToolEnabled = (toolName: string): boolean => { + if (initialMessage) { + return !getDefaultDisabledTools().includes(toolName) + } else if (currentThread?.id) { + return !isToolDisabled(currentThread.id, toolName) + } + return false + } + + // Only render if we have the custom MCP component and conditions are met + if (!selectedModelHasTools || !hasActiveMCPServers || !MCPToolComponent) { + return null + } + + return ( + + ) +} diff --git a/web-app/src/containers/ModelSetting.tsx b/web-app/src/containers/ModelSetting.tsx index 9a3bfd814..3f3391d51 100644 --- a/web-app/src/containers/ModelSetting.tsx +++ b/web-app/src/containers/ModelSetting.tsx @@ -14,7 +14,7 @@ import { Button } from '@/components/ui/button' import { DynamicControllerSetting } from '@/containers/dynamicControllerSetting' import { useModelProvider } from '@/hooks/useModelProvider' import { useServiceHub } from '@/hooks/useServiceHub' -import { cn } from '@/lib/utils' +import { cn, getModelDisplayName } from '@/lib/utils' import { useTranslation } from '@/i18n/react-i18next-compat' type ModelSettingProps = { @@ -171,7 +171,9 @@ export function ModelSetting({ key === 'ngl' || key === 'chat_template' || key === 'offload_mmproj' || - key === 'batch_size' + key === 'batch_size' || + key === 'cpu_moe' || + key === 'n_cpu_moe' ) if (requiresRestart) { @@ -231,7 +233,9 @@ export function ModelSetting({ key === 'ngl' || key === 'chat_template' || key === 'offload_mmproj' || - key === 'batch_size' + key === 'batch_size' || + key === 'cpu_moe' || + key === 'n_cpu_moe' ) { // Check if model is running before stopping it serviceHub @@ -261,7 +265,9 @@ export function ModelSetting({ - {t('common:modelSettings.title', { modelId: model.id })} + {t('common:modelSettings.title', { + modelId: getModelDisplayName(model), + })} {t('common:modelSettings.description')} diff --git a/web-app/src/containers/RenderMarkdown.tsx b/web-app/src/containers/RenderMarkdown.tsx index 31d08cf10..c941b512d 100644 --- a/web-app/src/containers/RenderMarkdown.tsx +++ b/web-app/src/containers/RenderMarkdown.tsx @@ -1,4 +1,3 @@ -/* eslint-disable react-hooks/exhaustive-deps */ import ReactMarkdown, { Components } from 'react-markdown' import remarkGfm from 'remark-gfm' import remarkEmoji from 'remark-emoji' diff --git a/web-app/src/containers/SettingsMenu.tsx b/web-app/src/containers/SettingsMenu.tsx index da0e94870..78389233d 100644 --- a/web-app/src/containers/SettingsMenu.tsx +++ b/web-app/src/containers/SettingsMenu.tsx @@ -30,12 +30,15 @@ const SettingsMenu = () => { // On web: exclude llamacpp provider as it's not available const activeProviders = providers.filter((provider) => { if (!provider.active) return false - + // On web version, hide llamacpp provider - if (!PlatformFeatures[PlatformFeature.LOCAL_INFERENCE] && provider.provider === 'llama.cpp') { + if ( + !PlatformFeatures[PlatformFeature.LOCAL_INFERENCE] && + provider.provider === 'llama.cpp' + ) { return false } - + return true }) @@ -92,7 +95,7 @@ const SettingsMenu = () => { title: 'common:keyboardShortcuts', route: route.settings.shortcuts, hasSubMenu: false, - isEnabled: true, + isEnabled: PlatformFeatures[PlatformFeature.SHORTCUT], }, { title: 'common:hardware', @@ -137,7 +140,7 @@ const SettingsMenu = () => { return ( <> - )} -
    - +
    + +
    + + {t(menu.title)} + + {menu.hasSubMenu && ( + + )} +
    + - {/* Sub-menu for model providers */} - {menu.hasSubMenu && expandedProviders && ( -
    - {activeProviders.map((provider) => { - const isActive = matches.some( - (match) => - match.routeId === '/settings/providers/$providerName' && - 'providerName' in match.params && - match.params.providerName === provider.provider - ) + {/* Sub-menu for model providers */} + {menu.hasSubMenu && expandedProviders && ( +
    + {activeProviders.map((provider) => { + const isActive = matches.some( + (match) => + match.routeId === + '/settings/providers/$providerName' && + 'providerName' in match.params && + match.params.providerName === provider.provider + ) - return ( -
    - - )} -
    + ) + })} +
    + )} +
    ) })}
    diff --git a/web-app/src/containers/SetupScreen.tsx b/web-app/src/containers/SetupScreen.tsx index bce474836..dadc16362 100644 --- a/web-app/src/containers/SetupScreen.tsx +++ b/web-app/src/containers/SetupScreen.tsx @@ -6,6 +6,8 @@ import HeaderPage from './HeaderPage' import { isProd } from '@/lib/version' import { useTranslation } from '@/i18n/react-i18next-compat' import { localStorageKey } from '@/constants/localStorage' +import { PlatformFeatures } from '@/lib/platform/const' +import { PlatformFeature } from '@/lib/platform' function SetupScreen() { const { t } = useTranslation() @@ -21,7 +23,7 @@ function SetupScreen() {
    -
    +

    {t('setup:welcome')} @@ -31,22 +33,24 @@ function SetupScreen() {

    - -
    -

    - {t('setup:localModel')} -

    -
    - - } - >
    + {PlatformFeatures[PlatformFeature.LOCAL_INFERENCE] && ( + +
    +

    + {t('setup:localModel')} +

    +
    + + } + /> + )} } - > + />
    diff --git a/web-app/src/containers/ThinkingBlock.tsx b/web-app/src/containers/ThinkingBlock.tsx index 68ab8644f..211fda9ff 100644 --- a/web-app/src/containers/ThinkingBlock.tsx +++ b/web-app/src/containers/ThinkingBlock.tsx @@ -3,6 +3,7 @@ import { create } from 'zustand' import { RenderMarkdown } from './RenderMarkdown' import { useAppState } from '@/hooks/useAppState' import { useTranslation } from '@/i18n/react-i18next-compat' +import { extractThinkingContent } from '@/lib/utils' interface Props { text: string @@ -43,19 +44,6 @@ const ThinkingBlock = ({ id, text }: Props) => { setThinkingState(id, newExpandedState) } - // Extract thinking content from either format - const extractThinkingContent = (text: string) => { - return text - .replace(/<\/?think>/g, '') - .replace(/<\|channel\|>analysis<\|message\|>/g, '') - .replace(/<\|start\|>assistant<\|channel\|>final<\|message\|>/g, '') - .replace(/assistant<\|channel\|>final<\|message\|>/g, '') - .replace(/<\|channel\|>/g, '') // remove any remaining channel markers - .replace(/<\|message\|>/g, '') // remove any remaining message markers - .replace(/<\|start\|>/g, '') // remove any remaining start markers - .trim() - } - const thinkingContent = extractThinkingContent(text) if (!thinkingContent) return null diff --git a/web-app/src/containers/ThreadList.tsx b/web-app/src/containers/ThreadList.tsx index b58d1872a..e48a2373d 100644 --- a/web-app/src/containers/ThreadList.tsx +++ b/web-app/src/containers/ThreadList.tsx @@ -23,7 +23,7 @@ import { useThreads } from '@/hooks/useThreads' import { useThreadManagement } from '@/hooks/useThreadManagement' import { useLeftPanel } from '@/hooks/useLeftPanel' import { useMessages } from '@/hooks/useMessages' -import { cn } from '@/lib/utils' +import { cn, extractThinkingContent } from '@/lib/utils' import { useSmallScreen } from '@/hooks/useMediaQuery' import { @@ -47,9 +47,11 @@ const SortableItem = memo( ({ thread, variant, + currentProjectId, }: { thread: Thread variant?: 'default' | 'project' + currentProjectId?: string }) => { const { attributes, @@ -108,6 +110,18 @@ const SortableItem = memo( return (thread.title || '').replace(/]*>|<\/span>/g, '') }, [thread.title]) + const availableProjects = useMemo(() => { + return folders + .filter((f) => { + // Exclude the current project page we're on + if (f.id === currentProjectId) return false + // Exclude the project this thread is already assigned to + if (f.id === thread.metadata?.project?.id) return false + return true + }) + .sort((a, b) => b.updated_at - a.updated_at) + }, [folders, currentProjectId, thread.metadata?.project?.id]) + const assignThreadToProject = (threadId: string, projectId: string) => { const project = getFolderById(projectId) if (project && updateThread) { @@ -167,14 +181,10 @@ const SortableItem = memo( )} > {thread.title || t('common:newThread')} - {variant === 'project' && ( - <> - {variant === 'project' && getLastMessageInfo?.content && ( -
    - {getLastMessageInfo.content} -
    - )} - + {variant === 'project' && getLastMessageInfo?.content && ( + + {extractThinkingContent(getLastMessageInfo.content)} + )}
    @@ -185,14 +195,17 @@ const SortableItem = memo( { e.preventDefault() e.stopPropagation() }} /> - + {thread.isFavorite ? ( { @@ -224,59 +237,57 @@ const SortableItem = memo( - Add to project + {t('common:projects.addToProject')} - - {folders.length === 0 ? ( + + {availableProjects.length === 0 ? ( - No projects available + {t('common:projects.noProjectsAvailable')} ) : ( - folders - .sort((a, b) => b.updated_at - a.updated_at) - .map((folder) => ( - { - e.stopPropagation() - assignThreadToProject(thread.id, folder.id) - }} - > - - - {folder.name} - - - )) - )} - {thread.metadata?.project && ( - <> - + availableProjects.map((folder) => ( { e.stopPropagation() - // Remove project from metadata - const projectName = thread.metadata?.project?.name - updateThread(thread.id, { - metadata: { - ...thread.metadata, - project: undefined, - }, - }) - toast.success( - `Thread removed from "${projectName}" successfully` - ) + assignThreadToProject(thread.id, folder.id) }} > - - Remove from project + + + {folder.name} + - + )) )} + {thread.metadata?.project && ( + <> + + { + e.stopPropagation() + // Remove project from metadata + const projectName = thread.metadata?.project?.name + updateThread(thread.id, { + metadata: { + ...thread.metadata, + project: undefined, + }, + }) + toast.success( + `Thread removed from "${projectName}" successfully` + ) + }} + > + + Remove from project + + + )} { return threads.sort((a, b) => { return (b.updated || 0) - (a.updated || 0) @@ -323,7 +339,12 @@ function ThreadList({ threads, variant = 'default' }: ThreadListProps) { strategy={verticalListSortingStrategy} > {sortedThreads.map((thread, index) => ( - + ))} diff --git a/web-app/src/containers/ThreadPadding.tsx b/web-app/src/containers/ThreadPadding.tsx new file mode 100644 index 000000000..3f4c725c3 --- /dev/null +++ b/web-app/src/containers/ThreadPadding.tsx @@ -0,0 +1,19 @@ +import { useThreadScrolling } from '@/hooks/useThreadScrolling' + +export const ThreadPadding = ({ + threadId, + scrollContainerRef, +}: { + threadId: string + scrollContainerRef: React.RefObject +}) => { + // Get padding height for ChatGPT-style message positioning + const { paddingHeight } = useThreadScrolling(threadId, scrollContainerRef) + return ( +
    + ) +} diff --git a/web-app/src/containers/__tests__/AvatarEmoji.test.tsx b/web-app/src/containers/__tests__/AvatarEmoji.test.tsx index ea44b95a0..e0ec9488a 100644 --- a/web-app/src/containers/__tests__/AvatarEmoji.test.tsx +++ b/web-app/src/containers/__tests__/AvatarEmoji.test.tsx @@ -121,4 +121,4 @@ describe('AvatarEmoji Component', () => { const img = screen.getByRole('img') expect(img).toHaveAttribute('alt', 'Custom avatar') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/containers/__tests__/ChatInput.simple.test.tsx b/web-app/src/containers/__tests__/ChatInput.simple.test.tsx index a1c71baa8..8992edbc0 100644 --- a/web-app/src/containers/__tests__/ChatInput.simple.test.tsx +++ b/web-app/src/containers/__tests__/ChatInput.simple.test.tsx @@ -36,4 +36,4 @@ describe('ChatInput Simple Tests', () => { const sendButton = screen.getByTestId('send-message-button') expect(sendButton).toHaveTextContent('Send') }) -}) \ No newline at end of file +}) diff --git a/web-app/src/containers/__tests__/ChatInput.test.tsx b/web-app/src/containers/__tests__/ChatInput.test.tsx index 50b0b6172..1d296e15d 100644 --- a/web-app/src/containers/__tests__/ChatInput.test.tsx +++ b/web-app/src/containers/__tests__/ChatInput.test.tsx @@ -9,6 +9,7 @@ import { useAppState } from '@/hooks/useAppState' import { useGeneralSetting } from '@/hooks/useGeneralSetting' import { useModelProvider } from '@/hooks/useModelProvider' import { useChat } from '@/hooks/useChat' +import type { ThreadModel } from '@/types/threads' // Mock dependencies with mutable state let mockPromptState = { @@ -138,18 +139,70 @@ vi.mock('../MovingBorder', () => ({ vi.mock('../DropdownModelProvider', () => ({ __esModule: true, - default: () =>
    Model Dropdown
    , + default: () =>
    Model Dropdown
    , +})) + +vi.mock('../loaders/ModelLoader', () => ({ + ModelLoader: () =>
    Model Loader
    , })) vi.mock('../DropdownToolsAvailable', () => ({ __esModule: true, default: ({ children }: { children: (isOpen: boolean, toolsCount: number) => React.ReactNode }) => { - return
    {children(false, 0)}
    + return
    {children(false, 0)}
    }, })) -vi.mock('../loaders/ModelLoader', () => ({ - ModelLoader: () =>
    Loading...
    , +vi.mock('@/components/ui/button', () => ({ + Button: ({ children, onClick, disabled, ...props }: any) => ( + + ), +})) + +vi.mock('@/components/ui/tooltip', () => ({ + Tooltip: ({ children }: { children: React.ReactNode }) =>
    {children}
    , + TooltipContent: ({ children }: { children: React.ReactNode }) =>
    {children}
    , + TooltipProvider: ({ children }: { children: React.ReactNode }) =>
    {children}
    , + TooltipTrigger: ({ children }: { children: React.ReactNode }) =>
    {children}
    , +})) + +vi.mock('react-textarea-autosize', () => ({ + default: ({ value, onChange, onKeyDown, placeholder, disabled, className, minRows, maxRows, onHeightChange, ...props }: any) => ( +