diff --git a/.github/ISSUE_TEMPLATE/3-epic.md b/.github/ISSUE_TEMPLATE/3-epic.md index afffc6b5c..3e05d8dd6 100644 --- a/.github/ISSUE_TEMPLATE/3-epic.md +++ b/.github/ISSUE_TEMPLATE/3-epic.md @@ -1,12 +1,27 @@ --- name: 🌟 Epic -about: Major building block that advances Jan's goals +about: User stories and specs title: 'epic: ' type: Epic --- -## Goal +## User Stories -## Tasklist +- As a [user type], I can [do something] so that [outcome] -## Out of scope +## Not in scope + +- + +## User Flows & Designs + +- Key user flows +- Figma link +- Edge cases +- Error states + +## Engineering Decisions + +- **Technical Approach:** Brief outline of the solution. +- **Key Trade-offs:** What’s been considered/rejected and why. +- **Dependencies:** APIs, services, libraries, teams. diff --git a/.github/ISSUE_TEMPLATE/4-goal.md b/.github/ISSUE_TEMPLATE/4-goal.md index 28b32382a..8d649c281 100644 --- a/.github/ISSUE_TEMPLATE/4-goal.md +++ b/.github/ISSUE_TEMPLATE/4-goal.md @@ -1,13 +1,38 @@ --- name: 🎯 Goal -about: External communication of Jan's roadmap and objectives +about: Roadmap goals for our users title: 'goal: ' type: Goal --- ## Goal -## Tasklist +> Why are we doing this? 1 liner value proposition -## Out of scope +_e.g. Make onboarding to Jan 3x easier_ +## Success Criteria + +> When do we consider this done? Limit to 3. + +1. _e.g. Redesign onboarding flow to remove redundant steps._ +2. _e.g. Add a “getting started” guide_ +3. _e.g. Make local model setup more “normie” friendly_ + +## Non Goals + +> What is out of scope? + +- _e.g. Take advanced users through customizing settings_ + +## User research (if any) + +> Links to user messages and interviews + +## Design inspo + +> Links + +## Open questions + +> What are we not sure about? diff --git a/.github/workflows/jan-docs.yml b/.github/workflows/jan-docs.yml index f64f93f66..3e92903c5 100644 --- a/.github/workflows/jan-docs.yml +++ b/.github/workflows/jan-docs.yml @@ -76,7 +76,7 @@ jobs: Preview URL: ${{ steps.deployCloudflarePages.outputs.url }} - name: Publish to Cloudflare Pages Production - if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev') + if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && startsWith(github.ref, 'refs/heads/release/')) uses: cloudflare/pages-action@v1 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} diff --git a/.github/workflows/jan-linter-and-test.yml b/.github/workflows/jan-linter-and-test.yml index e2f437837..cfcfe78ed 100644 --- a/.github/workflows/jan-linter-and-test.yml +++ b/.github/workflows/jan-linter-and-test.yml @@ -134,7 +134,7 @@ jobs: test-on-windows-pr: if: github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' - runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'windows-latest' || 'WINDOWS-11' }} + runs-on: 'windows-latest' steps: - name: Getting the repo uses: actions/checkout@v3 diff --git a/.github/workflows/jan-tauri-build-flatpak.yaml b/.github/workflows/jan-tauri-build-flatpak.yaml new file mode 100644 index 000000000..2eded766a --- /dev/null +++ b/.github/workflows/jan-tauri-build-flatpak.yaml @@ -0,0 +1,20 @@ +name: Tauri Builder Flatpak + +on: + workflow_dispatch: + inputs: + version: + description: 'Version to build. For example: 0.6.8' + required: false + +jobs: + + build-linux-x64: + uses: ./.github/workflows/template-tauri-build-linux-x64-flatpak.yml + secrets: inherit + with: + ref: ${{ github.ref }} + public_provider: none + channel: stable + new_version: ${{ inputs.version }} + disable_updater: true \ No newline at end of file diff --git a/.github/workflows/jan-tauri-build-nightly.yaml b/.github/workflows/jan-tauri-build-nightly.yaml index 106174eff..65a035c38 100644 --- a/.github/workflows/jan-tauri-build-nightly.yaml +++ b/.github/workflows/jan-tauri-build-nightly.yaml @@ -12,10 +12,31 @@ on: - none - aws-s3 default: none + disable_updater: + type: boolean + description: 'If true, builds both .deb and .appimage but disables auto-updater' + default: false pull_request: branches: - release/** - dev + paths: + - '.github/workflows/jan-tauri-build-nightly.yaml' + - '.github/workflows/template-get-update-version.yml' + - '.github/workflows/template-tauri-build-macos.yml' + - '.github/workflows/template-tauri-build-windows-x64.yml' + - '.github/workflows/template-tauri-build-linux-x64.yml' + - '.github/workflows/template-noti-discord-and-update-url-readme.yml' + - 'src-tauri/**' + - 'core/**' + - 'web-app/**' + - 'extensions/**' + - 'scripts/**' + - 'pre-install/**' + - 'Makefile' + - 'package.json' + - 'mise.toml' + jobs: set-public-provider: @@ -85,6 +106,7 @@ jobs: new_version: ${{ needs.get-update-version.outputs.new_version }} channel: nightly cortex_api_port: '39261' + disable_updater: ${{ github.event.inputs.disable_updater == 'true' }} sync-temp-to-latest: needs: diff --git a/.github/workflows/jan-tauri-build.yaml b/.github/workflows/jan-tauri-build.yaml index 1dc22f2e4..95838e982 100644 --- a/.github/workflows/jan-tauri-build.yaml +++ b/.github/workflows/jan-tauri-build.yaml @@ -32,6 +32,7 @@ jobs: name: "${{ env.VERSION }}" draft: true prerelease: false + generate_release_notes: true build-macos: uses: ./.github/workflows/template-tauri-build-macos.yml @@ -118,28 +119,4 @@ jobs: upload_url: ${{ needs.create-draft-release.outputs.upload_url }} asset_path: ./latest.json asset_name: latest.json - asset_content_type: text/json - - update_release_draft: - needs: [build-macos, build-windows-x64, build-linux-x64] - permissions: - # write permission is required to create a github release - contents: write - # write permission is required for autolabeler - # otherwise, read permission is required at least - pull-requests: write - runs-on: ubuntu-latest - steps: - # (Optional) GitHub Enterprise requires GHE_HOST variable set - #- name: Set GHE_HOST - # run: | - # echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV - - # Drafts your next Release notes as Pull Requests are merged into "master" - - uses: release-drafter/release-drafter@v5 - # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml - # with: - # config-name: my-config.yml - # disable-autolabeler: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + asset_content_type: text/json \ No newline at end of file diff --git a/.github/workflows/template-tauri-build-linux-x64-flatpak.yml b/.github/workflows/template-tauri-build-linux-x64-flatpak.yml new file mode 100644 index 000000000..2807a74ae --- /dev/null +++ b/.github/workflows/template-tauri-build-linux-x64-flatpak.yml @@ -0,0 +1,164 @@ +name: tauri-build-linux-x64-flatpak +on: + workflow_call: + inputs: + ref: + required: true + type: string + default: 'refs/heads/main' + public_provider: + required: true + type: string + default: none + description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3' + new_version: + required: true + type: string + default: '' + cortex_api_port: + required: false + type: string + default: '' + upload_url: + required: false + type: string + default: '' + channel: + required: true + type: string + default: 'nightly' + description: 'The channel to use for this job' + disable_updater: + required: false + type: boolean + default: false + description: 'If true, builds both .deb and .appimage but disables auto-updater' + secrets: + DELTA_AWS_S3_BUCKET_NAME: + required: false + DELTA_AWS_ACCESS_KEY_ID: + required: false + DELTA_AWS_SECRET_ACCESS_KEY: + required: false + TAURI_SIGNING_PRIVATE_KEY: + required: false + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: + required: false +jobs: + build-linux-x64: + runs-on: ubuntu-22.04 + permissions: + contents: write + steps: + - name: Getting the repo + uses: actions/checkout@v3 + with: + ref: ${{ inputs.ref }} + + - name: Free Disk Space Before Build + run: | + echo "Disk space before cleanup:" + df -h + sudo rm -rf /usr/local/.ghcup + sudo rm -rf /opt/hostedtoolcache/CodeQL + sudo rm -rf /usr/local/lib/android/sdk/ndk + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf /usr/local/share/boost + sudo apt-get clean + echo "Disk space after cleanup:" + df -h + + - name: Installing node + uses: actions/setup-node@v1 + with: + node-version: 20 + + - name: Install jq + uses: dcarbone/install-jq-action@v2.0.1 + + - name: Install ctoml + run: | + cargo install ctoml + + - name: Install Tauri dependencies + run: | + sudo apt update + sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 + + - name: Update app version base public_provider + run: | + echo "Version: ${{ inputs.new_version }}" + # Update tauri.conf.json + jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + if [ "${{ inputs.channel }}" != "stable" ]; then + jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun", + "usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json + mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json + fi + jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json + mv /tmp/package.json web-app/package.json + + # Temporarily enable devtool on prod build + ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools" + cat ./src-tauri/Cargo.toml + + ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}" + cat ./src-tauri/Cargo.toml + + # Change app name for beta and nightly builds + if [ "${{ inputs.channel }}" != "stable" ]; then + jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + + chmod +x .github/scripts/rename-tauri-app.sh + .github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }} + + cat ./src-tauri/tauri.conf.json + + # Update Cargo.toml + ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}" + ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools" + echo "------------------" + cat ./src-tauri/Cargo.toml + + chmod +x .github/scripts/rename-workspace.sh + .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} + cat ./package.json + fi + - name: Build app + run: | + make build + + APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1) + yarn tauri signer sign \ + --private-key "$TAURI_SIGNING_PRIVATE_KEY" \ + --password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \ + "$APP_IMAGE" + + env: + RELEASE_CHANNEL: '${{ inputs.channel }}' + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }} + POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }} + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }} + # Publish app + + ## Artifacts, for dev and test + - name: Upload Artifact + if: inputs.public_provider != 'github' + uses: actions/upload-artifact@v4 + with: + name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-deb + path: ./src-tauri/target/release/bundle/deb/*.deb + + - name: Upload Artifact + if: inputs.public_provider != 'github' + uses: actions/upload-artifact@v4 + with: + name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-AppImage + path: ./src-tauri/target/release/bundle/appimage/*.AppImage + diff --git a/.github/workflows/template-tauri-build-linux-x64.yml b/.github/workflows/template-tauri-build-linux-x64.yml index 20663ea69..ef9054be1 100644 --- a/.github/workflows/template-tauri-build-linux-x64.yml +++ b/.github/workflows/template-tauri-build-linux-x64.yml @@ -28,6 +28,11 @@ on: type: string default: 'nightly' description: 'The channel to use for this job' + disable_updater: + required: false + type: boolean + default: false + description: 'If true, builds both .deb and .appimage but disables auto-updater' secrets: DELTA_AWS_S3_BUCKET_NAME: required: false @@ -156,7 +161,7 @@ jobs: POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }} TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} - + AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }} # Publish app ## Artifacts, for dev and test diff --git a/.gitignore b/.gitignore index a20f96c30..93e43d4d8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,46 +1,22 @@ -.idea .env -.idea - -# Jan inference error.log node_modules *.tgz -!charts/server/charts/*.tgz dist build .DS_Store -electron/renderer -electron/models -electron/docs -electron/engines -electron/themes -electron/playwright-report -server/pre-install package-lock.json coverage *.log core/lib/** - -# Turborepo -.turbo -electron/test-data -electron/test-results -core/test_results.html -coverage .yarn .yarnrc -test_results.html *.tsbuildinfo -electron/shared/** +test_results.html # docs docs/yarn.lock -electron/.version.bak -src-tauri/binaries/engines/cortex.llamacpp -src-tauri/resources/themes src-tauri/resources/lib -src-tauri/Cargo.lock src-tauri/icons !src-tauri/icons/icon.png src-tauri/gen/apple @@ -75,5 +51,9 @@ docs/.next/ **/yarn-error.log* **/pnpm-debug.log* -# Combined output for local testing -combined-output/ +## cargo +target +Cargo.lock + +## test +test-data diff --git a/Makefile b/Makefile index 023f2c877..4bd823437 100644 --- a/Makefile +++ b/Makefile @@ -26,6 +26,7 @@ else ifeq ($(shell uname -s),Linux) chmod +x src-tauri/build-utils/* endif yarn install + yarn build:tauri:plugin:api yarn build:core yarn build:extensions @@ -43,6 +44,9 @@ test: lint yarn download:bin yarn download:lib yarn test + yarn copy:assets:tauri + yarn build:icon + cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features test-tauri -- --test-threads=1 # Builds and publishes the app build-and-publish: install-and-build diff --git a/README.md b/README.md index 41a41af46..b90cc8907 100644 --- a/README.md +++ b/README.md @@ -120,6 +120,7 @@ mise dev # runs the full development setup ```bash yarn install +yarn build:tauri:plugin:api yarn build:core yarn build:extensions yarn dev diff --git a/ai.menlo.jan.desktop b/ai.menlo.jan.desktop deleted file mode 100644 index 779bffb27..000000000 --- a/ai.menlo.jan.desktop +++ /dev/null @@ -1,9 +0,0 @@ -[Desktop Entry] -Name=Jan -Comment=Local AI Assistant that runs 100% offline -Exec=run.sh -Icon=ai.menlo.jan -Type=Application -Categories=Development; -Keywords=AI;Assistant;LLM;ChatGPT;Local;Offline; -StartupNotify=true \ No newline at end of file diff --git a/ai.menlo.jan.metainfo.xml b/ai.menlo.jan.metainfo.xml deleted file mode 100644 index ba17914e9..000000000 --- a/ai.menlo.jan.metainfo.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - ai.menlo.jan - FSFAP - AGPL-3.0-only - Jan - Local AI Assistant that runs 100% offline on your device - - -

- Jan is a ChatGPT-alternative that runs 100% offline on your device. Our goal is to make it easy for anyone to download and run LLMs and use AI with full control and privacy. -

-

Features:

- -
- - ai.menlo.jan.desktop - - - - https://catalog.jan.ai/flatpak/demo.gif - - - - https://jan.ai/ - https://github.com/menloresearch/jan/issues - - - - - - -

Latest stable release of Jan AI

-
-
-
-
\ No newline at end of file diff --git a/docs/public/assets/images/changelog/gpt-oss-serper.png b/docs/public/assets/images/changelog/gpt-oss-serper.png new file mode 100644 index 000000000..088d2c14f Binary files /dev/null and b/docs/public/assets/images/changelog/gpt-oss-serper.png differ diff --git a/docs/public/assets/images/changelog/jupyter5.png b/docs/public/assets/images/changelog/jupyter5.png new file mode 100644 index 000000000..cb6b1b119 Binary files /dev/null and b/docs/public/assets/images/changelog/jupyter5.png differ diff --git a/docs/public/assets/videos/mcpjupyter.mp4 b/docs/public/assets/videos/mcpjupyter.mp4 new file mode 100644 index 000000000..b9bdf79ed Binary files /dev/null and b/docs/public/assets/videos/mcpjupyter.mp4 differ diff --git a/docs/src/components/FooterMenu/index.tsx b/docs/src/components/FooterMenu/index.tsx index 0d9a50190..287c8a517 100644 --- a/docs/src/components/FooterMenu/index.tsx +++ b/docs/src/components/FooterMenu/index.tsx @@ -35,28 +35,28 @@ const socials = [ ] const menus = [ - { - name: 'Product', - child: [ - { - menu: 'Download', - path: '/download', - }, - { - menu: 'Changelog', - path: '/changelog', - }, - ], - }, - { - name: 'For Developers', - child: [ - { - menu: 'Documentation', - path: '/docs', - }, - ], - }, + // { + // name: 'Product', + // child: [ + // { + // menu: 'Download', + // path: '/download', + // }, + // { + // menu: 'Changelog', + // path: '/changelog', + // }, + // ], + // }, + // { + // name: 'For Developers', + // child: [ + // { + // menu: 'Documentation', + // path: '/docs', + // }, + // ], + // }, { name: 'Community', child: [ @@ -71,7 +71,7 @@ const menus = [ external: true, }, { - menu: 'Twitter', + menu: 'X/Twitter', path: 'https://twitter.com/jandotai', external: true, }, @@ -86,8 +86,8 @@ const menus = [ name: 'Company', child: [ { - menu: 'About', - path: '/about', + menu: 'Menlo', + path: 'https://menlo.ai', }, { menu: 'Blog', @@ -158,8 +158,8 @@ export default function Footer() { return (
-
-
+
+

@@ -209,9 +209,10 @@ export default function Footer() {

+
{menus.map((menu, i) => { return ( -
+

{menu.name}

diff --git a/docs/src/pages/_meta.json b/docs/src/pages/_meta.json index cccdee43f..9508e506c 100644 --- a/docs/src/pages/_meta.json +++ b/docs/src/pages/_meta.json @@ -33,10 +33,6 @@ "layout": "raw" } }, - "about": { - "type": "page", - "title": "About" - }, "blog": { "type": "page", "title": "Blog", diff --git a/docs/src/pages/changelog/2025-08-07-gpt-oss.mdx b/docs/src/pages/changelog/2025-08-07-gpt-oss.mdx new file mode 100644 index 000000000..e3ff1d8cd --- /dev/null +++ b/docs/src/pages/changelog/2025-08-07-gpt-oss.mdx @@ -0,0 +1,92 @@ +--- +title: "Jan v0.6.7: OpenAI gpt-oss support and enhanced MCP tutorials" +version: 0.6.7 +description: "Full support for OpenAI's open-weight gpt-oss models and new Jupyter MCP integration guide" +date: 2025-08-07 +ogImage: "/assets/images/changelog/gpt-oss-serper.png" +--- + +import ChangelogHeader from "@/components/Changelog/ChangelogHeader" +import { Callout } from 'nextra/components' + + + + +## Highlights 🎉 + +Jan v0.6.7 brings full support for OpenAI's groundbreaking open-weight models - gpt-oss-120b and gpt-oss-20b - along with enhanced MCP documentation and critical bug fixes for reasoning models. + +### 🚀 OpenAI gpt-oss Models Now Supported + +Jan now fully supports OpenAI's first open-weight language models since GPT-2: + +**gpt-oss-120b:** +- 117B total parameters, 5.1B active per token +- Runs efficiently on a single 80GB GPU +- Near-parity with OpenAI o4-mini on reasoning benchmarks +- Exceptional tool use and function calling capabilities + +**gpt-oss-20b:** +- 21B total parameters, 3.6B active per token +- Runs on edge devices with just 16GB memory +- Similar performance to OpenAI o3-mini +- Perfect for local inference and rapid iteration + + +Both models use Mixture-of-Experts (MoE) architecture and support context lengths up to 128k tokens. They come natively quantized in MXFP4 format for efficient memory usage. + + +### 🎮 GPU Layer Configuration + +Due to the models' size, you may need to adjust GPU layers based on your hardware: + +![GPU layers setting adjusted for optimal performance](/assets/images/changelog/jupyter5.png) + +Start with default settings and reduce layers if you encounter out-of-memory errors. Each system requires different configurations based on available VRAM. + +### 📚 New Jupyter MCP Tutorial + +We've added comprehensive documentation for the Jupyter MCP integration: +- Real-time notebook interaction and code execution +- Step-by-step setup with Python environment management +- Example workflows for data analysis and visualization +- Security best practices for code execution +- Performance optimization tips + +The tutorial demonstrates how to turn Jan into a capable data science partner that can execute analysis, create visualizations, and iterate based on actual results. + +### 🔧 Bug Fixes + +Critical fixes for reasoning model support: +- **Fixed reasoning text inclusion**: Reasoning text is no longer incorrectly included in chat completion requests +- **Fixed thinking block display**: gpt-oss thinking blocks now render properly in the UI +- **Fixed React state loop**: Resolved infinite re-render issue with useMediaQuery hook + +## Using gpt-oss Models + +### Download from Hub + +All gpt-oss GGUF variants are available in the Jan Hub. Simply search for "gpt-oss" and choose the quantization that fits your hardware: + +### Model Capabilities + +Both models excel at: +- **Reasoning tasks**: Competition coding, mathematics, and problem solving +- **Tool use**: Web search, code execution, and function calling +- **CoT reasoning**: Full chain-of-thought visibility for monitoring +- **Structured outputs**: JSON schema enforcement and grammar constraints + +### Performance Tips + +- **Memory requirements**: gpt-oss-120b needs ~80GB, gpt-oss-20b needs ~16GB +- **GPU layers**: Adjust based on your VRAM (start high, reduce if needed) +- **Context size**: Both models support up to 128k tokens +- **Quantization**: Choose lower quantization for smaller memory footprint + +## Coming Next + +We're continuing to optimize performance for large models, expand MCP integrations, and improve the overall experience for running cutting-edge open models locally. + +Update your Jan or [download the latest](https://jan.ai/). + +For the complete list of changes, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.6.7). diff --git a/docs/src/pages/docs/_assets/gpt-oss-tools.png b/docs/src/pages/docs/_assets/gpt-oss-tools.png new file mode 100644 index 000000000..96faa86ec Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt-oss-tools.png differ diff --git a/docs/src/pages/docs/_assets/gpt-oss.png b/docs/src/pages/docs/_assets/gpt-oss.png new file mode 100644 index 000000000..9621dbdef Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt-oss.png differ diff --git a/docs/src/pages/docs/_assets/gpt5-add.png b/docs/src/pages/docs/_assets/gpt5-add.png new file mode 100644 index 000000000..9e34e8e69 Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt5-add.png differ diff --git a/docs/src/pages/docs/_assets/gpt5-chat.png b/docs/src/pages/docs/_assets/gpt5-chat.png new file mode 100644 index 000000000..69933eba8 Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt5-chat.png differ diff --git a/docs/src/pages/docs/_assets/gpt5-msg.png b/docs/src/pages/docs/_assets/gpt5-msg.png new file mode 100644 index 000000000..4ea346ae3 Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt5-msg.png differ diff --git a/docs/src/pages/docs/_assets/gpt5-msg2.png b/docs/src/pages/docs/_assets/gpt5-msg2.png new file mode 100644 index 000000000..736e9cd6f Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt5-msg2.png differ diff --git a/docs/src/pages/docs/_assets/gpt5-msg3.png b/docs/src/pages/docs/_assets/gpt5-msg3.png new file mode 100644 index 000000000..509f23c79 Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt5-msg3.png differ diff --git a/docs/src/pages/docs/_assets/gpt5-tools.png b/docs/src/pages/docs/_assets/gpt5-tools.png new file mode 100644 index 000000000..f4b8eaaa9 Binary files /dev/null and b/docs/src/pages/docs/_assets/gpt5-tools.png differ diff --git a/docs/src/pages/docs/_assets/jupyter.png b/docs/src/pages/docs/_assets/jupyter.png new file mode 100644 index 000000000..d261566d3 Binary files /dev/null and b/docs/src/pages/docs/_assets/jupyter.png differ diff --git a/docs/src/pages/docs/_assets/jupyter1.png b/docs/src/pages/docs/_assets/jupyter1.png new file mode 100644 index 000000000..f12e66eea Binary files /dev/null and b/docs/src/pages/docs/_assets/jupyter1.png differ diff --git a/docs/src/pages/docs/_assets/jupyter2.png b/docs/src/pages/docs/_assets/jupyter2.png new file mode 100644 index 000000000..b4650d651 Binary files /dev/null and b/docs/src/pages/docs/_assets/jupyter2.png differ diff --git a/docs/src/pages/docs/_assets/jupyter3.png b/docs/src/pages/docs/_assets/jupyter3.png new file mode 100644 index 000000000..de64bafa6 Binary files /dev/null and b/docs/src/pages/docs/_assets/jupyter3.png differ diff --git a/docs/src/pages/docs/_assets/jupyter4.png b/docs/src/pages/docs/_assets/jupyter4.png new file mode 100644 index 000000000..b920d49cb Binary files /dev/null and b/docs/src/pages/docs/_assets/jupyter4.png differ diff --git a/docs/src/pages/docs/_assets/jupyter5.png b/docs/src/pages/docs/_assets/jupyter5.png new file mode 100644 index 000000000..cb6b1b119 Binary files /dev/null and b/docs/src/pages/docs/_assets/jupyter5.png differ diff --git a/docs/src/pages/docs/_assets/openai-settings.png b/docs/src/pages/docs/_assets/openai-settings.png new file mode 100644 index 000000000..e8beeba28 Binary files /dev/null and b/docs/src/pages/docs/_assets/openai-settings.png differ diff --git a/docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx b/docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx new file mode 100644 index 000000000..4086831f0 --- /dev/null +++ b/docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx @@ -0,0 +1,337 @@ +--- +title: Jupyter MCP +description: Real-time Jupyter notebook interaction and code execution through MCP integration. +keywords: + [ + Jan, + MCP, + Model Context Protocol, + Jupyter, + data analysis, + code execution, + notebooks, + Python, + visualization, + tool calling, + GPT-5, + OpenAI, + ] +--- + +import { Callout } from 'nextra/components' + +# Jupyter MCP + +[Jupyter MCP Server](https://jupyter-mcp-server.datalayer.tech/) enables real-time interaction with Jupyter notebooks, allowing AI models to edit, execute, and document code for data analysis and visualization. Instead of just generating code suggestions, AI can actually run Python code and see the results. + +This integration gives Jan the ability to execute analysis, create visualizations, and iterate based on actual results - turning your AI assistant into a capable data science partner. + + +**Breaking Change**: Version 0.11.0+ renamed `room` to `document`. Check the [release notes](https://jupyter-mcp-server.datalayer.tech/releases) for details. + + +## Available Tools + +The Jupyter MCP Server provides [12 comprehensive tools](https://jupyter-mcp-server.datalayer.tech/tools/): + +### Core Operations +- `append_execute_code_cell`: Add and run code cells at notebook end +- `insert_execute_code_cell`: Insert and run code at specific positions +- `execute_cell_simple_timeout`: Execute cells with timeout control +- `execute_cell_streaming`: Long-running cells with progress updates +- `execute_cell_with_progress`: Execute with timeout and monitoring + +### Cell Management +- `append_markdown_cell`: Add documentation cells +- `insert_markdown_cell`: Insert markdown at specific positions +- `delete_cell`: Remove cells from notebook +- `overwrite_cell_source`: Update existing cell content + +### Information & Reading +- `get_notebook_info`: Retrieve notebook metadata +- `read_cell`: Examine specific cell content +- `read_all_cells`: Get complete notebook state + + +The MCP connects to **one notebook at a time**, not multiple notebooks. Specify your target notebook in the configuration. + + +## Prerequisites + +- Jan with MCP enabled +- Python 3.8+ with uv package manager +- Docker installed +- OpenAI API key for GPT-5 access +- Basic understanding of Jupyter notebooks + +## Setup + +### Enable MCP + +1. Go to **Settings** > **MCP Servers** +2. Toggle **Allow All MCP Tool Permission** ON + +![MCP settings page with toggle enabled](../../_assets/mcp-on.png) + +### Install uv Package Manager + +If you don't have uv installed: + +```bash +# macOS and Linux +curl -LsSf https://astral.sh/uv/install.sh | sh + +# Windows +powershell -c "irm https://astral.sh/uv/install.ps1 | iex" +``` + +### Create Python Environment + +Set up an isolated environment for Jupyter: + +```bash +# Create environment with Python 3.13 +uv venv .venv --python 3.13 + +# Activate environment +source .venv/bin/activate # Linux/macOS +# or +.venv\Scripts\activate # Windows + +# Install Jupyter dependencies +uv pip install jupyterlab==4.4.1 jupyter-collaboration==4.0.2 ipykernel +uv pip uninstall pycrdt datalayer_pycrdt +uv pip install datalayer_pycrdt==0.12.17 + +# Add data science libraries +uv pip install pandas numpy matplotlib altair +``` + +### Start JupyterLab Server + +Launch JupyterLab with authentication: + +```bash +jupyter lab --port 8888 --IdentityProvider.token heyheyyou --ip 0.0.0.0 +``` + +![Terminal showing JupyterLab startup](../../_assets/jupyter1.png) + +The server opens in your browser: + +![JupyterLab interface in browser](../../_assets/jupyter.png) + +### Create Target Notebook + +Create a new notebook named `for_jan.ipynb`: + +![Notebook created in JupyterLab](../../_assets/jupyter2.png) + +### Configure MCP Server in Jan + +Click `+` in MCP Servers section: + +**Configuration for macOS/Windows:** +- **Server Name**: `jupyter` +- **Command**: `docker` +- **Arguments**: + ``` + run -i --rm -e DOCUMENT_URL -e DOCUMENT_TOKEN -e DOCUMENT_ID -e RUNTIME_URL -e RUNTIME_TOKEN datalayer/jupyter-mcp-server:latest + ``` +- **Environment Variables**: + - Key: `DOCUMENT_URL`, Value: `http://host.docker.internal:8888` + - Key: `DOCUMENT_TOKEN`, Value: `heyheyyou` + - Key: `DOCUMENT_ID`, Value: `for_jan.ipynb` + - Key: `RUNTIME_URL`, Value: `http://host.docker.internal:8888` + - Key: `RUNTIME_TOKEN`, Value: `heyheyyou` + +![Jan MCP server configuration](../../_assets/jupyter3.png) + +## Using OpenAI's GPT-5 + +### Configure OpenAI Provider + +Navigate to **Settings** > **Model Providers** > **OpenAI**: + +![OpenAI settings page](../../_assets/openai-settings.png) + +### Add GPT-5 Model + +Since GPT-5 is new, you'll need to manually add it to Jan: + +![Manually adding GPT-5 model name](../../_assets/gpt5-add.png) + + +**About GPT-5**: OpenAI's smartest, fastest, most useful model yet. It features built-in thinking capabilities, state-of-the-art performance across coding, math, and writing, and exceptional tool use abilities. GPT-5 automatically decides when to respond quickly versus when to think longer for expert-level responses. + + +### Enable Tool Calling + +Ensure tools are enabled for GPT-5: + +![Enabling tools for GPT-5](../../_assets/gpt5-tools.png) + +## Usage + +### Verify Tool Availability + +Start a new chat with GPT-5. The tools bubble shows all available Jupyter operations: + +![GPT-5 ready in chat with Jupyter tools visible](../../_assets/gpt5-chat.png) + +### Initial Test + +Start with establishing the notebook as your workspace: + +``` +You have access to a jupyter notebook, please use it as our data analysis scratchpad. Let's start by printing "Hello Jan" in a new cell. +``` + +GPT-5 creates and executes the code successfully: + +![First message showing successful tool use](../../_assets/gpt5-msg.png) + +### Advanced Data Analysis + +Try a more complex task combining multiple operations: + +``` +Generate synthetic data with numpy, move it to a pandas dataframe and create a pivot table, and then make a cool animated plot using matplotlib. Your use case will be sales analysis in the luxury fashion industry. +``` + +![Complex analysis with luxury fashion sales data](../../_assets/gpt5-msg2.png) + +Watch the complete output unfold: + + + +## Example Prompts to Try + +### Financial Analysis +``` +Create a Monte Carlo simulation for portfolio risk analysis. Generate 10,000 scenarios, calculate VaR at 95% confidence, and visualize the distribution. +``` + +### Time Series Forecasting +``` +Generate synthetic time series data representing daily website traffic over 2 years with weekly seasonality and trend. Build an ARIMA model and forecast the next 30 days. +``` + +### Machine Learning Pipeline +``` +Build a complete classification pipeline: generate a dataset with 3 classes and 5 features, split the data, try multiple algorithms (RF, SVM, XGBoost), and create a comparison chart of their performance. +``` + +### Interactive Dashboards +``` +Create an interactive visualization using matplotlib widgets showing how changing interest rates affects loan payments over different time periods. +``` + +### Statistical Testing +``` +Generate two datasets representing A/B test results for an e-commerce site. Perform appropriate statistical tests and create visualizations to determine if the difference is significant. +``` + +## Performance Considerations + + +Multiple tools can quickly consume context windows, especially for local models. GPT-5's unified system with smart routing helps manage this, but local models may struggle with speed and context limitations. + + +### Context Management +- Each tool call adds to conversation history +- 12 available tools means substantial system prompt overhead +- Local models may need reduced tool sets for reasonable performance +- Consider disabling unused tools to conserve context + +### Cloud vs Local Trade-offs +- **Cloud models (GPT-5)**: Handle multiple tools efficiently with large context windows +- **Local models**: May require optimization, reduced tool sets, or smaller context sizes +- **Hybrid approach**: Use cloud for complex multi-tool workflows, local for simple tasks + +## Security Considerations + + +MCP provides powerful capabilities but requires careful security practices. + + +### Authentication Tokens +- **Always use strong tokens** - avoid simple passwords +- **Never commit tokens** to version control +- **Rotate tokens regularly** for production use +- **Use different tokens** for different environments + +### Network Security +- JupyterLab is network-accessible with `--ip 0.0.0.0` +- Consider using `--ip 127.0.0.1` for local-only access +- Implement firewall rules to restrict access +- Use HTTPS in production environments + +### Code Execution Risks +- AI has full Python execution capabilities +- Review generated code before execution +- Use isolated environments for sensitive work +- Monitor resource usage and set limits + +### Data Privacy +- Notebook content is processed by AI models +- When using cloud models like GPT-5, data leaves your system +- Keep sensitive data in secure environments +- Consider model provider's data policies + +## Best Practices + +### Environment Management +- Use virtual environments for isolation +- Document required dependencies +- Version control your notebooks +- Regular environment cleanup + +### Performance Optimization +- Start with simple operations +- Monitor memory usage during execution +- Close unused notebooks +- Restart kernels when needed + +### Effective Prompting +- Be specific about desired outputs +- Break complex tasks into steps +- Ask for explanations with code +- Request error handling in critical operations + +## Troubleshooting + +**Connection Problems:** +- Verify JupyterLab is running +- Check token matches configuration +- Confirm Docker can reach host +- Test with curl to verify connectivity + +**Execution Failures:** +- Check Python package availability +- Verify kernel is running +- Look for syntax errors in generated code +- Restart kernel if stuck + +**Tool Calling Errors:** +- Ensure model supports tool calling +- Verify all 12 tools appear in chat +- Check MCP server is active +- Review Docker logs for errors + +**API Rate Limits:** +- Monitor OpenAI usage dashboard +- Implement retry logic for transient errors +- Consider fallback to local models +- Cache results when possible + +## Conclusion + +The Jupyter MCP integration combined with GPT-5's advanced capabilities creates an exceptionally powerful data science environment. With GPT-5's built-in reasoning and expert-level intelligence, complex analyses that once required extensive manual coding can now be accomplished through natural conversation. + +Whether you're exploring data, building models, or creating visualizations, this integration provides the computational power of Jupyter with the intelligence of GPT-5 - all within Jan's privacy-conscious interface. + +Remember: with great computational power comes the responsibility to use it securely. Always validate generated code, use strong authentication, and be mindful of data privacy when using cloud-based models. diff --git a/extensions/llamacpp-extension/package.json b/extensions/llamacpp-extension/package.json index 0df2cddf3..b5db33c5e 100644 --- a/extensions/llamacpp-extension/package.json +++ b/extensions/llamacpp-extension/package.json @@ -28,6 +28,8 @@ }, "dependencies": { "@janhq/core": "../../core/package.tgz", + "@janhq/tauri-plugin-hardware-api": "link:../../src-tauri/plugins/tauri-plugin-hardware", + "@janhq/tauri-plugin-llamacpp-api": "link:../../src-tauri/plugins/tauri-plugin-llamacpp", "@tauri-apps/api": "^2.5.0", "@tauri-apps/plugin-log": "^2.6.0", "fetch-retry": "^5.0.6", diff --git a/extensions/llamacpp-extension/src/backend.ts b/extensions/llamacpp-extension/src/backend.ts index 3bf6a2675..b4d86d154 100644 --- a/extensions/llamacpp-extension/src/backend.ts +++ b/extensions/llamacpp-extension/src/backend.ts @@ -2,6 +2,7 @@ import { getJanDataFolderPath, fs, joinPath, events } from '@janhq/core' import { invoke } from '@tauri-apps/api/core' import { getProxyConfig } from './util' import { dirname } from '@tauri-apps/api/path' +import { getSystemInfo } from '@janhq/tauri-plugin-hardware-api' // folder structure // /llamacpp/backends// @@ -10,7 +11,7 @@ import { dirname } from '@tauri-apps/api/path' export async function listSupportedBackends(): Promise< { version: string; backend: string }[] > { - const sysInfo = await window.core.api.getSystemInfo() + const sysInfo = await getSystemInfo() const os_type = sysInfo.os_type const arch = sysInfo.cpu.arch @@ -229,7 +230,7 @@ export async function downloadBackend( } async function _getSupportedFeatures() { - const sysInfo = await window.core.api.getSystemInfo() + const sysInfo = await getSystemInfo() const features = { avx: sysInfo.cpu.extensions.includes('avx'), avx2: sysInfo.cpu.extensions.includes('avx2'), @@ -289,7 +290,7 @@ async function _fetchGithubReleases( } async function _isCudaInstalled(version: string): Promise { - const sysInfo = await window.core.api.getSystemInfo() + const sysInfo = await getSystemInfo() const os_type = sysInfo.os_type // not sure the reason behind this naming convention diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index 75afb81ae..3612c678b 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -100,6 +100,13 @@ interface DeviceList { mem: number free: number } + +interface GgufMetadata { + version: number + tensor_count: number + metadata: Record +} + /** * Override the default app.log function to use Jan's logging system. * @param args @@ -801,7 +808,7 @@ export default class llamacpp_extension extends AIEngine { } private async generateApiKey(modelId: string, port: string): Promise { - const hash = await invoke('generate_api_key', { + const hash = await invoke('plugin:llamacpp|generate_api_key', { modelId: modelId + port, apiSecret: this.apiSecret, }) @@ -1094,7 +1101,7 @@ export default class llamacpp_extension extends AIEngine { */ private async getRandomPort(): Promise { try { - const port = await invoke('get_random_port') + const port = await invoke('plugin:llamacpp|get_random_port') return port } catch { logger.error('Unable to find a suitable port') @@ -1165,7 +1172,7 @@ export default class llamacpp_extension extends AIEngine { const [version, backend] = cfg.version_backend.split('/') if (!version || !backend) { throw new Error( - `Invalid version/backend format: ${cfg.version_backend}. Expected format: /` + "Initial setup for the backend failed due to a network issue. Please restart the app!" ) } @@ -1272,7 +1279,7 @@ export default class llamacpp_extension extends AIEngine { try { // TODO: add LIBRARY_PATH - const sInfo = await invoke('load_llama_model', { + const sInfo = await invoke('plugin:llamacpp|load_llama_model', { backendPath, libraryPath, args, @@ -1292,7 +1299,7 @@ export default class llamacpp_extension extends AIEngine { const pid = sInfo.pid try { // Pass the PID as the session_id - const result = await invoke('unload_llama_model', { + const result = await invoke('plugin:llamacpp|unload_llama_model', { pid: pid, }) @@ -1430,7 +1437,7 @@ export default class llamacpp_extension extends AIEngine { private async findSessionByModel(modelId: string): Promise { try { - let sInfo = await invoke('find_session_by_model', { + let sInfo = await invoke('plugin:llamacpp|find_session_by_model', { modelId, }) return sInfo @@ -1449,7 +1456,7 @@ export default class llamacpp_extension extends AIEngine { throw new Error(`No active session found for model: ${opts.model}`) } // check if the process is alive - const result = await invoke('is_process_running', { + const result = await invoke('plugin:llamacpp|is_process_running', { pid: sessionInfo.pid, }) if (result) { @@ -1509,7 +1516,7 @@ export default class llamacpp_extension extends AIEngine { override async getLoadedModels(): Promise { try { - let models: string[] = await invoke('get_loaded_models') + let models: string[] = await invoke('plugin:llamacpp|get_loaded_models') return models } catch (e) { logger.error(e) @@ -1532,7 +1539,7 @@ export default class llamacpp_extension extends AIEngine { const backendPath = await getBackendExePath(backend, version) const libraryPath = await joinPath([await this.getProviderPath(), 'lib']) try { - const dList = await invoke('get_devices', { + const dList = await invoke('plugin:llamacpp|get_devices', { backendPath, libraryPath, }) @@ -1591,4 +1598,15 @@ export default class llamacpp_extension extends AIEngine { override getChatClient(sessionId: string): any { throw new Error('method not implemented yet') } + + private async loadMetadata(path: string): Promise { + try { + const data = await invoke('plugin:llamacpp|read_gguf_metadata', { + path: path, + }) + return data + } catch (err) { + throw err + } + } } diff --git a/extensions/llamacpp-extension/src/test/backend.test.ts b/extensions/llamacpp-extension/src/test/backend.test.ts index 28e8f603a..a3395d907 100644 --- a/extensions/llamacpp-extension/src/test/backend.test.ts +++ b/extensions/llamacpp-extension/src/test/backend.test.ts @@ -18,7 +18,7 @@ describe('Backend functions', () => { describe('listSupportedBackends', () => { it('should return supported backends for Windows x64', async () => { // Mock system info - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'windows', cpu: { arch: 'x86_64', @@ -53,7 +53,7 @@ describe('Backend functions', () => { it('should return CUDA backends with proper CPU instruction detection for Windows', async () => { // Mock system info with CUDA support and AVX512 - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'windows', cpu: { arch: 'x86_64', @@ -92,7 +92,7 @@ describe('Backend functions', () => { it('should select appropriate CUDA backend based on CPU features - AVX2 only', async () => { // Mock system info with CUDA support but only AVX2 - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'windows', cpu: { arch: 'x86_64', @@ -131,7 +131,7 @@ describe('Backend functions', () => { it('should select appropriate CUDA backend based on CPU features - no AVX', async () => { // Mock system info with CUDA support but no AVX - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'windows', cpu: { arch: 'x86_64', @@ -171,7 +171,7 @@ describe('Backend functions', () => { it('should return CUDA backends with proper CPU instruction detection for Linux', async () => { // Mock system info with CUDA support and AVX support - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'linux', cpu: { arch: 'x86_64', @@ -210,7 +210,7 @@ describe('Backend functions', () => { }) it('should return supported backends for macOS arm64', async () => { - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'macos', cpu: { arch: 'aarch64', @@ -261,7 +261,7 @@ describe('Backend functions', () => { describe('getBackendExePath', () => { it('should return correct exe path for Windows', async () => { - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'windows', }) @@ -289,7 +289,7 @@ describe('Backend functions', () => { }) it('should return correct exe path for Linux/macOS', async () => { - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'linux', }) diff --git a/extensions/llamacpp-extension/src/test/index.test.ts b/extensions/llamacpp-extension/src/test/index.test.ts index 27f37293c..59090c7bf 100644 --- a/extensions/llamacpp-extension/src/test/index.test.ts +++ b/extensions/llamacpp-extension/src/test/index.test.ts @@ -175,7 +175,7 @@ describe('llamacpp_extension', () => { const { invoke } = await import('@tauri-apps/api/core') // Mock system info for getBackendExePath - window.core.api.getSystemInfo = vi.fn().mockResolvedValue({ + const getSystemInfo = vi.fn().mockResolvedValue({ os_type: 'linux' }) diff --git a/extensions/llamacpp-extension/src/test/setup.ts b/extensions/llamacpp-extension/src/test/setup.ts index 0bcc9d40f..464491dc2 100644 --- a/extensions/llamacpp-extension/src/test/setup.ts +++ b/extensions/llamacpp-extension/src/test/setup.ts @@ -19,7 +19,7 @@ Object.defineProperty(globalThis, 'window', { localStorage: localStorageMock, core: { api: { - getSystemInfo: vi.fn(), + // getSystemInfo: vi.fn(), }, extensionManager: { getByName: vi.fn().mockReturnValue({ @@ -31,6 +31,10 @@ Object.defineProperty(globalThis, 'window', { }, }) +vi.mock('@janhq/tauri-plugin-hardware-api', () => ({ + getSystemInfo: vi.fn(), +})); + // Mock Tauri invoke function vi.mock('@tauri-apps/api/core', () => ({ invoke: vi.fn(), diff --git a/flatpak/ai.jan.Jan.metainfo.xml b/flatpak/ai.jan.Jan.metainfo.xml new file mode 100644 index 000000000..6c144f6d7 --- /dev/null +++ b/flatpak/ai.jan.Jan.metainfo.xml @@ -0,0 +1,49 @@ + + + ai.jan.Jan + FSFAP + Apache-2.0 + Menlo Research + Jan + ai.jan.Jan + Local AI Assistant that runs 100% offline on your device + + Utility + Education + Chat + Dictionary + + +

+ Jan is a ChatGPT-alternative that runs 100% offline on your device. Our goal is to make it easy for anyone to download and run LLMs and use AI with full control and privacy. +

+

Features:

+
    +
  • Local AI Models: Download and run large language models like Llama, Gemma, and Qwen directly from Hugging Face. All models run locally on your device.
  • +
  • Cloud Integration: Connect to cloud providers such as OpenAI, Anthropic, Gemini, and Groq when needed. Easily switch between local and cloud models
  • +
  • Custom Assistants: Set up specialized AI assistants for different tasks, such as writing, summarizing, or coding. Customize their tone and behavior.
  • +
  • OpenAI-Compatible API: Jan includes a local API server (localhost:1337) that works with tools and plugins expecting OpenAI’s API format.
  • +
  • Model Context Protocol (MCP): Supports MCP to improve context handling and interactions across larger or more complex tasks.
  • +
  • Privacy First: Jan runs fully offline by default. Your data never leaves your device unless you enable cloud connections.
  • +
+
+ai.jan.Jan.desktop + + + https://catalog.jan.ai/flatpak/demo.gif + + + +https://jan.ai/ +https://github.com/menloresearch/jan/issues + + + + + + +

Latest stable release of Jan AI

+
+
+
+
diff --git a/flatpak/ai.jan.Jan.yml b/flatpak/ai.jan.Jan.yml new file mode 100644 index 000000000..bf27cb6ce --- /dev/null +++ b/flatpak/ai.jan.Jan.yml @@ -0,0 +1,118 @@ +id: ai.jan.Jan +runtime: org.gnome.Platform +runtime-version: '48' +sdk: org.gnome.Sdk +command: Jan +finish-args: + - --socket=wayland # Permission needed to show the window + - --socket=fallback-x11 # Permission needed to show the window on X11 + - --device=dri + - --share=ipc + - --share=network + - --socket=pulseaudio # for future multimodality + - --filesystem=xdg-run/dconf + - --filesystem=~/.config/dconf:ro + - --filesystem=~/.config/kioslaverc + - --env=GTK_PATH=/app/lib/gtkmodules + +modules: + - name: volk + buildsystem: cmake-ninja + builddir: true + config-opts: + - -DVOLK_INSTALL=ON + sources: + - type: archive + url: https://github.com/zeux/volk/archive/refs/tags/vulkan-sdk-1.3.280.0.zip + sha256: 178875134d36e8b90f7e3ec31171355df3b71f47eba49cca2f98158e6552b011 + + - name: vulkan-headers + buildsystem: cmake-ninja + builddir: true + sources: + - type: archive + url: https://github.com/KhronosGroup/Vulkan-Headers/archive/refs/tags/v1.3.283.zip + sha256: 2094159c87fb4b6d8f734bd4cad59564cef7ef32feb00cf6d8ca7e75a84df921 + + - name: vulkan-tools + buildsystem: cmake-ninja + builddir: true + sources: + - type: archive + url: https://github.com/KhronosGroup/Vulkan-Tools/archive/refs/tags/v1.3.283.zip + sha256: 11ec6b474e91dc8cb6e7f22891294ede549bb6ed67c19d230e293b3fc9610883 + + - name: shaderc + buildsystem: cmake-ninja + builddir: true + config-opts: + - -DSHADERC_SKIP_COPYRIGHT_CHECK=ON + - -DSHADERC_SKIP_EXAMPLES=ON + - -DSHADERC_SKIP_TESTS=ON + - -DSPIRV_SKIP_EXECUTABLES=ON + - -DENABLE_GLSLANG_BINARIES=OFF + cleanup: + - /bin + - /include + - /lib/cmake + - /lib/pkgconfig + sources: + - type: git + url: https://github.com/google/shaderc.git + tag: v2024.1 + commit: 47a9387ef5b3600d30d84c71ec77a59dc7db46fa + # https://github.com/google/shaderc/blob/known-good/known_good.json + - type: git + url: https://github.com/KhronosGroup/SPIRV-Tools.git + commit: dd4b663e13c07fea4fbb3f70c1c91c86731099f7 + dest: third_party/spirv-tools + - type: git + url: https://github.com/KhronosGroup/SPIRV-Headers.git + commit: 5e3ad389ee56fca27c9705d093ae5387ce404df4 + dest: third_party/spirv-headers + - type: git + url: https://github.com/KhronosGroup/glslang.git + commit: 142052fa30f9eca191aa9dcf65359fcaed09eeec + dest: third_party/glslang + + - name: cuda-toolkit + only-arches: + - x86_64 + cleanup: + - /cuda + buildsystem: simple + build-commands: + - mkdir /app/cuda + - sh cuda_toolkit.run --silent --toolkit --toolkitpath=/app/cuda + - mv /app/cuda/lib64/libcudart.* /app/lib/ + - mv /app/cuda/lib64/libcublas* /app/lib/ + sources: + - type: file + only-arches: + - x86_64 + url: https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_570.86.10_linux.run + dest-filename: cuda_toolkit.run + md5: c71027cf1a4ce84f80b9cbf81116e767 + + - name: binary + buildsystem: simple + sources: + - type: file + url: https://catalog.jan.ai/flatpak/Jan_0.6.8_amd64.deb + sha256: 15d1368c318a3853bad4fc3646f88afdbf9f52b6416a4734f7a297741d171f28 + only-arches: [x86_64] + - type: file + path: ai.jan.Jan.metainfo.xml + build-commands: + - ar -x *.deb + - tar -xf data.tar.gz + - 'install -Dm755 usr/bin/Jan /app/bin/Jan' + - 'install -Dm755 usr/bin/bun /app/bin/bun' + - 'install -Dm755 usr/bin/uv /app/bin/uv' + - cp -rv usr/lib/* /app/lib/. + - install -Dm644 usr/share/applications/Jan.desktop /app/share/applications/ai.jan.Jan.desktop + - sed -e 's/Icon=Jan/Icon=ai.jan.Jan/g' -e 's#Exec=Jan#Exec=/app/bin/Jan#g' -i /app/share/applications/ai.jan.Jan.desktop + - install -Dm644 usr/share/icons/hicolor/128x128/apps/Jan.png /app/share/icons/hicolor/128x128/apps/ai.jan.Jan.png + - install -Dm644 usr/share/icons/hicolor/32x32/apps/Jan.png /app/share/icons/hicolor/32x32/apps/ai.jan.Jan.png + - install -Dm644 usr/share/icons/hicolor/256x256@2/apps/Jan.png /app/share/icons/hicolor/256x256@2/apps/ai.jan.Jan.png + - install -Dm644 ai.jan.Jan.metainfo.xml /app/share/metainfo/ai.jan.Jan.rosary.metainfo.xml diff --git a/flatpak/flathub.json b/flatpak/flathub.json new file mode 100644 index 000000000..c82de06e0 --- /dev/null +++ b/flatpak/flathub.json @@ -0,0 +1,3 @@ +{ + "only-arches": ["x86_64"] +} \ No newline at end of file diff --git a/mise.toml b/mise.toml index e51fdcce7..e44555be4 100644 --- a/mise.toml +++ b/mise.toml @@ -28,9 +28,19 @@ run = "yarn install" sources = ['package.json', 'yarn.lock'] outputs = ['node_modules'] +[tasks.build-tauri-plugin-api] +description = "Build Tauri plugin API" +depends = ["install"] +run = "yarn build:tauri:plugin:api" +sources = ['src-tauri/plugins/**/*'] +outputs = [ + 'src-tauri/plugins/tauri-plugin-hardware/dist-js', + 'src-tauri/plugins/tauri-plugin-llamacpp/dist-js', +] + [tasks.build-core] description = "Build core package" -depends = ["install"] +depends = ["build-tauri-plugin-api"] run = "yarn build:core" sources = ['core/**/*'] outputs = ['core/dist'] diff --git a/package.json b/package.json index 99bf81631..04f1bc1dc 100644 --- a/package.json +++ b/package.json @@ -25,6 +25,7 @@ "build:tauri:linux": "yarn download:bin && ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh", "build:tauri:darwin": "yarn tauri build --target universal-apple-darwin", "build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os", + "build:tauri:plugin:api": "cd src-tauri/plugins && yarn install && yarn workspaces foreach -Apt run build", "build:icon": "tauri icon ./src-tauri/icons/icon.png", "build:core": "cd core && yarn build && yarn pack", "build:web": "yarn workspace @janhq/web-app build", diff --git a/src-tauri/.cargo/config.toml b/src-tauri/.cargo/config.toml new file mode 100644 index 000000000..3c45f4de8 --- /dev/null +++ b/src-tauri/.cargo/config.toml @@ -0,0 +1,4 @@ +[env] +# workaround needed to prevent `STATUS_ENTRYPOINT_NOT_FOUND` error in tests +# see https://github.com/tauri-apps/tauri/pull/4383#issuecomment-1212221864 +__TAURI_WORKSPACE__ = "true" \ No newline at end of file diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock new file mode 100644 index 000000000..fc6bfd301 --- /dev/null +++ b/src-tauri/Cargo.lock @@ -0,0 +1,7184 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "Jan" +version = "0.6.599" +dependencies = [ + "dirs", + "env", + "fix-path-env", + "flate2", + "futures-util", + "hyper 0.14.32", + "jan-utils", + "libc", + "libloading 0.8.8", + "log", + "nix", + "once_cell", + "reqwest 0.11.27", + "rmcp", + "serde", + "serde_json", + "serde_yaml", + "tar", + "tauri", + "tauri-build", + "tauri-plugin-deep-link", + "tauri-plugin-dialog", + "tauri-plugin-hardware", + "tauri-plugin-http", + "tauri-plugin-llamacpp", + "tauri-plugin-log", + "tauri-plugin-opener", + "tauri-plugin-os", + "tauri-plugin-shell", + "tauri-plugin-single-instance", + "tauri-plugin-store", + "tauri-plugin-updater", + "tempfile", + "thiserror 2.0.12", + "tokio", + "tokio-util", + "url", + "uuid", + "windows-sys 0.60.2", +] + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "ahash" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" +dependencies = [ + "getrandom 0.2.16", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_log-sys" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84521a3cf562bc62942e294181d9eef17eb38ceb8c68677bc49f144e4c3d4f8d" + +[[package]] +name = "android_logger" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbb4e440d04be07da1f1bf44fb4495ebd58669372fe0cffa6e48595ac5bd88a3" +dependencies = [ + "android_log-sys", + "env_filter", + "log", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" + +[[package]] +name = "arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "ash" +version = "0.38.0+1.3.281" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bb44936d800fea8f016d7f2311c6a4f97aebd5dc86f09906139ec848cf3a46f" +dependencies = [ + "libloading 0.8.8", +] + +[[package]] +name = "ashpd" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6cbdf310d77fd3aaee6ea2093db7011dc2d35d2eb3481e5607f1f8d942ed99df" +dependencies = [ + "enumflags2", + "futures-channel", + "futures-util", + "rand 0.9.2", + "raw-window-handle", + "serde", + "serde_repr", + "tokio", + "url", + "wayland-backend", + "wayland-client", + "wayland-protocols", + "zbus", +] + +[[package]] +name = "async-broadcast" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" +dependencies = [ + "event-listener", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb812ffb58524bdd10860d7d974e2f01cc0950c2438a74ee5ec2e2280c6c4ffa" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "pin-project-lite", + "slab", +] + +[[package]] +name = "async-io" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19634d6336019ef220f09fd31168ce5c184b295cbf80345437cc36094ef223ca" +dependencies = [ + "async-lock", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "windows-sys 0.60.2", +] + +[[package]] +name = "async-lock" +version = "3.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65daa13722ad51e6ab1a1b9c01299142bc75135b337923cfa10e79bbbd669f00" +dependencies = [ + "async-channel", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener", + "futures-lite", + "rustix", +] + +[[package]] +name = "async-recursion" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "async-signal" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f567af260ef69e1d52c2b560ce0ea230763e6fbb9214a85d768760a920e3e3c1" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.60.2", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "atk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241b621213072e993be4f6f3a9e4b45f65b7e6faad43001be957184b7bb1824b" +dependencies = [ + "atk-sys", + "glib", + "libc", +] + +[[package]] +name = "atk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5e48b684b0ca77d2bbadeef17424c2ea3c897d44d566a1617e7e8f30614d086" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "backtrace" +version = "0.3.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +dependencies = [ + "serde", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block2" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c132eebf10f5cad5289222520a4a058514204aed6d791f1cf4fe8088b82d15f" +dependencies = [ + "objc2 0.5.2", +] + +[[package]] +name = "block2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "340d2f0bdb2a43c1d3cd40513185b2bd7def0aa1052f956455114bc98f82dcf2" +dependencies = [ + "objc2 0.6.1", +] + +[[package]] +name = "blocking" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" +dependencies = [ + "async-channel", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + +[[package]] +name = "borsh" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad8646f98db542e39fc66e68a20b2144f6a732636df7c2354e74645faaa433ce" +dependencies = [ + "borsh-derive", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd1d3c0c2f5833f22386f252fe8ed005c7f59fdcddeef025c01b4c3b9fd9ac3" +dependencies = [ + "once_cell", + "proc-macro-crate 3.3.0", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "byte-unit" +version = "5.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cd29c3c585209b0cbc7309bfe3ed7efd8c84c21b7af29c8bfae908f8777174" +dependencies = [ + "rust_decimal", + "serde", + "utf8-width", +] + +[[package]] +name = "bytecheck" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "bytemuck" +version = "1.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +dependencies = [ + "serde", +] + +[[package]] +name = "cairo-rs" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ca26ef0159422fb77631dc9d17b102f253b876fe1586b03b803e63a309b4ee2" +dependencies = [ + "bitflags 2.9.1", + "cairo-sys-rs", + "glib", + "libc", + "once_cell", + "thiserror 1.0.69", +] + +[[package]] +name = "cairo-sys-rs" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "685c9fa8e590b8b3d678873528d83411db17242a73fccaed827770ea0fedda51" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "camino" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.19.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror 2.0.12", +] + +[[package]] +name = "cargo_toml" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374b7c592d9c00c1f4972ea58390ac6b18cbb6ab79011f3bdc90a0b82ca06b77" +dependencies = [ + "serde", + "toml 0.9.5", +] + +[[package]] +name = "cc" +version = "1.2.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3a42d84bb6b69d3a8b3eaacf0d88f179e1929695e1ad012b6cf64d9caaa5fd2" +dependencies = [ + "shlex", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cfb" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f" +dependencies = [ + "byteorder", + "fnv", + "uuid", +] + +[[package]] +name = "cfg-expr" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" +dependencies = [ + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cfg-if" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.16", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "cookie_store" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eac901828f88a5241ee0600950ab981148a18f2f756900ffba1b125ca6a3ef9" +dependencies = [ + "cookie", + "document-features", + "idna", + "log", + "publicsuffix", + "serde", + "serde_derive", + "serde_json", + "time", + "url", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core-graphics" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1" +dependencies = [ + "bitflags 2.9.1", + "core-foundation 0.10.1", + "core-graphics-types", + "foreign-types 0.5.0", + "libc", +] + +[[package]] +name = "core-graphics-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" +dependencies = [ + "bitflags 2.9.1", + "core-foundation 0.10.1", + "libc", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "cssparser" +version = "0.29.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93d03419cb5950ccfd3daf3ff1c7a36ace64609a1a8746d493df1ca0afde0fa" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa", + "matches", + "phf 0.10.1", + "proc-macro2", + "quote", + "smallvec", + "syn 1.0.109", +] + +[[package]] +name = "cssparser-macros" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" +dependencies = [ + "quote", + "syn 2.0.104", +] + +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn 2.0.104", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.104", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "data-url" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" + +[[package]] +name = "deranged" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derive_arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "derive_more" +version = "0.99.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.104", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.60.2", +] + +[[package]] +name = "dispatch" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" + +[[package]] +name = "dispatch2" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +dependencies = [ + "bitflags 2.9.1", + "block2 0.6.1", + "libc", + "objc2 0.6.1", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "dlib" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412" +dependencies = [ + "libloading 0.8.8", +] + +[[package]] +name = "dlopen2" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1297103d2bbaea85724fcee6294c2d50b1081f9ad47d0f6f6f61eda65315a6" +dependencies = [ + "dlopen2_derive", + "libc", + "once_cell", + "winapi", +] + +[[package]] +name = "dlopen2_derive" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "788160fb30de9cdd857af31c6a2675904b16ece8fc2737b2c7127ba368c9d0f4" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + +[[package]] +name = "document-features" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" +dependencies = [ + "litrs", +] + +[[package]] +name = "downcast-rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" + +[[package]] +name = "dpi" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b14ccef22fc6f5a8f4d7d768562a182c04ce9a3b3157b91390b52ddfdf1a76" +dependencies = [ + "serde", +] + +[[package]] +name = "dtoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6add3b8cff394282be81f3fc1a0605db594ed69890078ca6e2cab1c408bcf04" + +[[package]] +name = "dtoa-short" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87" +dependencies = [ + "dtoa", +] + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "embed-resource" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6d81016d6c977deefb2ef8d8290da019e27cc26167e102185da528e6c0ab38" +dependencies = [ + "cc", + "memchr", + "rustc_version", + "toml 0.9.5", + "vswhom", + "winreg 0.55.0", +] + +[[package]] +name = "embed_plist" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ef6b89e5b37196644d8796de5268852ff179b44e96276cf4290264843743bb7" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "endi" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3d8a32ae18130a3c84dd492d4215c3d913c3b07c6b63c2eb3eb7ff1101ab7bf" + +[[package]] +name = "enumflags2" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1027f7680c853e056ebcec683615fb6fbbc07dbaa13b4d5d9442b146ded4ecef" +dependencies = [ + "enumflags2_derive", + "serde", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "env" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc95de49ad098572c02d3fbf368c9a020bfff5ae78483685b77f51d8a7e9486d" +dependencies = [ + "num_threads", +] + +[[package]] +name = "env_filter" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "erased-serde" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" +dependencies = [ + "serde", + "typeid", +] + +[[package]] +name = "errno" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "fern" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4316185f709b23713e41e3195f90edef7fb00c3ed4adc79769cf09cc762a3b29" +dependencies = [ + "log", +] + +[[package]] +name = "field-offset" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f" +dependencies = [ + "memoffset", + "rustc_version", +] + +[[package]] +name = "filetime" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +dependencies = [ + "cfg-if", + "libc", + "libredox", + "windows-sys 0.59.0", +] + +[[package]] +name = "fix-path-env" +version = "0.0.0" +source = "git+https://github.com/tauri-apps/fix-path-env-rs#0e479e2804edc1a7e5f15ece2b48ee30858c2838" +dependencies = [ + "home", + "strip-ansi-escapes", + "thiserror 1.0.69", +] + +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared 0.1.1", +] + +[[package]] +name = "foreign-types" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" +dependencies = [ + "foreign-types-macros", + "foreign-types-shared 0.3.1", +] + +[[package]] +name = "foreign-types-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "foreign-types-shared" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" +dependencies = [ + "mac", + "new_debug_unreachable", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "gdk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f245958c627ac99d8e529166f9823fb3b838d1d41fd2b297af3075093c2691" +dependencies = [ + "cairo-rs", + "gdk-pixbuf", + "gdk-sys", + "gio", + "glib", + "libc", + "pango", +] + +[[package]] +name = "gdk-pixbuf" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50e1f5f1b0bfb830d6ccc8066d18db35c487b1b2b1e8589b5dfe9f07e8defaec" +dependencies = [ + "gdk-pixbuf-sys", + "gio", + "glib", + "libc", + "once_cell", +] + +[[package]] +name = "gdk-pixbuf-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9839ea644ed9c97a34d129ad56d38a25e6756f99f3a88e15cd39c20629caf7" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gdk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c2d13f38594ac1e66619e188c6d5a1adb98d11b2fcf7894fc416ad76aa2f3f7" +dependencies = [ + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "pkg-config", + "system-deps", +] + +[[package]] +name = "gdkwayland-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "140071d506d223f7572b9f09b5e155afbd77428cd5cc7af8f2694c41d98dfe69" +dependencies = [ + "gdk-sys", + "glib-sys", + "gobject-sys", + "libc", + "pkg-config", + "system-deps", +] + +[[package]] +name = "gdkx11" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3caa00e14351bebbc8183b3c36690327eb77c49abc2268dd4bd36b856db3fbfe" +dependencies = [ + "gdk", + "gdkx11-sys", + "gio", + "glib", + "libc", + "x11", +] + +[[package]] +name = "gdkx11-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e7445fe01ac26f11601db260dd8608fe172514eb63b3b5e261ea6b0f4428d" +dependencies = [ + "gdk-sys", + "glib-sys", + "libc", + "system-deps", + "x11", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "gethostname" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc257fdb4038301ce4b9cd1b3b51704509692bb3ff716a410cbd07925d9dae55" +dependencies = [ + "rustix", + "windows-targets 0.52.6", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "gio" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fc8f532f87b79cbc51a79748f16a6828fb784be93145a322fa14d06d354c73" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "gio-sys", + "glib", + "libc", + "once_cell", + "pin-project-lite", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "gio-sys" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37566df850baf5e4cb0dfb78af2e4b9898d817ed9263d1090a2df958c64737d2" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", + "winapi", +] + +[[package]] +name = "glib" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233daaf6e83ae6a12a52055f568f9d7cf4671dabb78ff9560ab6da230ce00ee5" +dependencies = [ + "bitflags 2.9.1", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "memchr", + "once_cell", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "glib-macros" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate 2.0.0", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "glib-sys" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063ce2eb6a8d0ea93d2bf8ba1957e78dbab6be1c2220dd3daca57d5a9d869898" +dependencies = [ + "libc", + "system-deps", +] + +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + +[[package]] +name = "gobject-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0850127b514d1c4a4654ead6dedadb18198999985908e6ffe4436f53c785ce44" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gtk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd56fb197bfc42bd5d2751f4f017d44ff59fbb58140c6b49f9b3b2bdab08506a" +dependencies = [ + "atk", + "cairo-rs", + "field-offset", + "futures-channel", + "gdk", + "gdk-pixbuf", + "gio", + "glib", + "gtk-sys", + "gtk3-macros", + "libc", + "pango", + "pkg-config", +] + +[[package]] +name = "gtk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f29a1c21c59553eb7dd40e918be54dccd60c52b049b75119d5d96ce6b624414" +dependencies = [ + "atk-sys", + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gdk-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "system-deps", +] + +[[package]] +name = "gtk3-macros" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52ff3c5b21f14f0736fed6dcfc0bfb4225ebf5725f3c0209edeec181e4d73e9d" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.10.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.3.1", + "indexmap 2.10.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "html5ever" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c" +dependencies = [ + "log", + "mac", + "markup5ever", + "match_token", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.3.1", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.3.1", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "http-range" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21dec9db110f5f872ed9699c3ecf50cf16f423502706ba5c72462e28d3157573" + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.27", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.12", + "http 1.3.1", + "http-body 1.0.1", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http 1.3.1", + "hyper 1.6.0", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper 0.14.32", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-util" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "hyper 1.6.0", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.0", + "system-configuration 0.6.1", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core 0.61.2", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ico" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc50b891e4acf8fe0e71ef88ec43ad82ee07b3810ad09de10f1d01f072ed4b98" +dependencies = [ + "byteorder", + "png", +] + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +dependencies = [ + "equivalent", + "hashbrown 0.15.4", + "serde", +] + +[[package]] +name = "infer" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7" +dependencies = [ + "cfb", +] + +[[package]] +name = "io-uring" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +dependencies = [ + "bitflags 2.9.1", + "cfg-if", + "libc", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is-docker" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "928bae27f42bc99b60d9ac7334e3a21d10ad8f1835a4e12ec3ec0464765ed1b3" +dependencies = [ + "once_cell", +] + +[[package]] +name = "is-wsl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "173609498df190136aa7dea1a91db051746d339e18476eed5ca40521f02d7aa5" +dependencies = [ + "is-docker", + "once_cell", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jan-utils" +version = "0.1.0" +dependencies = [ + "base64 0.22.1", + "hmac", + "rand 0.8.5", + "reqwest 0.11.27", + "serde", + "serde_json", + "sha2", + "tokio", + "url", +] + +[[package]] +name = "javascriptcore-rs" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca5671e9ffce8ffba57afc24070e906da7fc4b1ba66f2cabebf61bf2ea257fcc" +dependencies = [ + "bitflags 1.3.2", + "glib", + "javascriptcore-rs-sys", +] + +[[package]] +name = "javascriptcore-rs-sys" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af1be78d14ffa4b75b66df31840478fef72b51f8c2465d4ca7c194da9f7a5124" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "json-patch" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "863726d7afb6bc2590eeff7135d923545e5e964f004c2ccf8716c25e70a86f08" +dependencies = [ + "jsonptr", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "jsonptr" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dea2b27dd239b2556ed7a25ba842fe47fd602e7fc7433c2a8d6106d4d9edd70" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "keyboard-types" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b750dcadc39a09dbadd74e118f6dd6598df77fa01df0cfcdc52c28dece74528a" +dependencies = [ + "bitflags 2.9.1", + "serde", + "unicode-segmentation", +] + +[[package]] +name = "kuchikiki" +version = "0.8.8-speedreader" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02cb977175687f33fa4afa0c95c112b987ea1443e5a51c8f8ff27dc618270cc2" +dependencies = [ + "cssparser", + "html5ever", + "indexmap 2.10.0", + "selectors", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libappindicator" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03589b9607c868cc7ae54c0b2a22c8dc03dd41692d48f2d7df73615c6a95dc0a" +dependencies = [ + "glib", + "gtk", + "gtk-sys", + "libappindicator-sys", + "log", +] + +[[package]] +name = "libappindicator-sys" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e9ec52138abedcc58dc17a7c6c0c00a2bdb4f3427c7f63fa97fd0d859155caf" +dependencies = [ + "gtk-sys", + "libloading 0.7.4", + "once_cell", +] + +[[package]] +name = "libc" +version = "0.2.174" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if", + "winapi", +] + +[[package]] +name = "libloading" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" +dependencies = [ + "cfg-if", + "windows-targets 0.53.3", +] + +[[package]] +name = "libredox" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" +dependencies = [ + "bitflags 2.9.1", + "libc", + "redox_syscall", +] + +[[package]] +name = "linux-raw-sys" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] +name = "litrs" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" + +[[package]] +name = "lock_api" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +dependencies = [ + "value-bag", +] + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" + +[[package]] +name = "markup5ever" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7a7213d12e1864c0f002f52c2923d4556935a43dec5e71355c2760e0f6e7a18" +dependencies = [ + "log", + "phf 0.11.3", + "phf_codegen 0.11.3", + "string_cache", + "string_cache_codegen", + "tendril", +] + +[[package]] +name = "match_token" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minisign-verify" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e856fdd13623a2f5f2f54676a4ee49502a96a80ef4a62bcedd23d52427c44d43" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + +[[package]] +name = "muda" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01c1738382f66ed56b3b9c8119e794a2e23148ac8ea214eda86622d4cb9d415a" +dependencies = [ + "crossbeam-channel", + "dpi", + "gtk", + "keyboard-types", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation 0.3.1", + "once_cell", + "png", + "serde", + "thiserror 2.0.12", + "windows-sys 0.60.2", +] + +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "ndk" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4" +dependencies = [ + "bitflags 2.9.1", + "jni-sys", + "log", + "ndk-sys", + "num_enum", + "raw-window-handle", + "thiserror 1.0.69", +] + +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + +[[package]] +name = "ndk-sys" +version = "0.6.0+11769913" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6cda3051665f1fb8d9e08fc35c96d5a244fb1be711a03b71118828afc9a873" +dependencies = [ + "jni-sys", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nix" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" +dependencies = [ + "bitflags 2.9.1", + "cfg-if", + "cfg_aliases", + "libc", + "memoffset", +] + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "ntapi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" +dependencies = [ + "winapi", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_enum" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a973b4e44ce6cad84ce69d797acf9a044532e4184c4f267913d1b546a0727b7a" +dependencies = [ + "num_enum_derive", + "rustversion", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d" +dependencies = [ + "proc-macro-crate 3.3.0", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", +] + +[[package]] +name = "nvml-wrapper" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c9bff0aa1d48904a1385ea2a8b97576fbdcbc9a3cfccd0d31fe978e1c4038c5" +dependencies = [ + "bitflags 2.9.1", + "libloading 0.8.8", + "nvml-wrapper-sys", + "static_assertions", + "thiserror 1.0.69", + "wrapcenum-derive", +] + +[[package]] +name = "nvml-wrapper-sys" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "698d45156f28781a4e79652b6ebe2eaa0589057d588d3aec1333f6466f13fcb5" +dependencies = [ + "libloading 0.8.8", +] + +[[package]] +name = "objc-sys" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb91bdd390c7ce1a8607f35f3ca7151b65afc0ff5ff3b34fa350f7d7c7e4310" + +[[package]] +name = "objc2" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46a785d4eeff09c14c487497c162e92766fbb3e4059a71840cecc03d9a50b804" +dependencies = [ + "objc-sys", + "objc2-encode", +] + +[[package]] +name = "objc2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88c6597e14493ab2e44ce58f2fdecf095a51f12ca57bec060a11c57332520551" +dependencies = [ + "objc2-encode", + "objc2-exception-helper", +] + +[[package]] +name = "objc2-app-kit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc" +dependencies = [ + "bitflags 2.9.1", + "block2 0.6.1", + "libc", + "objc2 0.6.1", + "objc2-cloud-kit", + "objc2-core-data", + "objc2-core-foundation", + "objc2-core-graphics", + "objc2-core-image", + "objc2-foundation 0.3.1", + "objc2-quartz-core 0.3.1", +] + +[[package]] +name = "objc2-cloud-kit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17614fdcd9b411e6ff1117dfb1d0150f908ba83a7df81b1f118005fe0a8ea15d" +dependencies = [ + "bitflags 2.9.1", + "objc2 0.6.1", + "objc2-foundation 0.3.1", +] + +[[package]] +name = "objc2-core-data" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291fbbf7d29287518e8686417cf7239c74700fd4b607623140a7d4a3c834329d" +dependencies = [ + "bitflags 2.9.1", + "objc2 0.6.1", + "objc2-foundation 0.3.1", +] + +[[package]] +name = "objc2-core-foundation" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" +dependencies = [ + "bitflags 2.9.1", + "dispatch2", + "objc2 0.6.1", +] + +[[package]] +name = "objc2-core-graphics" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "989c6c68c13021b5c2d6b71456ebb0f9dc78d752e86a98da7c716f4f9470f5a4" +dependencies = [ + "bitflags 2.9.1", + "dispatch2", + "objc2 0.6.1", + "objc2-core-foundation", + "objc2-io-surface", +] + +[[package]] +name = "objc2-core-image" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79b3dc0cc4386b6ccf21c157591b34a7f44c8e75b064f85502901ab2188c007e" +dependencies = [ + "objc2 0.6.1", + "objc2-foundation 0.3.1", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + +[[package]] +name = "objc2-exception-helper" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7a1c5fbb72d7735b076bb47b578523aedc40f3c439bea6dfd595c089d79d98a" +dependencies = [ + "cc", +] + +[[package]] +name = "objc2-foundation" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee638a5da3799329310ad4cfa62fbf045d5f56e3ef5ba4149e7452dcf89d5a8" +dependencies = [ + "bitflags 2.9.1", + "block2 0.5.1", + "libc", + "objc2 0.5.2", +] + +[[package]] +name = "objc2-foundation" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" +dependencies = [ + "bitflags 2.9.1", + "block2 0.6.1", + "libc", + "objc2 0.6.1", + "objc2-core-foundation", +] + +[[package]] +name = "objc2-io-surface" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7282e9ac92529fa3457ce90ebb15f4ecbc383e8338060960760fa2cf75420c3c" +dependencies = [ + "bitflags 2.9.1", + "objc2 0.6.1", + "objc2-core-foundation", +] + +[[package]] +name = "objc2-metal" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd0cba1276f6023976a406a14ffa85e1fdd19df6b0f737b063b95f6c8c7aadd6" +dependencies = [ + "bitflags 2.9.1", + "block2 0.5.1", + "objc2 0.5.2", + "objc2-foundation 0.2.2", +] + +[[package]] +name = "objc2-osa-kit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26bb88504b5a050dbba515d2414607bf5e57dd56b107bc5f0351197a3e7bdc5d" +dependencies = [ + "bitflags 2.9.1", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-foundation 0.3.1", +] + +[[package]] +name = "objc2-quartz-core" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e42bee7bff906b14b167da2bac5efe6b6a07e6f7c0a21a7308d40c960242dc7a" +dependencies = [ + "bitflags 2.9.1", + "block2 0.5.1", + "objc2 0.5.2", + "objc2-foundation 0.2.2", + "objc2-metal", +] + +[[package]] +name = "objc2-quartz-core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ffb6a0cd5f182dc964334388560b12a57f7b74b3e2dec5e2722aa2dfb2ccd5" +dependencies = [ + "bitflags 2.9.1", + "objc2 0.6.1", + "objc2-foundation 0.3.1", +] + +[[package]] +name = "objc2-ui-kit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25b1312ad7bc8a0e92adae17aa10f90aae1fb618832f9b993b022b591027daed" +dependencies = [ + "bitflags 2.9.1", + "objc2 0.6.1", + "objc2-core-foundation", + "objc2-foundation 0.3.1", +] + +[[package]] +name = "objc2-web-kit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91672909de8b1ce1c2252e95bbee8c1649c9ad9d14b9248b3d7b4c47903c47ad" +dependencies = [ + "bitflags 2.9.1", + "block2 0.6.1", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation 0.3.1", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "open" +version = "5.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2483562e62ea94312f3576a7aca397306df7990b8d89033e18766744377ef95" +dependencies = [ + "dunce", + "is-wsl", + "libc", + "pathdiff", +] + +[[package]] +name = "openssl" +version = "0.10.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +dependencies = [ + "bitflags 2.9.1", + "cfg-if", + "foreign-types 0.3.2", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "ordered-multimap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" +dependencies = [ + "dlv-list", + "hashbrown 0.14.5", +] + +[[package]] +name = "ordered-stream" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "os_info" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0e1ac5fde8d43c34139135df8ea9ee9465394b2d8d20f032d38998f64afffc3" +dependencies = [ + "log", + "plist", + "serde", + "windows-sys 0.52.0", +] + +[[package]] +name = "os_pipe" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db335f4760b14ead6290116f2427bf33a14d4f0617d49f78a246de10c1831224" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "osakit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "732c71caeaa72c065bb69d7ea08717bd3f4863a4f451402fc9513e29dbd5261b" +dependencies = [ + "objc2 0.6.1", + "objc2-foundation 0.3.1", + "objc2-osa-kit", + "serde", + "serde_json", + "thiserror 2.0.12", +] + +[[package]] +name = "pango" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ca27ec1eb0457ab26f3036ea52229edbdb74dee1edd29063f5b9b010e7ebee4" +dependencies = [ + "gio", + "glib", + "libc", + "once_cell", + "pango-sys", +] + +[[package]] +name = "pango-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436737e391a843e5933d6d9aa102cb126d501e815b83601365a948a518555dc5" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pathdiff" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "phf" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" +dependencies = [ + "phf_shared 0.8.0", +] + +[[package]] +name = "phf" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +dependencies = [ + "phf_macros 0.10.0", + "phf_shared 0.10.0", + "proc-macro-hack", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_macros 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf_codegen" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", +] + +[[package]] +name = "phf_codegen" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf_generator" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" +dependencies = [ + "phf_shared 0.8.0", + "rand 0.7.3", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared 0.10.0", + "rand 0.8.5", +] + +[[package]] +name = "phf_generator" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" +dependencies = [ + "phf_shared 0.11.3", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "phf_macros" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "phf_shared" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher 1.0.1", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "plist" +version = "1.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1" +dependencies = [ + "base64 0.22.1", + "indexmap 2.10.0", + "quick-xml 0.38.1", + "serde", + "time", +] + +[[package]] +name = "png" +version = "0.17.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "polling" +version = "3.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5bd19146350fe804f7cb2669c851c03d69da628803dab0d98018142aaa5d829" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "windows-sys 0.60.2", +] + +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.15", +] + +[[package]] +name = "proc-macro-crate" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +dependencies = [ + "toml_edit 0.20.7", +] + +[[package]] +name = "proc-macro-crate" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +dependencies = [ + "toml_edit 0.22.27", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "process-wrap" +version = "8.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1" +dependencies = [ + "futures", + "indexmap 2.10.0", + "nix", + "tokio", + "tracing", + "windows 0.61.3", +] + +[[package]] +name = "psl-types" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac" + +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "publicsuffix" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42ea446cab60335f76979ec15e12619a2165b5ae2c12166bef27d283a9fadf" +dependencies = [ + "idna", + "psl-types", +] + +[[package]] +name = "quick-xml" +version = "0.37.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" +dependencies = [ + "memchr", +] + +[[package]] +name = "quick-xml" +version = "0.38.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9845d9dccf565065824e69f9f235fafba1587031eda353c1f1561cd6a6be78f4" +dependencies = [ + "memchr", +] + +[[package]] +name = "quinn" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2 0.5.10", + "thiserror 2.0.12", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +dependencies = [ + "bytes", + "getrandom 0.3.3", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.12", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.5.10", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", + "rand_pcg", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.3", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_pcg" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "raw-window-handle" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" + +[[package]] +name = "redox_syscall" +version = "0.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +dependencies = [ + "bitflags 2.9.1", +] + +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 2.0.12", +] + +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "rend" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.27", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", + "tokio", + "tokio-native-tls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "winreg 0.50.0", +] + +[[package]] +name = "reqwest" +version = "0.12.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +dependencies = [ + "base64 0.22.1", + "bytes", + "cookie", + "cookie_store", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.4.12", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "hyper 1.6.0", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "mime", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "tokio", + "tokio-rustls", + "tokio-util", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", +] + +[[package]] +name = "rfd" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2bee61e6cffa4635c72d7d81a84294e28f0930db0ddcb0f66d10244674ebed" +dependencies = [ + "ashpd", + "block2 0.6.1", + "dispatch2", + "glib-sys", + "gobject-sys", + "gtk-sys", + "js-sys", + "log", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation 0.3.1", + "raw-window-handle", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rkyv" +version = "0.7.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" +dependencies = [ + "bitvec", + "bytecheck", + "bytes", + "hashbrown 0.12.3", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "rmcp" +version = "0.2.1" +source = "git+https://github.com/modelcontextprotocol/rust-sdk?rev=3196c95f1dfafbffbdcdd6d365c94969ac975e6a#3196c95f1dfafbffbdcdd6d365c94969ac975e6a" +dependencies = [ + "base64 0.22.1", + "chrono", + "futures", + "http 1.3.1", + "paste", + "pin-project-lite", + "process-wrap", + "reqwest 0.12.22", + "rmcp-macros", + "schemars 1.0.4", + "serde", + "serde_json", + "sse-stream", + "thiserror 2.0.12", + "tokio", + "tokio-stream", + "tokio-util", + "tower-service", + "tracing", +] + +[[package]] +name = "rmcp-macros" +version = "0.2.1" +source = "git+https://github.com/modelcontextprotocol/rust-sdk?rev=3196c95f1dfafbffbdcdd6d365c94969ac975e6a#3196c95f1dfafbffbdcdd6d365c94969ac975e6a" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "serde_json", + "syn 2.0.104", +] + +[[package]] +name = "rust-ini" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7295b7ce3bf4806b419dc3420745998b447178b7005e2011947b38fc5aa6791" +dependencies = [ + "cfg-if", + "ordered-multimap", +] + +[[package]] +name = "rust_decimal" +version = "1.37.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b203a6425500a03e0919c42d3c47caca51e79f1132046626d2c8871c5092035d" +dependencies = [ + "arrayvec", + "borsh", + "bytes", + "num-traits", + "rand 0.8.5", + "rkyv", + "serde", + "serde_json", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" +dependencies = [ + "bitflags 2.9.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.60.2", +] + +[[package]] +name = "rustls" +version = "0.23.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pki-types" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "schemars" +version = "0.8.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +dependencies = [ + "dyn-clone", + "indexmap 1.9.3", + "schemars_derive 0.8.22", + "serde", + "serde_json", + "url", + "uuid", +] + +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "chrono", + "dyn-clone", + "ref-cast", + "schemars_derive 1.0.4", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.104", +] + +[[package]] +name = "schemars_derive" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.104", +] + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.9.1", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "selectors" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c37578180969d00692904465fb7f6b3d50b9a2b952b87c23d0e2e5cb5013416" +dependencies = [ + "bitflags 1.3.2", + "cssparser", + "derive_more", + "fxhash", + "log", + "phf 0.8.0", + "phf_codegen 0.8.0", + "precomputed-hash", + "servo_arc", + "smallvec", +] + +[[package]] +name = "semver" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +dependencies = [ + "serde", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-untagged" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "299d9c19d7d466db4ab10addd5703e4c615dec2a5a16dbbafe191045e87ee66e" +dependencies = [ + "erased-serde", + "serde", + "typeid", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "serde_json" +version = "1.0.142" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_spanned" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.10.0", + "schemars 0.9.0", + "schemars 1.0.4", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.10.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "serialize-to-javascript" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9823f2d3b6a81d98228151fdeaf848206a7855a7a042bbf9bf870449a66cafb" +dependencies = [ + "serde", + "serde_json", + "serialize-to-javascript-impl", +] + +[[package]] +name = "serialize-to-javascript-impl" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74064874e9f6a15f04c1f3cb627902d0e6b410abbf36668afa873c61889f1763" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "servo_arc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52aa42f8fdf0fed91e5ce7f23d8138441002fa31dca008acf47e6fd4721f741" +dependencies = [ + "nodrop", + "stable_deref_trait", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shared_child" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e362d9935bc50f019969e2f9ecd66786612daae13e8f277be7bfb66e8bed3f7" +dependencies = [ + "libc", + "sigchld", + "windows-sys 0.60.2", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "sigchld" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47106eded3c154e70176fc83df9737335c94ce22f821c32d17ed1db1f83badb1" +dependencies = [ + "libc", + "os_pipe", + "signal-hook", +] + +[[package]] +name = "signal-hook" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +dependencies = [ + "libc", +] + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "simdutf8" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "slab" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "softbuffer" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18051cdd562e792cad055119e0cdb2cfc137e44e3987532e0f9659a77931bb08" +dependencies = [ + "bytemuck", + "cfg_aliases", + "core-graphics", + "foreign-types 0.5.0", + "js-sys", + "log", + "objc2 0.5.2", + "objc2-foundation 0.2.2", + "objc2-quartz-core 0.2.2", + "raw-window-handle", + "redox_syscall", + "wasm-bindgen", + "web-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "soup3" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "471f924a40f31251afc77450e781cb26d55c0b650842efafc9c6cbd2f7cc4f9f" +dependencies = [ + "futures-channel", + "gio", + "glib", + "libc", + "soup3-sys", +] + +[[package]] +name = "soup3-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ebe8950a680a12f24f15ebe1bf70db7af98ad242d9db43596ad3108aab86c27" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "sse-stream" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb4dc4d33c68ec1f27d386b5610a351922656e1fdf5c05bbaad930cd1519479a" +dependencies = [ + "bytes", + "futures-util", + "http-body 1.0.1", + "http-body-util", + "pin-project-lite", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "string_cache" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" +dependencies = [ + "new_debug_unreachable", + "parking_lot", + "phf_shared 0.11.3", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache_codegen" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", +] + +[[package]] +name = "strip-ansi-escapes" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a8f8038e7e7969abb3f1b7c2a811225e9296da208539e0f79c5251d6cac0025" +dependencies = [ + "vte", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "swift-rs" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4057c98e2e852d51fdcfca832aac7b571f6b351ad159f9eda5db1655f8d0c4d7" +dependencies = [ + "base64 0.21.7", + "serde", + "serde_json", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "sys-locale" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eab9a99a024a169fe8a903cf9d4a3b3601109bcc13bd9e3c6fff259138626c4" +dependencies = [ + "libc", +] + +[[package]] +name = "sysinfo" +version = "0.34.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4b93974b3d3aeaa036504b8eefd4c039dced109171c1ae973f1dc63b2c7e4b2" +dependencies = [ + "libc", + "memchr", + "ntapi", + "objc2-core-foundation", + "windows 0.57.0", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation 0.9.4", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.9.1", + "core-foundation 0.9.4", + "system-configuration-sys 0.6.0", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-deps" +version = "6.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" +dependencies = [ + "cfg-expr", + "heck 0.5.0", + "pkg-config", + "toml 0.8.23", + "version-compare", +] + +[[package]] +name = "tao" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49c380ca75a231b87b6c9dd86948f035012e7171d1a7c40a9c2890489a7ffd8a" +dependencies = [ + "bitflags 2.9.1", + "core-foundation 0.10.1", + "core-graphics", + "crossbeam-channel", + "dispatch", + "dlopen2", + "dpi", + "gdkwayland-sys", + "gdkx11-sys", + "gtk", + "jni", + "lazy_static", + "libc", + "log", + "ndk", + "ndk-context", + "ndk-sys", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-foundation 0.3.1", + "once_cell", + "parking_lot", + "raw-window-handle", + "scopeguard", + "tao-macros", + "unicode-segmentation", + "url", + "windows 0.61.3", + "windows-core 0.61.2", + "windows-version", + "x11-dl", +] + +[[package]] +name = "tao-macros" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4e16beb8b2ac17db28eab8bca40e62dbfbb34c0fcdc6d9826b11b7b5d047dfd" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "target-lexicon" +version = "0.12.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" + +[[package]] +name = "tauri" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "352a4bc7bf6c25f5624227e3641adf475a6535707451b09bb83271df8b7a6ac7" +dependencies = [ + "anyhow", + "bytes", + "dirs", + "dunce", + "embed_plist", + "getrandom 0.3.3", + "glob", + "gtk", + "heck 0.5.0", + "http 1.3.1", + "http-range", + "jni", + "libc", + "log", + "mime", + "muda", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-foundation 0.3.1", + "objc2-ui-kit", + "percent-encoding", + "plist", + "raw-window-handle", + "reqwest 0.12.22", + "serde", + "serde_json", + "serde_repr", + "serialize-to-javascript", + "swift-rs", + "tauri-build", + "tauri-macros", + "tauri-runtime", + "tauri-runtime-wry", + "tauri-utils", + "thiserror 2.0.12", + "tokio", + "tray-icon", + "url", + "urlpattern", + "webkit2gtk", + "webview2-com", + "window-vibrancy", + "windows 0.61.3", +] + +[[package]] +name = "tauri-build" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "182d688496c06bf08ea896459bf483eb29cdff35c1c4c115fb14053514303064" +dependencies = [ + "anyhow", + "cargo_toml", + "dirs", + "glob", + "heck 0.5.0", + "json-patch", + "schemars 0.8.22", + "semver", + "serde", + "serde_json", + "tauri-utils", + "tauri-winres", + "toml 0.8.23", + "walkdir", +] + +[[package]] +name = "tauri-codegen" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b54a99a6cd8e01abcfa61508177e6096a4fe2681efecee9214e962f2f073ae4a" +dependencies = [ + "base64 0.22.1", + "ico", + "json-patch", + "plist", + "png", + "proc-macro2", + "quote", + "semver", + "serde", + "serde_json", + "sha2", + "syn 2.0.104", + "tauri-utils", + "thiserror 2.0.12", + "time", + "url", + "uuid", + "walkdir", +] + +[[package]] +name = "tauri-macros" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7945b14dc45e23532f2ded6e120170bbdd4af5ceaa45784a6b33d250fbce3f9e" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.104", + "tauri-codegen", + "tauri-utils", +] + +[[package]] +name = "tauri-plugin" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bd5c1e56990c70a906ef67a9851bbdba9136d26075ee9a2b19c8b46986b3e02" +dependencies = [ + "anyhow", + "glob", + "plist", + "schemars 0.8.22", + "serde", + "serde_json", + "tauri-utils", + "toml 0.8.23", + "walkdir", +] + +[[package]] +name = "tauri-plugin-deep-link" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fec67f32d7a06d80bd3dc009fdb678c35a66116d9cb8cd2bb32e406c2b5bbd2" +dependencies = [ + "dunce", + "rust-ini", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "tauri-utils", + "thiserror 2.0.12", + "tracing", + "url", + "windows-registry", + "windows-result 0.3.4", +] + +[[package]] +name = "tauri-plugin-dialog" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e5858cc7b455a73ab4ea2ebc08b5be33682c00ff1bf4cad5537d4fb62499d9" +dependencies = [ + "log", + "raw-window-handle", + "rfd", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "tauri-plugin-fs", + "thiserror 2.0.12", + "url", +] + +[[package]] +name = "tauri-plugin-fs" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c6ef84ee2f2094ce093e55106d90d763ba343fad57566992962e8f76d113f99" +dependencies = [ + "anyhow", + "dunce", + "glob", + "percent-encoding", + "schemars 0.8.22", + "serde", + "serde_json", + "serde_repr", + "tauri", + "tauri-plugin", + "tauri-utils", + "thiserror 2.0.12", + "toml 0.8.23", + "url", +] + +[[package]] +name = "tauri-plugin-hardware" +version = "0.6.599" +dependencies = [ + "ash", + "libc", + "libloading 0.8.8", + "log", + "nvml-wrapper", + "serde", + "serde_json", + "sysinfo", + "tauri", + "tauri-plugin", +] + +[[package]] +name = "tauri-plugin-http" +version = "2.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcde333d97e565a7765aad82f32d8672458f7bd77b6ee653830d5dded9d7b5c2" +dependencies = [ + "bytes", + "cookie_store", + "data-url", + "http 1.3.1", + "regex", + "reqwest 0.12.22", + "schemars 0.8.22", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "tauri-plugin-fs", + "thiserror 2.0.12", + "tokio", + "url", + "urlpattern", +] + +[[package]] +name = "tauri-plugin-llamacpp" +version = "0.6.599" +dependencies = [ + "base64 0.22.1", + "byteorder", + "hmac", + "jan-utils", + "log", + "nix", + "rand 0.8.5", + "serde", + "sha2", + "sysinfo", + "tauri", + "tauri-plugin", + "thiserror 2.0.12", + "tokio", + "windows-sys 0.60.2", +] + +[[package]] +name = "tauri-plugin-log" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a59139183e0907cec1499dddee4e085f5a801dc659efa0848ee224f461371426" +dependencies = [ + "android_logger", + "byte-unit", + "fern", + "log", + "objc2 0.6.1", + "objc2-foundation 0.3.1", + "serde", + "serde_json", + "serde_repr", + "swift-rs", + "tauri", + "tauri-plugin", + "thiserror 2.0.12", + "time", +] + +[[package]] +name = "tauri-plugin-opener" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecee219f11cdac713ab32959db5d0cceec4810ba4f4458da992292ecf9660321" +dependencies = [ + "dunce", + "glob", + "objc2-app-kit", + "objc2-foundation 0.3.1", + "open", + "schemars 0.8.22", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "thiserror 2.0.12", + "url", + "windows 0.61.3", + "zbus", +] + +[[package]] +name = "tauri-plugin-os" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05bccb4c6de4299beec5a9b070878a01bce9e2c945aa7a75bcea38bcba4c675d" +dependencies = [ + "gethostname", + "log", + "os_info", + "serde", + "serde_json", + "serialize-to-javascript", + "sys-locale", + "tauri", + "tauri-plugin", + "thiserror 2.0.12", +] + +[[package]] +name = "tauri-plugin-shell" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b9ffadec5c3523f11e8273465cacb3d86ea7652a28e6e2a2e9b5c182f791d25" +dependencies = [ + "encoding_rs", + "log", + "open", + "os_pipe", + "regex", + "schemars 0.8.22", + "serde", + "serde_json", + "shared_child", + "tauri", + "tauri-plugin", + "thiserror 2.0.12", + "tokio", +] + +[[package]] +name = "tauri-plugin-single-instance" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50a0e5a4ce43cb3a733c3aef85e8478bc769dac743c615e26639cbf5d953faf7" +dependencies = [ + "serde", + "serde_json", + "tauri", + "tauri-plugin-deep-link", + "thiserror 2.0.12", + "tracing", + "windows-sys 0.60.2", + "zbus", +] + +[[package]] +name = "tauri-plugin-store" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5916c609664a56c82aeaefffca9851fd072d4d41f73d63f22ee3ee451508194f" +dependencies = [ + "dunce", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "thiserror 2.0.12", + "tokio", + "tracing", +] + +[[package]] +name = "tauri-plugin-updater" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27cbc31740f4d507712550694749572ec0e43bdd66992db7599b89fbfd6b167b" +dependencies = [ + "base64 0.22.1", + "dirs", + "flate2", + "futures-util", + "http 1.3.1", + "infer", + "log", + "minisign-verify", + "osakit", + "percent-encoding", + "reqwest 0.12.22", + "semver", + "serde", + "serde_json", + "tar", + "tauri", + "tauri-plugin", + "tempfile", + "thiserror 2.0.12", + "time", + "tokio", + "url", + "windows-sys 0.60.2", + "zip", +] + +[[package]] +name = "tauri-runtime" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b1cc885be806ea15ff7b0eb47098a7b16323d9228876afda329e34e2d6c4676" +dependencies = [ + "cookie", + "dpi", + "gtk", + "http 1.3.1", + "jni", + "objc2 0.6.1", + "objc2-ui-kit", + "raw-window-handle", + "serde", + "serde_json", + "tauri-utils", + "thiserror 2.0.12", + "url", + "windows 0.61.3", +] + +[[package]] +name = "tauri-runtime-wry" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe653a2fbbef19fe898efc774bc52c8742576342a33d3d028c189b57eb1d2439" +dependencies = [ + "gtk", + "http 1.3.1", + "jni", + "log", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-foundation 0.3.1", + "once_cell", + "percent-encoding", + "raw-window-handle", + "softbuffer", + "tao", + "tauri-runtime", + "tauri-utils", + "url", + "webkit2gtk", + "webview2-com", + "windows 0.61.3", + "wry", +] + +[[package]] +name = "tauri-utils" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330c15cabfe1d9f213478c9e8ec2b0c76dab26bb6f314b8ad1c8a568c1d186e" +dependencies = [ + "anyhow", + "cargo_metadata", + "ctor", + "dunce", + "glob", + "html5ever", + "http 1.3.1", + "infer", + "json-patch", + "kuchikiki", + "log", + "memchr", + "phf 0.11.3", + "proc-macro2", + "quote", + "regex", + "schemars 0.8.22", + "semver", + "serde", + "serde-untagged", + "serde_json", + "serde_with", + "swift-rs", + "thiserror 2.0.12", + "toml 0.8.23", + "url", + "urlpattern", + "uuid", + "walkdir", +] + +[[package]] +name = "tauri-winres" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c6d9028d41d4de835e3c482c677a8cb88137ac435d6ff9a71f392d4421576c9" +dependencies = [ + "embed-resource", + "indexmap 2.10.0", + "toml 0.9.5", +] + +[[package]] +name = "tempfile" +version = "3.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +dependencies = [ + "fastrand", + "getrandom 0.3.3", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "tendril" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" +dependencies = [ + "futf", + "mac", + "utf-8", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl 2.0.12", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "time" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +dependencies = [ + "deranged", + "itoa", + "libc", + "num-conv", + "num_threads", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" + +[[package]] +name = "time-macros" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.47.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +dependencies = [ + "backtrace", + "bytes", + "io-uring", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "slab", + "socket2 0.6.0", + "tokio-macros", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_edit 0.22.27", +] + +[[package]] +name = "toml" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8" +dependencies = [ + "indexmap 2.10.0", + "serde", + "serde_spanned 1.0.0", + "toml_datetime 0.7.0", + "toml_parser", + "toml_writer", + "winnow 0.7.12", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.10.0", + "toml_datetime 0.6.11", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.20.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +dependencies = [ + "indexmap 2.10.0", + "toml_datetime 0.6.11", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.10.0", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_write", + "winnow 0.7.12", +] + +[[package]] +name = "toml_parser" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10" +dependencies = [ + "winnow 0.7.12", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "toml_writer" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.2", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.1", + "bytes", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "tracing-core" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tray-icon" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0d92153331e7d02ec09137538996a7786fe679c629c279e82a6be762b7e6fe2" +dependencies = [ + "crossbeam-channel", + "dirs", + "libappindicator", + "muda", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-core-graphics", + "objc2-foundation 0.3.1", + "once_cell", + "png", + "serde", + "thiserror 2.0.12", + "windows-sys 0.59.0", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typeid" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "uds_windows" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9" +dependencies = [ + "memoffset", + "tempfile", + "winapi", +] + +[[package]] +name = "unic-char-property" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" +dependencies = [ + "unic-char-range", +] + +[[package]] +name = "unic-char-range" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" + +[[package]] +name = "unic-common" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" + +[[package]] +name = "unic-ucd-ident" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e230a37c0381caa9219d67cf063aa3a375ffed5bf541a452db16e744bdab6987" +dependencies = [ + "unic-char-property", + "unic-char-range", + "unic-ucd-version", +] + +[[package]] +name = "unic-ucd-version" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" +dependencies = [ + "unic-common", +] + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlpattern" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70acd30e3aa1450bc2eece896ce2ad0d178e9c079493819301573dae3c37ba6d" +dependencies = [ + "regex", + "serde", + "unic-ucd-ident", + "url", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8-width" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +dependencies = [ + "getrandom 0.3.3", + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "value-bag" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version-compare" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852e951cb7832cb45cb1169900d19760cfa39b82bc0ea9c0e5a14ae88411c98b" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "vswhom" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be979b7f07507105799e854203b470ff7c78a1639e330a58f183b5fea574608b" +dependencies = [ + "libc", + "vswhom-sys", +] + +[[package]] +name = "vswhom-sys" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb067e4cbd1ff067d1df46c9194b5de0e98efd2810bbc95c5d5e5f25a3231150" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "vte" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "231fdcd7ef3037e8330d8e17e61011a2c244126acc0a982f4040ac3f9f0bc077" +dependencies = [ + "memchr", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.104", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wayland-backend" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673a33c33048a5ade91a6b139580fa174e19fb0d23f396dca9fa15f2e1e49b35" +dependencies = [ + "cc", + "downcast-rs", + "rustix", + "scoped-tls", + "smallvec", + "wayland-sys", +] + +[[package]] +name = "wayland-client" +version = "0.31.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66a47e840dc20793f2264eb4b3e4ecb4b75d91c0dd4af04b456128e0bdd449d" +dependencies = [ + "bitflags 2.9.1", + "rustix", + "wayland-backend", + "wayland-scanner", +] + +[[package]] +name = "wayland-protocols" +version = "0.32.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efa790ed75fbfd71283bd2521a1cfdc022aabcc28bdcff00851f9e4ae88d9901" +dependencies = [ + "bitflags 2.9.1", + "wayland-backend", + "wayland-client", + "wayland-scanner", +] + +[[package]] +name = "wayland-scanner" +version = "0.31.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54cb1e9dc49da91950bdfd8b848c49330536d9d1fb03d4bfec8cae50caa50ae3" +dependencies = [ + "proc-macro2", + "quick-xml 0.37.5", + "quote", +] + +[[package]] +name = "wayland-sys" +version = "0.31.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34949b42822155826b41db8e5d0c1be3a2bd296c747577a43a3e6daefc296142" +dependencies = [ + "dlib", + "log", + "pkg-config", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webkit2gtk" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76b1bc1e54c581da1e9f179d0b38512ba358fb1af2d634a1affe42e37172361a" +dependencies = [ + "bitflags 1.3.2", + "cairo-rs", + "gdk", + "gdk-sys", + "gio", + "gio-sys", + "glib", + "glib-sys", + "gobject-sys", + "gtk", + "gtk-sys", + "javascriptcore-rs", + "libc", + "once_cell", + "soup3", + "webkit2gtk-sys", +] + +[[package]] +name = "webkit2gtk-sys" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62daa38afc514d1f8f12b8693d30d5993ff77ced33ce30cd04deebc267a6d57c" +dependencies = [ + "bitflags 1.3.2", + "cairo-sys-rs", + "gdk-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "gtk-sys", + "javascriptcore-rs-sys", + "libc", + "pkg-config", + "soup3-sys", + "system-deps", +] + +[[package]] +name = "webpki-roots" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "webview2-com" +version = "0.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4ba622a989277ef3886dd5afb3e280e3dd6d974b766118950a08f8f678ad6a4" +dependencies = [ + "webview2-com-macros", + "webview2-com-sys", + "windows 0.61.3", + "windows-core 0.61.2", + "windows-implement 0.60.0", + "windows-interface 0.59.1", +] + +[[package]] +name = "webview2-com-macros" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d228f15bba3b9d56dde8bddbee66fa24545bd17b48d5128ccf4a8742b18e431" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "webview2-com-sys" +version = "0.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36695906a1b53a3bf5c4289621efedac12b73eeb0b89e7e1a89b517302d5d75c" +dependencies = [ + "thiserror 2.0.12", + "windows 0.61.3", + "windows-core 0.61.2", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "window-vibrancy" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9bec5a31f3f9362f2258fd0e9c9dd61a9ca432e7306cc78c444258f0dce9a9c" +dependencies = [ + "objc2 0.6.1", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation 0.3.1", + "raw-window-handle", + "windows-sys 0.59.0", + "windows-version", +] + +[[package]] +name = "windows" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" +dependencies = [ + "windows-core 0.57.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.2", +] + +[[package]] +name = "windows-core" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" +dependencies = [ + "windows-implement 0.57.0", + "windows-interface 0.57.0", + "windows-result 0.1.2", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement 0.60.0", + "windows-interface 0.59.1", + "windows-link", + "windows-result 0.3.4", + "windows-strings", +] + +[[package]] +name = "windows-future" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" +dependencies = [ + "windows-core 0.61.2", + "windows-link", + "windows-threading", +] + +[[package]] +name = "windows-implement" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "windows-interface" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.2", + "windows-link", +] + +[[package]] +name = "windows-registry" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" +dependencies = [ + "windows-link", + "windows-result 0.3.4", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.3", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-version" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e04a5c6627e310a23ad2358483286c7df260c964eb2d003d8efd6d0f4e79265c" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "winreg" +version = "0.55.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb5a765337c50e9ec252c2069be9bf91c7df47afb103b642ba3a53bf8101be97" +dependencies = [ + "cfg-if", + "windows-sys 0.59.0", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags 2.9.1", +] + +[[package]] +name = "wrapcenum-derive" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a76ff259533532054cfbaefb115c613203c73707017459206380f03b3b3f266e" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "wry" +version = "0.52.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12a714d9ba7075aae04a6e50229d6109e3d584774b99a6a8c60de1698ca111b9" +dependencies = [ + "base64 0.22.1", + "block2 0.6.1", + "cookie", + "crossbeam-channel", + "dpi", + "dunce", + "gdkx11", + "gtk", + "html5ever", + "http 1.3.1", + "javascriptcore-rs", + "jni", + "kuchikiki", + "libc", + "ndk", + "objc2 0.6.1", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation 0.3.1", + "objc2-ui-kit", + "objc2-web-kit", + "once_cell", + "percent-encoding", + "raw-window-handle", + "sha2", + "soup3", + "tao-macros", + "thiserror 2.0.12", + "url", + "webkit2gtk", + "webkit2gtk-sys", + "webview2-com", + "windows 0.61.3", + "windows-core 0.61.2", + "windows-version", + "x11-dl", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "x11" +version = "2.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "502da5464ccd04011667b11c435cb992822c2c0dbde1770c988480d312a0db2e" +dependencies = [ + "libc", + "pkg-config", +] + +[[package]] +name = "x11-dl" +version = "2.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38735924fedd5314a6e548792904ed8c6de6636285cb9fec04d5b1db85c1516f" +dependencies = [ + "libc", + "once_cell", + "pkg-config", +] + +[[package]] +name = "xattr" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909" +dependencies = [ + "libc", + "rustix", +] + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", + "synstructure", +] + +[[package]] +name = "zbus" +version = "5.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bb4f9a464286d42851d18a605f7193b8febaf5b0919d71c6399b7b26e5b0aad" +dependencies = [ + "async-broadcast", + "async-executor", + "async-io", + "async-lock", + "async-process", + "async-recursion", + "async-task", + "async-trait", + "blocking", + "enumflags2", + "event-listener", + "futures-core", + "futures-lite", + "hex", + "nix", + "ordered-stream", + "serde", + "serde_repr", + "tokio", + "tracing", + "uds_windows", + "windows-sys 0.59.0", + "winnow 0.7.12", + "zbus_macros", + "zbus_names", + "zvariant", +] + +[[package]] +name = "zbus_macros" +version = "5.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef9859f68ee0c4ee2e8cde84737c78e3f4c54f946f2a38645d0d4c7a95327659" +dependencies = [ + "proc-macro-crate 3.3.0", + "proc-macro2", + "quote", + "syn 2.0.104", + "zbus_names", + "zvariant", + "zvariant_utils", +] + +[[package]] +name = "zbus_names" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97" +dependencies = [ + "serde", + "static_assertions", + "winnow 0.7.12", + "zvariant", +] + +[[package]] +name = "zerocopy" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "zip" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aed4ac33e8eb078c89e6cbb1d5c4c7703ec6d299fc3e7c3695af8f8b423468b" +dependencies = [ + "arbitrary", + "crc32fast", + "indexmap 2.10.0", + "memchr", +] + +[[package]] +name = "zvariant" +version = "5.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91b3680bb339216abd84714172b5138a4edac677e641ef17e1d8cb1b3ca6e6f" +dependencies = [ + "endi", + "enumflags2", + "serde", + "url", + "winnow 0.7.12", + "zvariant_derive", + "zvariant_utils", +] + +[[package]] +name = "zvariant_derive" +version = "5.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8c68501be459a8dbfffbe5d792acdd23b4959940fc87785fb013b32edbc208" +dependencies = [ + "proc-macro-crate 3.3.0", + "proc-macro2", + "quote", + "syn 2.0.104", + "zvariant_utils", +] + +[[package]] +name = "zvariant_utils" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e16edfee43e5d7b553b77872d99bc36afdda75c223ca7ad5e3fbecd82ca5fc34" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "static_assertions", + "syn 2.0.104", + "winnow 0.7.12", +] diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index ca1a54bba..744b84830 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -7,33 +7,43 @@ license = "MIT" repository = "https://github.com/menloresearch/jan" edition = "2021" rust-version = "1.77.2" +resolver = "2" [lib] name = "app_lib" crate-type = ["staticlib", "cdylib", "rlib"] +[features] +default = [ + "tauri/wry", + "tauri/common-controls-v6", + "tauri/x11", + "tauri/protocol-asset", + "tauri/macos-private-api", + "tauri/test", +] +test-tauri = [ + "tauri/wry", + "tauri/x11", + "tauri/protocol-asset", + "tauri/macos-private-api", + "tauri/test", +] + [build-dependencies] tauri-build = { version = "2.0.2", features = [] } [dependencies] -serde_json = "1.0" -serde = { version = "1.0", features = ["derive"] } -log = "0.4" -tauri = { version = "2.5.0", features = [ "protocol-asset", "macos-private-api", - "test" -] } -tauri-plugin-log = "2.0.0-rc" -tauri-plugin-shell = "2.2.0" -tauri-plugin-os = "2.2.1" -tauri-plugin-opener = "2.2.7" +dirs = "6.0.0" +env = "1.0.1" +fix-path-env = { git = "https://github.com/tauri-apps/fix-path-env-rs" } flate2 = "1.0" -tar = "0.4" -rand = "0.8" -tauri-plugin-http = { version = "2", features = ["unsafe-headers"] } -tauri-plugin-store = "2" +futures-util = "0.3.31" hyper = { version = "0.14", features = ["server"] } +jan-utils = { path = "./utils" } +libloading = "0.8.7" +log = "0.4" reqwest = { version = "0.11", features = ["json", "blocking", "stream"] } -tokio = { version = "1", features = ["full"] } rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", rev = "3196c95f1dfafbffbdcdd6d365c94969ac975e6a", features = [ "client", "transport-sse-client", @@ -41,24 +51,35 @@ rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", rev = "3196c9 "tower", "reqwest", ] } -uuid = { version = "1.7", features = ["v4"] } -env = "1.0.1" -futures-util = "0.3.31" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +serde_yaml = "0.9.34" +tar = "0.4" +tauri-plugin-deep-link = "2" +tauri-plugin-dialog = "2.2.1" +tauri-plugin-hardware = { path = "./plugins/tauri-plugin-hardware" } +tauri-plugin-http = { version = "2", features = ["unsafe-headers"] } +tauri-plugin-llamacpp = { path = "./plugins/tauri-plugin-llamacpp" } +tauri-plugin-log = "2.0.0-rc" +tauri-plugin-opener = "2.2.7" +tauri-plugin-os = "2.2.1" +tauri-plugin-shell = "2.2.0" +tauri-plugin-store = "2" +thiserror = "2.0.12" +tokio = { version = "1", features = ["full"] } tokio-util = "0.7.14" url = "2.5" -tauri-plugin-dialog = "2.2.1" -dirs = "6.0.0" -sysinfo = "0.34.2" -ash = "0.38.0" -nvml-wrapper = "0.10.0" -tauri-plugin-deep-link = "2" -fix-path-env = { git = "https://github.com/tauri-apps/fix-path-env-rs" } -serde_yaml = "0.9.34" -hmac = "0.12.1" -sha2 = "0.10.9" -base64 = "0.22.1" -libloading = "0.8.7" -thiserror = "2.0.12" +uuid = { version = "1.7", features = ["v4"] } + +[dependencies.tauri] +version = "2.5.0" +default-features = false +features = ["protocol-asset", "macos-private-api", "test"] + +[target.'cfg(windows)'.dev-dependencies] +tempfile = "3.20.0" + +[target.'cfg(unix)'.dependencies] nix = "=0.30.1" [target.'cfg(windows)'.dependencies] @@ -69,6 +90,3 @@ windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] } tauri-plugin-updater = "2" once_cell = "1.18" tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] } - -[target.'cfg(windows)'.dev-dependencies] -tempfile = "3.20.0" diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json index 3d9f98178..e594bf023 100644 --- a/src-tauri/capabilities/default.json +++ b/src-tauri/capabilities/default.json @@ -55,6 +55,8 @@ } ] }, - "store:default" + "store:default", + "llamacpp:default", + "hardware:default" ] } diff --git a/src-tauri/icons/icon.png b/src-tauri/icons/icon.png index 1b354a241..c16023e94 100644 Binary files a/src-tauri/icons/icon.png and b/src-tauri/icons/icon.png differ diff --git a/src-tauri/plugins/.yarnrc.yml b/src-tauri/plugins/.yarnrc.yml new file mode 100644 index 000000000..74febbee3 --- /dev/null +++ b/src-tauri/plugins/.yarnrc.yml @@ -0,0 +1,3 @@ +nmHoistingLimits: workspaces +nodeLinker: node-modules +checksumBehavior: update diff --git a/src-tauri/plugins/package.json b/src-tauri/plugins/package.json new file mode 100644 index 000000000..df9dec545 --- /dev/null +++ b/src-tauri/plugins/package.json @@ -0,0 +1,12 @@ +{ + "private": true, + "workspaces": { + "packages": [ + "**" + ] + }, + "installConfig": { + "hoistingLimits": "workspaces" + }, + "packageManager": "yarn@4.5.3" +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/.gitignore b/src-tauri/plugins/tauri-plugin-hardware/.gitignore new file mode 100644 index 000000000..50d8e32e8 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/.gitignore @@ -0,0 +1,17 @@ +/.vs +.DS_Store +.Thumbs.db +*.sublime* +.idea/ +debug.log +package-lock.json +.vscode/settings.json +yarn.lock + +/.tauri +/target +Cargo.lock +node_modules/ + +dist-js +dist diff --git a/src-tauri/plugins/tauri-plugin-hardware/Cargo.toml b/src-tauri/plugins/tauri-plugin-hardware/Cargo.toml new file mode 100644 index 000000000..eb74d32d1 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "tauri-plugin-hardware" +version = "0.6.599" +authors = ["Jan "] +description = "Tauri plugin for hardware information and GPU monitoring" +license = "MIT" +repository = "https://github.com/menloresearch/jan" +edition = "2021" +rust-version = "1.77.2" +exclude = ["/examples", "/dist-js", "/guest-js", "/node_modules"] +links = "tauri-plugin-hardware" + +[dependencies] +ash = "0.38.0" +libc = "0.2" +log = "0.4" +nvml-wrapper = "0.10.0" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +sysinfo = "0.34.2" +tauri = { version = "2.5.0", default-features = false, features = ["test"] } + +# Windows-specific dependencies +[target.'cfg(windows)'.dependencies] +libloading = "0.8" + +[build-dependencies] +tauri-plugin = { version = "2.3.1", features = ["build"] } diff --git a/src-tauri/plugins/tauri-plugin-hardware/build.rs b/src-tauri/plugins/tauri-plugin-hardware/build.rs new file mode 100644 index 000000000..c90177818 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/build.rs @@ -0,0 +1,5 @@ +const COMMANDS: &[&str] = &["get_system_info", "get_system_usage"]; + +fn main() { + tauri_plugin::Builder::new(COMMANDS).build(); +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/guest-js/index.ts b/src-tauri/plugins/tauri-plugin-hardware/guest-js/index.ts new file mode 100644 index 000000000..6bb22f5e1 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/guest-js/index.ts @@ -0,0 +1,49 @@ +import { invoke } from '@tauri-apps/api/core' + +// Types +export interface CpuStaticInfo { + name: string; + core_count: number; + arch: string; + extensions: string[]; +} + +export interface GpuInfo { + name: string; + total_memory: number; + vendor: string; + uuid: string; + driver_version: string; + nvidia_info?: any; + vulkan_info?: any; +} + +export interface SystemInfo { + cpu: CpuStaticInfo; + os_type: string; + os_name: string; + total_memory: number; + gpus: GpuInfo[]; +} + +export interface GpuUsage { + uuid: string; + used_memory: number; + total_memory: number; +} + +export interface SystemUsage { + cpu: number; + used_memory: number; + total_memory: number; + gpus: GpuUsage[]; +} + +// Hardware commands +export async function getSystemInfo(): Promise { + return await invoke('plugin:hardware|get_system_info'); +} + +export async function getSystemUsage(): Promise { + return await invoke('plugin:hardware|get_system_usage'); +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/package.json b/src-tauri/plugins/tauri-plugin-hardware/package.json new file mode 100644 index 000000000..d8376af7b --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/package.json @@ -0,0 +1,33 @@ +{ + "name": "@janhq/tauri-plugin-hardware-api", + "version": "0.6.6", + "private": true, + "description": "Hardware monitoring plugin API for Tauri", + "type": "module", + "types": "./dist-js/index.d.ts", + "main": "./dist-js/index.cjs", + "module": "./dist-js/index.js", + "exports": { + "types": "./dist-js/index.d.ts", + "import": "./dist-js/index.js", + "require": "./dist-js/index.cjs" + }, + "files": [ + "dist-js", + "README.md" + ], + "scripts": { + "build": "rollup -c", + "prepublishOnly": "yarn build", + "pretest": "yarn build" + }, + "dependencies": { + "@tauri-apps/api": ">=2.0.0-beta.6" + }, + "devDependencies": { + "@rollup/plugin-typescript": "^12.0.0", + "rollup": "^4.9.6", + "tslib": "^2.6.2", + "typescript": "^5.3.3" + } +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/commands/get_system_info.toml b/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/commands/get_system_info.toml new file mode 100644 index 000000000..07db155bc --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/commands/get_system_info.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-system-info" +description = "Enables the get_system_info command without any pre-configured scope." +commands.allow = ["get_system_info"] + +[[permission]] +identifier = "deny-get-system-info" +description = "Denies the get_system_info command without any pre-configured scope." +commands.deny = ["get_system_info"] diff --git a/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/commands/get_system_usage.toml b/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/commands/get_system_usage.toml new file mode 100644 index 000000000..e43142605 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/commands/get_system_usage.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-system-usage" +description = "Enables the get_system_usage command without any pre-configured scope." +commands.allow = ["get_system_usage"] + +[[permission]] +identifier = "deny-get-system-usage" +description = "Denies the get_system_usage command without any pre-configured scope." +commands.deny = ["get_system_usage"] diff --git a/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/reference.md b/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/reference.md new file mode 100644 index 000000000..6a98cc156 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/permissions/autogenerated/reference.md @@ -0,0 +1,70 @@ +## Default Permission + +Default permissions for the hardware plugin + +#### This default permission set includes the following: + +- `allow-get-system-info` +- `allow-get-system-usage` + +## Permission Table + + + + + + + + + + + + + + + + + + + + + + + + + + + +
IdentifierDescription
+ +`hardware:allow-get-system-info` + + + +Enables the get_system_info command without any pre-configured scope. + +
+ +`hardware:deny-get-system-info` + + + +Denies the get_system_info command without any pre-configured scope. + +
+ +`hardware:allow-get-system-usage` + + + +Enables the get_system_usage command without any pre-configured scope. + +
+ +`hardware:deny-get-system-usage` + + + +Denies the get_system_usage command without any pre-configured scope. + +
diff --git a/src-tauri/plugins/tauri-plugin-hardware/permissions/default.toml b/src-tauri/plugins/tauri-plugin-hardware/permissions/default.toml new file mode 100644 index 000000000..98a8de1b3 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/permissions/default.toml @@ -0,0 +1,6 @@ +[default] +description = "Default permissions for the hardware plugin" +permissions = [ + "allow-get-system-info", + "allow-get-system-usage" +] diff --git a/src-tauri/plugins/tauri-plugin-hardware/permissions/schemas/schema.json b/src-tauri/plugins/tauri-plugin-hardware/permissions/schemas/schema.json new file mode 100644 index 000000000..6848c3288 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/permissions/schemas/schema.json @@ -0,0 +1,330 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "PermissionFile", + "description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.", + "type": "object", + "properties": { + "default": { + "description": "The default permission set for the plugin", + "anyOf": [ + { + "$ref": "#/definitions/DefaultPermission" + }, + { + "type": "null" + } + ] + }, + "set": { + "description": "A list of permissions sets defined", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionSet" + } + }, + "permission": { + "description": "A list of inlined permissions", + "default": [], + "type": "array", + "items": { + "$ref": "#/definitions/Permission" + } + } + }, + "definitions": { + "DefaultPermission": { + "description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.", + "type": "object", + "required": [ + "permissions" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "PermissionSet": { + "description": "A set of direct permissions grouped together under a new name.", + "type": "object", + "required": [ + "description", + "identifier", + "permissions" + ], + "properties": { + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does.", + "type": "string" + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionKind" + } + } + } + }, + "Permission": { + "description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.", + "type": "object", + "required": [ + "identifier" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri internal convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "commands": { + "description": "Allowed or denied commands when using this permission.", + "default": { + "allow": [], + "deny": [] + }, + "allOf": [ + { + "$ref": "#/definitions/Commands" + } + ] + }, + "scope": { + "description": "Allowed or denied scoped when using this permission.", + "allOf": [ + { + "$ref": "#/definitions/Scopes" + } + ] + }, + "platforms": { + "description": "Target platforms this permission applies. By default all platforms are affected by this permission.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Target" + } + } + } + }, + "Commands": { + "description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.", + "type": "object", + "properties": { + "allow": { + "description": "Allowed command.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "deny": { + "description": "Denied command, which takes priority.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "Scopes": { + "description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```", + "type": "object", + "properties": { + "allow": { + "description": "Data that defines what is allowed by the scope.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + }, + "deny": { + "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + } + } + }, + "Value": { + "description": "All supported ACL values.", + "anyOf": [ + { + "description": "Represents a null JSON value.", + "type": "null" + }, + { + "description": "Represents a [`bool`].", + "type": "boolean" + }, + { + "description": "Represents a valid ACL [`Number`].", + "allOf": [ + { + "$ref": "#/definitions/Number" + } + ] + }, + { + "description": "Represents a [`String`].", + "type": "string" + }, + { + "description": "Represents a list of other [`Value`]s.", + "type": "array", + "items": { + "$ref": "#/definitions/Value" + } + }, + { + "description": "Represents a map of [`String`] keys to [`Value`]s.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Value" + } + } + ] + }, + "Number": { + "description": "A valid ACL number.", + "anyOf": [ + { + "description": "Represents an [`i64`].", + "type": "integer", + "format": "int64" + }, + { + "description": "Represents a [`f64`].", + "type": "number", + "format": "double" + } + ] + }, + "Target": { + "description": "Platform target.", + "oneOf": [ + { + "description": "MacOS.", + "type": "string", + "enum": [ + "macOS" + ] + }, + { + "description": "Windows.", + "type": "string", + "enum": [ + "windows" + ] + }, + { + "description": "Linux.", + "type": "string", + "enum": [ + "linux" + ] + }, + { + "description": "Android.", + "type": "string", + "enum": [ + "android" + ] + }, + { + "description": "iOS.", + "type": "string", + "enum": [ + "iOS" + ] + } + ] + }, + "PermissionKind": { + "type": "string", + "oneOf": [ + { + "description": "Enables the get_system_info command without any pre-configured scope.", + "type": "string", + "const": "allow-get-system-info", + "markdownDescription": "Enables the get_system_info command without any pre-configured scope." + }, + { + "description": "Denies the get_system_info command without any pre-configured scope.", + "type": "string", + "const": "deny-get-system-info", + "markdownDescription": "Denies the get_system_info command without any pre-configured scope." + }, + { + "description": "Enables the get_system_usage command without any pre-configured scope.", + "type": "string", + "const": "allow-get-system-usage", + "markdownDescription": "Enables the get_system_usage command without any pre-configured scope." + }, + { + "description": "Denies the get_system_usage command without any pre-configured scope.", + "type": "string", + "const": "deny-get-system-usage", + "markdownDescription": "Denies the get_system_usage command without any pre-configured scope." + }, + { + "description": "Default permissions for the hardware plugin\n#### This default permission set includes:\n\n- `allow-get-system-info`\n- `allow-get-system-usage`", + "type": "string", + "const": "default", + "markdownDescription": "Default permissions for the hardware plugin\n#### This default permission set includes:\n\n- `allow-get-system-info`\n- `allow-get-system-usage`" + } + ] + } + } +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/rollup.config.js b/src-tauri/plugins/tauri-plugin-hardware/rollup.config.js new file mode 100644 index 000000000..8b4768ff6 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/rollup.config.js @@ -0,0 +1,31 @@ +import { readFileSync } from 'node:fs' +import { dirname, join } from 'node:path' +import { cwd } from 'node:process' +import typescript from '@rollup/plugin-typescript' + +const pkg = JSON.parse(readFileSync(join(cwd(), 'package.json'), 'utf8')) + +export default { + input: 'guest-js/index.ts', + output: [ + { + file: pkg.exports.import, + format: 'esm' + }, + { + file: pkg.exports.require, + format: 'cjs' + } + ], + plugins: [ + typescript({ + declaration: true, + declarationDir: dirname(pkg.exports.import) + }) + ], + external: [ + /^@tauri-apps\/api/, + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.peerDependencies || {}) + ] +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/commands.rs b/src-tauri/plugins/tauri-plugin-hardware/src/commands.rs new file mode 100644 index 000000000..56e78f1c1 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/commands.rs @@ -0,0 +1,90 @@ +use crate::{ + helpers::get_jan_libvulkan_path, + types::{CpuStaticInfo, SystemInfo, SystemUsage}, + vendor::{nvidia, vulkan}, + SYSTEM_INFO, +}; +use sysinfo::System; +use tauri::Runtime; + +#[tauri::command] +pub fn get_system_info(app: tauri::AppHandle) -> SystemInfo { + SYSTEM_INFO + .get_or_init(|| { + let mut system = System::new(); + system.refresh_memory(); + + let mut gpu_map = std::collections::HashMap::new(); + for gpu in nvidia::get_nvidia_gpus() { + gpu_map.insert(gpu.uuid.clone(), gpu); + } + + // try system vulkan first + let paths = vec!["".to_string(), get_jan_libvulkan_path(app.clone())]; + let mut vulkan_gpus = vec![]; + for path in paths { + vulkan_gpus = vulkan::get_vulkan_gpus(&path); + if !vulkan_gpus.is_empty() { + break; + } + } + + for gpu in vulkan_gpus { + match gpu_map.get_mut(&gpu.uuid) { + // for existing NVIDIA GPUs, add Vulkan info + Some(nvidia_gpu) => { + nvidia_gpu.vulkan_info = gpu.vulkan_info; + } + None => { + gpu_map.insert(gpu.uuid.clone(), gpu); + } + } + } + + let os_type = if cfg!(target_os = "windows") { + "windows" + } else if cfg!(target_os = "macos") { + "macos" + } else if cfg!(target_os = "linux") { + "linux" + } else { + "unknown" + }; + let os_name = System::long_os_version().unwrap_or("Unknown".to_string()); + + SystemInfo { + cpu: CpuStaticInfo::new(), + os_type: os_type.to_string(), + os_name, + total_memory: system.total_memory() / 1024 / 1024, // bytes to MiB + gpus: gpu_map.into_values().collect(), + } + }) + .clone() +} + +#[tauri::command] +pub fn get_system_usage(app: tauri::AppHandle) -> SystemUsage { + let mut system = System::new(); + system.refresh_memory(); + + // need to refresh 2 times to get CPU usage + system.refresh_cpu_all(); + std::thread::sleep(sysinfo::MINIMUM_CPU_UPDATE_INTERVAL); + system.refresh_cpu_all(); + + let cpus = system.cpus(); + let cpu_usage = + cpus.iter().map(|cpu| cpu.cpu_usage()).sum::() / (cpus.len().max(1) as f32); + + SystemUsage { + cpu: cpu_usage, + used_memory: system.used_memory() / 1024 / 1024, // bytes to MiB, + total_memory: system.total_memory() / 1024 / 1024, // bytes to MiB, + gpus: get_system_info(app.clone()) + .gpus + .iter() + .map(|gpu| gpu.get_usage()) + .collect(), + } +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/constants.rs b/src-tauri/plugins/tauri-plugin-hardware/src/constants.rs new file mode 100644 index 000000000..e79a67171 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/constants.rs @@ -0,0 +1,4 @@ +// https://devicehunt.com/all-pci-vendors +pub const VENDOR_ID_AMD: u32 = 0x1002; +pub const VENDOR_ID_NVIDIA: u32 = 0x10DE; +pub const VENDOR_ID_INTEL: u32 = 0x8086; diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/cpu.rs b/src-tauri/plugins/tauri-plugin-hardware/src/cpu.rs new file mode 100644 index 000000000..5b35088cd --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/cpu.rs @@ -0,0 +1,130 @@ +use sysinfo::System; + +use crate::types::CpuStaticInfo; + +impl CpuStaticInfo { + pub fn new() -> Self { + let mut system = System::new(); + system.refresh_cpu_all(); + + let name = system + .cpus() + .first() + .map(|cpu| { + let brand = cpu.brand(); + if brand.is_empty() { + cpu.name() + } else { + brand + } + }) + .unwrap_or("unknown") + .to_string(); + + CpuStaticInfo { + name, + core_count: System::physical_core_count().unwrap_or(0), + arch: std::env::consts::ARCH.to_string(), + extensions: CpuStaticInfo::get_extensions(), + } + } + + // TODO: see if we need to check for all CPU extensions + #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] + fn get_extensions() -> Vec { + let mut exts = vec![]; + + // fpu is always present on modern x86 processors, + // but is_x86_feature_detected doesn't support it + exts.push("fpu".to_string()); + if is_x86_feature_detected!("mmx") { + exts.push("mmx".to_string()); + } + if is_x86_feature_detected!("sse") { + exts.push("sse".to_string()); + } + if is_x86_feature_detected!("sse2") { + exts.push("sse2".to_string()); + } + if is_x86_feature_detected!("sse3") { + exts.push("sse3".to_string()); + } + if is_x86_feature_detected!("ssse3") { + exts.push("ssse3".to_string()); + } + if is_x86_feature_detected!("sse4.1") { + exts.push("sse4_1".to_string()); + } + if is_x86_feature_detected!("sse4.2") { + exts.push("sse4_2".to_string()); + } + if is_x86_feature_detected!("pclmulqdq") { + exts.push("pclmulqdq".to_string()); + } + if is_x86_feature_detected!("avx") { + exts.push("avx".to_string()); + } + if is_x86_feature_detected!("avx2") { + exts.push("avx2".to_string()); + } + if is_x86_feature_detected!("avx512f") { + exts.push("avx512_f".to_string()); + } + if is_x86_feature_detected!("avx512dq") { + exts.push("avx512_dq".to_string()); + } + if is_x86_feature_detected!("avx512ifma") { + exts.push("avx512_ifma".to_string()); + } + if is_x86_feature_detected!("avx512pf") { + exts.push("avx512_pf".to_string()); + } + if is_x86_feature_detected!("avx512er") { + exts.push("avx512_er".to_string()); + } + if is_x86_feature_detected!("avx512cd") { + exts.push("avx512_cd".to_string()); + } + if is_x86_feature_detected!("avx512bw") { + exts.push("avx512_bw".to_string()); + } + if is_x86_feature_detected!("avx512vl") { + exts.push("avx512_vl".to_string()); + } + if is_x86_feature_detected!("avx512vbmi") { + exts.push("avx512_vbmi".to_string()); + } + if is_x86_feature_detected!("avx512vbmi2") { + exts.push("avx512_vbmi2".to_string()); + } + if is_x86_feature_detected!("avx512vnni") { + exts.push("avx512_vnni".to_string()); + } + if is_x86_feature_detected!("avx512bitalg") { + exts.push("avx512_bitalg".to_string()); + } + if is_x86_feature_detected!("avx512vpopcntdq") { + exts.push("avx512_vpopcntdq".to_string()); + } + // avx512_4vnniw and avx512_4fmaps are only available on Intel Knights Mill, which are + // very rare. https://en.wikipedia.org/wiki/AVX-512 + // is_x86_feature_detected doesn't support them + if is_x86_feature_detected!("avx512vp2intersect") { + exts.push("avx512_vp2intersect".to_string()); + } + if is_x86_feature_detected!("aes") { + exts.push("aes".to_string()); + } + if is_x86_feature_detected!("f16c") { + exts.push("f16c".to_string()); + } + + exts + } + + // Cortex always returns empty list for non-x86 + #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))] + fn get_extensions() -> Vec { + vec![] + } +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/gpu.rs b/src-tauri/plugins/tauri-plugin-hardware/src/gpu.rs new file mode 100644 index 000000000..4ceec8e6c --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/gpu.rs @@ -0,0 +1,33 @@ +use crate::{ + constants::{VENDOR_ID_AMD, VENDOR_ID_INTEL, VENDOR_ID_NVIDIA}, + types::{GpuInfo, GpuUsage, Vendor}, +}; + +impl Vendor { + pub fn from_vendor_id(vendor_id: u32) -> Self { + match vendor_id { + VENDOR_ID_AMD => Vendor::AMD, + VENDOR_ID_NVIDIA => Vendor::NVIDIA, + VENDOR_ID_INTEL => Vendor::Intel, + _ => Vendor::Unknown(vendor_id), + } + } +} + +impl GpuInfo { + pub fn get_usage(&self) -> GpuUsage { + match self.vendor { + Vendor::NVIDIA => self.get_usage_nvidia(), + Vendor::AMD => self.get_usage_amd(), + _ => self.get_usage_unsupported(), + } + } + + pub fn get_usage_unsupported(&self) -> GpuUsage { + GpuUsage { + uuid: self.uuid.clone(), + used_memory: 0, + total_memory: 0, + } + } +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/helpers.rs b/src-tauri/plugins/tauri-plugin-hardware/src/helpers.rs new file mode 100644 index 000000000..22bcc8669 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/helpers.rs @@ -0,0 +1,20 @@ +use tauri::{path::BaseDirectory, Manager, Runtime}; + +pub fn get_jan_libvulkan_path(app: tauri::AppHandle) -> String { + let lib_name = if cfg!(target_os = "windows") { + "vulkan-1.dll" + } else if cfg!(target_os = "linux") { + "libvulkan.so" + } else { + return "".to_string(); + }; + + // NOTE: this does not work in test mode (mock app) + match app.path().resolve( + format!("resources/lib/{}", lib_name), + BaseDirectory::Resource, + ) { + Ok(lib_path) => lib_path.to_string_lossy().to_string(), + Err(_) => "".to_string(), + } +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/lib.rs b/src-tauri/plugins/tauri-plugin-hardware/src/lib.rs new file mode 100644 index 000000000..8f0427a6b --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/lib.rs @@ -0,0 +1,29 @@ +mod commands; +mod constants; +pub mod cpu; +pub mod gpu; +mod helpers; +mod types; +pub mod vendor; + +pub use constants::*; +pub use helpers::*; +pub use types::*; + +use std::sync::OnceLock; +use tauri::Runtime; + +static SYSTEM_INFO: OnceLock = OnceLock::new(); + +/// Initialize the hardware plugin +pub fn init() -> tauri::plugin::TauriPlugin { + tauri::plugin::Builder::new("hardware") + .invoke_handler(tauri::generate_handler![ + commands::get_system_info, + commands::get_system_usage + ]) + .build() +} + +#[cfg(test)] +mod tests; diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/tests.rs b/src-tauri/plugins/tauri-plugin-hardware/src/tests.rs new file mode 100644 index 000000000..394092543 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/tests.rs @@ -0,0 +1,16 @@ +use crate::commands::*; +use tauri::test::mock_app; + +#[test] +fn test_system_info() { + let app = mock_app(); + let info = get_system_info(app.handle().clone()); + println!("System Static Info: {:?}", info); +} + +#[test] +fn test_system_usage() { + let app = mock_app(); + let usage = get_system_usage(app.handle().clone()); + println!("System Usage Info: {:?}", usage); +} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/types.rs b/src-tauri/plugins/tauri-plugin-hardware/src/types.rs new file mode 100644 index 000000000..e26135995 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/types.rs @@ -0,0 +1,71 @@ +use serde::Serialize; + +use crate::vendor::{nvidia::NvidiaInfo, vulkan::VulkanInfo}; + +#[derive(Clone, Serialize, Debug)] +pub struct CpuStaticInfo { + pub name: String, + pub core_count: usize, + pub arch: String, + pub extensions: Vec, +} + +#[derive(Debug, Clone)] +pub enum Vendor { + AMD, + NVIDIA, + Intel, + Unknown(u32), +} + +impl Serialize for Vendor { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self { + Vendor::AMD => "AMD".serialize(serializer), + Vendor::NVIDIA => "NVIDIA".serialize(serializer), + Vendor::Intel => "Intel".serialize(serializer), + Vendor::Unknown(vendor_id) => { + let formatted = format!("Unknown (vendor_id: {})", vendor_id); + serializer.serialize_str(&formatted) + } + } + } +} + +#[derive(Clone, Debug, Serialize)] +pub struct GpuInfo { + pub name: String, + pub total_memory: u64, + pub vendor: Vendor, + pub uuid: String, + pub driver_version: String, + pub nvidia_info: Option, + pub vulkan_info: Option, +} + +#[derive(Serialize, Clone, Debug)] +pub struct SystemInfo { + pub cpu: CpuStaticInfo, + pub os_type: String, + pub os_name: String, + pub total_memory: u64, + pub gpus: Vec, +} + +#[derive(Serialize, Clone, Debug)] +pub struct GpuUsage { + pub uuid: String, + pub used_memory: u64, + pub total_memory: u64, +} + +#[derive(Serialize, Clone, Debug)] +pub struct SystemUsage { + pub cpu: f32, + pub used_memory: u64, + pub total_memory: u64, + pub gpus: Vec, +} diff --git a/src-tauri/src/core/hardware/amd.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/amd.rs similarity index 99% rename from src-tauri/src/core/hardware/amd.rs rename to src-tauri/plugins/tauri-plugin-hardware/src/vendor/amd.rs index cbaea172d..62d90ca1b 100644 --- a/src-tauri/src/core/hardware/amd.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/amd.rs @@ -1,4 +1,4 @@ -use super::{GpuInfo, GpuUsage}; +use crate::types::{GpuInfo, GpuUsage}; impl GpuInfo { #[cfg(not(target_os = "linux"))] diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/mod.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/mod.rs new file mode 100644 index 000000000..137bf41ad --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/mod.rs @@ -0,0 +1,6 @@ +pub mod amd; +pub mod nvidia; +pub mod vulkan; + +#[cfg(test)] +mod tests; diff --git a/src-tauri/src/core/hardware/nvidia.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/nvidia.rs similarity index 90% rename from src-tauri/src/core/hardware/nvidia.rs rename to src-tauri/plugins/tauri-plugin-hardware/src/vendor/nvidia.rs index 6dced3448..006ca66ba 100644 --- a/src-tauri/src/core/hardware/nvidia.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/nvidia.rs @@ -1,4 +1,4 @@ -use super::{GpuInfo, GpuUsage, Vendor}; +use crate::types::{GpuInfo, GpuUsage, Vendor}; use nvml_wrapper::{error::NvmlError, Nvml}; use std::sync::OnceLock; @@ -103,18 +103,3 @@ pub fn get_nvidia_gpus() -> Vec { } } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_get_nvidia_gpus() { - let gpus = get_nvidia_gpus(); - for (i, gpu) in gpus.iter().enumerate() { - println!("GPU {}:", i); - println!(" {:?}", gpu); - println!(" {:?}", gpu.get_usage()); - } - } -} diff --git a/src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs new file mode 100644 index 000000000..078efe91b --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs @@ -0,0 +1,21 @@ +use crate::vendor::{nvidia, vulkan}; + +#[test] +fn test_get_nvidia_gpus() { + let gpus = nvidia::get_nvidia_gpus(); + for (i, gpu) in gpus.iter().enumerate() { + println!("GPU {}:", i); + println!(" {:?}", gpu); + println!(" {:?}", gpu.get_usage()); + } +} + +#[test] +fn test_get_vulkan_gpus() { + let gpus = vulkan::get_vulkan_gpus(""); + for (i, gpu) in gpus.iter().enumerate() { + println!("GPU {}:", i); + println!(" {:?}", gpu); + println!(" {:?}", gpu.get_usage()); + } +} diff --git a/src-tauri/src/core/hardware/vulkan.rs b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/vulkan.rs similarity index 91% rename from src-tauri/src/core/hardware/vulkan.rs rename to src-tauri/plugins/tauri-plugin-hardware/src/vendor/vulkan.rs index cba3ed391..6a9bf21aa 100644 --- a/src-tauri/src/core/hardware/vulkan.rs +++ b/src-tauri/plugins/tauri-plugin-hardware/src/vendor/vulkan.rs @@ -1,4 +1,4 @@ -use super::{GpuInfo, Vendor}; +use crate::types::{GpuInfo, Vendor}; use ash::{vk, Entry}; #[derive(Debug, Clone, serde::Serialize)] @@ -128,18 +128,3 @@ fn get_vulkan_gpus_internal(lib_path: &str) -> Result, Box"] +description = "Tauri plugin for managing Jan LlamaCpp server processes and model loading" +license = "MIT" +repository = "https://github.com/menloresearch/jan" +edition = "2021" +rust-version = "1.77.2" +exclude = ["/examples", "/dist-js", "/guest-js", "/node_modules"] +links = "tauri-plugin-llamacpp" + +[dependencies] +base64 = "0.22.1" +byteorder = "1.5.0" +hmac = "0.12.1" +jan-utils = { path = "../../utils" } +log = "0.4" +rand = "0.8" +serde = { version = "1.0", features = ["derive"] } +sha2 = "0.10.9" +sysinfo = "0.34.2" +tauri = { version = "2.5.0", default-features = false, features = [] } +thiserror = "2.0.12" +tokio = { version = "1", features = ["full"] } + +# Windows-specific dependencies +[target.'cfg(windows)'.dependencies] +windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] } + +# Unix-specific dependencies +[target.'cfg(unix)'.dependencies] +nix = { version = "=0.30.1", features = ["signal", "process"] } + +[build-dependencies] +tauri-plugin = { version = "2.3.1", features = ["build"] } diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/build.rs b/src-tauri/plugins/tauri-plugin-llamacpp/build.rs new file mode 100644 index 000000000..ca32eb4d5 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/build.rs @@ -0,0 +1,21 @@ +const COMMANDS: &[&str] = &[ + // Cleanup command + "cleanup_llama_processes", + // LlamaCpp server commands + "load_llama_model", + "unload_llama_model", + "get_devices", + "generate_api_key", + "is_process_running", + "get_random_port", + "find_session_by_model", + "get_loaded_models", + "get_all_sessions", + "get_session_by_model", + // GGUF commands + "read_gguf_metadata", +]; + +fn main() { + tauri_plugin::Builder::new(COMMANDS).build(); +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts b/src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts new file mode 100644 index 000000000..0380e4fe7 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts @@ -0,0 +1,93 @@ +import { invoke } from '@tauri-apps/api/core' + +// Types +export interface SessionInfo { + pid: number; + port: number; + model_id: string; + model_path: string; + api_key: string; +} + +export interface DeviceInfo { + id: string; + name: string; + memory: number; +} + +export interface GgufMetadata { + version: number; + tensor_count: number; + metadata: Record; +} + +// Cleanup commands +export async function cleanupLlamaProcesses(): Promise { + return await invoke('plugin:llamacpp|cleanup_llama_processes'); +} + +// LlamaCpp server commands +export async function loadLlamaModel( + backendPath: string, + libraryPath?: string, + args: string[] = [] +): Promise { + return await invoke('plugin:llamacpp|load_llama_model', { + backendPath, + libraryPath, + args + }); +} + +export async function unloadLlamaModel(pid: number): Promise { + return await invoke('plugin:llamacpp|unload_llama_model', { pid }); +} + +export async function getDevices( + backendPath: string, + libraryPath?: string +): Promise { + return await invoke('plugin:llamacpp|get_devices', { + backendPath, + libraryPath + }); +} + +export async function generateApiKey( + modelId: string, + apiSecret: string +): Promise { + return await invoke('plugin:llamacpp|generate_api_key', { + modelId, + apiSecret + }); +} + +export async function isProcessRunning(pid: number): Promise { + return await invoke('plugin:llamacpp|is_process_running', { pid }); +} + +export async function getRandomPort(): Promise { + return await invoke('plugin:llamacpp|get_random_port'); +} + +export async function findSessionByModel(modelId: string): Promise { + return await invoke('plugin:llamacpp|find_session_by_model', { modelId }); +} + +export async function getLoadedModels(): Promise { + return await invoke('plugin:llamacpp|get_loaded_models'); +} + +export async function getAllSessions(): Promise { + return await invoke('plugin:llamacpp|get_all_sessions'); +} + +export async function getSessionByModel(modelId: string): Promise { + return await invoke('plugin:llamacpp|get_session_by_model', { modelId }); +} + +// GGUF commands +export async function readGgufMetadata(path: string): Promise { + return await invoke('plugin:llamacpp|read_gguf_metadata', { path }); +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/package.json b/src-tauri/plugins/tauri-plugin-llamacpp/package.json new file mode 100644 index 000000000..a7f78220e --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/package.json @@ -0,0 +1,33 @@ +{ + "name": "@janhq/tauri-plugin-llamacpp-api", + "version": "0.6.6", + "private": true, + "description": "", + "type": "module", + "types": "./dist-js/index.d.ts", + "main": "./dist-js/index.cjs", + "module": "./dist-js/index.js", + "exports": { + "types": "./dist-js/index.d.ts", + "import": "./dist-js/index.js", + "require": "./dist-js/index.cjs" + }, + "files": [ + "dist-js", + "README.md" + ], + "scripts": { + "build": "rollup -c", + "prepublishOnly": "yarn build", + "pretest": "yarn build" + }, + "dependencies": { + "@tauri-apps/api": ">=2.0.0-beta.6" + }, + "devDependencies": { + "@rollup/plugin-typescript": "^12.0.0", + "rollup": "^4.9.6", + "tslib": "^2.6.2", + "typescript": "^5.3.3" + } +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/cleanup_llama_processes.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/cleanup_llama_processes.toml new file mode 100644 index 000000000..2a6b4db9d --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/cleanup_llama_processes.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-cleanup-llama-processes" +description = "Enables the cleanup_llama_processes command without any pre-configured scope." +commands.allow = ["cleanup_llama_processes"] + +[[permission]] +identifier = "deny-cleanup-llama-processes" +description = "Denies the cleanup_llama_processes command without any pre-configured scope." +commands.deny = ["cleanup_llama_processes"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/find_session_by_model.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/find_session_by_model.toml new file mode 100644 index 000000000..431b018e5 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/find_session_by_model.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-find-session-by-model" +description = "Enables the find_session_by_model command without any pre-configured scope." +commands.allow = ["find_session_by_model"] + +[[permission]] +identifier = "deny-find-session-by-model" +description = "Denies the find_session_by_model command without any pre-configured scope." +commands.deny = ["find_session_by_model"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/generate_api_key.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/generate_api_key.toml new file mode 100644 index 000000000..005092664 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/generate_api_key.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-generate-api-key" +description = "Enables the generate_api_key command without any pre-configured scope." +commands.allow = ["generate_api_key"] + +[[permission]] +identifier = "deny-generate-api-key" +description = "Denies the generate_api_key command without any pre-configured scope." +commands.deny = ["generate_api_key"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_all_sessions.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_all_sessions.toml new file mode 100644 index 000000000..1d9de5517 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_all_sessions.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-all-sessions" +description = "Enables the get_all_sessions command without any pre-configured scope." +commands.allow = ["get_all_sessions"] + +[[permission]] +identifier = "deny-get-all-sessions" +description = "Denies the get_all_sessions command without any pre-configured scope." +commands.deny = ["get_all_sessions"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_devices.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_devices.toml new file mode 100644 index 000000000..c5aa86e9f --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_devices.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-devices" +description = "Enables the get_devices command without any pre-configured scope." +commands.allow = ["get_devices"] + +[[permission]] +identifier = "deny-get-devices" +description = "Denies the get_devices command without any pre-configured scope." +commands.deny = ["get_devices"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_loaded_models.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_loaded_models.toml new file mode 100644 index 000000000..ade091d39 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_loaded_models.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-loaded-models" +description = "Enables the get_loaded_models command without any pre-configured scope." +commands.allow = ["get_loaded_models"] + +[[permission]] +identifier = "deny-get-loaded-models" +description = "Denies the get_loaded_models command without any pre-configured scope." +commands.deny = ["get_loaded_models"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_random_port.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_random_port.toml new file mode 100644 index 000000000..ba0ba01b8 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_random_port.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-random-port" +description = "Enables the get_random_port command without any pre-configured scope." +commands.allow = ["get_random_port"] + +[[permission]] +identifier = "deny-get-random-port" +description = "Denies the get_random_port command without any pre-configured scope." +commands.deny = ["get_random_port"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_session_by_model.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_session_by_model.toml new file mode 100644 index 000000000..bed88faa9 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/get_session_by_model.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-session-by-model" +description = "Enables the get_session_by_model command without any pre-configured scope." +commands.allow = ["get_session_by_model"] + +[[permission]] +identifier = "deny-get-session-by-model" +description = "Denies the get_session_by_model command without any pre-configured scope." +commands.deny = ["get_session_by_model"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/is_process_running.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/is_process_running.toml new file mode 100644 index 000000000..71fcc5812 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/is_process_running.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-is-process-running" +description = "Enables the is_process_running command without any pre-configured scope." +commands.allow = ["is_process_running"] + +[[permission]] +identifier = "deny-is-process-running" +description = "Denies the is_process_running command without any pre-configured scope." +commands.deny = ["is_process_running"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/load_llama_model.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/load_llama_model.toml new file mode 100644 index 000000000..8c21b9e1a --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/load_llama_model.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-load-llama-model" +description = "Enables the load_llama_model command without any pre-configured scope." +commands.allow = ["load_llama_model"] + +[[permission]] +identifier = "deny-load-llama-model" +description = "Denies the load_llama_model command without any pre-configured scope." +commands.deny = ["load_llama_model"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/read_gguf_metadata.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/read_gguf_metadata.toml new file mode 100644 index 000000000..8a156c726 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/read_gguf_metadata.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-read-gguf-metadata" +description = "Enables the read_gguf_metadata command without any pre-configured scope." +commands.allow = ["read_gguf_metadata"] + +[[permission]] +identifier = "deny-read-gguf-metadata" +description = "Denies the read_gguf_metadata command without any pre-configured scope." +commands.deny = ["read_gguf_metadata"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/unload_llama_model.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/unload_llama_model.toml new file mode 100644 index 000000000..fcad4f066 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/commands/unload_llama_model.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-unload-llama-model" +description = "Enables the unload_llama_model command without any pre-configured scope." +commands.allow = ["unload_llama_model"] + +[[permission]] +identifier = "deny-unload-llama-model" +description = "Denies the unload_llama_model command without any pre-configured scope." +commands.deny = ["unload_llama_model"] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/reference.md b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/reference.md new file mode 100644 index 000000000..898cfe530 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/autogenerated/reference.md @@ -0,0 +1,340 @@ +## Default Permission + +Default permissions for the llamacpp plugin + +#### This default permission set includes the following: + +- `allow-cleanup-llama-processes` +- `allow-load-llama-model` +- `allow-unload-llama-model` +- `allow-get-devices` +- `allow-generate-api-key` +- `allow-is-process-running` +- `allow-get-random-port` +- `allow-find-session-by-model` +- `allow-get-loaded-models` +- `allow-get-all-sessions` +- `allow-get-session-by-model` +- `allow-read-gguf-metadata` + +## Permission Table + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
IdentifierDescription
+ +`llamacpp:allow-cleanup-llama-processes` + + + +Enables the cleanup_llama_processes command without any pre-configured scope. + +
+ +`llamacpp:deny-cleanup-llama-processes` + + + +Denies the cleanup_llama_processes command without any pre-configured scope. + +
+ +`llamacpp:allow-find-session-by-model` + + + +Enables the find_session_by_model command without any pre-configured scope. + +
+ +`llamacpp:deny-find-session-by-model` + + + +Denies the find_session_by_model command without any pre-configured scope. + +
+ +`llamacpp:allow-generate-api-key` + + + +Enables the generate_api_key command without any pre-configured scope. + +
+ +`llamacpp:deny-generate-api-key` + + + +Denies the generate_api_key command without any pre-configured scope. + +
+ +`llamacpp:allow-get-all-sessions` + + + +Enables the get_all_sessions command without any pre-configured scope. + +
+ +`llamacpp:deny-get-all-sessions` + + + +Denies the get_all_sessions command without any pre-configured scope. + +
+ +`llamacpp:allow-get-devices` + + + +Enables the get_devices command without any pre-configured scope. + +
+ +`llamacpp:deny-get-devices` + + + +Denies the get_devices command without any pre-configured scope. + +
+ +`llamacpp:allow-get-loaded-models` + + + +Enables the get_loaded_models command without any pre-configured scope. + +
+ +`llamacpp:deny-get-loaded-models` + + + +Denies the get_loaded_models command without any pre-configured scope. + +
+ +`llamacpp:allow-get-random-port` + + + +Enables the get_random_port command without any pre-configured scope. + +
+ +`llamacpp:deny-get-random-port` + + + +Denies the get_random_port command without any pre-configured scope. + +
+ +`llamacpp:allow-get-session-by-model` + + + +Enables the get_session_by_model command without any pre-configured scope. + +
+ +`llamacpp:deny-get-session-by-model` + + + +Denies the get_session_by_model command without any pre-configured scope. + +
+ +`llamacpp:allow-is-process-running` + + + +Enables the is_process_running command without any pre-configured scope. + +
+ +`llamacpp:deny-is-process-running` + + + +Denies the is_process_running command without any pre-configured scope. + +
+ +`llamacpp:allow-load-llama-model` + + + +Enables the load_llama_model command without any pre-configured scope. + +
+ +`llamacpp:deny-load-llama-model` + + + +Denies the load_llama_model command without any pre-configured scope. + +
+ +`llamacpp:allow-read-gguf-metadata` + + + +Enables the read_gguf_metadata command without any pre-configured scope. + +
+ +`llamacpp:deny-read-gguf-metadata` + + + +Denies the read_gguf_metadata command without any pre-configured scope. + +
+ +`llamacpp:allow-unload-llama-model` + + + +Enables the unload_llama_model command without any pre-configured scope. + +
+ +`llamacpp:deny-unload-llama-model` + + + +Denies the unload_llama_model command without any pre-configured scope. + +
diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/default.toml b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/default.toml new file mode 100644 index 000000000..08339b766 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/default.toml @@ -0,0 +1,21 @@ +[default] +description = "Default permissions for the llamacpp plugin" +permissions = [ + # Cleanup commands + "allow-cleanup-llama-processes", + + # LlamaCpp server commands + "allow-load-llama-model", + "allow-unload-llama-model", + "allow-get-devices", + "allow-generate-api-key", + "allow-is-process-running", + "allow-get-random-port", + "allow-find-session-by-model", + "allow-get-loaded-models", + "allow-get-all-sessions", + "allow-get-session-by-model", + + # GGUF commands + "allow-read-gguf-metadata" +] diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/permissions/schemas/schema.json b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/schemas/schema.json new file mode 100644 index 000000000..f832b4560 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/permissions/schemas/schema.json @@ -0,0 +1,450 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "PermissionFile", + "description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.", + "type": "object", + "properties": { + "default": { + "description": "The default permission set for the plugin", + "anyOf": [ + { + "$ref": "#/definitions/DefaultPermission" + }, + { + "type": "null" + } + ] + }, + "set": { + "description": "A list of permissions sets defined", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionSet" + } + }, + "permission": { + "description": "A list of inlined permissions", + "default": [], + "type": "array", + "items": { + "$ref": "#/definitions/Permission" + } + } + }, + "definitions": { + "DefaultPermission": { + "description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.", + "type": "object", + "required": [ + "permissions" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "PermissionSet": { + "description": "A set of direct permissions grouped together under a new name.", + "type": "object", + "required": [ + "description", + "identifier", + "permissions" + ], + "properties": { + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does.", + "type": "string" + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionKind" + } + } + } + }, + "Permission": { + "description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.", + "type": "object", + "required": [ + "identifier" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri internal convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "commands": { + "description": "Allowed or denied commands when using this permission.", + "default": { + "allow": [], + "deny": [] + }, + "allOf": [ + { + "$ref": "#/definitions/Commands" + } + ] + }, + "scope": { + "description": "Allowed or denied scoped when using this permission.", + "allOf": [ + { + "$ref": "#/definitions/Scopes" + } + ] + }, + "platforms": { + "description": "Target platforms this permission applies. By default all platforms are affected by this permission.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Target" + } + } + } + }, + "Commands": { + "description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.", + "type": "object", + "properties": { + "allow": { + "description": "Allowed command.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "deny": { + "description": "Denied command, which takes priority.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "Scopes": { + "description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```", + "type": "object", + "properties": { + "allow": { + "description": "Data that defines what is allowed by the scope.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + }, + "deny": { + "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + } + } + }, + "Value": { + "description": "All supported ACL values.", + "anyOf": [ + { + "description": "Represents a null JSON value.", + "type": "null" + }, + { + "description": "Represents a [`bool`].", + "type": "boolean" + }, + { + "description": "Represents a valid ACL [`Number`].", + "allOf": [ + { + "$ref": "#/definitions/Number" + } + ] + }, + { + "description": "Represents a [`String`].", + "type": "string" + }, + { + "description": "Represents a list of other [`Value`]s.", + "type": "array", + "items": { + "$ref": "#/definitions/Value" + } + }, + { + "description": "Represents a map of [`String`] keys to [`Value`]s.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Value" + } + } + ] + }, + "Number": { + "description": "A valid ACL number.", + "anyOf": [ + { + "description": "Represents an [`i64`].", + "type": "integer", + "format": "int64" + }, + { + "description": "Represents a [`f64`].", + "type": "number", + "format": "double" + } + ] + }, + "Target": { + "description": "Platform target.", + "oneOf": [ + { + "description": "MacOS.", + "type": "string", + "enum": [ + "macOS" + ] + }, + { + "description": "Windows.", + "type": "string", + "enum": [ + "windows" + ] + }, + { + "description": "Linux.", + "type": "string", + "enum": [ + "linux" + ] + }, + { + "description": "Android.", + "type": "string", + "enum": [ + "android" + ] + }, + { + "description": "iOS.", + "type": "string", + "enum": [ + "iOS" + ] + } + ] + }, + "PermissionKind": { + "type": "string", + "oneOf": [ + { + "description": "Enables the cleanup_llama_processes command without any pre-configured scope.", + "type": "string", + "const": "allow-cleanup-llama-processes", + "markdownDescription": "Enables the cleanup_llama_processes command without any pre-configured scope." + }, + { + "description": "Denies the cleanup_llama_processes command without any pre-configured scope.", + "type": "string", + "const": "deny-cleanup-llama-processes", + "markdownDescription": "Denies the cleanup_llama_processes command without any pre-configured scope." + }, + { + "description": "Enables the find_session_by_model command without any pre-configured scope.", + "type": "string", + "const": "allow-find-session-by-model", + "markdownDescription": "Enables the find_session_by_model command without any pre-configured scope." + }, + { + "description": "Denies the find_session_by_model command without any pre-configured scope.", + "type": "string", + "const": "deny-find-session-by-model", + "markdownDescription": "Denies the find_session_by_model command without any pre-configured scope." + }, + { + "description": "Enables the generate_api_key command without any pre-configured scope.", + "type": "string", + "const": "allow-generate-api-key", + "markdownDescription": "Enables the generate_api_key command without any pre-configured scope." + }, + { + "description": "Denies the generate_api_key command without any pre-configured scope.", + "type": "string", + "const": "deny-generate-api-key", + "markdownDescription": "Denies the generate_api_key command without any pre-configured scope." + }, + { + "description": "Enables the get_all_sessions command without any pre-configured scope.", + "type": "string", + "const": "allow-get-all-sessions", + "markdownDescription": "Enables the get_all_sessions command without any pre-configured scope." + }, + { + "description": "Denies the get_all_sessions command without any pre-configured scope.", + "type": "string", + "const": "deny-get-all-sessions", + "markdownDescription": "Denies the get_all_sessions command without any pre-configured scope." + }, + { + "description": "Enables the get_devices command without any pre-configured scope.", + "type": "string", + "const": "allow-get-devices", + "markdownDescription": "Enables the get_devices command without any pre-configured scope." + }, + { + "description": "Denies the get_devices command without any pre-configured scope.", + "type": "string", + "const": "deny-get-devices", + "markdownDescription": "Denies the get_devices command without any pre-configured scope." + }, + { + "description": "Enables the get_loaded_models command without any pre-configured scope.", + "type": "string", + "const": "allow-get-loaded-models", + "markdownDescription": "Enables the get_loaded_models command without any pre-configured scope." + }, + { + "description": "Denies the get_loaded_models command without any pre-configured scope.", + "type": "string", + "const": "deny-get-loaded-models", + "markdownDescription": "Denies the get_loaded_models command without any pre-configured scope." + }, + { + "description": "Enables the get_random_port command without any pre-configured scope.", + "type": "string", + "const": "allow-get-random-port", + "markdownDescription": "Enables the get_random_port command without any pre-configured scope." + }, + { + "description": "Denies the get_random_port command without any pre-configured scope.", + "type": "string", + "const": "deny-get-random-port", + "markdownDescription": "Denies the get_random_port command without any pre-configured scope." + }, + { + "description": "Enables the get_session_by_model command without any pre-configured scope.", + "type": "string", + "const": "allow-get-session-by-model", + "markdownDescription": "Enables the get_session_by_model command without any pre-configured scope." + }, + { + "description": "Denies the get_session_by_model command without any pre-configured scope.", + "type": "string", + "const": "deny-get-session-by-model", + "markdownDescription": "Denies the get_session_by_model command without any pre-configured scope." + }, + { + "description": "Enables the is_process_running command without any pre-configured scope.", + "type": "string", + "const": "allow-is-process-running", + "markdownDescription": "Enables the is_process_running command without any pre-configured scope." + }, + { + "description": "Denies the is_process_running command without any pre-configured scope.", + "type": "string", + "const": "deny-is-process-running", + "markdownDescription": "Denies the is_process_running command without any pre-configured scope." + }, + { + "description": "Enables the load_llama_model command without any pre-configured scope.", + "type": "string", + "const": "allow-load-llama-model", + "markdownDescription": "Enables the load_llama_model command without any pre-configured scope." + }, + { + "description": "Denies the load_llama_model command without any pre-configured scope.", + "type": "string", + "const": "deny-load-llama-model", + "markdownDescription": "Denies the load_llama_model command without any pre-configured scope." + }, + { + "description": "Enables the read_gguf_metadata command without any pre-configured scope.", + "type": "string", + "const": "allow-read-gguf-metadata", + "markdownDescription": "Enables the read_gguf_metadata command without any pre-configured scope." + }, + { + "description": "Denies the read_gguf_metadata command without any pre-configured scope.", + "type": "string", + "const": "deny-read-gguf-metadata", + "markdownDescription": "Denies the read_gguf_metadata command without any pre-configured scope." + }, + { + "description": "Enables the unload_llama_model command without any pre-configured scope.", + "type": "string", + "const": "allow-unload-llama-model", + "markdownDescription": "Enables the unload_llama_model command without any pre-configured scope." + }, + { + "description": "Denies the unload_llama_model command without any pre-configured scope.", + "type": "string", + "const": "deny-unload-llama-model", + "markdownDescription": "Denies the unload_llama_model command without any pre-configured scope." + }, + { + "description": "Default permissions for the llamacpp plugin\n#### This default permission set includes:\n\n- `allow-cleanup-llama-processes`\n- `allow-load-llama-model`\n- `allow-unload-llama-model`\n- `allow-get-devices`\n- `allow-generate-api-key`\n- `allow-is-process-running`\n- `allow-get-random-port`\n- `allow-find-session-by-model`\n- `allow-get-loaded-models`\n- `allow-get-all-sessions`\n- `allow-get-session-by-model`\n- `allow-read-gguf-metadata`", + "type": "string", + "const": "default", + "markdownDescription": "Default permissions for the llamacpp plugin\n#### This default permission set includes:\n\n- `allow-cleanup-llama-processes`\n- `allow-load-llama-model`\n- `allow-unload-llama-model`\n- `allow-get-devices`\n- `allow-generate-api-key`\n- `allow-is-process-running`\n- `allow-get-random-port`\n- `allow-find-session-by-model`\n- `allow-get-loaded-models`\n- `allow-get-all-sessions`\n- `allow-get-session-by-model`\n- `allow-read-gguf-metadata`" + } + ] + } + } +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/rollup.config.js b/src-tauri/plugins/tauri-plugin-llamacpp/rollup.config.js new file mode 100644 index 000000000..8b4768ff6 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/rollup.config.js @@ -0,0 +1,31 @@ +import { readFileSync } from 'node:fs' +import { dirname, join } from 'node:path' +import { cwd } from 'node:process' +import typescript from '@rollup/plugin-typescript' + +const pkg = JSON.parse(readFileSync(join(cwd(), 'package.json'), 'utf8')) + +export default { + input: 'guest-js/index.ts', + output: [ + { + file: pkg.exports.import, + format: 'esm' + }, + { + file: pkg.exports.require, + format: 'cjs' + } + ], + plugins: [ + typescript({ + declaration: true, + declarationDir: dirname(pkg.exports.import) + }) + ], + external: [ + /^@tauri-apps\/api/, + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.peerDependencies || {}) + ] +} diff --git a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/cleanup.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/cleanup.rs similarity index 80% rename from src-tauri/src/core/utils/extensions/inference_llamacpp_extension/cleanup.rs rename to src-tauri/plugins/tauri-plugin-llamacpp/src/cleanup.rs index be7dde514..0721b1c8f 100644 --- a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/cleanup.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/cleanup.rs @@ -1,8 +1,15 @@ -use crate::core::state::AppState; -use tauri::State; +use tauri::{Manager, Runtime}; -pub async fn cleanup_processes(state: State<'_, AppState>) { - let mut map = state.llama_server_process.lock().await; +pub async fn cleanup_processes(app_handle: &tauri::AppHandle) { + // Access the global AppState from the main app + let app_state = match app_handle.try_state::() { + Some(state) => state, + None => { + log::warn!("LlamacppState not found in app_handle"); + return; + } + }; + let mut map = app_state.llama_server_process.lock().await; let pids: Vec = map.keys().cloned().collect(); for pid in pids { if let Some(session) = map.remove(&pid) { @@ -64,3 +71,11 @@ pub async fn cleanup_processes(state: State<'_, AppState>) { } } } + +#[tauri::command] +pub async fn cleanup_llama_processes( + app_handle: tauri::AppHandle, +) -> Result<(), String> { + cleanup_processes(&app_handle).await; + Ok(()) +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs new file mode 100644 index 000000000..a2592f345 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs @@ -0,0 +1,319 @@ +use base64::{engine::general_purpose, Engine as _}; +use hmac::{Hmac, Mac}; +use sha2::Sha256; +use std::process::Stdio; +use std::time::Duration; +use tauri::{Manager, Runtime, State}; +use tokio::io::{AsyncBufReadExt, BufReader}; +use tokio::process::Command; +use tokio::sync::mpsc; +use tokio::time::Instant; + +use crate::device::{get_devices_from_backend, DeviceInfo}; +use crate::error::{ErrorCode, LlamacppError, ServerError, ServerResult}; +use crate::path::{validate_binary_path, validate_model_path}; +use crate::process::{ + find_session_by_model_id, get_all_active_sessions, get_all_loaded_model_ids, + get_random_available_port, is_process_running_by_pid, +}; +use crate::state::{LLamaBackendSession, LlamacppState, SessionInfo}; +use jan_utils::{ + extract_arg_value, parse_port_from_args, setup_library_path, setup_windows_process_flags, +}; + +#[cfg(unix)] +use crate::process::graceful_terminate_process; + +#[cfg(all(windows, target_arch = "x86_64"))] +use crate::process::force_terminate_process; + +type HmacSha256 = Hmac; + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct UnloadResult { + success: bool, + error: Option, +} + +/// Load a llama model and start the server +#[tauri::command] +pub async fn load_llama_model( + app_handle: tauri::AppHandle, + backend_path: &str, + library_path: Option<&str>, + mut args: Vec, +) -> ServerResult { + let state: State = app_handle.state(); + let mut process_map = state.llama_server_process.lock().await; + + log::info!("Attempting to launch server at path: {:?}", backend_path); + log::info!("Using arguments: {:?}", args); + + validate_binary_path(backend_path)?; + + let port = parse_port_from_args(&args); + let model_path_pb = validate_model_path(&mut args)?; + + let api_key = extract_arg_value(&args, "--api-key"); + let model_id = extract_arg_value(&args, "-a"); + + // Configure the command to run the server + let mut command = Command::new(backend_path); + command.args(args); + + setup_library_path(library_path, &mut command); + command.stdout(Stdio::piped()); + command.stderr(Stdio::piped()); + setup_windows_process_flags(&mut command); + + // Spawn the child process + let mut child = command.spawn().map_err(ServerError::Io)?; + + let stderr = child.stderr.take().expect("stderr was piped"); + let stdout = child.stdout.take().expect("stdout was piped"); + + // Create channels for communication between tasks + let (ready_tx, mut ready_rx) = mpsc::channel::(1); + + // Spawn task to monitor stdout for readiness + let _stdout_task = tokio::spawn(async move { + let mut reader = BufReader::new(stdout); + let mut byte_buffer = Vec::new(); + + loop { + byte_buffer.clear(); + match reader.read_until(b'\n', &mut byte_buffer).await { + Ok(0) => break, // EOF + Ok(_) => { + let line = String::from_utf8_lossy(&byte_buffer); + let line = line.trim_end(); + if !line.is_empty() { + log::info!("[llamacpp stdout] {}", line); + } + } + Err(e) => { + log::error!("Error reading stdout: {}", e); + break; + } + } + } + }); + + // Spawn task to capture stderr and monitor for errors + let stderr_task = tokio::spawn(async move { + let mut reader = BufReader::new(stderr); + let mut byte_buffer = Vec::new(); + let mut stderr_buffer = String::new(); + + loop { + byte_buffer.clear(); + match reader.read_until(b'\n', &mut byte_buffer).await { + Ok(0) => break, // EOF + Ok(_) => { + let line = String::from_utf8_lossy(&byte_buffer); + let line = line.trim_end(); + + if !line.is_empty() { + stderr_buffer.push_str(line); + stderr_buffer.push('\n'); + log::info!("[llamacpp] {}", line); + + // Check for readiness indicator - llama-server outputs this when ready + let line_lower = line.to_string().to_lowercase(); + if line_lower.contains("server is listening on") + || line_lower.contains("starting the main loop") + || line_lower.contains("server listening on") + { + log::info!("Model appears to be ready based on logs: '{}'", line); + let _ = ready_tx.send(true).await; + } + } + } + Err(e) => { + log::error!("Error reading logs: {}", e); + break; + } + } + } + + stderr_buffer + }); + + // Check if process exited early + if let Some(status) = child.try_wait()? { + if !status.success() { + let stderr_output = stderr_task.await.unwrap_or_default(); + log::error!("llama.cpp failed early with code {:?}", status); + log::error!("{}", stderr_output); + return Err(LlamacppError::from_stderr(&stderr_output).into()); + } + } + + // Wait for server to be ready or timeout + let timeout_duration = Duration::from_secs(180); // 3 minutes timeout + let start_time = Instant::now(); + log::info!("Waiting for model session to be ready..."); + loop { + tokio::select! { + // Server is ready + Some(true) = ready_rx.recv() => { + log::info!("Model is ready to accept requests!"); + break; + } + // Check for process exit more frequently + _ = tokio::time::sleep(Duration::from_millis(50)) => { + // Check if process exited + if let Some(status) = child.try_wait()? { + let stderr_output = stderr_task.await.unwrap_or_default(); + if !status.success() { + log::error!("llama.cpp exited with error code {:?}", status); + return Err(LlamacppError::from_stderr(&stderr_output).into()); + } else { + log::error!("llama.cpp exited successfully but without ready signal"); + return Err(LlamacppError::from_stderr(&stderr_output).into()); + } + } + + // Timeout check + if start_time.elapsed() > timeout_duration { + log::error!("Timeout waiting for server to be ready"); + let _ = child.kill().await; + let stderr_output = stderr_task.await.unwrap_or_default(); + return Err(LlamacppError::new( + ErrorCode::ModelLoadTimedOut, + "The model took too long to load and timed out.".into(), + Some(format!("Timeout: {}s\n\nStderr:\n{}", timeout_duration.as_secs(), stderr_output)), + ).into()); + } + } + } + } + + // Get the PID to use as session ID + let pid = child.id().map(|id| id as i32).unwrap_or(-1); + + log::info!("Server process started with PID: {} and is ready", pid); + let session_info = SessionInfo { + pid: pid.clone(), + port: port, + model_id: model_id, + model_path: model_path_pb.display().to_string(), + api_key: api_key, + }; + + // Insert session info to process_map + process_map.insert( + pid.clone(), + LLamaBackendSession { + child, + info: session_info.clone(), + }, + ); + + Ok(session_info) +} + +/// Unload a llama model by terminating its process +#[tauri::command] +pub async fn unload_llama_model( + app_handle: tauri::AppHandle, + pid: i32, +) -> ServerResult { + let state: State = app_handle.state(); + let mut map = state.llama_server_process.lock().await; + + if let Some(session) = map.remove(&pid) { + let mut child = session.child; + + #[cfg(unix)] + { + graceful_terminate_process(&mut child).await; + } + + #[cfg(all(windows, target_arch = "x86_64"))] + { + force_terminate_process(&mut child).await; + } + + Ok(UnloadResult { + success: true, + error: None, + }) + } else { + log::warn!("No server with PID '{}' found", pid); + Ok(UnloadResult { + success: true, + error: None, + }) + } +} + +/// Get available devices from the llama.cpp backend +#[tauri::command] +pub async fn get_devices( + backend_path: &str, + library_path: Option<&str>, +) -> ServerResult> { + get_devices_from_backend(backend_path, library_path).await +} + +/// Generate API key using HMAC-SHA256 +#[tauri::command] +pub fn generate_api_key(model_id: String, api_secret: String) -> Result { + let mut mac = HmacSha256::new_from_slice(api_secret.as_bytes()) + .map_err(|e| format!("Invalid key length: {}", e))?; + mac.update(model_id.as_bytes()); + let result = mac.finalize(); + let code_bytes = result.into_bytes(); + let hash = general_purpose::STANDARD.encode(code_bytes); + Ok(hash) +} + +/// Check if a process is still running +#[tauri::command] +pub async fn is_process_running( + app_handle: tauri::AppHandle, + pid: i32, +) -> Result { + is_process_running_by_pid(app_handle, pid).await +} + +/// Get a random available port +#[tauri::command] +pub async fn get_random_port(app_handle: tauri::AppHandle) -> Result { + get_random_available_port(app_handle).await +} + +/// Find session information by model ID +#[tauri::command] +pub async fn find_session_by_model( + app_handle: tauri::AppHandle, + model_id: String, +) -> Result, String> { + find_session_by_model_id(app_handle, &model_id).await +} + +/// Get all loaded model IDs +#[tauri::command] +pub async fn get_loaded_models( + app_handle: tauri::AppHandle, +) -> Result, String> { + get_all_loaded_model_ids(app_handle).await +} + +/// Get all active sessions +#[tauri::command] +pub async fn get_all_sessions( + app_handle: tauri::AppHandle, +) -> Result, String> { + get_all_active_sessions(app_handle).await +} + +/// Get session information by model ID +#[tauri::command] +pub async fn get_session_by_model( + app_handle: tauri::AppHandle, + model_id: String, +) -> Result, String> { + find_session_by_model_id(app_handle, &model_id).await +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/device.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/device.rs new file mode 100644 index 000000000..285547209 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/device.rs @@ -0,0 +1,227 @@ +use serde::{Deserialize, Serialize}; +use std::process::Stdio; +use std::time::Duration; +use tokio::process::Command; +use tokio::time::timeout; + +use crate::error::{ErrorCode, LlamacppError, ServerError, ServerResult}; +use crate::path::validate_binary_path; +use jan_utils::{setup_library_path, setup_windows_process_flags}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DeviceInfo { + pub id: String, + pub name: String, + pub mem: i32, + pub free: i32, +} + +pub async fn get_devices_from_backend( + backend_path: &str, + library_path: Option<&str>, +) -> ServerResult> { + log::info!("Getting devices from server at path: {:?}", backend_path); + + validate_binary_path(backend_path)?; + + // Configure the command to run the server with --list-devices + let mut command = Command::new(backend_path); + command.arg("--list-devices"); + + // Set up library path + setup_library_path(library_path, &mut command); + + command.stdout(Stdio::piped()); + command.stderr(Stdio::piped()); + + setup_windows_process_flags(&mut command); + + // Execute the command and wait for completion + let output = timeout(Duration::from_secs(30), command.output()) + .await + .map_err(|_| { + LlamacppError::new( + ErrorCode::InternalError, + "Timeout waiting for device list".into(), + None, + ) + })? + .map_err(ServerError::Io)?; + + // Check if command executed successfully + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("llama-server --list-devices failed: {}", stderr); + return Err(LlamacppError::from_stderr(&stderr).into()); + } + // Parse the output + let stdout = String::from_utf8_lossy(&output.stdout); + log::info!("Device list output:\n{}", stdout); + + parse_device_output(&stdout) +} + +fn parse_device_output(output: &str) -> ServerResult> { + let mut devices = Vec::new(); + let mut found_devices_section = false; + + for raw in output.lines() { + // detect header (ignoring whitespace) + if raw.trim() == "Available devices:" { + found_devices_section = true; + continue; + } + + if !found_devices_section { + continue; + } + + // skip blank lines + if raw.trim().is_empty() { + continue; + } + + // now parse any non-blank line after the header + let line = raw.trim(); + if let Some(device) = parse_device_line(line)? { + devices.push(device); + } + } + + if devices.is_empty() && found_devices_section { + log::warn!("No devices found in output"); + } else if !found_devices_section { + return Err(LlamacppError::new( + ErrorCode::DeviceListParseFailed, + "Could not find 'Available devices:' section in the backend output.".into(), + Some(output.to_string()), + ) + .into()); + } + + Ok(devices) +} + +fn parse_device_line(line: &str) -> ServerResult> { + let line = line.trim(); + + log::info!("Parsing device line: '{}'", line); + + // Expected formats: + // "Vulkan0: Intel(R) Arc(tm) A750 Graphics (DG2) (8128 MiB, 8128 MiB free)" + // "CUDA0: NVIDIA GeForce RTX 4090 (24576 MiB, 24000 MiB free)" + // "SYCL0: Intel(R) Arc(TM) A750 Graphics (8000 MiB, 7721 MiB free)" + + // Split by colon to get ID and rest + let parts: Vec<&str> = line.splitn(2, ':').collect(); + if parts.len() != 2 { + log::warn!("Skipping malformed device line: {}", line); + return Ok(None); + } + + let id = parts[0].trim().to_string(); + let rest = parts[1].trim(); + + // Use regex-like approach to find the memory pattern at the end + // Look for pattern: (number MiB, number MiB free) at the end + if let Some(memory_match) = find_memory_pattern(rest) { + let (memory_start, memory_content) = memory_match; + let name = rest[..memory_start].trim().to_string(); + + // Parse memory info: "8128 MiB, 8128 MiB free" + let memory_parts: Vec<&str> = memory_content.split(',').collect(); + if memory_parts.len() >= 2 { + if let (Ok(total_mem), Ok(free_mem)) = ( + parse_memory_value(memory_parts[0].trim()), + parse_memory_value(memory_parts[1].trim()), + ) { + log::info!( + "Parsed device - ID: '{}', Name: '{}', Mem: {}, Free: {}", + id, + name, + total_mem, + free_mem + ); + + return Ok(Some(DeviceInfo { + id, + name, + mem: total_mem, + free: free_mem, + })); + } + } + } + + log::warn!("Could not parse device line: {}", line); + Ok(None) +} + +fn find_memory_pattern(text: &str) -> Option<(usize, &str)> { + // Find the last parenthesis that contains the memory pattern + let mut last_match = None; + let mut chars = text.char_indices().peekable(); + + while let Some((start_idx, ch)) = chars.next() { + if ch == '(' { + // Find the closing parenthesis + let remaining = &text[start_idx + 1..]; + if let Some(close_pos) = remaining.find(')') { + let content = &remaining[..close_pos]; + + // Check if this looks like memory info + if is_memory_pattern(content) { + last_match = Some((start_idx, content)); + } + } + } + } + + last_match +} + +fn is_memory_pattern(content: &str) -> bool { + // Check if content matches pattern like "8128 MiB, 8128 MiB free" + // Must contain: numbers, "MiB", comma, "free" + if !(content.contains("MiB") && content.contains("free") && content.contains(',')) { + return false; + } + + let parts: Vec<&str> = content.split(',').collect(); + if parts.len() != 2 { + return false; + } + + parts.iter().all(|part| { + let part = part.trim(); + // Each part should start with a number and contain "MiB" + part.split_whitespace() + .next() + .map_or(false, |first_word| first_word.parse::().is_ok()) + && part.contains("MiB") + }) +} + +fn parse_memory_value(mem_str: &str) -> ServerResult { + // Handle formats like "8000 MiB" or "7721 MiB free" + let parts: Vec<&str> = mem_str.split_whitespace().collect(); + if parts.is_empty() { + return Err(LlamacppError::new( + ErrorCode::DeviceListParseFailed, + format!("empty memory value: {}", mem_str), + None, + ) + .into()); + } + + // Take the first part which should be the number + let number_str = parts[0]; + number_str.parse::().map_err(|_| { + LlamacppError::new( + ErrorCode::DeviceListParseFailed, + format!("Could not parse memory value: '{}'", number_str), + None, + ) + .into() + }) +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/error.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/error.rs new file mode 100644 index 000000000..647b2fead --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/error.rs @@ -0,0 +1,115 @@ +use serde::{Deserialize, Serialize}; +use thiserror; + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum ErrorCode { + BinaryNotFound, + ModelFileNotFound, + LibraryPathInvalid, + + // --- Model Loading Errors --- + ModelLoadFailed, + DraftModelLoadFailed, + MultimodalProjectorLoadFailed, + ModelArchNotSupported, + ModelLoadTimedOut, + LlamaCppProcessError, + + // --- Memory Errors --- + OutOfMemory, + + // --- Internal Application Errors --- + DeviceListParseFailed, + IoError, + InternalError, +} + +#[derive(Debug, Clone, Serialize, thiserror::Error)] +#[error("LlamacppError {{ code: {code:?}, message: \"{message}\" }}")] +pub struct LlamacppError { + pub code: ErrorCode, + pub message: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub details: Option, +} + +impl LlamacppError { + pub fn new(code: ErrorCode, message: String, details: Option) -> Self { + Self { + code, + message, + details, + } + } + + /// Parses stderr from llama.cpp and creates a specific LlamacppError. + pub fn from_stderr(stderr: &str) -> Self { + let lower_stderr = stderr.to_lowercase(); + // TODO: add others + let is_out_of_memory = lower_stderr.contains("out of memory") + || lower_stderr.contains("insufficient memory") + || lower_stderr.contains("erroroutofdevicememory") // vulkan specific + || lower_stderr.contains("kiogpucommandbuffercallbackerroroutofmemory") // Metal-specific error code + || lower_stderr.contains("cuda_error_out_of_memory"); // CUDA-specific + + if is_out_of_memory { + return Self::new( + ErrorCode::OutOfMemory, + "Out of memory. The model requires more RAM or VRAM than available.".into(), + Some(stderr.into()), + ); + } + + if lower_stderr.contains("error loading model architecture") { + return Self::new( + ErrorCode::ModelArchNotSupported, + "The model's architecture is not supported by this version of the backend.".into(), + Some(stderr.into()), + ); + } + Self::new( + ErrorCode::LlamaCppProcessError, + "The model process encountered an unexpected error.".into(), + Some(stderr.into()), + ) + } +} + +// Error type for server commands +#[derive(Debug, thiserror::Error)] +pub enum ServerError { + #[error(transparent)] + Llamacpp(#[from] LlamacppError), + + #[error("IO error: {0}")] + Io(#[from] std::io::Error), + + #[error("Tauri error: {0}")] + Tauri(#[from] tauri::Error), +} + +// impl serialization for tauri +impl serde::Serialize for ServerError { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let error_to_serialize: LlamacppError = match self { + ServerError::Llamacpp(err) => err.clone(), + ServerError::Io(e) => LlamacppError::new( + ErrorCode::IoError, + "An input/output error occurred.".into(), + Some(e.to_string()), + ), + ServerError::Tauri(e) => LlamacppError::new( + ErrorCode::InternalError, + "An internal application error occurred.".into(), + Some(e.to_string()), + ), + }; + error_to_serialize.serialize(serializer) + } +} + +pub type ServerResult = Result; diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs new file mode 100644 index 000000000..5d005a241 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs @@ -0,0 +1,8 @@ +use super::helpers; +use super::types::GgufMetadata; + +/// Read GGUF metadata from a model file +#[tauri::command] +pub async fn read_gguf_metadata(path: String) -> Result { + helpers::read_gguf_metadata(&path).map_err(|e| format!("Failed to read GGUF metadata: {}", e)) +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/helpers.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/helpers.rs new file mode 100644 index 000000000..245b986a1 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/helpers.rs @@ -0,0 +1,161 @@ +use byteorder::{LittleEndian, ReadBytesExt}; +use std::convert::TryFrom; +use std::fs::File; +use std::io::{self, BufReader, Read, Seek}; +use std::path::Path; + +use super::types::{GgufMetadata, GgufValueType}; + +pub fn read_gguf_metadata>(path: P) -> io::Result { + let mut file = BufReader::new(File::open(path)?); + + let mut magic = [0u8; 4]; + file.read_exact(&mut magic)?; + if &magic != b"GGUF" { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "Not a GGUF file", + )); + } + + let version = file.read_u32::()?; + let tensor_count = file.read_u64::()?; + let metadata_count = file.read_u64::()?; + + let mut metadata_map = std::collections::HashMap::new(); + for i in 0..metadata_count { + match read_metadata_entry(&mut file, i) { + Ok((key, value)) => { + metadata_map.insert(key, value); + } + Err(e) => { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!("Error reading metadata entry {}: {}", i, e), + )); + } + } + } + + Ok(GgufMetadata { + version, + tensor_count, + metadata: metadata_map, + }) +} + +fn read_metadata_entry(reader: &mut R, index: u64) -> io::Result<(String, String)> +where + R: ReadBytesExt, +{ + let key = read_gguf_string(reader).map_err(|e| { + io::Error::new( + io::ErrorKind::InvalidData, + format!("Failed to read key for metadata entry {}: {}", index, e), + ) + })?; + + let value_type_u32 = reader.read_u32::()?; + let value_type = GgufValueType::try_from(value_type_u32)?; + let value = read_gguf_value(reader, value_type)?; + + Ok((key, value)) +} + +fn read_gguf_string(reader: &mut R) -> io::Result +where + R: ReadBytesExt, +{ + let len = reader.read_u64::()?; + if len > (1024 * 1024) { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!("String length {} is unreasonably large", len), + )); + } + let mut buf = vec![0u8; len as usize]; + reader.read_exact(&mut buf)?; + Ok(String::from_utf8(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?) +} + +fn read_gguf_value(reader: &mut R, value_type: GgufValueType) -> io::Result +where + R: ReadBytesExt, +{ + match value_type { + GgufValueType::Uint8 => Ok(reader.read_u8()?.to_string()), + GgufValueType::Int8 => Ok(reader.read_i8()?.to_string()), + GgufValueType::Uint16 => Ok(reader.read_u16::()?.to_string()), + GgufValueType::Int16 => Ok(reader.read_i16::()?.to_string()), + GgufValueType::Uint32 => Ok(reader.read_u32::()?.to_string()), + GgufValueType::Int32 => Ok(reader.read_i32::()?.to_string()), + GgufValueType::Float32 => Ok(reader.read_f32::()?.to_string()), + GgufValueType::Bool => Ok((reader.read_u8()? != 0).to_string()), + GgufValueType::String => read_gguf_string(reader), + GgufValueType::Uint64 => Ok(reader.read_u64::()?.to_string()), + GgufValueType::Int64 => Ok(reader.read_i64::()?.to_string()), + GgufValueType::Float64 => Ok(reader.read_f64::()?.to_string()), + GgufValueType::Array => { + let elem_type_u32 = reader.read_u32::()?; + let elem_type = GgufValueType::try_from(elem_type_u32)?; + let len = reader.read_u64::()?; + + if len > 1_000_000 { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!("Array length {} is unreasonably large", len), + )); + } + + if len > 24 { + skip_array_data(reader, elem_type, len)?; + return Ok(format!( + "", + elem_type, len + )); + } + + let mut elems = Vec::with_capacity(len as usize); + for _ in 0..len { + elems.push(read_gguf_value(reader, elem_type)?); + } + Ok(format!("[{}]", elems.join(", "))) + } + } +} + +fn skip_array_data( + reader: &mut R, + elem_type: GgufValueType, + len: u64, +) -> io::Result<()> +where + R: ReadBytesExt, +{ + match elem_type { + GgufValueType::Uint8 | GgufValueType::Int8 | GgufValueType::Bool => { + reader.seek(io::SeekFrom::Current(len as i64))?; + } + GgufValueType::Uint16 | GgufValueType::Int16 => { + reader.seek(io::SeekFrom::Current((len * 2) as i64))?; + } + GgufValueType::Uint32 | GgufValueType::Int32 | GgufValueType::Float32 => { + reader.seek(io::SeekFrom::Current((len * 4) as i64))?; + } + GgufValueType::Uint64 | GgufValueType::Int64 | GgufValueType::Float64 => { + reader.seek(io::SeekFrom::Current((len * 8) as i64))?; + } + GgufValueType::String => { + for _ in 0..len { + let str_len = reader.read_u64::()?; + reader.seek(io::SeekFrom::Current(str_len as i64))?; + } + } + GgufValueType::Array => { + for _ in 0..len { + read_gguf_value(reader, elem_type)?; + } + } + } + Ok(()) +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/mod.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/mod.rs new file mode 100644 index 000000000..44fa1911f --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/mod.rs @@ -0,0 +1,3 @@ +pub mod commands; +pub mod helpers; +pub mod types; diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/types.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/types.rs new file mode 100644 index 000000000..a2bc73c59 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/types.rs @@ -0,0 +1,54 @@ +use serde::Serialize; +use std::collections::HashMap; +use std::convert::TryFrom; +use std::io; + +#[derive(Debug, Clone, Copy)] +#[repr(u32)] +pub enum GgufValueType { + Uint8 = 0, + Int8 = 1, + Uint16 = 2, + Int16 = 3, + Uint32 = 4, + Int32 = 5, + Float32 = 6, + Bool = 7, + String = 8, + Array = 9, + Uint64 = 10, + Int64 = 11, + Float64 = 12, +} + +impl TryFrom for GgufValueType { + type Error = io::Error; + fn try_from(value: u32) -> Result { + match value { + 0 => Ok(Self::Uint8), + 1 => Ok(Self::Int8), + 2 => Ok(Self::Uint16), + 3 => Ok(Self::Int16), + 4 => Ok(Self::Uint32), + 5 => Ok(Self::Int32), + 6 => Ok(Self::Float32), + 7 => Ok(Self::Bool), + 8 => Ok(Self::String), + 9 => Ok(Self::Array), + 10 => Ok(Self::Uint64), + 11 => Ok(Self::Int64), + 12 => Ok(Self::Float64), + _ => Err(io::Error::new( + io::ErrorKind::InvalidData, + format!("Unknown GGUF value type: {}", value), + )), + } + } +} + +#[derive(Serialize)] +pub struct GgufMetadata { + pub version: u32, + pub tensor_count: u64, + pub metadata: HashMap, +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/lib.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/lib.rs new file mode 100644 index 000000000..d35cb24cf --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/lib.rs @@ -0,0 +1,43 @@ +use tauri::{ + plugin::{Builder, TauriPlugin}, + Manager, Runtime, +}; + +pub mod cleanup; +mod commands; +mod device; +mod error; +mod gguf; +mod path; +mod process; +pub mod state; +pub use cleanup::cleanup_llama_processes; +pub use state::LLamaBackendSession; + +/// Initializes the plugin. +pub fn init() -> TauriPlugin { + Builder::new("llamacpp") + .invoke_handler(tauri::generate_handler![ + // Cleanup command + cleanup::cleanup_llama_processes, + // LlamaCpp server commands + commands::load_llama_model, + commands::unload_llama_model, + commands::get_devices, + commands::generate_api_key, + commands::is_process_running, + commands::get_random_port, + commands::find_session_by_model, + commands::get_loaded_models, + commands::get_all_sessions, + commands::get_session_by_model, + // GGUF commands + gguf::commands::read_gguf_metadata, + ]) + .setup(|app, _api| { + // Initialize and manage the plugin state + app.manage(state::LlamacppState::new()); + Ok(()) + }) + .build() +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/path.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/path.rs new file mode 100644 index 000000000..44ed00109 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/path.rs @@ -0,0 +1,100 @@ +use std::path::PathBuf; + +use crate::error::{ErrorCode, LlamacppError, ServerResult}; + +#[cfg(windows)] +use std::os::windows::ffi::OsStrExt; + +#[cfg(windows)] +use std::ffi::OsStr; + +#[cfg(windows)] +use windows_sys::Win32::Storage::FileSystem::GetShortPathNameW; + +/// Get Windows short path to avoid issues with spaces and special characters +#[cfg(windows)] +pub fn get_short_path>(path: P) -> Option { + let wide: Vec = OsStr::new(path.as_ref()) + .encode_wide() + .chain(Some(0)) + .collect(); + + let mut buffer = vec![0u16; 260]; + let len = unsafe { GetShortPathNameW(wide.as_ptr(), buffer.as_mut_ptr(), buffer.len() as u32) }; + + if len > 0 { + Some(String::from_utf16_lossy(&buffer[..len as usize])) + } else { + None + } +} + +/// Validate that a binary path exists and is accessible +pub fn validate_binary_path(backend_path: &str) -> ServerResult { + let server_path_buf = PathBuf::from(backend_path); + if !server_path_buf.exists() { + let err_msg = format!("Binary not found at {:?}", backend_path); + log::error!( + "Server binary not found at expected path: {:?}", + backend_path + ); + return Err(LlamacppError::new( + ErrorCode::BinaryNotFound, + "The llama.cpp server binary could not be found.".into(), + Some(err_msg), + ) + .into()); + } + Ok(server_path_buf) +} + +/// Validate model path exists and update args with platform-appropriate path format +pub fn validate_model_path(args: &mut Vec) -> ServerResult { + let model_path_index = args.iter().position(|arg| arg == "-m").ok_or_else(|| { + LlamacppError::new( + ErrorCode::ModelLoadFailed, + "Model path argument '-m' is missing.".into(), + None, + ) + })?; + + let model_path = args.get(model_path_index + 1).cloned().ok_or_else(|| { + LlamacppError::new( + ErrorCode::ModelLoadFailed, + "Model path was not provided after '-m' flag.".into(), + None, + ) + })?; + + let model_path_pb = PathBuf::from(&model_path); + if !model_path_pb.exists() { + let err_msg = format!( + "Invalid or inaccessible model path: {}", + model_path_pb.display() + ); + log::error!("{}", &err_msg); + return Err(LlamacppError::new( + ErrorCode::ModelFileNotFound, + "The specified model file does not exist or is not accessible.".into(), + Some(err_msg), + ) + .into()); + } + + // Update the path in args with appropriate format for the platform + #[cfg(windows)] + { + // use short path on Windows + if let Some(short) = get_short_path(&model_path_pb) { + args[model_path_index + 1] = short; + } else { + args[model_path_index + 1] = model_path_pb.display().to_string(); + } + } + #[cfg(not(windows))] + { + args[model_path_index + 1] = model_path_pb.display().to_string(); + } + + Ok(model_path_pb) +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/process.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/process.rs new file mode 100644 index 000000000..3de983c51 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/process.rs @@ -0,0 +1,154 @@ +use std::collections::HashSet; +use std::time::Duration; +use sysinfo::{Pid, System}; +use tauri::{Manager, Runtime, State}; +use tokio::time::timeout; + +use crate::state::{LlamacppState, SessionInfo}; +use jan_utils::generate_random_port; + +/// Check if a process is running by PID +pub async fn is_process_running_by_pid( + app_handle: tauri::AppHandle, + pid: i32, +) -> Result { + let mut system = System::new(); + system.refresh_processes(sysinfo::ProcessesToUpdate::All, true); + let process_pid = Pid::from(pid as usize); + let alive = system.process(process_pid).is_some(); + + if !alive { + let state: State = app_handle.state(); + let mut map = state.llama_server_process.lock().await; + map.remove(&pid); + } + + Ok(alive) +} + +/// Get a random available port, avoiding ports used by existing sessions +pub async fn get_random_available_port( + app_handle: tauri::AppHandle, +) -> Result { + // Get all active ports from sessions + let state: State = app_handle.state(); + let map = state.llama_server_process.lock().await; + + let used_ports: HashSet = map + .values() + .filter_map(|session| { + // Convert valid ports to u16 (filter out placeholder ports like -1) + if session.info.port > 0 && session.info.port <= u16::MAX as i32 { + Some(session.info.port as u16) + } else { + None + } + }) + .collect(); + + drop(map); // unlock early + + generate_random_port(&used_ports) +} + +/// Gracefully terminate a process on Unix systems +#[cfg(unix)] +pub async fn graceful_terminate_process(child: &mut tokio::process::Child) { + use nix::sys::signal::{kill, Signal}; + use nix::unistd::Pid; + + if let Some(raw_pid) = child.id() { + let raw_pid = raw_pid as i32; + log::info!("Sending SIGTERM to PID {}", raw_pid); + let _ = kill(Pid::from_raw(raw_pid), Signal::SIGTERM); + + match timeout(Duration::from_secs(5), child.wait()).await { + Ok(Ok(status)) => log::info!("Process exited gracefully: {}", status), + Ok(Err(e)) => log::error!("Error waiting after SIGTERM: {}", e), + Err(_) => { + log::warn!("SIGTERM timed out; sending SIGKILL to PID {}", raw_pid); + let _ = kill(Pid::from_raw(raw_pid), Signal::SIGKILL); + match child.wait().await { + Ok(s) => log::info!("Force-killed process exited: {}", s), + Err(e) => log::error!("Error waiting after SIGKILL: {}", e), + } + } + } + } +} + +/// Force terminate a process on Windows +#[cfg(all(windows, target_arch = "x86_64"))] +pub async fn force_terminate_process(child: &mut tokio::process::Child) { + if let Some(raw_pid) = child.id() { + log::warn!( + "gracefully killing is unsupported on Windows, force-killing PID {}", + raw_pid + ); + + // Since we know a graceful shutdown doesn't work and there are no child processes + // to worry about, we can use `child.kill()` directly. On Windows, this is + // a forceful termination via the `TerminateProcess` API. + if let Err(e) = child.kill().await { + log::error!( + "Failed to send kill signal to PID {}: {}. It may have already terminated.", + raw_pid, + e + ); + } + + match child.wait().await { + Ok(status) => log::info!( + "process {} has been terminated. Final exit status: {}", + raw_pid, + status + ), + Err(e) => log::error!( + "Error waiting on child process {} after kill: {}", + raw_pid, + e + ), + } + } +} + +/// Find a session by model ID +pub async fn find_session_by_model_id( + app_handle: tauri::AppHandle, + model_id: &str, +) -> Result, String> { + let state: State = app_handle.state(); + let map = state.llama_server_process.lock().await; + + let session_info = map + .values() + .find(|backend_session| backend_session.info.model_id == model_id) + .map(|backend_session| backend_session.info.clone()); + + Ok(session_info) +} + +/// Get all loaded model IDs +pub async fn get_all_loaded_model_ids( + app_handle: tauri::AppHandle, +) -> Result, String> { + let state: State = app_handle.state(); + let map = state.llama_server_process.lock().await; + + let model_ids = map + .values() + .map(|backend_session| backend_session.info.model_id.clone()) + .collect(); + + Ok(model_ids) +} + +/// Get all active sessions +pub async fn get_all_active_sessions( + app_handle: tauri::AppHandle, +) -> Result, String> { + let state: State = app_handle.state(); + let map = state.llama_server_process.lock().await; + let sessions: Vec = map.values().map(|s| s.info.clone()).collect(); + Ok(sessions) +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs new file mode 100644 index 000000000..359a27951 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs @@ -0,0 +1,38 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::process::Child; +use tokio::sync::Mutex; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SessionInfo { + pub pid: i32, // opaque handle for unload/chat + pub port: i32, // llama-server output port + pub model_id: String, + pub model_path: String, // path of the loaded model + pub api_key: String, +} + +pub struct LLamaBackendSession { + pub child: Child, + pub info: SessionInfo, +} + +/// LlamaCpp plugin state +pub struct LlamacppState { + pub llama_server_process: Arc>>, +} + +impl Default for LlamacppState { + fn default() -> Self { + Self { + llama_server_process: Arc::new(Mutex::new(HashMap::new())), + } + } +} + +impl LlamacppState { + pub fn new() -> Self { + Self::default() + } +} diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/tsconfig.json b/src-tauri/plugins/tauri-plugin-llamacpp/tsconfig.json new file mode 100644 index 000000000..059112270 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-llamacpp/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "es2021", + "module": "esnext", + "moduleResolution": "bundler", + "skipLibCheck": true, + "strict": true, + "noUnusedLocals": true, + "noImplicitAny": true, + "noEmit": true + }, + "include": ["guest-js/*.ts"], + "exclude": ["dist-js", "node_modules"] +} diff --git a/src-tauri/plugins/yarn.lock b/src-tauri/plugins/yarn.lock new file mode 100644 index 000000000..e69de29bb diff --git a/src-tauri/src/core/app/commands.rs b/src-tauri/src/core/app/commands.rs new file mode 100644 index 000000000..ba3e493b3 --- /dev/null +++ b/src-tauri/src/core/app/commands.rs @@ -0,0 +1,212 @@ +use std::{fs, path::PathBuf}; +use tauri::{AppHandle, Manager, Runtime, State}; + +use super::{ + constants::CONFIGURATION_FILE_NAME, helpers::copy_dir_recursive, models::AppConfiguration, +}; +use crate::core::state::AppState; + +#[tauri::command] +pub fn get_app_configurations(app_handle: tauri::AppHandle) -> AppConfiguration { + let mut app_default_configuration = AppConfiguration::default(); + + if std::env::var("CI").unwrap_or_default() == "e2e" { + return app_default_configuration; + } + + let configuration_file = get_configuration_file_path(app_handle.clone()); + + let default_data_folder = default_data_folder_path(app_handle.clone()); + + if !configuration_file.exists() { + log::info!( + "App config not found, creating default config at {:?}", + configuration_file + ); + + app_default_configuration.data_folder = default_data_folder; + + if let Err(err) = fs::write( + &configuration_file, + serde_json::to_string(&app_default_configuration).unwrap(), + ) { + log::error!("Failed to create default config: {}", err); + } + + return app_default_configuration; + } + + match fs::read_to_string(&configuration_file) { + Ok(content) => match serde_json::from_str::(&content) { + Ok(app_configurations) => app_configurations, + Err(err) => { + log::error!( + "Failed to parse app config, returning default config instead. Error: {}", + err + ); + app_default_configuration + } + }, + Err(err) => { + log::error!( + "Failed to read app config, returning default config instead. Error: {}", + err + ); + app_default_configuration + } + } +} + +#[tauri::command] +pub fn update_app_configuration( + app_handle: tauri::AppHandle, + configuration: AppConfiguration, +) -> Result<(), String> { + let configuration_file = get_configuration_file_path(app_handle); + log::info!( + "update_app_configuration, configuration_file: {:?}", + configuration_file + ); + + fs::write( + configuration_file, + serde_json::to_string(&configuration).map_err(|e| e.to_string())?, + ) + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn get_jan_data_folder_path(app_handle: tauri::AppHandle) -> PathBuf { + if cfg!(test) { + let path = std::env::current_dir() + .unwrap_or_else(|_| PathBuf::from(".")) + .join("test-data"); + if !path.exists() { + let _ = fs::create_dir_all(&path); + } + return path; + } + + let app_configurations = get_app_configurations(app_handle); + PathBuf::from(app_configurations.data_folder) +} + +#[tauri::command] +pub fn get_configuration_file_path(app_handle: tauri::AppHandle) -> PathBuf { + let app_path = app_handle.path().app_data_dir().unwrap_or_else(|err| { + log::error!( + "Failed to get app data directory: {}. Using home directory instead.", + err + ); + + let home_dir = std::env::var(if cfg!(target_os = "windows") { + "USERPROFILE" + } else { + "HOME" + }) + .expect("Failed to determine the home directory"); + + PathBuf::from(home_dir) + }); + + let package_name = env!("CARGO_PKG_NAME"); + #[cfg(target_os = "linux")] + let old_data_dir = { + if let Some(config_path) = dirs::config_dir() { + config_path.join(package_name) + } else { + log::debug!("Could not determine config directory"); + app_path + .parent() + .unwrap_or(&app_path.join("../")) + .join(package_name) + } + }; + + #[cfg(not(target_os = "linux"))] + let old_data_dir = app_path + .parent() + .unwrap_or(&app_path.join("../")) + .join(package_name); + + if old_data_dir.exists() { + return old_data_dir.join(CONFIGURATION_FILE_NAME); + } else { + return app_path.join(CONFIGURATION_FILE_NAME); + } +} + +#[tauri::command] +pub fn default_data_folder_path(app_handle: tauri::AppHandle) -> String { + let mut path = app_handle.path().data_dir().unwrap(); + + let app_name = std::env::var("APP_NAME") + .unwrap_or_else(|_| app_handle.config().product_name.clone().unwrap()); + path.push(app_name); + path.push("data"); + + let mut path_str = path.to_str().unwrap().to_string(); + + if let Some(stripped) = path.to_str().unwrap().to_string().strip_suffix(".ai.app") { + path_str = stripped.to_string(); + } + + path_str +} + +#[tauri::command] +pub fn get_user_home_path(app: AppHandle) -> String { + return get_app_configurations(app.clone()).data_folder; +} + +#[tauri::command] +pub fn change_app_data_folder( + app_handle: tauri::AppHandle, + new_data_folder: String, +) -> Result<(), String> { + // Get current data folder path + let current_data_folder = get_jan_data_folder_path(app_handle.clone()); + let new_data_folder_path = PathBuf::from(&new_data_folder); + + // Create the new data folder if it doesn't exist + if !new_data_folder_path.exists() { + fs::create_dir_all(&new_data_folder_path) + .map_err(|e| format!("Failed to create new data folder: {}", e))?; + } + + // Copy all files from the old folder to the new one + if current_data_folder.exists() { + log::info!( + "Copying data from {:?} to {:?}", + current_data_folder, + new_data_folder_path + ); + + // Check if this is a parent directory to avoid infinite recursion + if new_data_folder_path.starts_with(¤t_data_folder) { + return Err( + "New data folder cannot be a subdirectory of the current data folder".to_string(), + ); + } + copy_dir_recursive( + ¤t_data_folder, + &new_data_folder_path, + &[".uvx", ".npx"], + ) + .map_err(|e| format!("Failed to copy data to new folder: {}", e))?; + } else { + log::info!("Current data folder does not exist, nothing to copy"); + } + + // Update the configuration to point to the new folder + let mut configuration = get_app_configurations(app_handle.clone()); + configuration.data_folder = new_data_folder; + + // Save the updated configuration + update_app_configuration(app_handle, configuration) +} + +#[tauri::command] +pub fn app_token(state: State<'_, AppState>) -> Option { + state.app_token.clone() +} diff --git a/src-tauri/src/core/app/constants.rs b/src-tauri/src/core/app/constants.rs new file mode 100644 index 000000000..a1b86fa33 --- /dev/null +++ b/src-tauri/src/core/app/constants.rs @@ -0,0 +1,2 @@ +// App Configuration Constants +pub const CONFIGURATION_FILE_NAME: &str = "settings.json"; diff --git a/src-tauri/src/core/app/helpers.rs b/src-tauri/src/core/app/helpers.rs new file mode 100644 index 000000000..6715e73cb --- /dev/null +++ b/src-tauri/src/core/app/helpers.rs @@ -0,0 +1,33 @@ +use std::{fs, io, path::PathBuf}; + +/// Recursively copy a directory from src to dst, excluding specified directories +pub fn copy_dir_recursive( + src: &PathBuf, + dst: &PathBuf, + exclude_dirs: &[&str], +) -> Result<(), io::Error> { + if !dst.exists() { + fs::create_dir_all(dst)?; + } + + for entry in fs::read_dir(src)? { + let entry = entry?; + let file_type = entry.file_type()?; + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); + + if file_type.is_dir() { + // Skip excluded directories + if let Some(dir_name) = entry.file_name().to_str() { + if exclude_dirs.contains(&dir_name) { + continue; + } + } + copy_dir_recursive(&src_path, &dst_path, exclude_dirs)?; + } else { + fs::copy(&src_path, &dst_path)?; + } + } + + Ok(()) +} diff --git a/src-tauri/src/core/app/mod.rs b/src-tauri/src/core/app/mod.rs new file mode 100644 index 000000000..e0b10b55a --- /dev/null +++ b/src-tauri/src/core/app/mod.rs @@ -0,0 +1,4 @@ +pub mod commands; +mod constants; +pub mod helpers; +pub mod models; diff --git a/src-tauri/src/core/app/models.rs b/src-tauri/src/core/app/models.rs new file mode 100644 index 000000000..be10bcfe0 --- /dev/null +++ b/src-tauri/src/core/app/models.rs @@ -0,0 +1,16 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct AppConfiguration { + pub data_folder: String, + // Add other fields as needed +} + +impl AppConfiguration { + pub fn default() -> Self { + Self { + data_folder: String::from("./data"), // Set a default value for the data_folder + // Add other fields with default values as needed + } + } +} diff --git a/src-tauri/src/core/cmd.rs b/src-tauri/src/core/cmd.rs deleted file mode 100644 index ffa1b8a53..000000000 --- a/src-tauri/src/core/cmd.rs +++ /dev/null @@ -1,422 +0,0 @@ -use serde::{Deserialize, Serialize}; -use std::{fs, io, path::PathBuf}; -use tauri::{AppHandle, Manager, Runtime, State}; - -use crate::core::utils::extensions::inference_llamacpp_extension::cleanup::cleanup_processes; - -use super::{server, setup, state::AppState}; - -const CONFIGURATION_FILE_NAME: &str = "settings.json"; - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct AppConfiguration { - pub data_folder: String, - // Add other fields as needed -} -impl AppConfiguration { - pub fn default() -> Self { - Self { - data_folder: String::from("./data"), // Set a default value for the data_folder - // Add other fields with default values as needed - } - } -} - -#[tauri::command] -pub fn get_app_configurations(app_handle: tauri::AppHandle) -> AppConfiguration { - let mut app_default_configuration = AppConfiguration::default(); - - if std::env::var("CI").unwrap_or_default() == "e2e" { - return app_default_configuration; - } - - let configuration_file = get_configuration_file_path(app_handle.clone()); - - let default_data_folder = default_data_folder_path(app_handle.clone()); - - if !configuration_file.exists() { - log::info!( - "App config not found, creating default config at {:?}", - configuration_file - ); - - app_default_configuration.data_folder = default_data_folder; - - if let Err(err) = fs::write( - &configuration_file, - serde_json::to_string(&app_default_configuration).unwrap(), - ) { - log::error!("Failed to create default config: {}", err); - } - - return app_default_configuration; - } - - match fs::read_to_string(&configuration_file) { - Ok(content) => match serde_json::from_str::(&content) { - Ok(app_configurations) => app_configurations, - Err(err) => { - log::error!( - "Failed to parse app config, returning default config instead. Error: {}", - err - ); - app_default_configuration - } - }, - Err(err) => { - log::error!( - "Failed to read app config, returning default config instead. Error: {}", - err - ); - app_default_configuration - } - } -} - -#[tauri::command] -pub fn update_app_configuration( - app_handle: tauri::AppHandle, - configuration: AppConfiguration, -) -> Result<(), String> { - let configuration_file = get_configuration_file_path(app_handle); - log::info!( - "update_app_configuration, configuration_file: {:?}", - configuration_file - ); - - fs::write( - configuration_file, - serde_json::to_string(&configuration).map_err(|e| e.to_string())?, - ) - .map_err(|e| e.to_string()) -} - -#[tauri::command] -pub fn get_jan_data_folder_path(app_handle: tauri::AppHandle) -> PathBuf { - if cfg!(test) { - return PathBuf::from("./data"); - } - - let app_configurations = get_app_configurations(app_handle); - PathBuf::from(app_configurations.data_folder) -} - -#[tauri::command] -pub fn get_jan_extensions_path(app_handle: tauri::AppHandle) -> PathBuf { - get_jan_data_folder_path(app_handle).join("extensions") -} - -#[tauri::command] -pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) { - // close window - let windows = app_handle.webview_windows(); - for (label, window) in windows.iter() { - window.close().unwrap_or_else(|_| { - log::warn!("Failed to close window: {:?}", label); - }); - } - let data_folder = get_jan_data_folder_path(app_handle.clone()); - log::info!("Factory reset, removing data folder: {:?}", data_folder); - - tauri::async_runtime::block_on(async { - cleanup_processes(state).await; - - if data_folder.exists() { - if let Err(e) = fs::remove_dir_all(&data_folder) { - log::error!("Failed to remove data folder: {}", e); - return; - } - } - - // Recreate the data folder - let _ = fs::create_dir_all(&data_folder).map_err(|e| e.to_string()); - - // Reset the configuration - let mut default_config = AppConfiguration::default(); - default_config.data_folder = default_data_folder_path(app_handle.clone()); - let _ = update_app_configuration(app_handle.clone(), default_config); - - app_handle.restart(); - }); -} - -#[tauri::command] -pub fn get_configuration_file_path(app_handle: tauri::AppHandle) -> PathBuf { - let app_path = app_handle.path().app_data_dir().unwrap_or_else(|err| { - log::error!( - "Failed to get app data directory: {}. Using home directory instead.", - err - ); - - let home_dir = std::env::var(if cfg!(target_os = "windows") { - "USERPROFILE" - } else { - "HOME" - }) - .expect("Failed to determine the home directory"); - - PathBuf::from(home_dir) - }); - - let package_name = env!("CARGO_PKG_NAME"); - #[cfg(target_os = "linux")] - let old_data_dir = { - if let Some(config_path) = dirs::config_dir() { - config_path.join(package_name) - } else { - log::debug!("Could not determine config directory"); - app_path - .parent() - .unwrap_or(&app_path.join("../")) - .join(package_name) - } - }; - - #[cfg(not(target_os = "linux"))] - let old_data_dir = app_path - .parent() - .unwrap_or(&app_path.join("../")) - .join(package_name); - - if old_data_dir.exists() { - return old_data_dir.join(CONFIGURATION_FILE_NAME); - } else { - return app_path.join(CONFIGURATION_FILE_NAME); - } -} - -#[tauri::command] -pub fn default_data_folder_path(app_handle: tauri::AppHandle) -> String { - let mut path = app_handle.path().data_dir().unwrap(); - - let app_name = std::env::var("APP_NAME") - .unwrap_or_else(|_| app_handle.config().product_name.clone().unwrap()); - path.push(app_name); - path.push("data"); - - let mut path_str = path.to_str().unwrap().to_string(); - - if let Some(stripped) = path.to_str().unwrap().to_string().strip_suffix(".ai.app") { - path_str = stripped.to_string(); - } - - path_str -} - -#[tauri::command] -pub fn relaunch(app: AppHandle) { - app.restart() -} - -#[tauri::command] -pub fn open_app_directory(app: AppHandle) { - let app_path = app.path().app_data_dir().unwrap(); - if cfg!(target_os = "windows") { - std::process::Command::new("explorer") - .arg(app_path) - .spawn() - .expect("Failed to open app directory"); - } else if cfg!(target_os = "macos") { - std::process::Command::new("open") - .arg(app_path) - .spawn() - .expect("Failed to open app directory"); - } else { - std::process::Command::new("xdg-open") - .arg(app_path) - .spawn() - .expect("Failed to open app directory"); - } -} - -#[tauri::command] -pub fn open_file_explorer(path: String) { - let path = PathBuf::from(path); - if cfg!(target_os = "windows") { - std::process::Command::new("explorer") - .arg(path) - .spawn() - .expect("Failed to open file explorer"); - } else if cfg!(target_os = "macos") { - std::process::Command::new("open") - .arg(path) - .spawn() - .expect("Failed to open file explorer"); - } else { - std::process::Command::new("xdg-open") - .arg(path) - .spawn() - .expect("Failed to open file explorer"); - } -} - -#[tauri::command] -pub fn install_extensions(app: AppHandle) { - if let Err(err) = setup::install_extensions(app, true) { - log::error!("Failed to install extensions: {}", err); - } -} - -#[tauri::command] -pub fn get_active_extensions(app: AppHandle) -> Vec { - let mut path = get_jan_extensions_path(app); - path.push("extensions.json"); - log::info!("get jan extensions, path: {:?}", path); - - let contents = fs::read_to_string(path); - let contents: Vec = match contents { - Ok(data) => match serde_json::from_str::>(&data) { - Ok(exts) => exts - .into_iter() - .map(|ext| { - serde_json::json!({ - "url": ext["url"], - "name": ext["name"], - "productName": ext["productName"], - "active": ext["_active"], - "description": ext["description"], - "version": ext["version"] - }) - }) - .collect(), - Err(error) => { - log::error!("Failed to parse extensions.json: {}", error); - vec![] - } - }, - Err(error) => { - log::error!("Failed to read extensions.json: {}", error); - vec![] - } - }; - return contents; -} - -#[tauri::command] -pub fn get_user_home_path(app: AppHandle) -> String { - return get_app_configurations(app.clone()).data_folder; -} - -/// Recursively copy a directory from src to dst -fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> Result<(), io::Error> { - if !dst.exists() { - fs::create_dir_all(dst)?; - } - - for entry in fs::read_dir(src)? { - let entry = entry?; - let file_type = entry.file_type()?; - let src_path = entry.path(); - let dst_path = dst.join(entry.file_name()); - - if file_type.is_dir() { - copy_dir_recursive(&src_path, &dst_path)?; - } else { - fs::copy(&src_path, &dst_path)?; - } - } - - Ok(()) -} - -#[tauri::command] -pub fn change_app_data_folder( - app_handle: tauri::AppHandle, - new_data_folder: String, -) -> Result<(), String> { - // Get current data folder path - let current_data_folder = get_jan_data_folder_path(app_handle.clone()); - let new_data_folder_path = PathBuf::from(&new_data_folder); - - // Create the new data folder if it doesn't exist - if !new_data_folder_path.exists() { - fs::create_dir_all(&new_data_folder_path) - .map_err(|e| format!("Failed to create new data folder: {}", e))?; - } - - // Copy all files from the old folder to the new one - if current_data_folder.exists() { - log::info!( - "Copying data from {:?} to {:?}", - current_data_folder, - new_data_folder_path - ); - - // Check if this is a parent directory to avoid infinite recursion - if new_data_folder_path.starts_with(¤t_data_folder) { - return Err( - "New data folder cannot be a subdirectory of the current data folder".to_string(), - ); - } - copy_dir_recursive(¤t_data_folder, &new_data_folder_path) - .map_err(|e| format!("Failed to copy data to new folder: {}", e))?; - } else { - log::info!("Current data folder does not exist, nothing to copy"); - } - - // Update the configuration to point to the new folder - let mut configuration = get_app_configurations(app_handle.clone()); - configuration.data_folder = new_data_folder; - - // Save the updated configuration - update_app_configuration(app_handle, configuration) -} - -#[tauri::command] -pub fn app_token(state: State<'_, AppState>) -> Option { - state.app_token.clone() -} - -#[tauri::command] -pub async fn start_server( - state: State<'_, AppState>, - host: String, - port: u16, - prefix: String, - api_key: String, - trusted_hosts: Vec, -) -> Result { - let server_handle = state.server_handle.clone(); - let sessions = state.llama_server_process.clone(); - - server::start_server( - server_handle, - sessions, - host, - port, - prefix, - api_key, - vec![trusted_hosts], - ) - .await - .map_err(|e| e.to_string())?; - Ok(true) -} - -#[tauri::command] -pub async fn stop_server(state: State<'_, AppState>) -> Result<(), String> { - let server_handle = state.server_handle.clone(); - - server::stop_server(server_handle) - .await - .map_err(|e| e.to_string())?; - Ok(()) -} - -#[tauri::command] -pub async fn get_server_status(state: State<'_, AppState>) -> Result { - let server_handle = state.server_handle.clone(); - - Ok(server::is_server_running(server_handle).await) -} - -#[tauri::command] -pub async fn read_logs(app: AppHandle) -> Result { - let log_path = get_jan_data_folder_path(app).join("logs").join("app.log"); - if log_path.exists() { - let content = fs::read_to_string(log_path).map_err(|e| e.to_string())?; - Ok(content) - } else { - Err(format!("Log file not found")) - } -} diff --git a/src-tauri/src/core/downloads/commands.rs b/src-tauri/src/core/downloads/commands.rs new file mode 100644 index 000000000..f2187046a --- /dev/null +++ b/src-tauri/src/core/downloads/commands.rs @@ -0,0 +1,68 @@ +use super::helpers::{_download_files_internal, err_to_string}; +use super::models::DownloadItem; +use crate::core::app::commands::get_jan_data_folder_path; +use crate::core::state::AppState; +use std::collections::HashMap; +use tauri::State; +use tokio_util::sync::CancellationToken; + +#[tauri::command] +pub async fn download_files( + app: tauri::AppHandle, + state: State<'_, AppState>, + items: Vec, + task_id: &str, + headers: HashMap, +) -> Result<(), String> { + // insert cancel tokens + let cancel_token = CancellationToken::new(); + { + let mut download_manager = state.download_manager.lock().await; + if download_manager.cancel_tokens.contains_key(task_id) { + return Err(format!("task_id {} exists", task_id)); + } + download_manager + .cancel_tokens + .insert(task_id.to_string(), cancel_token.clone()); + } + // TODO: Support resuming downloads when FE is ready + let result = _download_files_internal( + app.clone(), + &items, + &headers, + task_id, + false, + cancel_token.clone(), + ) + .await; + + // cleanup + { + let mut download_manager = state.download_manager.lock().await; + download_manager.cancel_tokens.remove(task_id); + } + + // delete files if cancelled + if cancel_token.is_cancelled() { + let jan_data_folder = get_jan_data_folder_path(app.clone()); + for item in items { + let save_path = jan_data_folder.join(&item.save_path); + let _ = std::fs::remove_file(&save_path); // don't check error + } + } + + result.map_err(err_to_string) +} + +#[tauri::command] +pub async fn cancel_download_task(state: State<'_, AppState>, task_id: &str) -> Result<(), String> { + // NOTE: might want to add User-Agent header + let mut download_manager = state.download_manager.lock().await; + if let Some(token) = download_manager.cancel_tokens.remove(task_id) { + token.cancel(); + log::info!("Cancelled download task: {}", task_id); + Ok(()) + } else { + Err(format!("No download task: {}", task_id)) + } +} diff --git a/src-tauri/src/core/downloads/helpers.rs b/src-tauri/src/core/downloads/helpers.rs new file mode 100644 index 000000000..1fad0ea4b --- /dev/null +++ b/src-tauri/src/core/downloads/helpers.rs @@ -0,0 +1,365 @@ +use super::models::{DownloadEvent, DownloadItem, ProxyConfig}; +use crate::core::app::commands::get_jan_data_folder_path; +use futures_util::StreamExt; +use jan_utils::normalize_path; +use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; +use std::collections::HashMap; +use std::time::Duration; +use tauri::Emitter; +use tokio::fs::File; +use tokio::io::AsyncWriteExt; +use tokio_util::sync::CancellationToken; +use url::Url; + +pub fn err_to_string(e: E) -> String { + format!("Error: {}", e) +} + +pub fn validate_proxy_config(config: &ProxyConfig) -> Result<(), String> { + // Validate proxy URL format + if let Err(e) = Url::parse(&config.url) { + return Err(format!("Invalid proxy URL '{}': {}", config.url, e)); + } + + // Check if proxy URL has valid scheme + let url = Url::parse(&config.url).unwrap(); // Safe to unwrap as we just validated it + match url.scheme() { + "http" | "https" | "socks4" | "socks5" => {} + scheme => return Err(format!("Unsupported proxy scheme: {}", scheme)), + } + + // Validate authentication credentials + if config.username.is_some() && config.password.is_none() { + return Err("Username provided without password".to_string()); + } + + if config.password.is_some() && config.username.is_none() { + return Err("Password provided without username".to_string()); + } + + // Validate no_proxy entries + if let Some(no_proxy) = &config.no_proxy { + for entry in no_proxy { + if entry.is_empty() { + return Err("Empty no_proxy entry".to_string()); + } + // Basic validation for wildcard patterns + if entry.starts_with("*.") && entry.len() < 3 { + return Err(format!("Invalid wildcard pattern: {}", entry)); + } + } + } + + // SSL verification settings are all optional booleans, no validation needed + + Ok(()) +} + +pub fn create_proxy_from_config(config: &ProxyConfig) -> Result { + // Validate the configuration first + validate_proxy_config(config)?; + + let mut proxy = reqwest::Proxy::all(&config.url).map_err(err_to_string)?; + + // Add authentication if provided + if let (Some(username), Some(password)) = (&config.username, &config.password) { + proxy = proxy.basic_auth(username, password); + } + + Ok(proxy) +} + +pub fn should_bypass_proxy(url: &str, no_proxy: &[String]) -> bool { + if no_proxy.is_empty() { + return false; + } + + // Parse the URL to get the host + let parsed_url = match Url::parse(url) { + Ok(u) => u, + Err(_) => return false, + }; + + let host = match parsed_url.host_str() { + Some(h) => h, + None => return false, + }; + + // Check if host matches any no_proxy entry + for entry in no_proxy { + if entry == "*" { + return true; + } + + // Simple wildcard matching + if entry.starts_with("*.") { + let domain = &entry[2..]; + if host.ends_with(domain) { + return true; + } + } else if host == entry { + return true; + } + } + + false +} + +pub fn _get_client_for_item( + item: &DownloadItem, + header_map: &HeaderMap, +) -> Result { + let mut client_builder = reqwest::Client::builder() + .http2_keep_alive_timeout(Duration::from_secs(15)) + .default_headers(header_map.clone()); + + // Add proxy configuration if provided + if let Some(proxy_config) = &item.proxy { + // Handle SSL verification settings + if proxy_config.ignore_ssl.unwrap_or(false) { + client_builder = client_builder.danger_accept_invalid_certs(true); + log::info!("SSL certificate verification disabled for URL {}", item.url); + } + + // Note: reqwest doesn't have fine-grained SSL verification controls + // for verify_proxy_ssl, verify_proxy_host_ssl, verify_peer_ssl, verify_host_ssl + // These settings are handled by the underlying TLS implementation + + // Check if this URL should bypass proxy + let no_proxy = proxy_config.no_proxy.as_deref().unwrap_or(&[]); + if !should_bypass_proxy(&item.url, no_proxy) { + let proxy = create_proxy_from_config(proxy_config)?; + client_builder = client_builder.proxy(proxy); + log::info!("Using proxy {} for URL {}", proxy_config.url, item.url); + } else { + log::info!("Bypassing proxy for URL {}", item.url); + } + } + + client_builder.build().map_err(err_to_string) +} + +pub fn _convert_headers( + headers: &HashMap, +) -> Result> { + let mut header_map = HeaderMap::new(); + for (k, v) in headers { + let key = HeaderName::from_bytes(k.as_bytes())?; + let value = HeaderValue::from_str(v)?; + header_map.insert(key, value); + } + Ok(header_map) +} + +pub async fn _get_file_size( + client: &reqwest::Client, + url: &str, +) -> Result> { + let resp = client.head(url).send().await?; + if !resp.status().is_success() { + return Err(format!("Failed to get file size: HTTP status {}", resp.status()).into()); + } + // this is buggy, always return 0 for HEAD request + // Ok(resp.content_length().unwrap_or(0)) + + match resp.headers().get("content-length") { + Some(value) => { + let value_str = value.to_str()?; + let value_u64: u64 = value_str.parse()?; + Ok(value_u64) + } + None => Ok(0), + } +} + +pub async fn _download_files_internal( + app: tauri::AppHandle, + items: &[DownloadItem], + headers: &HashMap, + task_id: &str, + resume: bool, + cancel_token: CancellationToken, +) -> Result<(), String> { + log::info!("Start download task: {}", task_id); + + let header_map = _convert_headers(headers).map_err(err_to_string)?; + + let total_size = { + let mut total_size = 0u64; + for item in items.iter() { + let client = _get_client_for_item(item, &header_map).map_err(err_to_string)?; + total_size += _get_file_size(&client, &item.url) + .await + .map_err(err_to_string)?; + } + total_size + }; + log::info!("Total download size: {}", total_size); + + let mut evt = DownloadEvent { + transferred: 0, + total: total_size, + }; + let evt_name = format!("download-{}", task_id); + + // save file under Jan data folder + let jan_data_folder = get_jan_data_folder_path(app.clone()); + + for item in items.iter() { + let save_path = jan_data_folder.join(&item.save_path); + let save_path = normalize_path(&save_path); + + if !save_path.starts_with(&jan_data_folder) { + return Err(format!( + "Path {} is outside of Jan data folder {}", + save_path.display(), + jan_data_folder.display() + )); + } + + // Create parent directories if they don't exist + if let Some(parent) = save_path.parent() { + if !parent.exists() { + tokio::fs::create_dir_all(parent) + .await + .map_err(err_to_string)?; + } + } + + let current_extension = save_path.extension().unwrap_or_default().to_string_lossy(); + let append_extension = |ext: &str| { + if current_extension.is_empty() { + ext.to_string() + } else { + format!("{}.{}", current_extension, ext) + } + }; + let tmp_save_path = save_path.with_extension(append_extension("tmp")); + let url_save_path = save_path.with_extension(append_extension("url")); + + let mut should_resume = resume + && tmp_save_path.exists() + && tokio::fs::read_to_string(&url_save_path) + .await + .map(|url| url == item.url) // check if we resume the same URL + .unwrap_or(false); + + tokio::fs::write(&url_save_path, item.url.clone()) + .await + .map_err(err_to_string)?; + + log::info!("Started downloading: {}", item.url); + let client = _get_client_for_item(item, &header_map).map_err(err_to_string)?; + let mut download_delta = 0u64; + let resp = if should_resume { + let downloaded_size = tmp_save_path.metadata().map_err(err_to_string)?.len(); + match _get_maybe_resume(&client, &item.url, downloaded_size).await { + Ok(resp) => { + log::info!( + "Resume download: {}, already downloaded {} bytes", + item.url, + downloaded_size + ); + download_delta += downloaded_size; + resp + } + Err(e) => { + // fallback to normal download + log::warn!("Failed to resume download: {}", e); + should_resume = false; + _get_maybe_resume(&client, &item.url, 0).await? + } + } + } else { + _get_maybe_resume(&client, &item.url, 0).await? + }; + let mut stream = resp.bytes_stream(); + + let file = if should_resume { + // resume download, append to existing file + tokio::fs::OpenOptions::new() + .write(true) + .append(true) + .open(&tmp_save_path) + .await + .map_err(err_to_string)? + } else { + // start new download, create a new file + File::create(&tmp_save_path).await.map_err(err_to_string)? + }; + let mut writer = tokio::io::BufWriter::new(file); + + // write chunk to file + while let Some(chunk) = stream.next().await { + if cancel_token.is_cancelled() { + if !should_resume { + tokio::fs::remove_dir_all(&save_path.parent().unwrap()) + .await + .ok(); + } + log::info!("Download cancelled for task: {}", task_id); + app.emit(&evt_name, evt.clone()).unwrap(); + return Ok(()); + } + + let chunk = chunk.map_err(err_to_string)?; + writer.write_all(&chunk).await.map_err(err_to_string)?; + download_delta += chunk.len() as u64; + + // only update every 10 MB + if download_delta >= 10 * 1024 * 1024 { + evt.transferred += download_delta; + app.emit(&evt_name, evt.clone()).unwrap(); + download_delta = 0u64; + } + } + + writer.flush().await.map_err(err_to_string)?; + evt.transferred += download_delta; + + // rename tmp file to final file + tokio::fs::rename(&tmp_save_path, &save_path) + .await + .map_err(err_to_string)?; + tokio::fs::remove_file(&url_save_path) + .await + .map_err(err_to_string)?; + log::info!("Finished downloading: {}", item.url); + } + + app.emit(&evt_name, evt.clone()).unwrap(); + Ok(()) +} + +pub async fn _get_maybe_resume( + client: &reqwest::Client, + url: &str, + start_bytes: u64, +) -> Result { + if start_bytes > 0 { + let resp = client + .get(url) + .header("Range", format!("bytes={}-", start_bytes)) + .send() + .await + .map_err(err_to_string)?; + if resp.status() != reqwest::StatusCode::PARTIAL_CONTENT { + return Err(format!( + "Failed to resume download: HTTP status {}, {}", + resp.status(), + resp.text().await.unwrap_or_default() + )); + } + Ok(resp) + } else { + let resp = client.get(url).send().await.map_err(err_to_string)?; + if !resp.status().is_success() { + return Err(format!( + "Failed to download: HTTP status {}, {}", + resp.status(), + resp.text().await.unwrap_or_default() + )); + } + Ok(resp) + } +} diff --git a/src-tauri/src/core/downloads/mod.rs b/src-tauri/src/core/downloads/mod.rs new file mode 100644 index 000000000..5ddcb526a --- /dev/null +++ b/src-tauri/src/core/downloads/mod.rs @@ -0,0 +1,6 @@ +pub mod commands; +pub mod helpers; +pub mod models; + +#[cfg(test)] +mod tests; diff --git a/src-tauri/src/core/downloads/models.rs b/src-tauri/src/core/downloads/models.rs new file mode 100644 index 000000000..61f438ec8 --- /dev/null +++ b/src-tauri/src/core/downloads/models.rs @@ -0,0 +1,29 @@ +use std::collections::HashMap; +use tokio_util::sync::CancellationToken; + +#[derive(Default)] +pub struct DownloadManagerState { + pub cancel_tokens: HashMap, +} + +#[derive(serde::Deserialize, Clone, Debug)] +pub struct ProxyConfig { + pub url: String, + pub username: Option, + pub password: Option, + pub no_proxy: Option>, // List of domains to bypass proxy + pub ignore_ssl: Option, // Ignore SSL certificate verification +} + +#[derive(serde::Deserialize, Clone, Debug)] +pub struct DownloadItem { + pub url: String, + pub save_path: String, + pub proxy: Option, +} + +#[derive(serde::Serialize, Clone, Debug)] +pub struct DownloadEvent { + pub transferred: u64, + pub total: u64, +} diff --git a/src-tauri/src/core/downloads/tests.rs b/src-tauri/src/core/downloads/tests.rs new file mode 100644 index 000000000..42e690dba --- /dev/null +++ b/src-tauri/src/core/downloads/tests.rs @@ -0,0 +1,341 @@ +use super::helpers::*; +use super::models::*; +use reqwest::header::HeaderMap; +use std::collections::HashMap; + +// Helper function to create a minimal proxy config for testing +fn create_test_proxy_config(url: &str) -> ProxyConfig { + ProxyConfig { + url: url.to_string(), + username: None, + password: None, + no_proxy: None, + ignore_ssl: None, + } +} + +#[test] +fn test_validate_proxy_config() { + // Valid HTTP proxy + let config = ProxyConfig { + url: "http://proxy.example.com:8080".to_string(), + username: Some("user".to_string()), + password: Some("pass".to_string()), + no_proxy: Some(vec!["localhost".to_string(), "*.example.com".to_string()]), + ignore_ssl: Some(true), + }; + assert!(validate_proxy_config(&config).is_ok()); + + // Valid HTTPS proxy + let config = ProxyConfig { + url: "https://proxy.example.com:8080".to_string(), + username: None, + password: None, + no_proxy: None, + ignore_ssl: None, + }; + assert!(validate_proxy_config(&config).is_ok()); + + // Valid SOCKS5 proxy + let config = ProxyConfig { + url: "socks5://proxy.example.com:1080".to_string(), + username: None, + password: None, + no_proxy: None, + ignore_ssl: None, + }; + assert!(validate_proxy_config(&config).is_ok()); + + // Invalid URL + let config = create_test_proxy_config("invalid-url"); + assert!(validate_proxy_config(&config).is_err()); + + // Unsupported scheme + let config = create_test_proxy_config("ftp://proxy.example.com:21"); + assert!(validate_proxy_config(&config).is_err()); + + // Username without password + let mut config = create_test_proxy_config("http://proxy.example.com:8080"); + config.username = Some("user".to_string()); + assert!(validate_proxy_config(&config).is_err()); + + // Password without username + let mut config = create_test_proxy_config("http://proxy.example.com:8080"); + config.password = Some("pass".to_string()); + assert!(validate_proxy_config(&config).is_err()); + + // Empty no_proxy entry + let mut config = create_test_proxy_config("http://proxy.example.com:8080"); + config.no_proxy = Some(vec!["".to_string()]); + assert!(validate_proxy_config(&config).is_err()); + + // Invalid wildcard pattern + let mut config = create_test_proxy_config("http://proxy.example.com:8080"); + config.no_proxy = Some(vec!["*.".to_string()]); + assert!(validate_proxy_config(&config).is_err()); +} + +#[test] +fn test_should_bypass_proxy() { + let no_proxy = vec![ + "localhost".to_string(), + "127.0.0.1".to_string(), + "*.example.com".to_string(), + "specific.domain.com".to_string(), + ]; + + // Should bypass for localhost + assert!(should_bypass_proxy("http://localhost:8080/path", &no_proxy)); + + // Should bypass for 127.0.0.1 + assert!(should_bypass_proxy("https://127.0.0.1:3000/api", &no_proxy)); + + // Should bypass for wildcard match + assert!(should_bypass_proxy( + "http://sub.example.com/path", + &no_proxy + )); + assert!(should_bypass_proxy("https://api.example.com/v1", &no_proxy)); + + // Should bypass for specific domain + assert!(should_bypass_proxy( + "http://specific.domain.com/test", + &no_proxy + )); + + // Should NOT bypass for other domains + assert!(!should_bypass_proxy("http://other.com/path", &no_proxy)); + assert!(!should_bypass_proxy("https://example.org/api", &no_proxy)); + + // Should bypass everything with "*" + let wildcard_no_proxy = vec!["*".to_string()]; + assert!(should_bypass_proxy( + "http://any.domain.com/path", + &wildcard_no_proxy + )); + + // Empty no_proxy should not bypass anything + let empty_no_proxy = vec![]; + assert!(!should_bypass_proxy( + "http://any.domain.com/path", + &empty_no_proxy + )); +} + +#[test] +fn test_create_proxy_from_config() { + // Valid configuration should work + let mut config = create_test_proxy_config("http://proxy.example.com:8080"); + config.username = Some("user".to_string()); + config.password = Some("pass".to_string()); + assert!(create_proxy_from_config(&config).is_ok()); + + // Invalid configuration should fail + let config = create_test_proxy_config("invalid-url"); + assert!(create_proxy_from_config(&config).is_err()); +} + +#[test] +fn test_convert_headers() { + let mut headers = HashMap::new(); + headers.insert("User-Agent".to_string(), "test-agent".to_string()); + headers.insert("Authorization".to_string(), "Bearer token".to_string()); + + let header_map = _convert_headers(&headers).unwrap(); + assert_eq!(header_map.len(), 2); + assert_eq!(header_map.get("User-Agent").unwrap(), "test-agent"); + assert_eq!(header_map.get("Authorization").unwrap(), "Bearer token"); +} + +#[test] +fn test_proxy_ssl_verification_settings() { + // Test proxy config with SSL verification settings + let mut config = create_test_proxy_config("https://proxy.example.com:8080"); + config.ignore_ssl = Some(true); + + // Should validate successfully + assert!(validate_proxy_config(&config).is_ok()); + + // Test with all SSL settings as false + config.ignore_ssl = Some(false); + + // Should still validate successfully + assert!(validate_proxy_config(&config).is_ok()); +} + +#[test] +fn test_proxy_config_with_mixed_ssl_settings() { + // Test with mixed SSL settings - ignore_ssl true, others false + let mut config = create_test_proxy_config("https://proxy.example.com:8080"); + config.ignore_ssl = Some(true); + + assert!(validate_proxy_config(&config).is_ok()); + assert!(create_proxy_from_config(&config).is_ok()); +} + +#[test] +fn test_proxy_config_ssl_defaults() { + // Test with no SSL settings (should use None defaults) + let config = create_test_proxy_config("https://proxy.example.com:8080"); + + assert_eq!(config.ignore_ssl, None); + + assert!(validate_proxy_config(&config).is_ok()); + assert!(create_proxy_from_config(&config).is_ok()); +} + +#[test] +fn test_download_item_with_ssl_proxy() { + // Test that DownloadItem can be created with SSL proxy configuration + let mut proxy_config = create_test_proxy_config("https://proxy.example.com:8080"); + proxy_config.ignore_ssl = Some(true); + + let download_item = DownloadItem { + url: "https://example.com/file.zip".to_string(), + save_path: "downloads/file.zip".to_string(), + proxy: Some(proxy_config), + }; + + assert!(download_item.proxy.is_some()); + let proxy = download_item.proxy.unwrap(); + assert_eq!(proxy.ignore_ssl, Some(true)); +} + +#[test] +fn test_client_creation_with_ssl_settings() { + // Test client creation with SSL settings + let mut proxy_config = create_test_proxy_config("https://proxy.example.com:8080"); + proxy_config.ignore_ssl = Some(true); + + let download_item = DownloadItem { + url: "https://example.com/file.zip".to_string(), + save_path: "downloads/file.zip".to_string(), + proxy: Some(proxy_config), + }; + + let header_map = HeaderMap::new(); + let result = _get_client_for_item(&download_item, &header_map); + + // Should create client successfully even with SSL settings + assert!(result.is_ok()); +} + +#[test] +fn test_proxy_config_with_http_and_ssl_settings() { + // Test that SSL settings work with HTTP proxy (though not typically used) + let mut config = create_test_proxy_config("http://proxy.example.com:8080"); + config.ignore_ssl = Some(true); + + assert!(validate_proxy_config(&config).is_ok()); + assert!(create_proxy_from_config(&config).is_ok()); +} + +#[test] +fn test_proxy_config_with_socks_and_ssl_settings() { + // Test that SSL settings work with SOCKS proxy + let mut config = create_test_proxy_config("socks5://proxy.example.com:1080"); + config.ignore_ssl = Some(false); + + assert!(validate_proxy_config(&config).is_ok()); + + // SOCKS proxies are not supported by reqwest::Proxy::all() + // This test should expect an error for SOCKS proxies + let result = create_proxy_from_config(&config); + assert!(result.is_err()); + + // Test with HTTP proxy instead which is supported + let mut http_config = create_test_proxy_config("http://proxy.example.com:8080"); + http_config.ignore_ssl = Some(false); + assert!(validate_proxy_config(&http_config).is_ok()); + assert!(create_proxy_from_config(&http_config).is_ok()); +} + +#[test] +fn test_download_item_creation() { + let item = DownloadItem { + url: "https://example.com/file.tar.gz".to_string(), + save_path: "models/test.tar.gz".to_string(), + proxy: None, + }; + + assert_eq!(item.url, "https://example.com/file.tar.gz"); + assert_eq!(item.save_path, "models/test.tar.gz"); +} + +#[test] +fn test_download_event_creation() { + let event = DownloadEvent { + transferred: 1024, + total: 2048, + }; + + assert_eq!(event.transferred, 1024); + assert_eq!(event.total, 2048); +} + +#[test] +fn test_err_to_string() { + let error = "Test error"; + let result = err_to_string(error); + assert_eq!(result, "Error: Test error"); +} + +#[test] +fn test_convert_headers_valid() { + let mut headers = HashMap::new(); + headers.insert("Content-Type".to_string(), "application/json".to_string()); + headers.insert("Authorization".to_string(), "Bearer token123".to_string()); + + let result = _convert_headers(&headers); + assert!(result.is_ok()); + + let header_map = result.unwrap(); + assert_eq!(header_map.len(), 2); + assert_eq!(header_map.get("Content-Type").unwrap(), "application/json"); + assert_eq!(header_map.get("Authorization").unwrap(), "Bearer token123"); +} + +#[test] +fn test_convert_headers_invalid_header_name() { + let mut headers = HashMap::new(); + headers.insert("Invalid\nHeader".to_string(), "value".to_string()); + + let result = _convert_headers(&headers); + assert!(result.is_err()); +} + +#[test] +fn test_convert_headers_invalid_header_value() { + let mut headers = HashMap::new(); + headers.insert("Content-Type".to_string(), "invalid\nvalue".to_string()); + + let result = _convert_headers(&headers); + assert!(result.is_err()); +} + +#[test] +fn test_download_manager_state_default() { + let state = DownloadManagerState::default(); + assert!(state.cancel_tokens.is_empty()); +} + +#[test] +fn test_download_event_serialization() { + let event = DownloadEvent { + transferred: 512, + total: 1024, + }; + + let json = serde_json::to_string(&event).unwrap(); + assert!(json.contains("\"transferred\":512")); + assert!(json.contains("\"total\":1024")); +} + +#[test] +fn test_download_item_deserialization() { + let json = r#"{"url":"https://example.com/file.zip","save_path":"downloads/file.zip"}"#; + let item: DownloadItem = serde_json::from_str(json).unwrap(); + + assert_eq!(item.url, "https://example.com/file.zip"); + assert_eq!(item.save_path, "downloads/file.zip"); +} diff --git a/src-tauri/src/core/extensions/commands.rs b/src-tauri/src/core/extensions/commands.rs new file mode 100644 index 000000000..784c71f46 --- /dev/null +++ b/src-tauri/src/core/extensions/commands.rs @@ -0,0 +1,53 @@ +use std::fs; +use std::path::PathBuf; +use tauri::AppHandle; + +use crate::core::app::commands::get_jan_data_folder_path; +use crate::core::setup; + +#[tauri::command] +pub fn get_jan_extensions_path(app_handle: tauri::AppHandle) -> PathBuf { + get_jan_data_folder_path(app_handle).join("extensions") +} + +#[tauri::command] +pub fn install_extensions(app: AppHandle) { + if let Err(err) = setup::install_extensions(app, true) { + log::error!("Failed to install extensions: {}", err); + } +} + +#[tauri::command] +pub fn get_active_extensions(app: AppHandle) -> Vec { + let mut path = get_jan_extensions_path(app); + path.push("extensions.json"); + log::info!("get jan extensions, path: {:?}", path); + + let contents = fs::read_to_string(path); + let contents: Vec = match contents { + Ok(data) => match serde_json::from_str::>(&data) { + Ok(exts) => exts + .into_iter() + .map(|ext| { + serde_json::json!({ + "url": ext["url"], + "name": ext["name"], + "productName": ext["productName"], + "active": ext["_active"], + "description": ext["description"], + "version": ext["version"] + }) + }) + .collect(), + Err(error) => { + log::error!("Failed to parse extensions.json: {}", error); + vec![] + } + }, + Err(error) => { + log::error!("Failed to read extensions.json: {}", error); + vec![] + } + }; + return contents; +} diff --git a/src-tauri/src/core/extensions/mod.rs b/src-tauri/src/core/extensions/mod.rs new file mode 100644 index 000000000..82b6da3c0 --- /dev/null +++ b/src-tauri/src/core/extensions/mod.rs @@ -0,0 +1 @@ +pub mod commands; diff --git a/src-tauri/src/core/filesystem/commands.rs b/src-tauri/src/core/filesystem/commands.rs new file mode 100644 index 000000000..c70943db4 --- /dev/null +++ b/src-tauri/src/core/filesystem/commands.rs @@ -0,0 +1,204 @@ +// WARNING: These APIs will be deprecated soon due to removing FS API access from frontend. +// It's added to ensure the legacy implementation from frontend still functions before removal. +use super::helpers::resolve_path; +use super::models::FileStat; +use std::fs; +use tauri::Runtime; + +#[tauri::command] +pub fn rm(app_handle: tauri::AppHandle, args: Vec) -> Result<(), String> { + if args.is_empty() || args[0].is_empty() { + return Err("rm error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + if path.is_file() { + fs::remove_file(&path).map_err(|e| e.to_string())?; + } else if path.is_dir() { + fs::remove_dir_all(&path).map_err(|e| e.to_string())?; + } else { + return Err("rm error: Path does not exist".to_string()); + } + + Ok(()) +} + +#[tauri::command] +pub fn mkdir(app_handle: tauri::AppHandle, args: Vec) -> Result<(), String> { + if args.is_empty() || args[0].is_empty() { + return Err("mkdir error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + fs::create_dir_all(&path).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn join_path( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result { + if args.is_empty() { + return Err("join_path error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + let joined_path = args[1..].iter().fold(path, |acc, part| acc.join(part)); + Ok(joined_path.to_string_lossy().to_string()) +} + +#[tauri::command] +pub fn exists_sync( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result { + if args.is_empty() || args[0].is_empty() { + return Err("exist_sync error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + Ok(path.exists()) +} + +#[tauri::command] +pub fn file_stat( + app_handle: tauri::AppHandle, + args: String, +) -> Result { + if args.is_empty() { + return Err("file_stat error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args); + let metadata = fs::metadata(&path).map_err(|e| e.to_string())?; + let is_directory = metadata.is_dir(); + let size = if is_directory { 0 } else { metadata.len() }; + let file_stat = FileStat { is_directory, size }; + Ok(file_stat) +} + +#[tauri::command] +pub fn read_file_sync( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result { + if args.is_empty() || args[0].is_empty() { + return Err("read_file_sync error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + fs::read_to_string(&path).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn write_file_sync( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result<(), String> { + if args.len() < 2 || args[0].is_empty() || args[1].is_empty() { + return Err("write_file_sync error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + let content = &args[1]; + fs::write(&path, content).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn readdir_sync( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result, String> { + if args.is_empty() || args[0].is_empty() { + return Err("read_dir_sync error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + let entries = fs::read_dir(&path).map_err(|e| e.to_string())?; + let paths: Vec = entries + .filter_map(|entry| entry.ok()) + .map(|entry| entry.path().to_string_lossy().to_string()) + .collect(); + Ok(paths) +} + +#[tauri::command] +pub fn write_yaml( + app: tauri::AppHandle, + data: serde_json::Value, + save_path: &str, +) -> Result<(), String> { + // TODO: have an internal function to check scope + let jan_data_folder = crate::core::app::commands::get_jan_data_folder_path(app.clone()); + let save_path = jan_utils::normalize_path(&jan_data_folder.join(save_path)); + if !save_path.starts_with(&jan_data_folder) { + return Err(format!( + "Error: save path {} is not under jan_data_folder {}", + save_path.to_string_lossy(), + jan_data_folder.to_string_lossy(), + )); + } + let file = fs::File::create(&save_path).map_err(|e| e.to_string())?; + let mut writer = std::io::BufWriter::new(file); + serde_yaml::to_writer(&mut writer, &data).map_err(|e| e.to_string())?; + Ok(()) +} + +#[tauri::command] +pub fn read_yaml(app: tauri::AppHandle, path: &str) -> Result { + let jan_data_folder = crate::core::app::commands::get_jan_data_folder_path(app.clone()); + let path = jan_utils::normalize_path(&jan_data_folder.join(path)); + if !path.starts_with(&jan_data_folder) { + return Err(format!( + "Error: path {} is not under jan_data_folder {}", + path.to_string_lossy(), + jan_data_folder.to_string_lossy(), + )); + } + let file = fs::File::open(&path).map_err(|e| e.to_string())?; + let reader = std::io::BufReader::new(file); + let data: serde_json::Value = serde_yaml::from_reader(reader).map_err(|e| e.to_string())?; + Ok(data) +} + +#[tauri::command] +pub fn decompress(app: tauri::AppHandle, path: &str, output_dir: &str) -> Result<(), String> { + let jan_data_folder = crate::core::app::commands::get_jan_data_folder_path(app.clone()); + let path_buf = jan_utils::normalize_path(&jan_data_folder.join(path)); + if !path_buf.starts_with(&jan_data_folder) { + return Err(format!( + "Error: path {} is not under jan_data_folder {}", + path_buf.to_string_lossy(), + jan_data_folder.to_string_lossy(), + )); + } + + let output_dir_buf = jan_utils::normalize_path(&jan_data_folder.join(output_dir)); + if !output_dir_buf.starts_with(&jan_data_folder) { + return Err(format!( + "Error: output directory {} is not under jan_data_folder {}", + output_dir_buf.to_string_lossy(), + jan_data_folder.to_string_lossy(), + )); + } + + // Ensure output directory exists + fs::create_dir_all(&output_dir_buf).map_err(|e| { + format!( + "Failed to create output directory {}: {}", + output_dir_buf.to_string_lossy(), + e + ) + })?; + + let file = fs::File::open(&path_buf).map_err(|e| e.to_string())?; + if path.ends_with(".tar.gz") { + let tar = flate2::read::GzDecoder::new(file); + let mut archive = tar::Archive::new(tar); + archive.unpack(&output_dir_buf).map_err(|e| e.to_string())?; + } else { + return Err("Unsupported file format. Only .tar.gz is supported.".to_string()); + } + + Ok(()) +} diff --git a/src-tauri/src/core/filesystem/helpers.rs b/src-tauri/src/core/filesystem/helpers.rs new file mode 100644 index 000000000..05c742969 --- /dev/null +++ b/src-tauri/src/core/filesystem/helpers.rs @@ -0,0 +1,23 @@ +use crate::core::app::commands::get_jan_data_folder_path; +use jan_utils::normalize_file_path; +use std::path::PathBuf; +use tauri::Runtime; + +pub fn resolve_path(app_handle: tauri::AppHandle, path: &str) -> PathBuf { + let path = if path.starts_with("file:/") || path.starts_with("file:\\") { + let normalized = normalize_file_path(path); + let relative_normalized = normalized + .trim_start_matches(std::path::MAIN_SEPARATOR) + .trim_start_matches('/') + .trim_start_matches('\\'); + get_jan_data_folder_path(app_handle).join(relative_normalized) + } else { + PathBuf::from(path) + }; + + if path.starts_with("http://") || path.starts_with("https://") { + path + } else { + path.canonicalize().unwrap_or(path) + } +} diff --git a/src-tauri/src/core/filesystem/mod.rs b/src-tauri/src/core/filesystem/mod.rs new file mode 100644 index 000000000..5ddcb526a --- /dev/null +++ b/src-tauri/src/core/filesystem/mod.rs @@ -0,0 +1,6 @@ +pub mod commands; +pub mod helpers; +pub mod models; + +#[cfg(test)] +mod tests; diff --git a/src-tauri/src/core/filesystem/models.rs b/src-tauri/src/core/filesystem/models.rs new file mode 100644 index 000000000..cb345a15a --- /dev/null +++ b/src-tauri/src/core/filesystem/models.rs @@ -0,0 +1,6 @@ +#[derive(serde::Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct FileStat { + pub is_directory: bool, + pub size: u64, +} diff --git a/src-tauri/src/core/filesystem/tests.rs b/src-tauri/src/core/filesystem/tests.rs new file mode 100644 index 000000000..b4e96e994 --- /dev/null +++ b/src-tauri/src/core/filesystem/tests.rs @@ -0,0 +1,90 @@ +use super::commands::*; +use crate::core::app::commands::get_jan_data_folder_path; +use std::fs::{self, File}; +use std::io::Write; +use tauri::test::mock_app; + +#[test] +fn test_rm() { + let app = mock_app(); + let path = "test_rm_dir"; + fs::create_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap(); + let args = vec![format!("file://{}", path).to_string()]; + let result = rm(app.handle().clone(), args); + assert!(result.is_ok()); + assert!(!get_jan_data_folder_path(app.handle().clone()) + .join(path) + .exists()); +} + +#[test] +fn test_mkdir() { + let app = mock_app(); + let path = "test_mkdir_dir"; + let args = vec![format!("file://{}", path).to_string()]; + let result = mkdir(app.handle().clone(), args); + assert!(result.is_ok()); + assert!(get_jan_data_folder_path(app.handle().clone()) + .join(path) + .exists()); + let _ = fs::remove_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)); +} + +#[test] +fn test_join_path() { + let app = mock_app(); + let path = "file://test_dir"; + let args = vec![path.to_string(), "test_file".to_string()]; + let result = join_path(app.handle().clone(), args).unwrap(); + assert_eq!( + result, + get_jan_data_folder_path(app.handle().clone()) + .join(&format!("test_dir{}test_file", std::path::MAIN_SEPARATOR)) + .to_string_lossy() + .to_string() + ); +} + +#[test] +fn test_exists_sync() { + let app = mock_app(); + let path = "file://test_exists_sync_file"; + let dir_path = get_jan_data_folder_path(app.handle().clone()); + fs::create_dir_all(&dir_path).unwrap(); + let file_path = dir_path.join("test_exists_sync_file"); + File::create(&file_path).unwrap(); + let args: Vec = vec![path.to_string()]; + let result = exists_sync(app.handle().clone(), args).unwrap(); + assert!(result); + fs::remove_file(file_path).unwrap(); +} + +#[test] +fn test_read_file_sync() { + let app = mock_app(); + let path = "file://test_read_file_sync_file"; + let dir_path = get_jan_data_folder_path(app.handle().clone()); + fs::create_dir_all(&dir_path).unwrap(); + let file_path = dir_path.join("test_read_file_sync_file"); + let mut file = File::create(&file_path).unwrap(); + file.write_all(b"test content").unwrap(); + let args = vec![path.to_string()]; + let result = read_file_sync(app.handle().clone(), args).unwrap(); + assert_eq!(result, "test content".to_string()); + fs::remove_file(file_path).unwrap(); +} + +#[test] +fn test_readdir_sync() { + let app = mock_app(); + let dir_path = get_jan_data_folder_path(app.handle().clone()).join("test_readdir_sync_dir"); + fs::create_dir_all(&dir_path).unwrap(); + File::create(dir_path.join("file1.txt")).unwrap(); + File::create(dir_path.join("file2.txt")).unwrap(); + + let args = vec![dir_path.to_string_lossy().to_string()]; + let result = readdir_sync(app.handle().clone(), args).unwrap(); + assert_eq!(result.len(), 2); + + let _ = fs::remove_dir_all(dir_path); +} diff --git a/src-tauri/src/core/fs.rs b/src-tauri/src/core/fs.rs deleted file mode 100644 index 985fc0fe6..000000000 --- a/src-tauri/src/core/fs.rs +++ /dev/null @@ -1,247 +0,0 @@ -// WARNING: These APIs will be deprecated soon due to removing FS API access from frontend. -// It's added to ensure the legacy implementation from frontend still functions before removal. -use crate::core::cmd::get_jan_data_folder_path; -use std::fs; -use std::path::PathBuf; -use tauri::Runtime; - -#[tauri::command] -pub fn rm(app_handle: tauri::AppHandle, args: Vec) -> Result<(), String> { - if args.is_empty() || args[0].is_empty() { - return Err("rm error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args[0]); - if path.is_file() { - fs::remove_file(&path).map_err(|e| e.to_string())?; - } else if path.is_dir() { - fs::remove_dir_all(&path).map_err(|e| e.to_string())?; - } else { - return Err("rm error: Path does not exist".to_string()); - } - - Ok(()) -} - -#[tauri::command] -pub fn mkdir(app_handle: tauri::AppHandle, args: Vec) -> Result<(), String> { - if args.is_empty() || args[0].is_empty() { - return Err("mkdir error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args[0]); - fs::create_dir_all(&path).map_err(|e| e.to_string()) -} - -#[tauri::command] -pub fn join_path( - app_handle: tauri::AppHandle, - args: Vec, -) -> Result { - if args.is_empty() { - return Err("join_path error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args[0]); - let joined_path = args[1..].iter().fold(path, |acc, part| acc.join(part)); - Ok(joined_path.to_string_lossy().to_string()) -} - -#[tauri::command] -pub fn exists_sync( - app_handle: tauri::AppHandle, - args: Vec, -) -> Result { - if args.is_empty() || args[0].is_empty() { - return Err("exist_sync error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args[0]); - Ok(path.exists()) -} - -#[derive(serde::Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct FileStat { - pub is_directory: bool, - pub size: u64, -} - -#[tauri::command] -pub fn file_stat( - app_handle: tauri::AppHandle, - args: String, -) -> Result { - if args.is_empty() { - return Err("file_stat error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args); - let metadata = fs::metadata(&path).map_err(|e| e.to_string())?; - let is_directory = metadata.is_dir(); - let size = if is_directory { 0 } else { metadata.len() }; - let file_stat = FileStat { is_directory, size }; - Ok(file_stat) -} - -#[tauri::command] -pub fn read_file_sync( - app_handle: tauri::AppHandle, - args: Vec, -) -> Result { - if args.is_empty() || args[0].is_empty() { - return Err("read_file_sync error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args[0]); - fs::read_to_string(&path).map_err(|e| e.to_string()) -} - -#[tauri::command] -pub fn write_file_sync( - app_handle: tauri::AppHandle, - args: Vec, -) -> Result<(), String> { - if args.len() < 2 || args[0].is_empty() || args[1].is_empty() { - return Err("write_file_sync error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args[0]); - let content = &args[1]; - fs::write(&path, content).map_err(|e| e.to_string()) -} - -#[tauri::command] -pub fn readdir_sync( - app_handle: tauri::AppHandle, - args: Vec, -) -> Result, String> { - if args.is_empty() || args[0].is_empty() { - return Err("read_dir_sync error: Invalid argument".to_string()); - } - - let path = resolve_path(app_handle, &args[0]); - let entries = fs::read_dir(&path).map_err(|e| e.to_string())?; - let paths: Vec = entries - .filter_map(|entry| entry.ok()) - .map(|entry| entry.path().to_string_lossy().to_string()) - .collect(); - Ok(paths) -} - -fn normalize_file_path(path: &str) -> String { - path.replace("file:/", "").replace("file:\\", "") -} - -fn resolve_path(app_handle: tauri::AppHandle, path: &str) -> PathBuf { - let path = if path.starts_with("file:/") || path.starts_with("file:\\") { - let normalized = normalize_file_path(path); - let relative_normalized = normalized - .trim_start_matches(std::path::MAIN_SEPARATOR) - .trim_start_matches('/') - .trim_start_matches('\\'); - get_jan_data_folder_path(app_handle).join(relative_normalized) - } else { - PathBuf::from(path) - }; - - if path.starts_with("http://") || path.starts_with("https://") { - path - } else { - path.canonicalize().unwrap_or(path) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs::{self, File}; - use std::io::Write; - use tauri::test::mock_app; - - #[test] - fn test_rm() { - let app = mock_app(); - let path = "test_rm_dir"; - fs::create_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap(); - let args = vec![format!("file://{}", path).to_string()]; - let result = rm(app.handle().clone(), args); - assert!(result.is_ok()); - assert!(!get_jan_data_folder_path(app.handle().clone()) - .join(path) - .exists()); - } - - #[test] - fn test_mkdir() { - let app = mock_app(); - let path = "test_mkdir_dir"; - let args = vec![format!("file://{}", path).to_string()]; - let result = mkdir(app.handle().clone(), args); - assert!(result.is_ok()); - assert!(get_jan_data_folder_path(app.handle().clone()) - .join(path) - .exists()); - fs::remove_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap(); - } - - #[test] - fn test_join_path() { - let app = mock_app(); - let path = "file://test_dir"; - let args = vec![path.to_string(), "test_file".to_string()]; - let result = join_path(app.handle().clone(), args).unwrap(); - assert_eq!( - result, - get_jan_data_folder_path(app.handle().clone()) - .join("test_dir/test_file") - .to_string_lossy() - .to_string() - ); - } - - #[test] - fn test_exists_sync() { - let app = mock_app(); - let path = "file://test_exists_sync_file"; - let dir_path = get_jan_data_folder_path(app.handle().clone()); - fs::create_dir_all(&dir_path).unwrap(); - let file_path = dir_path.join("test_exists_sync_file"); - File::create(&file_path).unwrap(); - let args: Vec = vec![path.to_string()]; - let result = exists_sync(app.handle().clone(), args).unwrap(); - assert!(result); - fs::remove_file(file_path).unwrap(); - } - - #[test] - fn test_read_file_sync() { - let app = mock_app(); - let path = "file://test_read_file_sync_file"; - let dir_path = get_jan_data_folder_path(app.handle().clone()); - fs::create_dir_all(&dir_path).unwrap(); - let file_path = dir_path.join("test_read_file_sync_file"); - let mut file = File::create(&file_path).unwrap(); - file.write_all(b"test content").unwrap(); - let args = vec![path.to_string()]; - let result = read_file_sync(app.handle().clone(), args).unwrap(); - assert_eq!(result, "test content".to_string()); - fs::remove_file(file_path).unwrap(); - } - - #[test] - fn test_readdir_sync() { - let app = mock_app(); - let path = "file://test_readdir_sync_dir"; - let dir_path = get_jan_data_folder_path(app.handle().clone()).join(path); - fs::create_dir_all(&dir_path).unwrap(); - File::create(dir_path.join("file1.txt")).unwrap(); - File::create(dir_path.join("file2.txt")).unwrap(); - - let args = vec![dir_path.to_string_lossy().to_string()]; - let result = readdir_sync(app.handle().clone(), args).unwrap(); - assert_eq!(result.len(), 2); - - fs::remove_dir_all(dir_path).unwrap(); - } -} diff --git a/src-tauri/src/core/hardware/mod.rs b/src-tauri/src/core/hardware/mod.rs deleted file mode 100644 index ea2435cb0..000000000 --- a/src-tauri/src/core/hardware/mod.rs +++ /dev/null @@ -1,359 +0,0 @@ -pub mod amd; -pub mod nvidia; -pub mod vulkan; - -use std::sync::OnceLock; -use sysinfo::System; -use tauri::{path::BaseDirectory, Manager}; - -static SYSTEM_INFO: OnceLock = OnceLock::new(); - -#[derive(Clone, serde::Serialize, Debug)] -struct CpuStaticInfo { - name: String, - core_count: usize, - arch: String, - extensions: Vec, -} - -impl CpuStaticInfo { - fn new() -> Self { - let mut system = System::new(); - system.refresh_cpu_all(); - - let name = system - .cpus() - .first() - .map(|cpu| { - let brand = cpu.brand(); - if brand.is_empty() { - cpu.name() - } else { - brand - } - }) - .unwrap_or("unknown") - .to_string(); - - CpuStaticInfo { - name, - core_count: System::physical_core_count().unwrap_or(0), - arch: std::env::consts::ARCH.to_string(), - extensions: CpuStaticInfo::get_extensions(), - } - } - - // TODO: see if we need to check for all CPU extensions - #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] - fn get_extensions() -> Vec { - let mut exts = vec![]; - - // fpu is always present on modern x86 processors, - // but is_x86_feature_detected doesn't support it - exts.push("fpu".to_string()); - if is_x86_feature_detected!("mmx") { - exts.push("mmx".to_string()); - } - if is_x86_feature_detected!("sse") { - exts.push("sse".to_string()); - } - if is_x86_feature_detected!("sse2") { - exts.push("sse2".to_string()); - } - if is_x86_feature_detected!("sse3") { - exts.push("sse3".to_string()); - } - if is_x86_feature_detected!("ssse3") { - exts.push("ssse3".to_string()); - } - if is_x86_feature_detected!("sse4.1") { - exts.push("sse4_1".to_string()); - } - if is_x86_feature_detected!("sse4.2") { - exts.push("sse4_2".to_string()); - } - if is_x86_feature_detected!("pclmulqdq") { - exts.push("pclmulqdq".to_string()); - } - if is_x86_feature_detected!("avx") { - exts.push("avx".to_string()); - } - if is_x86_feature_detected!("avx2") { - exts.push("avx2".to_string()); - } - if is_x86_feature_detected!("avx512f") { - exts.push("avx512_f".to_string()); - } - if is_x86_feature_detected!("avx512dq") { - exts.push("avx512_dq".to_string()); - } - if is_x86_feature_detected!("avx512ifma") { - exts.push("avx512_ifma".to_string()); - } - if is_x86_feature_detected!("avx512pf") { - exts.push("avx512_pf".to_string()); - } - if is_x86_feature_detected!("avx512er") { - exts.push("avx512_er".to_string()); - } - if is_x86_feature_detected!("avx512cd") { - exts.push("avx512_cd".to_string()); - } - if is_x86_feature_detected!("avx512bw") { - exts.push("avx512_bw".to_string()); - } - if is_x86_feature_detected!("avx512vl") { - exts.push("avx512_vl".to_string()); - } - if is_x86_feature_detected!("avx512vbmi") { - exts.push("avx512_vbmi".to_string()); - } - if is_x86_feature_detected!("avx512vbmi2") { - exts.push("avx512_vbmi2".to_string()); - } - if is_x86_feature_detected!("avx512vnni") { - exts.push("avx512_vnni".to_string()); - } - if is_x86_feature_detected!("avx512bitalg") { - exts.push("avx512_bitalg".to_string()); - } - if is_x86_feature_detected!("avx512vpopcntdq") { - exts.push("avx512_vpopcntdq".to_string()); - } - // avx512_4vnniw and avx512_4fmaps are only available on Intel Knights Mill, which are - // very rare. https://en.wikipedia.org/wiki/AVX-512 - // is_x86_feature_detected doesn't support them - if is_x86_feature_detected!("avx512vp2intersect") { - exts.push("avx512_vp2intersect".to_string()); - } - if is_x86_feature_detected!("aes") { - exts.push("aes".to_string()); - } - if is_x86_feature_detected!("f16c") { - exts.push("f16c".to_string()); - } - - exts - } - - // Cortex always returns empty list for non-x86 - #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))] - fn get_extensions() -> Vec { - vec![] - } -} - -// https://devicehunt.com/all-pci-vendors -pub const VENDOR_ID_AMD: u32 = 0x1002; -pub const VENDOR_ID_NVIDIA: u32 = 0x10DE; -pub const VENDOR_ID_INTEL: u32 = 0x8086; - -#[derive(Debug, Clone)] -pub enum Vendor { - AMD, - NVIDIA, - Intel, - Unknown(u32), -} - -impl serde::Serialize for Vendor { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - Vendor::AMD => "AMD".serialize(serializer), - Vendor::NVIDIA => "NVIDIA".serialize(serializer), - Vendor::Intel => "Intel".serialize(serializer), - Vendor::Unknown(vendor_id) => { - let formatted = format!("Unknown (vendor_id: {})", vendor_id); - serializer.serialize_str(&formatted) - } - } - } -} - -impl Vendor { - pub fn from_vendor_id(vendor_id: u32) -> Self { - match vendor_id { - VENDOR_ID_AMD => Vendor::AMD, - VENDOR_ID_NVIDIA => Vendor::NVIDIA, - VENDOR_ID_INTEL => Vendor::Intel, - _ => Vendor::Unknown(vendor_id), - } - } -} - -#[derive(Clone, Debug, serde::Serialize)] -pub struct GpuInfo { - pub name: String, - pub total_memory: u64, - pub vendor: Vendor, - pub uuid: String, - pub driver_version: String, - pub nvidia_info: Option, - pub vulkan_info: Option, -} - -impl GpuInfo { - pub fn get_usage(&self) -> GpuUsage { - match self.vendor { - Vendor::NVIDIA => self.get_usage_nvidia(), - Vendor::AMD => self.get_usage_amd(), - _ => self.get_usage_unsupported(), - } - } - - pub fn get_usage_unsupported(&self) -> GpuUsage { - GpuUsage { - uuid: self.uuid.clone(), - used_memory: 0, - total_memory: 0, - } - } -} - -#[derive(serde::Serialize, Clone, Debug)] -pub struct SystemInfo { - cpu: CpuStaticInfo, - os_type: String, - os_name: String, - total_memory: u64, - gpus: Vec, -} - -#[derive(serde::Serialize, Clone, Debug)] -pub struct GpuUsage { - uuid: String, - used_memory: u64, - total_memory: u64, -} - -#[derive(serde::Serialize, Clone, Debug)] -pub struct SystemUsage { - cpu: f32, - used_memory: u64, - total_memory: u64, - gpus: Vec, -} - -fn get_jan_libvulkan_path(app: tauri::AppHandle) -> String { - let lib_name = if cfg!(target_os = "windows") { - "vulkan-1.dll" - } else if cfg!(target_os = "linux") { - "libvulkan.so" - } else { - return "".to_string(); - }; - - // NOTE: this does not work in test mode (mock app) - match app.path().resolve( - format!("resources/lib/{}", lib_name), - BaseDirectory::Resource, - ) { - Ok(lib_path) => lib_path.to_string_lossy().to_string(), - Err(_) => "".to_string(), - } -} - -#[tauri::command] -pub fn get_system_info(app: tauri::AppHandle) -> SystemInfo { - SYSTEM_INFO - .get_or_init(|| { - let mut system = System::new(); - system.refresh_memory(); - - let mut gpu_map = std::collections::HashMap::new(); - for gpu in nvidia::get_nvidia_gpus() { - gpu_map.insert(gpu.uuid.clone(), gpu); - } - - // try system vulkan first - let paths = vec!["".to_string(), get_jan_libvulkan_path(app.clone())]; - let mut vulkan_gpus = vec![]; - for path in paths { - vulkan_gpus = vulkan::get_vulkan_gpus(&path); - if !vulkan_gpus.is_empty() { - break; - } - } - - for gpu in vulkan_gpus { - match gpu_map.get_mut(&gpu.uuid) { - // for existing NVIDIA GPUs, add Vulkan info - Some(nvidia_gpu) => { - nvidia_gpu.vulkan_info = gpu.vulkan_info; - } - None => { - gpu_map.insert(gpu.uuid.clone(), gpu); - } - } - } - - let os_type = if cfg!(target_os = "windows") { - "windows" - } else if cfg!(target_os = "macos") { - "macos" - } else if cfg!(target_os = "linux") { - "linux" - } else { - "unknown" - }; - let os_name = System::long_os_version().unwrap_or("Unknown".to_string()); - - SystemInfo { - cpu: CpuStaticInfo::new(), - os_type: os_type.to_string(), - os_name, - total_memory: system.total_memory() / 1024 / 1024, // bytes to MiB - gpus: gpu_map.into_values().collect(), - } - }) - .clone() -} - -#[tauri::command] -pub fn get_system_usage(app: tauri::AppHandle) -> SystemUsage { - let mut system = System::new(); - system.refresh_memory(); - - // need to refresh 2 times to get CPU usage - system.refresh_cpu_all(); - std::thread::sleep(sysinfo::MINIMUM_CPU_UPDATE_INTERVAL); - system.refresh_cpu_all(); - - let cpus = system.cpus(); - let cpu_usage = - cpus.iter().map(|cpu| cpu.cpu_usage()).sum::() / (cpus.len().max(1) as f32); - - SystemUsage { - cpu: cpu_usage, - used_memory: system.used_memory() / 1024 / 1024, // bytes to MiB, - total_memory: system.total_memory() / 1024 / 1024, // bytes to MiB, - gpus: get_system_info(app.clone()) - .gpus - .iter() - .map(|gpu| gpu.get_usage()) - .collect(), - } -} - -#[cfg(test)] -mod tests { - use super::*; - use tauri::test::mock_app; - - #[test] - fn test_system_info() { - let app = mock_app(); - let info = get_system_info(app.handle().clone()); - println!("System Static Info: {:?}", info); - } - - #[test] - fn test_system_usage() { - let app = mock_app(); - let usage = get_system_usage(app.handle().clone()); - println!("System Usage Info: {:?}", usage); - } -} diff --git a/src-tauri/src/core/mcp/commands.rs b/src-tauri/src/core/mcp/commands.rs new file mode 100644 index 000000000..56b1a6124 --- /dev/null +++ b/src-tauri/src/core/mcp/commands.rs @@ -0,0 +1,238 @@ +use rmcp::model::{CallToolRequestParam, CallToolResult, Tool}; +use rmcp::{service::RunningService, RoleClient}; +use serde_json::{Map, Value}; +use std::{collections::HashMap, sync::Arc}; +use tauri::{AppHandle, Emitter, Runtime, State}; +use tokio::{sync::Mutex, time::timeout}; + +use super::{ + constants::{DEFAULT_MCP_CONFIG, MCP_TOOL_CALL_TIMEOUT}, + helpers::{restart_active_mcp_servers, start_mcp_server_with_restart, stop_mcp_servers}, +}; +use crate::core::{app::commands::get_jan_data_folder_path, state::AppState}; +use std::fs; + +#[tauri::command] +pub async fn activate_mcp_server( + app: tauri::AppHandle, + state: State<'_, AppState>, + name: String, + config: Value, +) -> Result<(), String> { + let servers: Arc>>> = + state.mcp_servers.clone(); + + // Use the modified start_mcp_server_with_restart that returns first attempt result + start_mcp_server_with_restart(app, servers, name, config, Some(3)).await +} + +#[tauri::command] +pub async fn deactivate_mcp_server(state: State<'_, AppState>, name: String) -> Result<(), String> { + log::info!("Deactivating MCP server: {}", name); + + // First, mark server as manually deactivated to prevent restart + // Remove from active servers list to prevent restart + { + let mut active_servers = state.mcp_active_servers.lock().await; + active_servers.remove(&name); + log::info!("Removed MCP server {} from active servers list", name); + } + + // Mark as not successfully connected to prevent restart logic + { + let mut connected = state.mcp_successfully_connected.lock().await; + connected.insert(name.clone(), false); + log::info!("Marked MCP server {} as not successfully connected", name); + } + + // Reset restart count + { + let mut counts = state.mcp_restart_counts.lock().await; + counts.remove(&name); + log::info!("Reset restart count for MCP server {}", name); + } + + // Now remove and stop the server + let servers = state.mcp_servers.clone(); + let mut servers_map = servers.lock().await; + + let service = servers_map + .remove(&name) + .ok_or_else(|| format!("Server {} not found", name))?; + + // Release the lock before calling cancel + drop(servers_map); + + service.cancel().await.map_err(|e| e.to_string())?; + log::info!("Server {name} stopped successfully and marked as deactivated."); + Ok(()) +} + +#[tauri::command] +pub async fn restart_mcp_servers(app: AppHandle, state: State<'_, AppState>) -> Result<(), String> { + let servers = state.mcp_servers.clone(); + // Stop the servers + stop_mcp_servers(state.mcp_servers.clone()).await?; + + // Restart only previously active servers (like cortex) + restart_active_mcp_servers(&app, servers).await?; + + app.emit("mcp-update", "MCP servers updated") + .map_err(|e| format!("Failed to emit event: {}", e))?; + + Ok(()) +} + +/// Reset MCP restart count for a specific server (like cortex reset) +#[tauri::command] +pub async fn reset_mcp_restart_count( + state: State<'_, AppState>, + server_name: String, +) -> Result<(), String> { + let mut counts = state.mcp_restart_counts.lock().await; + + let count = match counts.get_mut(&server_name) { + Some(count) => count, + None => return Ok(()), // Server not found, nothing to reset + }; + + let old_count = *count; + *count = 0; + log::info!( + "MCP server {} restart count reset from {} to 0.", + server_name, + old_count + ); + Ok(()) +} + +#[tauri::command] +pub async fn get_connected_servers( + _app: AppHandle, + state: State<'_, AppState>, +) -> Result, String> { + let servers = state.mcp_servers.clone(); + let servers_map = servers.lock().await; + Ok(servers_map.keys().cloned().collect()) +} + +/// Retrieves all available tools from all MCP servers +/// +/// # Arguments +/// * `state` - Application state containing MCP server connections +/// +/// # Returns +/// * `Result, String>` - A vector of all tools if successful, or an error message if failed +/// +/// This function: +/// 1. Locks the MCP servers mutex to access server connections +/// 2. Iterates through all connected servers +/// 3. Gets the list of tools from each server +/// 4. Combines all tools into a single vector +/// 5. Returns the combined list of all available tools +#[tauri::command] +pub async fn get_tools(state: State<'_, AppState>) -> Result, String> { + let servers = state.mcp_servers.lock().await; + let mut all_tools: Vec = Vec::new(); + + for (_, service) in servers.iter() { + // List tools with timeout + let tools_future = service.list_all_tools(); + let tools = match timeout(MCP_TOOL_CALL_TIMEOUT, tools_future).await { + Ok(result) => result.map_err(|e| e.to_string())?, + Err(_) => { + log::warn!( + "Listing tools timed out after {} seconds", + MCP_TOOL_CALL_TIMEOUT.as_secs() + ); + continue; // Skip this server and continue with others + } + }; + + for tool in tools { + all_tools.push(tool); + } + } + + Ok(all_tools) +} + +/// Calls a tool on an MCP server by name with optional arguments +/// +/// # Arguments +/// * `state` - Application state containing MCP server connections +/// * `tool_name` - Name of the tool to call +/// * `arguments` - Optional map of argument names to values +/// +/// # Returns +/// * `Result` - Result of the tool call if successful, or error message if failed +/// +/// This function: +/// 1. Locks the MCP servers mutex to access server connections +/// 2. Searches through all servers for one containing the named tool +/// 3. When found, calls the tool on that server with the provided arguments +/// 4. Returns error if no server has the requested tool +#[tauri::command] +pub async fn call_tool( + state: State<'_, AppState>, + tool_name: String, + arguments: Option>, +) -> Result { + let servers = state.mcp_servers.lock().await; + + // Iterate through servers and find the first one that contains the tool + for (_, service) in servers.iter() { + let tools = match service.list_all_tools().await { + Ok(tools) => tools, + Err(_) => continue, // Skip this server if we can't list tools + }; + + if !tools.iter().any(|t| t.name == tool_name) { + continue; // Tool not found in this server, try next + } + + println!("Found tool {} in server", tool_name); + + // Call the tool with timeout + let tool_call = service.call_tool(CallToolRequestParam { + name: tool_name.clone().into(), + arguments, + }); + + return match timeout(MCP_TOOL_CALL_TIMEOUT, tool_call).await { + Ok(result) => result.map_err(|e| e.to_string()), + Err(_) => Err(format!( + "Tool call '{}' timed out after {} seconds", + tool_name, + MCP_TOOL_CALL_TIMEOUT.as_secs() + )), + }; + } + + Err(format!("Tool {} not found", tool_name)) +} + +#[tauri::command] +pub async fn get_mcp_configs(app: AppHandle) -> Result { + let mut path = get_jan_data_folder_path(app); + path.push("mcp_config.json"); + log::info!("read mcp configs, path: {:?}", path); + + // Create default empty config if file doesn't exist + if !path.exists() { + log::info!("mcp_config.json not found, creating default empty config"); + fs::write(&path, DEFAULT_MCP_CONFIG) + .map_err(|e| format!("Failed to create default MCP config: {}", e))?; + } + + fs::read_to_string(path).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub async fn save_mcp_configs(app: AppHandle, configs: String) -> Result<(), String> { + let mut path = get_jan_data_folder_path(app); + path.push("mcp_config.json"); + log::info!("save mcp configs, path: {:?}", path); + + fs::write(path, configs).map_err(|e| e.to_string()) +} diff --git a/src-tauri/src/core/mcp/constants.rs b/src-tauri/src/core/mcp/constants.rs new file mode 100644 index 000000000..a93e62494 --- /dev/null +++ b/src-tauri/src/core/mcp/constants.rs @@ -0,0 +1,46 @@ +use std::time::Duration; + +// MCP Constants +pub const MCP_TOOL_CALL_TIMEOUT: Duration = Duration::from_secs(30); +pub const MCP_BASE_RESTART_DELAY_MS: u64 = 1000; // Start with 1 second +pub const MCP_MAX_RESTART_DELAY_MS: u64 = 30000; // Cap at 30 seconds +pub const MCP_BACKOFF_MULTIPLIER: f64 = 2.0; // Double the delay each time + +pub const DEFAULT_MCP_CONFIG: &str = r#"{ + "mcpServers": { + "browsermcp": { + "command": "npx", + "args": ["@browsermcp/mcp"], + "env": {}, + "active": false + }, + "fetch": { + "command": "uvx", + "args": ["mcp-server-fetch"], + "env": {}, + "active": false + }, + "serper": { + "command": "npx", + "args": ["-y", "serper-search-scrape-mcp-server"], + "env": { "SERPER_API_KEY": "YOUR_SERPER_API_KEY_HERE" }, + "active": false + }, + "filesystem": { + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-filesystem", + "/path/to/other/allowed/dir" + ], + "env": {}, + "active": false + }, + "sequential-thinking": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"], + "env": {}, + "active": false + } + } +}"#; diff --git a/src-tauri/src/core/mcp.rs b/src-tauri/src/core/mcp/helpers.rs similarity index 64% rename from src-tauri/src/core/mcp.rs rename to src-tauri/src/core/mcp/helpers.rs index e8775d187..e6b72488d 100644 --- a/src-tauri/src/core/mcp.rs +++ b/src-tauri/src/core/mcp/helpers.rs @@ -1,7 +1,5 @@ -use rmcp::model::{CallToolRequestParam, CallToolResult, Tool}; use rmcp::{service::RunningService, transport::TokioChildProcess, RoleClient, ServiceExt}; -use serde_json::{Map, Value}; -use std::fs; +use serde_json::Value; use std::{collections::HashMap, env, sync::Arc, time::Duration}; use tauri::{AppHandle, Emitter, Manager, Runtime, State}; use tokio::{ @@ -10,59 +8,11 @@ use tokio::{ time::{sleep, timeout}, }; -use super::{ - cmd::get_jan_data_folder_path, - state::AppState, - utils::can_override_npx, +use super::constants::{ + MCP_BACKOFF_MULTIPLIER, MCP_BASE_RESTART_DELAY_MS, MCP_MAX_RESTART_DELAY_MS, }; - -const DEFAULT_MCP_CONFIG: &str = r#"{ - "mcpServers": { - "browsermcp": { - "command": "npx", - "args": ["@browsermcp/mcp"], - "env": {}, - "active": false - }, - "fetch": { - "command": "uvx", - "args": ["mcp-server-fetch"], - "env": {}, - "active": false - }, - "serper": { - "command": "npx", - "args": ["-y", "serper-search-scrape-mcp-server"], - "env": { "SERPER_API_KEY": "YOUR_SERPER_API_KEY_HERE" }, - "active": false - }, - "filesystem": { - "command": "npx", - "args": [ - "-y", - "@modelcontextprotocol/server-filesystem", - "/path/to/other/allowed/dir" - ], - "env": {}, - "active": false - }, - "sequential-thinking": { - "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"], - "env": {}, - "active": false - } - } -} -"#; - -// Timeout for MCP tool calls (30 seconds) -const MCP_TOOL_CALL_TIMEOUT: Duration = Duration::from_secs(30); - -// MCP server restart configuration with exponential backoff -const MCP_BASE_RESTART_DELAY_MS: u64 = 1000; // Start with 1 second -const MCP_MAX_RESTART_DELAY_MS: u64 = 30000; // Cap at 30 seconds -const MCP_BACKOFF_MULTIPLIER: f64 = 2.0; // Double the delay each time +use crate::core::{app::commands::get_jan_data_folder_path, state::AppState}; +use jan_utils::can_override_npx; /// Calculate exponential backoff delay with jitter /// @@ -71,43 +21,43 @@ const MCP_BACKOFF_MULTIPLIER: f64 = 2.0; // Double the delay each time /// /// # Returns /// * `u64` - Delay in milliseconds, capped at MCP_MAX_RESTART_DELAY_MS -fn calculate_exponential_backoff_delay(attempt: u32) -> u64 { +pub fn calculate_exponential_backoff_delay(attempt: u32) -> u64 { use std::cmp; - + // Calculate base exponential delay: base_delay * multiplier^(attempt-1) - let exponential_delay = (MCP_BASE_RESTART_DELAY_MS as f64) - * MCP_BACKOFF_MULTIPLIER.powi((attempt - 1) as i32); - + let exponential_delay = + (MCP_BASE_RESTART_DELAY_MS as f64) * MCP_BACKOFF_MULTIPLIER.powi((attempt - 1) as i32); + // Cap the delay at maximum let capped_delay = cmp::min(exponential_delay as u64, MCP_MAX_RESTART_DELAY_MS); - + // Add jitter (±25% randomness) to prevent thundering herd let jitter_range = (capped_delay as f64 * 0.25) as u64; let jitter = if jitter_range > 0 { use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; - + // Use attempt number as seed for deterministic but varied jitter let mut hasher = DefaultHasher::new(); attempt.hash(&mut hasher); let hash = hasher.finish(); - + // Convert hash to jitter value in range [-jitter_range, +jitter_range] let jitter_offset = (hash % (jitter_range * 2)) as i64 - jitter_range as i64; jitter_offset } else { 0 }; - + // Apply jitter while ensuring delay stays positive and within bounds let final_delay = cmp::max( 100, // Minimum 100ms delay cmp::min( MCP_MAX_RESTART_DELAY_MS, - (capped_delay as i64 + jitter) as u64 - ) + (capped_delay as i64 + jitter) as u64, + ), ); - + final_delay } @@ -156,7 +106,7 @@ pub async fn run_mcp_commands( let servers_clone = servers_state.clone(); let name_clone = name.clone(); let config_clone = config.clone(); - + // Spawn task for initial startup attempt let handle = tokio::spawn(async move { // Only wait for the initial startup attempt, not the monitoring @@ -166,44 +116,47 @@ pub async fn run_mcp_commands( name_clone.clone(), config_clone.clone(), Some(3), // Default max restarts for startup - ).await; - + ) + .await; + // If initial startup failed, we still want to continue with other servers if let Err(e) = &result { - log::error!("Initial startup failed for MCP server {}: {}", name_clone, e); + log::error!( + "Initial startup failed for MCP server {}: {}", + name_clone, + e + ); } - + (name_clone, result) }); - + startup_handles.push(handle); } // Wait for all initial startup attempts to complete let mut successful_count = 0; let mut failed_count = 0; - + for handle in startup_handles { match handle.await { - Ok((name, result)) => { - match result { - Ok(_) => { - log::info!("MCP server {} initialized successfully", name); - successful_count += 1; - } - Err(e) => { - log::error!("MCP server {} failed to initialize: {}", name, e); - failed_count += 1; - } + Ok((name, result)) => match result { + Ok(_) => { + log::info!("MCP server {} initialized successfully", name); + successful_count += 1; } - } + Err(e) => { + log::error!("MCP server {} failed to initialize: {}", name, e); + failed_count += 1; + } + }, Err(e) => { log::error!("Failed to join startup task: {}", e); failed_count += 1; } } } - + log::info!( "MCP server initialization complete: {} successful, {} failed", successful_count, @@ -214,17 +167,17 @@ pub async fn run_mcp_commands( } /// Monitor MCP server health without removing it from the HashMap -async fn monitor_mcp_server_handle( +pub async fn monitor_mcp_server_handle( servers_state: Arc>>>, name: String, ) -> Option { log::info!("Monitoring MCP server {} health", name); - + // Monitor server health with periodic checks loop { // Small delay between health checks sleep(Duration::from_secs(5)).await; - + // Check if server is still healthy by trying to list tools let health_check_result = { let servers = servers_state.lock().await; @@ -250,10 +203,13 @@ async fn monitor_mcp_server_handle( return Some(rmcp::service::QuitReason::Closed); } }; - + if !health_check_result { // Server failed health check - remove it and return - log::error!("MCP server {} failed health check, removing from active servers", name); + log::error!( + "MCP server {} failed health check, removing from active servers", + name + ); let mut servers = servers_state.lock().await; if let Some(service) = servers.remove(&name) { // Try to cancel the service gracefully @@ -266,7 +222,7 @@ async fn monitor_mcp_server_handle( /// Starts an MCP server with restart monitoring /// Returns the result of the first start attempt, then continues with restart monitoring -async fn start_mcp_server_with_restart( +pub async fn start_mcp_server_with_restart( app: AppHandle, servers_state: Arc>>>, name: String, @@ -277,12 +233,12 @@ async fn start_mcp_server_with_restart( let restart_counts = app_state.mcp_restart_counts.clone(); let active_servers_state = app_state.mcp_active_servers.clone(); let successfully_connected = app_state.mcp_successfully_connected.clone(); - + // Store active server config for restart purposes store_active_server_config(&active_servers_state, &name, &config).await; - + let max_restarts = max_restarts.unwrap_or(5); - + // Try the first start attempt and return its result log::info!("Starting MCP server {} (Initial attempt)", name); let first_start_result = schedule_mcp_start_task( @@ -290,19 +246,20 @@ async fn start_mcp_server_with_restart( servers_state.clone(), name.clone(), config.clone(), - ).await; + ) + .await; match first_start_result { Ok(_) => { log::info!("MCP server {} started successfully on first attempt", name); reset_restart_count(&restart_counts, &name).await; - + // Check if server was marked as successfully connected (passed verification) let was_verified = { let connected = successfully_connected.lock().await; connected.get(&name).copied().unwrap_or(false) }; - + if was_verified { // Only spawn monitoring task if server passed verification spawn_server_monitoring_task( @@ -313,24 +270,32 @@ async fn start_mcp_server_with_restart( max_restarts, restart_counts, successfully_connected, - ).await; - + ) + .await; + Ok(()) } else { // Server failed verification, don't monitor for restarts log::error!("MCP server {} failed verification after startup", name); - Err(format!("MCP server {} failed verification after startup", name)) + Err(format!( + "MCP server {} failed verification after startup", + name + )) } } Err(e) => { - log::error!("Failed to start MCP server {} on first attempt: {}", name, e); + log::error!( + "Failed to start MCP server {} on first attempt: {}", + name, + e + ); Err(e) } } } /// Helper function to handle the restart loop logic -async fn start_restart_loop( +pub async fn start_restart_loop( app: AppHandle, servers_state: Arc>>>, name: String, @@ -353,11 +318,12 @@ async fn start_restart_loop( name, max_restarts ); - if let Err(e) = app.emit("mcp_max_restarts_reached", + if let Err(e) = app.emit( + "mcp_max_restarts_reached", serde_json::json!({ "server": name, "max_restarts": max_restarts - }) + }), ) { log::error!("Failed to emit mcp_max_restarts_reached event: {e}"); } @@ -387,18 +353,19 @@ async fn start_restart_loop( servers_state.clone(), name.clone(), config.clone(), - ).await; + ) + .await; match start_result { Ok(_) => { log::info!("MCP server {} restarted successfully.", name); - + // Check if server passed verification (was marked as successfully connected) let passed_verification = { let connected = successfully_connected.lock().await; connected.get(&name).copied().unwrap_or(false) }; - + if !passed_verification { log::error!( "MCP server {} failed verification after restart - stopping permanently", @@ -406,7 +373,7 @@ async fn start_restart_loop( ); break; } - + // Reset restart count on successful restart with verification { let mut counts = restart_counts.lock().await; @@ -423,10 +390,8 @@ async fn start_restart_loop( } // Monitor the server again - let quit_reason = monitor_mcp_server_handle( - servers_state.clone(), - name.clone(), - ).await; + let quit_reason = + monitor_mcp_server_handle(servers_state.clone(), name.clone()).await; log::info!("MCP server {} quit with reason: {:?}", name, quit_reason); @@ -464,7 +429,7 @@ async fn start_restart_loop( } Err(e) => { log::error!("Failed to restart MCP server {}: {}", name, e); - + // Check if server was marked as successfully connected before let was_connected = { let connected = successfully_connected.lock().await; @@ -485,21 +450,7 @@ async fn start_restart_loop( } } -#[tauri::command] -pub async fn activate_mcp_server( - app: tauri::AppHandle, - state: State<'_, AppState>, - name: String, - config: Value, -) -> Result<(), String> { - let servers: Arc>>> = - state.mcp_servers.clone(); - - // Use the modified start_mcp_server_with_restart that returns first attempt result - start_mcp_server_with_restart(app, servers, name, config, Some(3)).await -} - -async fn schedule_mcp_start_task( +pub async fn schedule_mcp_start_task( app: tauri::AppHandle, servers: Arc>>>, name: String, @@ -511,7 +462,7 @@ async fn schedule_mcp_start_task( .parent() .expect("Executable must have a parent directory"); let bin_path = exe_parent_path.to_path_buf(); - + let (command, args, envs) = extract_command_args(&config) .ok_or_else(|| format!("Failed to extract command args from config for {name}"))?; @@ -535,12 +486,12 @@ async fn schedule_mcp_start_task( cmd.arg("run"); cmd.env("UV_CACHE_DIR", cache_dir.to_str().unwrap().to_string()); } - + #[cfg(windows)] { cmd.creation_flags(0x08000000); // CREATE_NO_WINDOW: prevents shell window on Windows } - + let app_path_str = app_path.to_str().unwrap().to_string(); let log_file_path = format!("{}/logs/app.log", app_path_str); match std::fs::OpenOptions::new() @@ -568,13 +519,14 @@ async fn schedule_mcp_start_task( } }); - let process = TokioChildProcess::new(cmd) - .map_err(|e| { - log::error!("Failed to run command {name}: {e}"); - format!("Failed to run command {name}: {e}") - })?; + let process = TokioChildProcess::new(cmd).map_err(|e| { + log::error!("Failed to run command {name}: {e}"); + format!("Failed to run command {name}: {e}") + })?; - let service = ().serve(process).await + let service = () + .serve(process) + .await .map_err(|e| format!("Failed to start MCP server {name}: {e}"))?; // Get peer info and clone the needed values before moving the service @@ -595,15 +547,18 @@ async fn schedule_mcp_start_task( // This prevents race conditions where the server quits immediately let verification_delay = Duration::from_millis(500); sleep(verification_delay).await; - + // Check if server is still running after the verification delay let server_still_running = { let servers_map = servers.lock().await; servers_map.contains_key(&name) }; - + if !server_still_running { - return Err(format!("MCP server {} quit immediately after starting", name)); + return Err(format!( + "MCP server {} quit immediately after starting", + name + )); } // Mark server as successfully connected (for restart policy) @@ -626,48 +581,7 @@ async fn schedule_mcp_start_task( Ok(()) } -#[tauri::command] -pub async fn deactivate_mcp_server(state: State<'_, AppState>, name: String) -> Result<(), String> { - log::info!("Deactivating MCP server: {}", name); - - // First, mark server as manually deactivated to prevent restart - // Remove from active servers list to prevent restart - { - let mut active_servers = state.mcp_active_servers.lock().await; - active_servers.remove(&name); - log::info!("Removed MCP server {} from active servers list", name); - } - - // Mark as not successfully connected to prevent restart logic - { - let mut connected = state.mcp_successfully_connected.lock().await; - connected.insert(name.clone(), false); - log::info!("Marked MCP server {} as not successfully connected", name); - } - - // Reset restart count - { - let mut counts = state.mcp_restart_counts.lock().await; - counts.remove(&name); - log::info!("Reset restart count for MCP server {}", name); - } - - // Now remove and stop the server - let servers = state.mcp_servers.clone(); - let mut servers_map = servers.lock().await; - - let service = servers_map.remove(&name) - .ok_or_else(|| format!("Server {} not found", name))?; - - // Release the lock before calling cancel - drop(servers_map); - - service.cancel().await.map_err(|e| e.to_string())?; - log::info!("Server {name} stopped successfully and marked as deactivated."); - Ok(()) -} - -fn extract_command_args( +pub fn extract_command_args( config: &Value, ) -> Option<(String, Vec, serde_json::Map)> { let obj = config.as_object()?; @@ -681,27 +595,12 @@ fn extract_command_args( Some((command, args, envs)) } -fn extract_active_status(config: &Value) -> Option { +pub fn extract_active_status(config: &Value) -> Option { let obj = config.as_object()?; let active = obj.get("active")?.as_bool()?; Some(active) } -#[tauri::command] -pub async fn restart_mcp_servers(app: AppHandle, state: State<'_, AppState>) -> Result<(), String> { - let servers = state.mcp_servers.clone(); - // Stop the servers - stop_mcp_servers(state.mcp_servers.clone()).await?; - - // Restart only previously active servers (like cortex) - restart_active_mcp_servers(&app, servers).await?; - - app.emit("mcp-update", "MCP servers updated") - .map_err(|e| format!("Failed to emit event: {}", e))?; - - Ok(()) -} - /// Restart only servers that were previously active (like cortex restart behavior) pub async fn restart_active_mcp_servers( app: &AppHandle, @@ -709,18 +608,21 @@ pub async fn restart_active_mcp_servers( ) -> Result<(), String> { let app_state = app.state::(); let active_servers = app_state.mcp_active_servers.lock().await; - - log::info!("Restarting {} previously active MCP servers", active_servers.len()); - + + log::info!( + "Restarting {} previously active MCP servers", + active_servers.len() + ); + for (name, config) in active_servers.iter() { log::info!("Restarting MCP server: {}", name); - + // Start server with restart monitoring - spawn async task let app_clone = app.clone(); let servers_clone = servers_state.clone(); let name_clone = name.clone(); let config_clone = config.clone(); - + tauri::async_runtime::spawn(async move { let _ = start_mcp_server_with_restart( app_clone, @@ -728,27 +630,30 @@ pub async fn restart_active_mcp_servers( name_clone, config_clone, Some(3), // Default max restarts for startup - ).await; + ) + .await; }); } - + Ok(()) } -/// Reset MCP restart count for a specific server (like cortex reset) -#[tauri::command] -pub async fn reset_mcp_restart_count(state: State<'_, AppState>, server_name: String) -> Result<(), String> { - let mut counts = state.mcp_restart_counts.lock().await; - - let count = match counts.get_mut(&server_name) { - Some(count) => count, - None => return Ok(()), // Server not found, nothing to reset - }; +pub async fn clean_up_mcp_servers(state: State<'_, AppState>) { + log::info!("Cleaning up MCP servers"); - let old_count = *count; - *count = 0; - log::info!("MCP server {} restart count reset from {} to 0.", server_name, old_count); - Ok(()) + // Stop all running MCP servers + let _ = stop_mcp_servers(state.mcp_servers.clone()).await; + + // Clear active servers and restart counts + { + let mut active_servers = state.mcp_active_servers.lock().await; + active_servers.clear(); + } + { + let mut restart_counts = state.mcp_restart_counts.lock().await; + restart_counts.clear(); + } + log::info!("MCP servers cleaned up successfully"); } pub async fn stop_mcp_servers( @@ -765,139 +670,8 @@ pub async fn stop_mcp_servers( Ok(()) } -#[tauri::command] -pub async fn get_connected_servers( - _app: AppHandle, - state: State<'_, AppState>, -) -> Result, String> { - let servers = state.mcp_servers.clone(); - let servers_map = servers.lock().await; - Ok(servers_map.keys().cloned().collect()) -} - -/// Retrieves all available tools from all MCP servers -/// -/// # Arguments -/// * `state` - Application state containing MCP server connections -/// -/// # Returns -/// * `Result, String>` - A vector of all tools if successful, or an error message if failed -/// -/// This function: -/// 1. Locks the MCP servers mutex to access server connections -/// 2. Iterates through all connected servers -/// 3. Gets the list of tools from each server -/// 4. Combines all tools into a single vector -/// 5. Returns the combined list of all available tools -#[tauri::command] -pub async fn get_tools(state: State<'_, AppState>) -> Result, String> { - let servers = state.mcp_servers.lock().await; - let mut all_tools: Vec = Vec::new(); - - for (_, service) in servers.iter() { - // List tools with timeout - let tools_future = service.list_all_tools(); - let tools = match timeout(MCP_TOOL_CALL_TIMEOUT, tools_future).await { - Ok(result) => result.map_err(|e| e.to_string())?, - Err(_) => { - log::warn!( - "Listing tools timed out after {} seconds", - MCP_TOOL_CALL_TIMEOUT.as_secs() - ); - continue; // Skip this server and continue with others - } - }; - - for tool in tools { - all_tools.push(tool); - } - } - - Ok(all_tools) -} - -/// Calls a tool on an MCP server by name with optional arguments -/// -/// # Arguments -/// * `state` - Application state containing MCP server connections -/// * `tool_name` - Name of the tool to call -/// * `arguments` - Optional map of argument names to values -/// -/// # Returns -/// * `Result` - Result of the tool call if successful, or error message if failed -/// -/// This function: -/// 1. Locks the MCP servers mutex to access server connections -/// 2. Searches through all servers for one containing the named tool -/// 3. When found, calls the tool on that server with the provided arguments -/// 4. Returns error if no server has the requested tool -#[tauri::command] -pub async fn call_tool( - state: State<'_, AppState>, - tool_name: String, - arguments: Option>, -) -> Result { - let servers = state.mcp_servers.lock().await; - - // Iterate through servers and find the first one that contains the tool - for (_, service) in servers.iter() { - let tools = match service.list_all_tools().await { - Ok(tools) => tools, - Err(_) => continue, // Skip this server if we can't list tools - }; - - if !tools.iter().any(|t| t.name == tool_name) { - continue; // Tool not found in this server, try next - } - - println!("Found tool {} in server", tool_name); - - // Call the tool with timeout - let tool_call = service.call_tool(CallToolRequestParam { - name: tool_name.clone().into(), - arguments, - }); - - return match timeout(MCP_TOOL_CALL_TIMEOUT, tool_call).await { - Ok(result) => result.map_err(|e| e.to_string()), - Err(_) => Err(format!( - "Tool call '{}' timed out after {} seconds", - tool_name, - MCP_TOOL_CALL_TIMEOUT.as_secs() - )), - }; - } - - Err(format!("Tool {} not found", tool_name)) -} - -#[tauri::command] -pub async fn get_mcp_configs(app: AppHandle) -> Result { - let mut path = get_jan_data_folder_path(app); - path.push("mcp_config.json"); - log::info!("read mcp configs, path: {:?}", path); - - // Create default empty config if file doesn't exist - if !path.exists() { - log::info!("mcp_config.json not found, creating default empty config"); - fs::write(&path, DEFAULT_MCP_CONFIG) - .map_err(|e| format!("Failed to create default MCP config: {}", e))?; - } - - fs::read_to_string(path).map_err(|e| e.to_string()) -} - -#[tauri::command] -pub async fn save_mcp_configs(app: AppHandle, configs: String) -> Result<(), String> { - let mut path = get_jan_data_folder_path(app); - path.push("mcp_config.json"); - log::info!("save mcp configs, path: {:?}", path); - - fs::write(path, configs).map_err(|e| e.to_string()) -} - /// Store active server configuration for restart purposes -async fn store_active_server_config( +pub async fn store_active_server_config( active_servers_state: &Arc>>, name: &str, config: &Value, @@ -906,18 +680,14 @@ async fn store_active_server_config( active_servers.insert(name.to_string(), config.clone()); } - /// Reset restart count for a server -async fn reset_restart_count( - restart_counts: &Arc>>, - name: &str, -) { +pub async fn reset_restart_count(restart_counts: &Arc>>, name: &str) { let mut counts = restart_counts.lock().await; counts.insert(name.to_string(), 0); } /// Spawn the server monitoring task for handling restarts -async fn spawn_server_monitoring_task( +pub async fn spawn_server_monitoring_task( app: AppHandle, servers_state: Arc>>>, name: String, @@ -930,15 +700,17 @@ async fn spawn_server_monitoring_task( let servers_clone = servers_state.clone(); let name_clone = name.clone(); let config_clone = config.clone(); - + tauri::async_runtime::spawn(async move { // Monitor the server using RunningService's JoinHandle - let quit_reason = monitor_mcp_server_handle( - servers_clone.clone(), - name_clone.clone(), - ).await; + let quit_reason = + monitor_mcp_server_handle(servers_clone.clone(), name_clone.clone()).await; - log::info!("MCP server {} quit with reason: {:?}", name_clone, quit_reason); + log::info!( + "MCP server {} quit with reason: {:?}", + name_clone, + quit_reason + ); // Check if we should restart based on connection status and quit reason if should_restart_server(&successfully_connected, &name_clone, &quit_reason).await { @@ -951,13 +723,14 @@ async fn spawn_server_monitoring_task( max_restarts, restart_counts, successfully_connected, - ).await; + ) + .await; } }); } /// Determine if a server should be restarted based on its connection status and quit reason -async fn should_restart_server( +pub async fn should_restart_server( successfully_connected: &Arc>>, name: &str, quit_reason: &Option, @@ -989,35 +762,3 @@ async fn should_restart_server( } } } - -#[cfg(test)] -mod tests { - use super::*; - use std::collections::HashMap; - use std::fs::File; - use std::io::Write; - use std::sync::Arc; - use tauri::test::mock_app; - use tokio::sync::Mutex; - - #[tokio::test] - async fn test_run_mcp_commands() { - let app = mock_app(); - // Create a mock mcp_config.json file - let config_path = "mcp_config.json"; - let mut file: File = File::create(config_path).expect("Failed to create config file"); - file.write_all(b"{\"mcpServers\":{}}") - .expect("Failed to write to config file"); - - // Call the run_mcp_commands function - let servers_state: Arc>>> = - Arc::new(Mutex::new(HashMap::new())); - let result = run_mcp_commands(app.handle(), servers_state).await; - - // Assert that the function returns Ok(()) - assert!(result.is_ok()); - - // Clean up the mock config file - std::fs::remove_file(config_path).expect("Failed to remove config file"); - } -} diff --git a/src-tauri/src/core/mcp/mod.rs b/src-tauri/src/core/mcp/mod.rs new file mode 100644 index 000000000..5b20160de --- /dev/null +++ b/src-tauri/src/core/mcp/mod.rs @@ -0,0 +1,6 @@ +pub mod commands; +mod constants; +pub mod helpers; + +#[cfg(test)] +mod tests; diff --git a/src-tauri/src/core/mcp/tests.rs b/src-tauri/src/core/mcp/tests.rs new file mode 100644 index 000000000..8346449b2 --- /dev/null +++ b/src-tauri/src/core/mcp/tests.rs @@ -0,0 +1,39 @@ +use super::helpers::run_mcp_commands; +use crate::core::app::commands::get_jan_data_folder_path; +use rmcp::{service::RunningService, RoleClient}; +use std::collections::HashMap; +use std::fs::File; +use std::io::Write; +use std::sync::Arc; +use tauri::test::mock_app; +use tokio::sync::Mutex; + +#[tokio::test] +async fn test_run_mcp_commands() { + let app = mock_app(); + + // Get the app path where the config should be created + let app_path = get_jan_data_folder_path(app.handle().clone()); + let config_path = app_path.join("mcp_config.json"); + + // Ensure the directory exists + if let Some(parent) = config_path.parent() { + std::fs::create_dir_all(parent).expect("Failed to create parent directory"); + } + + // Create a mock mcp_config.json file at the correct location + let mut file: File = File::create(&config_path).expect("Failed to create config file"); + file.write_all(b"{\"mcpServers\":{}}") + .expect("Failed to write to config file"); + + // Call the run_mcp_commands function + let servers_state: Arc>>> = + Arc::new(Mutex::new(HashMap::new())); + let result = run_mcp_commands(app.handle(), servers_state).await; + + // Assert that the function returns Ok(()) + assert!(result.is_ok()); + + // Clean up the mock config file + std::fs::remove_file(&config_path).expect("Failed to remove config file"); +} diff --git a/src-tauri/src/core/mod.rs b/src-tauri/src/core/mod.rs index e91adedec..a20abd8dc 100644 --- a/src-tauri/src/core/mod.rs +++ b/src-tauri/src/core/mod.rs @@ -1,9 +1,10 @@ -pub mod cmd; -pub mod fs; -pub mod hardware; +pub mod app; +pub mod downloads; +pub mod extensions; +pub mod filesystem; pub mod mcp; pub mod server; pub mod setup; pub mod state; +pub mod system; pub mod threads; -pub mod utils; diff --git a/src-tauri/src/core/server/commands.rs b/src-tauri/src/core/server/commands.rs new file mode 100644 index 000000000..c1c6a9294 --- /dev/null +++ b/src-tauri/src/core/server/commands.rs @@ -0,0 +1,50 @@ +use tauri::{AppHandle, Manager, Runtime, State}; +use tauri_plugin_llamacpp::state::LlamacppState; + +use crate::core::server::proxy; +use crate::core::state::AppState; + +#[tauri::command] +pub async fn start_server( + app_handle: AppHandle, + state: State<'_, AppState>, + host: String, + port: u16, + prefix: String, + api_key: String, + trusted_hosts: Vec, +) -> Result { + let server_handle = state.server_handle.clone(); + let plugin_state: State = app_handle.state(); + let sessions = plugin_state.llama_server_process.clone(); + + proxy::start_server( + server_handle, + sessions, + host, + port, + prefix, + api_key, + vec![trusted_hosts], + ) + .await + .map_err(|e| e.to_string())?; + Ok(true) +} + +#[tauri::command] +pub async fn stop_server(state: State<'_, AppState>) -> Result<(), String> { + let server_handle = state.server_handle.clone(); + + proxy::stop_server(server_handle) + .await + .map_err(|e| e.to_string())?; + Ok(()) +} + +#[tauri::command] +pub async fn get_server_status(state: State<'_, AppState>) -> Result { + let server_handle = state.server_handle.clone(); + + Ok(proxy::is_server_running(server_handle).await) +} diff --git a/src-tauri/src/core/server/mod.rs b/src-tauri/src/core/server/mod.rs new file mode 100644 index 000000000..368235778 --- /dev/null +++ b/src-tauri/src/core/server/mod.rs @@ -0,0 +1,2 @@ +pub mod commands; +pub mod proxy; diff --git a/src-tauri/src/core/server.rs b/src-tauri/src/core/server/proxy.rs similarity index 89% rename from src-tauri/src/core/server.rs rename to src-tauri/src/core/server/proxy.rs index 5f7bf73cc..47f642716 100644 --- a/src-tauri/src/core/server.rs +++ b/src-tauri/src/core/server/proxy.rs @@ -1,17 +1,18 @@ use futures_util::StreamExt; +use hyper::body::Bytes; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Request, Response, Server, StatusCode}; -use hyper::body::Bytes; +use jan_utils::{is_cors_header, is_valid_host, remove_prefix}; use reqwest::Client; +use serde_json; use std::collections::HashMap; use std::convert::Infallible; use std::net::SocketAddr; use std::sync::Arc; +use tauri_plugin_llamacpp::LLamaBackendSession; use tokio::sync::Mutex; -use serde_json; - -use crate::core::state::{LLamaBackendSession, ServerHandle}; +use crate::core::state::ServerHandle; /// Configuration for the proxy server #[derive(Clone)] @@ -21,24 +22,6 @@ struct ProxyConfig { trusted_hosts: Vec>, } -/// Removes a prefix from a path, ensuring proper formatting -fn remove_prefix(path: &str, prefix: &str) -> String { - log::debug!("Processing path: {}, removing prefix: {}", path, prefix); - - if !prefix.is_empty() && path.starts_with(prefix) { - let result = path[prefix.len()..].to_string(); - if result.is_empty() { - "/".to_string() - } else if result.starts_with('/') { - result - } else { - format!("/{}", result) - } - } else { - path.to_string() - } -} - /// Determines the final destination path based on the original request path fn get_destination_path(original_path: &str, prefix: &str) -> String { remove_prefix(original_path, prefix) @@ -212,13 +195,15 @@ async fn proxy_request( let (parts, body) = req.into_parts(); - let origin_header = parts.headers + let origin_header = parts + .headers .get(hyper::header::ORIGIN) .and_then(|v| v.to_str().ok()) .unwrap_or("") .to_string(); - let host_header = parts.headers + let host_header = parts + .headers .get(hyper::header::HOST) .and_then(|v| v.to_str().ok()) .unwrap_or("") @@ -348,15 +333,21 @@ async fn proxy_request( let sessions_guard = sessions.lock().await; if sessions_guard.is_empty() { - log::warn!("Request for model '{}' but no models are running.", model_id); - let mut error_response = Response::builder().status(StatusCode::SERVICE_UNAVAILABLE); - error_response = add_cors_headers_with_host_and_origin( + log::warn!( + "Request for model '{}' but no models are running.", + model_id + ); + let mut error_response = + Response::builder().status(StatusCode::SERVICE_UNAVAILABLE); + error_response = add_cors_headers_with_host_and_origin( error_response, &host_header, &origin_header, &config.trusted_hosts, ); - return Ok(error_response.body(Body::from("No models are available")).unwrap()); + return Ok(error_response + .body(Body::from("No models are available")) + .unwrap()); } if let Some(session) = sessions_guard @@ -365,10 +356,7 @@ async fn proxy_request( { target_port = Some(session.info.port); session_api_key = Some(session.info.api_key.clone()); - log::debug!( - "Found session for model_id {}", - model_id, - ); + log::debug!("Found session for model_id {}", model_id,); } else { log::warn!("No running session found for model_id: {}", model_id); let mut error_response = @@ -433,7 +421,7 @@ async fn proxy_request( serde_json::json!({ "id": session.info.model_id, "object": "model", - "created": 1, + "created": 1, "owned_by": "user" }) }) @@ -444,7 +432,8 @@ async fn proxy_request( "data": models_data }); - let body_str = serde_json::to_string(&response_json).unwrap_or_else(|_| "{}".to_string()); + let body_str = + serde_json::to_string(&response_json).unwrap_or_else(|_| "{}".to_string()); let mut response_builder = Response::builder() .status(StatusCode::OK) @@ -493,7 +482,9 @@ async fn proxy_request( let port = match target_port { Some(p) => p, None => { - log::error!("Internal API server routing error: target is None after successful lookup"); + log::error!( + "Internal API server routing error: target is None after successful lookup" + ); let mut error_response = Response::builder().status(StatusCode::INTERNAL_SERVER_ERROR); error_response = add_cors_headers_with_host_and_origin( error_response, @@ -599,11 +590,6 @@ async fn proxy_request( } } -fn is_cors_header(header_name: &str) -> bool { - let header_lower = header_name.to_lowercase(); - header_lower.starts_with("access-control-") -} - fn add_cors_headers_with_host_and_origin( builder: hyper::http::response::Builder, host: &str, @@ -632,50 +618,6 @@ fn add_cors_headers_with_host_and_origin( builder } -fn is_valid_host(host: &str, trusted_hosts: &[Vec]) -> bool { - if host.is_empty() { - return false; - } - - let host_without_port = if host.starts_with('[') { - host.split(']') - .next() - .unwrap_or(host) - .trim_start_matches('[') - } else { - host.split(':').next().unwrap_or(host) - }; - let default_valid_hosts = ["localhost", "127.0.0.1", "0.0.0.0"]; - - if default_valid_hosts - .iter() - .any(|&valid| host_without_port.to_lowercase() == valid.to_lowercase()) - { - return true; - } - - trusted_hosts.iter().flatten().any(|valid| { - let host_lower = host.to_lowercase(); - let valid_lower = valid.to_lowercase(); - - if host_lower == valid_lower { - return true; - } - - let valid_without_port = if valid.starts_with('[') { - valid - .split(']') - .next() - .unwrap_or(valid) - .trim_start_matches('[') - } else { - valid.split(':').next().unwrap_or(valid) - }; - - host_without_port.to_lowercase() == valid_without_port.to_lowercase() - }) -} - pub async fn is_server_running(server_handle: Arc>>) -> bool { let handle_guard = server_handle.lock().await; handle_guard.is_some() diff --git a/src-tauri/src/core/setup.rs b/src-tauri/src/core/setup.rs index 940122235..0168e8e57 100644 --- a/src-tauri/src/core/setup.rs +++ b/src-tauri/src/core/setup.rs @@ -5,7 +5,7 @@ use std::{ path::PathBuf, }; use tar::Archive; -use tauri::{App, Emitter, Listener, Manager}; +use tauri::{App, Emitter, Manager}; use tauri_plugin_store::StoreExt; // use tokio::sync::Mutex; // use tokio::time::{sleep, Duration}; // Using tokio::sync::Mutex @@ -13,9 +13,8 @@ use tauri_plugin_store::StoreExt; // MCP use super::{ - cmd::{get_jan_data_folder_path, get_jan_extensions_path}, - mcp::run_mcp_commands, - state::AppState, + app::commands::get_jan_data_folder_path, extensions::commands::get_jan_extensions_path, + mcp::helpers::run_mcp_commands, state::AppState, }; pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> { @@ -165,7 +164,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri Ok(()) } -fn extract_extension_manifest( +pub fn extract_extension_manifest( archive: &mut Archive, ) -> Result, String> { let entry = archive @@ -199,30 +198,6 @@ pub fn setup_mcp(app: &App) { let state = app.state::(); let servers = state.mcp_servers.clone(); let app_handle: tauri::AppHandle = app.handle().clone(); - // Setup kill-mcp-servers event listener (similar to kill-sidecar) - let app_handle_for_kill = app_handle.clone(); - app_handle.listen("kill-mcp-servers", move |_event| { - let app_handle = app_handle_for_kill.clone(); - tauri::async_runtime::spawn(async move { - log::info!("Received kill-mcp-servers event - cleaning up MCP servers"); - let app_state = app_handle.state::(); - // Stop all running MCP servers - if let Err(e) = super::mcp::stop_mcp_servers(app_state.mcp_servers.clone()).await { - log::error!("Failed to stop MCP servers: {}", e); - return; - } - // Clear active servers and restart counts - { - let mut active_servers = app_state.mcp_active_servers.lock().await; - active_servers.clear(); - } - { - let mut restart_counts = app_state.mcp_restart_counts.lock().await; - restart_counts.clear(); - } - log::info!("MCP servers cleaned up successfully"); - }); - }); tauri::async_runtime::spawn(async move { if let Err(e) = run_mcp_commands(&app_handle, servers).await { log::error!("Failed to run mcp commands: {}", e); diff --git a/src-tauri/src/core/state.rs b/src-tauri/src/core/state.rs index 12cc34d4a..bda2eb40c 100644 --- a/src-tauri/src/core/state.rs +++ b/src-tauri/src/core/state.rs @@ -1,19 +1,12 @@ use std::{collections::HashMap, sync::Arc}; -use crate::core::utils::download::DownloadManagerState; -use rand::{distributions::Alphanumeric, Rng}; +use crate::core::downloads::models::DownloadManagerState; use rmcp::{service::RunningService, RoleClient}; use tokio::task::JoinHandle; /// Server handle type for managing the proxy server lifecycle pub type ServerHandle = JoinHandle>>; -use tokio::{process::Child, sync::Mutex}; -use crate::core::utils::extensions::inference_llamacpp_extension::server::SessionInfo; - -pub struct LLamaBackendSession { - pub child: Child, - pub info: SessionInfo, -} +use tokio::sync::Mutex; #[derive(Default)] pub struct AppState { @@ -24,12 +17,4 @@ pub struct AppState { pub mcp_active_servers: Arc>>, pub mcp_successfully_connected: Arc>>, pub server_handle: Arc>>, - pub llama_server_process: Arc>>, -} -pub fn generate_app_token() -> String { - rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(32) - .map(char::from) - .collect() } diff --git a/src-tauri/src/core/system/commands.rs b/src-tauri/src/core/system/commands.rs new file mode 100644 index 000000000..a8b58d745 --- /dev/null +++ b/src-tauri/src/core/system/commands.rs @@ -0,0 +1,116 @@ +use std::fs; +use std::path::PathBuf; +use tauri::{AppHandle, Manager, State}; +use tauri_plugin_llamacpp::cleanup_llama_processes; + +use crate::core::app::commands::{ + default_data_folder_path, get_jan_data_folder_path, update_app_configuration, +}; +use crate::core::app::models::AppConfiguration; +use crate::core::mcp::helpers::clean_up_mcp_servers; +use crate::core::state::AppState; + +#[tauri::command] +pub fn factory_reset(app_handle: tauri::AppHandle, state: State<'_, AppState>) { + // close window + let windows = app_handle.webview_windows(); + for (label, window) in windows.iter() { + window.close().unwrap_or_else(|_| { + log::warn!("Failed to close window: {:?}", label); + }); + } + let data_folder = get_jan_data_folder_path(app_handle.clone()); + log::info!("Factory reset, removing data folder: {:?}", data_folder); + + tauri::async_runtime::block_on(async { + clean_up_mcp_servers(state.clone()).await; + let _ = cleanup_llama_processes(app_handle.clone()).await; + + if data_folder.exists() { + if let Err(e) = fs::remove_dir_all(&data_folder) { + log::error!("Failed to remove data folder: {}", e); + return; + } + } + + // Recreate the data folder + let _ = fs::create_dir_all(&data_folder).map_err(|e| e.to_string()); + + // Reset the configuration + let mut default_config = AppConfiguration::default(); + default_config.data_folder = default_data_folder_path(app_handle.clone()); + let _ = update_app_configuration(app_handle.clone(), default_config); + + app_handle.restart(); + }); +} + +#[tauri::command] +pub fn relaunch(app: AppHandle) { + app.restart() +} + +#[tauri::command] +pub fn open_app_directory(app: AppHandle) { + let app_path = app.path().app_data_dir().unwrap(); + if cfg!(target_os = "windows") { + std::process::Command::new("explorer") + .arg(app_path) + .spawn() + .expect("Failed to open app directory"); + } else if cfg!(target_os = "macos") { + std::process::Command::new("open") + .arg(app_path) + .spawn() + .expect("Failed to open app directory"); + } else { + std::process::Command::new("xdg-open") + .arg(app_path) + .spawn() + .expect("Failed to open app directory"); + } +} + +#[tauri::command] +pub fn open_file_explorer(path: String) { + let path = PathBuf::from(path); + if cfg!(target_os = "windows") { + std::process::Command::new("explorer") + .arg(path) + .spawn() + .expect("Failed to open file explorer"); + } else if cfg!(target_os = "macos") { + std::process::Command::new("open") + .arg(path) + .spawn() + .expect("Failed to open file explorer"); + } else { + std::process::Command::new("xdg-open") + .arg(path) + .spawn() + .expect("Failed to open file explorer"); + } +} + +#[tauri::command] +pub async fn read_logs(app: AppHandle) -> Result { + let log_path = get_jan_data_folder_path(app).join("logs").join("app.log"); + if log_path.exists() { + let content = fs::read_to_string(log_path).map_err(|e| e.to_string())?; + Ok(content) + } else { + Err(format!("Log file not found")) + } +} + +// check if a system library is available +#[tauri::command] +pub fn is_library_available(library: &str) -> bool { + match unsafe { libloading::Library::new(library) } { + Ok(_) => true, + Err(e) => { + log::info!("Library {} is not available: {}", library, e); + false + } + } +} diff --git a/src-tauri/src/core/system/mod.rs b/src-tauri/src/core/system/mod.rs new file mode 100644 index 000000000..82b6da3c0 --- /dev/null +++ b/src-tauri/src/core/system/mod.rs @@ -0,0 +1 @@ +pub mod commands; diff --git a/src-tauri/src/core/threads.rs b/src-tauri/src/core/threads.rs deleted file mode 100644 index 3554e287c..000000000 --- a/src-tauri/src/core/threads.rs +++ /dev/null @@ -1,613 +0,0 @@ -/*! - Thread and Message Persistence Module - - This module provides all logic for managing threads and their messages, including creation, modification, deletion, and listing. - Messages for each thread are persisted in a JSONL file (messages.jsonl) per thread directory. - - **Concurrency and Consistency Guarantee:** - - All operations that write or modify messages for a thread are protected by a global, per-thread asynchronous lock. - - This design ensures that only one operation can write to a thread's messages.jsonl file at a time, preventing race conditions. - - As a result, the messages.jsonl file for each thread is always consistent and never corrupted, even under concurrent access. -*/ - -use serde::{Deserialize, Serialize}; -use std::fs::{self, File}; -use std::io::{BufRead, BufReader, Write}; -use tauri::command; -use tauri::Runtime; -use uuid::Uuid; - -// For async file write serialization -use once_cell::sync::Lazy; -use std::collections::HashMap; -use std::sync::Arc; -use tokio::sync::Mutex; - -// Global per-thread locks for message file writes -static MESSAGE_LOCKS: Lazy>>>> = - Lazy::new(|| Mutex::new(HashMap::new())); - -use super::utils::{ - ensure_data_dirs, ensure_thread_dir_exists, get_data_dir, get_messages_path, get_thread_dir, - get_thread_metadata_path, THREADS_FILE, -}; - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Thread { - pub id: String, - pub object: String, - pub title: String, - pub assistants: Vec, - pub created: i64, - pub updated: i64, - pub metadata: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadMessage { - pub id: String, - pub object: String, - pub thread_id: String, - pub assistant_id: Option, - pub attachments: Option>, - pub role: String, - pub content: Vec, - pub status: String, - pub created_at: i64, - pub completed_at: i64, - pub metadata: Option, - pub type_: Option, - pub error_code: Option, - pub tool_call_id: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Attachment { - pub file_id: Option, - pub tools: Option>, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(tag = "type")] -pub enum Tool { - #[serde(rename = "file_search")] - FileSearch, - #[serde(rename = "code_interpreter")] - CodeInterpreter, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadContent { - pub type_: String, - pub text: Option, - pub image_url: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ContentValue { - pub value: String, - pub annotations: Vec, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ImageContentValue { - pub detail: Option, - pub url: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadAssistantInfo { - pub id: String, - pub name: String, - pub model: ModelInfo, - pub instructions: Option, - pub tools: Option>, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ModelInfo { - pub id: String, - pub name: String, - pub settings: serde_json::Value, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(tag = "type")] -pub enum AssistantTool { - #[serde(rename = "code_interpreter")] - CodeInterpreter, - #[serde(rename = "retrieval")] - Retrieval, - #[serde(rename = "function")] - Function { - name: String, - description: Option, - parameters: Option, - }, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ThreadState { - pub has_more: bool, - pub waiting_for_response: bool, - pub error: Option, - pub last_message: Option, -} - -/// Lists all threads by reading their metadata from the threads directory. -/// Returns a vector of thread metadata as JSON values. -#[command] -pub async fn list_threads( - app_handle: tauri::AppHandle, -) -> Result, String> { - ensure_data_dirs(app_handle.clone())?; - let data_dir = get_data_dir(app_handle.clone()); - let mut threads = Vec::new(); - - if !data_dir.exists() { - return Ok(threads); - } - - for entry in fs::read_dir(&data_dir).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - let path = entry.path(); - if path.is_dir() { - let thread_metadata_path = path.join(THREADS_FILE); - if thread_metadata_path.exists() { - let data = fs::read_to_string(&thread_metadata_path).map_err(|e| e.to_string())?; - match serde_json::from_str(&data) { - Ok(thread) => threads.push(thread), - Err(e) => { - println!("Failed to parse thread file: {}", e); - continue; // skip invalid thread files - } - } - } - } - } - - Ok(threads) -} - -/// Creates a new thread, assigns it a unique ID, and persists its metadata. -/// Ensures the thread directory exists and writes thread.json. -#[command] -pub async fn create_thread( - app_handle: tauri::AppHandle, - mut thread: serde_json::Value, -) -> Result { - ensure_data_dirs(app_handle.clone())?; - let uuid = Uuid::new_v4().to_string(); - thread["id"] = serde_json::Value::String(uuid.clone()); - let thread_dir = get_thread_dir(app_handle.clone(), &uuid); - if !thread_dir.exists() { - fs::create_dir_all(&thread_dir).map_err(|e| e.to_string())?; - } - let path = get_thread_metadata_path(app_handle.clone(), &uuid); - let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; - fs::write(path, data).map_err(|e| e.to_string())?; - Ok(thread) -} - -/// Modifies an existing thread's metadata by overwriting its thread.json file. -/// Returns an error if the thread directory does not exist. -#[command] -pub async fn modify_thread( - app_handle: tauri::AppHandle, - thread: serde_json::Value, -) -> Result<(), String> { - let thread_id = thread - .get("id") - .and_then(|id| id.as_str()) - .ok_or("Missing thread id")?; - let thread_dir = get_thread_dir(app_handle.clone(), thread_id); - if !thread_dir.exists() { - return Err("Thread directory does not exist".to_string()); - } - let path = get_thread_metadata_path(app_handle.clone(), thread_id); - let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; - fs::write(path, data).map_err(|e| e.to_string())?; - Ok(()) -} - -/// Deletes a thread and all its associated files by removing its directory. -#[command] -pub async fn delete_thread( - app_handle: tauri::AppHandle, - thread_id: String, -) -> Result<(), String> { - let thread_dir = get_thread_dir(app_handle.clone(), &thread_id); - if thread_dir.exists() { - fs::remove_dir_all(thread_dir).map_err(|e| e.to_string())?; - } - Ok(()) -} - -/// Lists all messages for a given thread by reading and parsing its messages.jsonl file. -/// Returns a vector of message JSON values. -#[command] -pub async fn list_messages( - app_handle: tauri::AppHandle, - thread_id: String, -) -> Result, String> { - let path = get_messages_path(app_handle, &thread_id); - if !path.exists() { - return Ok(vec![]); - } - - let file = File::open(&path).map_err(|e| { - eprintln!("Error opening file {}: {}", path.display(), e); - e.to_string() - })?; - let reader = BufReader::new(file); - - let mut messages = Vec::new(); - for line in reader.lines() { - let line = line.map_err(|e| { - eprintln!("Error reading line from file {}: {}", path.display(), e); - e.to_string() - })?; - let message: serde_json::Value = serde_json::from_str(&line).map_err(|e| { - eprintln!( - "Error parsing JSON from line in file {}: {}", - path.display(), - e - ); - e.to_string() - })?; - messages.push(message); - } - - Ok(messages) -} - -/// Appends a new message to a thread's messages.jsonl file. -/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. -#[command] -pub async fn create_message( - app_handle: tauri::AppHandle, - mut message: serde_json::Value, -) -> Result { - let thread_id = { - let id = message - .get("thread_id") - .and_then(|v| v.as_str()) - .ok_or("Missing thread_id")?; - id.to_string() - }; - ensure_thread_dir_exists(app_handle.clone(), &thread_id)?; - let path = get_messages_path(app_handle.clone(), &thread_id); - - if message.get("id").is_none() { - let uuid = Uuid::new_v4().to_string(); - message["id"] = serde_json::Value::String(uuid); - } - - // Acquire per-thread lock before writing - { - let mut locks = MESSAGE_LOCKS.lock().await; - let lock = locks - .entry(thread_id.to_string()) - .or_insert_with(|| Arc::new(Mutex::new(()))) - .clone(); - drop(locks); // Release the map lock before awaiting the file lock - - let _guard = lock.lock().await; - - let mut file: File = fs::OpenOptions::new() - .create(true) - .append(true) - .open(path) - .map_err(|e| e.to_string())?; - - let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; - writeln!(file, "{}", data).map_err(|e| e.to_string())?; - } - - Ok(message) -} - -/// Modifies an existing message in a thread's messages.jsonl file. -/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. -/// Rewrites the entire messages.jsonl file for the thread. -#[command] -pub async fn modify_message( - app_handle: tauri::AppHandle, - message: serde_json::Value, -) -> Result { - let thread_id = message - .get("thread_id") - .and_then(|v| v.as_str()) - .ok_or("Missing thread_id")?; - let message_id = message - .get("id") - .and_then(|v| v.as_str()) - .ok_or("Missing message id")?; - - // Acquire per-thread lock before modifying - { - let mut locks = MESSAGE_LOCKS.lock().await; - let lock = locks - .entry(thread_id.to_string()) - .or_insert_with(|| Arc::new(Mutex::new(()))) - .clone(); - drop(locks); // Release the map lock before awaiting the file lock - - let _guard = lock.lock().await; - - let mut messages = list_messages(app_handle.clone(), thread_id.to_string()).await?; - if let Some(index) = messages - .iter() - .position(|m| m.get("id").and_then(|v| v.as_str()) == Some(message_id)) - { - messages[index] = message.clone(); - - // Rewrite all messages - let path = get_messages_path(app_handle.clone(), thread_id); - let mut file = File::create(path).map_err(|e| e.to_string())?; - for msg in messages { - let data = serde_json::to_string(&msg).map_err(|e| e.to_string())?; - writeln!(file, "{}", data).map_err(|e| e.to_string())?; - } - } - } - Ok(message) -} - -/// Deletes a message from a thread's messages.jsonl file by message ID. -/// Rewrites the entire messages.jsonl file for the thread. -/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. -#[command] -pub async fn delete_message( - app_handle: tauri::AppHandle, - thread_id: String, - message_id: String, -) -> Result<(), String> { - // Acquire per-thread lock before modifying - { - let mut locks = MESSAGE_LOCKS.lock().await; - let lock = locks - .entry(thread_id.to_string()) - .or_insert_with(|| Arc::new(Mutex::new(()))) - .clone(); - drop(locks); // Release the map lock before awaiting the file lock - - let _guard = lock.lock().await; - - let mut messages = list_messages(app_handle.clone(), thread_id.clone()).await?; - messages.retain(|m| m.get("id").and_then(|v| v.as_str()) != Some(message_id.as_str())); - - // Rewrite remaining messages - let path = get_messages_path(app_handle.clone(), &thread_id); - let mut file = File::create(path).map_err(|e| e.to_string())?; - for msg in messages { - let data = serde_json::to_string(&msg).map_err(|e| e.to_string())?; - writeln!(file, "{}", data).map_err(|e| e.to_string())?; - } - } - - Ok(()) -} - -/// Retrieves the first assistant associated with a thread. -/// Returns an error if the thread or assistant is not found. -#[command] -pub async fn get_thread_assistant( - app_handle: tauri::AppHandle, - thread_id: String, -) -> Result { - let path = get_thread_metadata_path(app_handle, &thread_id); - if !path.exists() { - return Err("Thread not found".to_string()); - } - let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; - let thread: serde_json::Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; - if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) { - if let Some(first) = assistants.get(0) { - Ok(first.clone()) - } else { - Err("Assistant not found".to_string()) - } - } else { - Err("Assistant not found".to_string()) - } -} - -/// Adds a new assistant to a thread's metadata. -/// Updates thread.json with the new assistant information. -#[command] -pub async fn create_thread_assistant( - app_handle: tauri::AppHandle, - thread_id: String, - assistant: serde_json::Value, -) -> Result { - let path = get_thread_metadata_path(app_handle.clone(), &thread_id); - if !path.exists() { - return Err("Thread not found".to_string()); - } - let mut thread: serde_json::Value = { - let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; - serde_json::from_str(&data).map_err(|e| e.to_string())? - }; - if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) { - assistants.push(assistant.clone()); - } else { - thread["assistants"] = serde_json::Value::Array(vec![assistant.clone()]); - } - let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; - fs::write(&path, data).map_err(|e| e.to_string())?; - Ok(assistant) -} - -/// Modifies an existing assistant's information in a thread's metadata. -/// Updates thread.json with the modified assistant data. -#[command] -pub async fn modify_thread_assistant( - app_handle: tauri::AppHandle, - thread_id: String, - assistant: serde_json::Value, -) -> Result { - let path = get_thread_metadata_path(app_handle.clone(), &thread_id); - if !path.exists() { - return Err("Thread not found".to_string()); - } - let mut thread: serde_json::Value = { - let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; - serde_json::from_str(&data).map_err(|e| e.to_string())? - }; - let assistant_id = assistant - .get("id") - .and_then(|v| v.as_str()) - .ok_or("Missing id")?; - if let Some(assistants) = thread - .get_mut("assistants") - .and_then(|a: &mut serde_json::Value| a.as_array_mut()) - { - if let Some(index) = assistants - .iter() - .position(|a| a.get("id").and_then(|v| v.as_str()) == Some(assistant_id)) - { - assistants[index] = assistant.clone(); - let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; - fs::write(&path, data).map_err(|e| e.to_string())?; - } - } - Ok(assistant) -} - -#[cfg(test)] -mod tests { - use crate::core::cmd::get_jan_data_folder_path; - - use super::*; - use serde_json::json; - use std::fs; - use std::path::PathBuf; - use tauri::test::{mock_app, MockRuntime}; - - // Helper to create a mock app handle with a temp data dir - fn mock_app_with_temp_data_dir() -> (tauri::App, PathBuf) { - let app = mock_app(); - let data_dir = get_jan_data_folder_path(app.handle().clone()); - println!("Mock app data dir: {}", data_dir.display()); - // Patch get_data_dir to use temp dir (requires get_data_dir to be overridable or injectable) - // For now, we assume get_data_dir uses tauri::api::path::app_data_dir(&app_handle) - // and that we can set the environment variable to redirect it. - (app, data_dir) - } - - #[tokio::test] - async fn test_create_and_list_threads() { - let (app, data_dir) = mock_app_with_temp_data_dir(); - // Create a thread - let thread = json!({ - "object": "thread", - "title": "Test Thread", - "assistants": [], - "created": 1234567890, - "updated": 1234567890, - "metadata": null - }); - let created = create_thread(app.handle().clone(), thread.clone()) - .await - .unwrap(); - assert_eq!(created["title"], "Test Thread"); - - // List threads - let threads = list_threads(app.handle().clone()).await.unwrap(); - assert!(threads.len() > 0); - - // Clean up - fs::remove_dir_all(data_dir).unwrap(); - } - - #[tokio::test] - async fn test_create_and_list_messages() { - let (app, data_dir) = mock_app_with_temp_data_dir(); - // Create a thread first - let thread = json!({ - "object": "thread", - "title": "Msg Thread", - "assistants": [], - "created": 123, - "updated": 123, - "metadata": null - }); - let created = create_thread(app.handle().clone(), thread.clone()) - .await - .unwrap(); - let thread_id = created["id"].as_str().unwrap().to_string(); - - // Create a message - let message = json!({ - "object": "message", - "thread_id": thread_id, - "assistant_id": null, - "attachments": null, - "role": "user", - "content": [], - "status": "sent", - "created_at": 123, - "completed_at": 123, - "metadata": null, - "type_": null, - "error_code": null, - "tool_call_id": null - }); - let created_msg = create_message(app.handle().clone(), message).await.unwrap(); - assert_eq!(created_msg["role"], "user"); - - // List messages - let messages = list_messages(app.handle().clone(), thread_id.clone()) - .await - .unwrap(); - assert!(messages.len() > 0); - assert_eq!(messages[0]["role"], "user"); - - // Clean up - fs::remove_dir_all(data_dir).unwrap(); - } - - #[tokio::test] - async fn test_create_and_get_thread_assistant() { - let (app, data_dir) = mock_app_with_temp_data_dir(); - // Create a thread - let thread = json!({ - "object": "thread", - "title": "Assistant Thread", - "assistants": [], - "created": 1, - "updated": 1, - "metadata": null - }); - let created = create_thread(app.handle().clone(), thread.clone()) - .await - .unwrap(); - let thread_id = created["id"].as_str().unwrap().to_string(); - - // Add assistant - let assistant = json!({ - "id": "assistant-1", - "assistant_name": "Test Assistant", - "model": { - "id": "model-1", - "name": "Test Model", - "settings": json!({}) - }, - "instructions": null, - "tools": null - }); - let _ = create_thread_assistant(app.handle().clone(), thread_id.clone(), assistant.clone()) - .await - .unwrap(); - - // Get assistant - let got = get_thread_assistant(app.handle().clone(), thread_id.clone()) - .await - .unwrap(); - assert_eq!(got["assistant_name"], "Test Assistant"); - - // Clean up - fs::remove_dir_all(data_dir).unwrap(); - } -} diff --git a/src-tauri/src/core/threads/commands.rs b/src-tauri/src/core/threads/commands.rs new file mode 100644 index 000000000..a9012193a --- /dev/null +++ b/src-tauri/src/core/threads/commands.rs @@ -0,0 +1,300 @@ +use std::fs::{self, File}; +use std::io::Write; +use tauri::Runtime; +use uuid::Uuid; + +use super::helpers::{ + get_lock_for_thread, read_messages_from_file, update_thread_metadata, write_messages_to_file, +}; +use super::{ + constants::THREADS_FILE, + utils::{ + ensure_data_dirs, ensure_thread_dir_exists, get_data_dir, get_messages_path, + get_thread_dir, get_thread_metadata_path, + }, +}; + +/// Lists all threads by reading their metadata from the threads directory. +/// Returns a vector of thread metadata as JSON values. +#[tauri::command] +pub async fn list_threads( + app_handle: tauri::AppHandle, +) -> Result, String> { + ensure_data_dirs(app_handle.clone())?; + let data_dir = get_data_dir(app_handle.clone()); + let mut threads = Vec::new(); + + if !data_dir.exists() { + return Ok(threads); + } + + for entry in fs::read_dir(&data_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let path = entry.path(); + if path.is_dir() { + let thread_metadata_path = path.join(THREADS_FILE); + if thread_metadata_path.exists() { + let data = fs::read_to_string(&thread_metadata_path).map_err(|e| e.to_string())?; + match serde_json::from_str(&data) { + Ok(thread) => threads.push(thread), + Err(e) => { + println!("Failed to parse thread file: {}", e); + continue; // skip invalid thread files + } + } + } + } + } + + Ok(threads) +} + +/// Creates a new thread, assigns it a unique ID, and persists its metadata. +/// Ensures the thread directory exists and writes thread.json. +#[tauri::command] +pub async fn create_thread( + app_handle: tauri::AppHandle, + mut thread: serde_json::Value, +) -> Result { + ensure_data_dirs(app_handle.clone())?; + let uuid = Uuid::new_v4().to_string(); + thread["id"] = serde_json::Value::String(uuid.clone()); + let thread_dir = get_thread_dir(app_handle.clone(), &uuid); + if !thread_dir.exists() { + fs::create_dir_all(&thread_dir).map_err(|e| e.to_string())?; + } + let path = get_thread_metadata_path(app_handle.clone(), &uuid); + let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; + fs::write(path, data).map_err(|e| e.to_string())?; + Ok(thread) +} + +/// Modifies an existing thread's metadata by overwriting its thread.json file. +/// Returns an error if the thread directory does not exist. +#[tauri::command] +pub async fn modify_thread( + app_handle: tauri::AppHandle, + thread: serde_json::Value, +) -> Result<(), String> { + let thread_id = thread + .get("id") + .and_then(|id| id.as_str()) + .ok_or("Missing thread id")?; + let thread_dir = get_thread_dir(app_handle.clone(), thread_id); + if !thread_dir.exists() { + return Err("Thread directory does not exist".to_string()); + } + let path = get_thread_metadata_path(app_handle.clone(), thread_id); + let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; + fs::write(path, data).map_err(|e| e.to_string())?; + Ok(()) +} + +/// Deletes a thread and all its associated files by removing its directory. +#[tauri::command] +pub async fn delete_thread( + app_handle: tauri::AppHandle, + thread_id: String, +) -> Result<(), String> { + let thread_dir = get_thread_dir(app_handle.clone(), &thread_id); + if thread_dir.exists() { + let _ = fs::remove_dir_all(thread_dir); + } + Ok(()) +} + +/// Lists all messages for a given thread by reading and parsing its messages.jsonl file. +/// Returns a vector of message JSON values. +#[tauri::command] +pub async fn list_messages( + app_handle: tauri::AppHandle, + thread_id: String, +) -> Result, String> { + read_messages_from_file(app_handle, &thread_id) +} + +/// Appends a new message to a thread's messages.jsonl file. +/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. +#[tauri::command] +pub async fn create_message( + app_handle: tauri::AppHandle, + mut message: serde_json::Value, +) -> Result { + let thread_id = { + let id = message + .get("thread_id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread_id")?; + id.to_string() + }; + ensure_thread_dir_exists(app_handle.clone(), &thread_id)?; + let path = get_messages_path(app_handle.clone(), &thread_id); + + if message.get("id").is_none() { + let uuid = Uuid::new_v4().to_string(); + message["id"] = serde_json::Value::String(uuid); + } + + // Acquire per-thread lock before writing + { + let lock = get_lock_for_thread(&thread_id).await; + let _guard = lock.lock().await; + + let mut file: File = fs::OpenOptions::new() + .create(true) + .append(true) + .open(path) + .map_err(|e| e.to_string())?; + + let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; + writeln!(file, "{}", data).map_err(|e| e.to_string())?; + } + + Ok(message) +} + +/// Modifies an existing message in a thread's messages.jsonl file. +/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. +/// Rewrites the entire messages.jsonl file for the thread. +#[tauri::command] +pub async fn modify_message( + app_handle: tauri::AppHandle, + message: serde_json::Value, +) -> Result { + let thread_id = message + .get("thread_id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread_id")?; + let message_id = message + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing message id")?; + + // Acquire per-thread lock before modifying + { + let lock = get_lock_for_thread(thread_id).await; + let _guard = lock.lock().await; + + let mut messages = read_messages_from_file(app_handle.clone(), thread_id)?; + if let Some(index) = messages + .iter() + .position(|m| m.get("id").and_then(|v| v.as_str()) == Some(message_id)) + { + messages[index] = message.clone(); + + // Rewrite all messages + let path = get_messages_path(app_handle.clone(), thread_id); + write_messages_to_file(&messages, &path)?; + } + } + Ok(message) +} + +/// Deletes a message from a thread's messages.jsonl file by message ID. +/// Rewrites the entire messages.jsonl file for the thread. +/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. +#[tauri::command] +pub async fn delete_message( + app_handle: tauri::AppHandle, + thread_id: String, + message_id: String, +) -> Result<(), String> { + // Acquire per-thread lock before modifying + { + let lock = get_lock_for_thread(&thread_id).await; + let _guard = lock.lock().await; + + let mut messages = read_messages_from_file(app_handle.clone(), &thread_id)?; + messages.retain(|m| m.get("id").and_then(|v| v.as_str()) != Some(message_id.as_str())); + + // Rewrite remaining messages + let path = get_messages_path(app_handle.clone(), &thread_id); + write_messages_to_file(&messages, &path)?; + } + + Ok(()) +} + +/// Retrieves the first assistant associated with a thread. +/// Returns an error if the thread or assistant is not found. +#[tauri::command] +pub async fn get_thread_assistant( + app_handle: tauri::AppHandle, + thread_id: String, +) -> Result { + let path = get_thread_metadata_path(app_handle, &thread_id); + if !path.exists() { + return Err("Thread not found".to_string()); + } + let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; + let thread: serde_json::Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) { + if let Some(first) = assistants.get(0) { + Ok(first.clone()) + } else { + Err("Assistant not found".to_string()) + } + } else { + Err("Assistant not found".to_string()) + } +} + +/// Adds a new assistant to a thread's metadata. +/// Updates thread.json with the new assistant information. +#[tauri::command] +pub async fn create_thread_assistant( + app_handle: tauri::AppHandle, + thread_id: String, + assistant: serde_json::Value, +) -> Result { + let path = get_thread_metadata_path(app_handle.clone(), &thread_id); + if !path.exists() { + return Err("Thread not found".to_string()); + } + let mut thread: serde_json::Value = { + let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; + serde_json::from_str(&data).map_err(|e| e.to_string())? + }; + if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) { + assistants.push(assistant.clone()); + } else { + thread["assistants"] = serde_json::Value::Array(vec![assistant.clone()]); + } + update_thread_metadata(app_handle, &thread_id, &thread)?; + Ok(assistant) +} + +/// Modifies an existing assistant's information in a thread's metadata. +/// Updates thread.json with the modified assistant data. +#[tauri::command] +pub async fn modify_thread_assistant( + app_handle: tauri::AppHandle, + thread_id: String, + assistant: serde_json::Value, +) -> Result { + let path = get_thread_metadata_path(app_handle.clone(), &thread_id); + if !path.exists() { + return Err("Thread not found".to_string()); + } + let mut thread: serde_json::Value = { + let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; + serde_json::from_str(&data).map_err(|e| e.to_string())? + }; + let assistant_id = assistant + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing id")?; + if let Some(assistants) = thread + .get_mut("assistants") + .and_then(|a: &mut serde_json::Value| a.as_array_mut()) + { + if let Some(index) = assistants + .iter() + .position(|a| a.get("id").and_then(|v| v.as_str()) == Some(assistant_id)) + { + assistants[index] = assistant.clone(); + update_thread_metadata(app_handle, &thread_id, &thread)?; + } + } + Ok(assistant) +} diff --git a/src-tauri/src/core/threads/constants.rs b/src-tauri/src/core/threads/constants.rs new file mode 100644 index 000000000..99902b217 --- /dev/null +++ b/src-tauri/src/core/threads/constants.rs @@ -0,0 +1,4 @@ +// Thread Constants +pub const THREADS_DIR: &str = "threads"; +pub const THREADS_FILE: &str = "thread.json"; +pub const MESSAGES_FILE: &str = "messages.jsonl"; diff --git a/src-tauri/src/core/threads/helpers.rs b/src-tauri/src/core/threads/helpers.rs new file mode 100644 index 000000000..0edcf41b2 --- /dev/null +++ b/src-tauri/src/core/threads/helpers.rs @@ -0,0 +1,87 @@ +use std::fs::{self, File}; +use std::io::{BufRead, BufReader, Write}; +use tauri::Runtime; + +// For async file write serialization +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::Mutex; + +use super::utils::{get_messages_path, get_thread_metadata_path}; + +// Global per-thread locks for message file writes +pub static MESSAGE_LOCKS: Lazy>>>> = + Lazy::new(|| Mutex::new(HashMap::new())); + +/// Get a lock for a specific thread to ensure thread-safe message file operations +pub async fn get_lock_for_thread(thread_id: &str) -> Arc> { + let mut locks = MESSAGE_LOCKS.lock().await; + let lock = locks + .entry(thread_id.to_string()) + .or_insert_with(|| Arc::new(Mutex::new(()))) + .clone(); + drop(locks); // Release the map lock before returning the file lock + lock +} + +/// Write messages to a thread's messages.jsonl file +pub fn write_messages_to_file( + messages: &[serde_json::Value], + path: &std::path::Path, +) -> Result<(), String> { + let mut file = File::create(path).map_err(|e| e.to_string())?; + for msg in messages { + let data = serde_json::to_string(msg).map_err(|e| e.to_string())?; + writeln!(file, "{}", data).map_err(|e| e.to_string())?; + } + Ok(()) +} + +/// Read messages from a thread's messages.jsonl file +pub fn read_messages_from_file( + app_handle: tauri::AppHandle, + thread_id: &str, +) -> Result, String> { + let path = get_messages_path(app_handle, thread_id); + if !path.exists() { + return Ok(vec![]); + } + + let file = File::open(&path).map_err(|e| { + eprintln!("Error opening file {}: {}", path.display(), e); + e.to_string() + })?; + let reader = BufReader::new(file); + + let mut messages = Vec::new(); + for line in reader.lines() { + let line = line.map_err(|e| { + eprintln!("Error reading line from file {}: {}", path.display(), e); + e.to_string() + })?; + let message: serde_json::Value = serde_json::from_str(&line).map_err(|e| { + eprintln!( + "Error parsing JSON from line in file {}: {}", + path.display(), + e + ); + e.to_string() + })?; + messages.push(message); + } + + Ok(messages) +} + +/// Update thread metadata by writing to thread.json +pub fn update_thread_metadata( + app_handle: tauri::AppHandle, + thread_id: &str, + thread: &serde_json::Value, +) -> Result<(), String> { + let path = get_thread_metadata_path(app_handle, thread_id); + let data = serde_json::to_string_pretty(thread).map_err(|e| e.to_string())?; + fs::write(path, data).map_err(|e| e.to_string())?; + Ok(()) +} diff --git a/src-tauri/src/core/threads/mod.rs b/src-tauri/src/core/threads/mod.rs new file mode 100644 index 000000000..fb76bee8c --- /dev/null +++ b/src-tauri/src/core/threads/mod.rs @@ -0,0 +1,20 @@ +/*! + Thread and Message Persistence Module + + This module provides all logic for managing threads and their messages, including creation, modification, deletion, and listing. + Messages for each thread are persisted in a JSONL file (messages.jsonl) per thread directory. + + **Concurrency and Consistency Guarantee:** + - All operations that write or modify messages for a thread are protected by a global, per-thread asynchronous lock. + - This design ensures that only one operation can write to a thread's messages.jsonl file at a time, preventing race conditions. + - As a result, the messages.jsonl file for each thread is always consistent and never corrupted, even under concurrent access. +*/ + +pub mod commands; +mod constants; +pub mod helpers; +pub mod models; +pub mod utils; + +#[cfg(test)] +mod tests; diff --git a/src-tauri/src/core/threads/models.rs b/src-tauri/src/core/threads/models.rs new file mode 100644 index 000000000..5038c6def --- /dev/null +++ b/src-tauri/src/core/threads/models.rs @@ -0,0 +1,103 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Thread { + pub id: String, + pub object: String, + pub title: String, + pub assistants: Vec, + pub created: i64, + pub updated: i64, + pub metadata: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadMessage { + pub id: String, + pub object: String, + pub thread_id: String, + pub assistant_id: Option, + pub attachments: Option>, + pub role: String, + pub content: Vec, + pub status: String, + pub created_at: i64, + pub completed_at: i64, + pub metadata: Option, + pub type_: Option, + pub error_code: Option, + pub tool_call_id: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Attachment { + pub file_id: Option, + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(tag = "type")] +pub enum Tool { + #[serde(rename = "file_search")] + FileSearch, + #[serde(rename = "code_interpreter")] + CodeInterpreter, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadContent { + pub type_: String, + pub text: Option, + pub image_url: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ContentValue { + pub value: String, + pub annotations: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ImageContentValue { + pub detail: Option, + pub url: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadAssistantInfo { + pub id: String, + pub name: String, + pub model: ModelInfo, + pub instructions: Option, + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ModelInfo { + pub id: String, + pub name: String, + pub settings: serde_json::Value, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(tag = "type")] +pub enum AssistantTool { + #[serde(rename = "code_interpreter")] + CodeInterpreter, + #[serde(rename = "retrieval")] + Retrieval, + #[serde(rename = "function")] + Function { + name: String, + description: Option, + parameters: Option, + }, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadState { + pub has_more: bool, + pub waiting_for_response: bool, + pub error: Option, + pub last_message: Option, +} diff --git a/src-tauri/src/core/threads/tests.rs b/src-tauri/src/core/threads/tests.rs new file mode 100644 index 000000000..5b4aaec57 --- /dev/null +++ b/src-tauri/src/core/threads/tests.rs @@ -0,0 +1,133 @@ +use crate::core::app::commands::get_jan_data_folder_path; + +use super::commands::*; +use serde_json::json; +use std::fs; +use std::path::PathBuf; +use tauri::test::{mock_app, MockRuntime}; + +// Helper to create a mock app handle with a temp data dir +fn mock_app_with_temp_data_dir() -> (tauri::App, PathBuf) { + let app = mock_app(); + let data_dir = get_jan_data_folder_path(app.handle().clone()); + println!("Mock app data dir: {}", data_dir.display()); + // Patch get_data_dir to use temp dir (requires get_data_dir to be overridable or injectable) + // For now, we assume get_data_dir uses tauri::api::path::app_data_dir(&app_handle) + // and that we can set the environment variable to redirect it. + (app, data_dir) +} + +#[tokio::test] +async fn test_create_and_list_threads() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Test Thread", + "assistants": [], + "created": 1234567890, + "updated": 1234567890, + "metadata": null + }); + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + assert_eq!(created["title"], "Test Thread"); + + // List threads + let threads = list_threads(app.handle().clone()).await.unwrap(); + assert!(threads.len() > 0); + + // Clean up + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_create_and_list_messages() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Create a thread first + let thread = json!({ + "object": "thread", + "title": "Msg Thread", + "assistants": [], + "created": 123, + "updated": 123, + "metadata": null + }); + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Create a message + let message = json!({ + "object": "message", + "thread_id": thread_id, + "assistant_id": null, + "attachments": null, + "role": "user", + "content": [], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null, + "type_": null, + "error_code": null, + "tool_call_id": null + }); + let created_msg = create_message(app.handle().clone(), message).await.unwrap(); + assert_eq!(created_msg["role"], "user"); + + // List messages + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert!(messages.len() > 0); + assert_eq!(messages[0]["role"], "user"); + + // Clean up + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_create_and_get_thread_assistant() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Assistant Thread", + "assistants": [], + "created": 1, + "updated": 1, + "metadata": null + }); + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Add assistant + let assistant = json!({ + "id": "assistant-1", + "assistant_name": "Test Assistant", + "model": { + "id": "model-1", + "name": "Test Model", + "settings": json!({}) + }, + "instructions": null, + "tools": null + }); + let _ = create_thread_assistant(app.handle().clone(), thread_id.clone(), assistant.clone()) + .await + .unwrap(); + + // Get assistant + let got = get_thread_assistant(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(got["assistant_name"], "Test Assistant"); + + // Clean up + let _ = fs::remove_dir_all(data_dir); +} diff --git a/src-tauri/src/core/threads/utils.rs b/src-tauri/src/core/threads/utils.rs new file mode 100644 index 000000000..b5b6b5e3c --- /dev/null +++ b/src-tauri/src/core/threads/utils.rs @@ -0,0 +1,45 @@ +use std::fs; +use std::path::PathBuf; +use tauri::Runtime; + +use super::constants::{MESSAGES_FILE, THREADS_DIR, THREADS_FILE}; +use crate::core::app::commands::get_jan_data_folder_path; + +pub fn get_data_dir(app_handle: tauri::AppHandle) -> PathBuf { + get_jan_data_folder_path(app_handle).join(THREADS_DIR) +} + +pub fn get_thread_dir(app_handle: tauri::AppHandle, thread_id: &str) -> PathBuf { + get_data_dir(app_handle).join(thread_id) +} + +pub fn get_thread_metadata_path( + app_handle: tauri::AppHandle, + thread_id: &str, +) -> PathBuf { + get_thread_dir(app_handle, thread_id).join(THREADS_FILE) +} + +pub fn get_messages_path(app_handle: tauri::AppHandle, thread_id: &str) -> PathBuf { + get_thread_dir(app_handle, thread_id).join(MESSAGES_FILE) +} + +pub fn ensure_data_dirs(app_handle: tauri::AppHandle) -> Result<(), String> { + let data_dir = get_data_dir(app_handle.clone()); + if !data_dir.exists() { + fs::create_dir_all(&data_dir).map_err(|e| e.to_string())?; + } + Ok(()) +} + +pub fn ensure_thread_dir_exists( + app_handle: tauri::AppHandle, + thread_id: &str, +) -> Result<(), String> { + ensure_data_dirs(app_handle.clone())?; + let thread_dir = get_thread_dir(app_handle, thread_id); + if !thread_dir.exists() { + fs::create_dir_all(&thread_dir).map_err(|e| e.to_string())?; + } + Ok(()) +} diff --git a/src-tauri/src/core/utils/download.rs b/src-tauri/src/core/utils/download.rs deleted file mode 100644 index f3facfda1..000000000 --- a/src-tauri/src/core/utils/download.rs +++ /dev/null @@ -1,786 +0,0 @@ -use crate::core::cmd::get_jan_data_folder_path; -use crate::core::state::AppState; -use crate::core::utils::normalize_path; -use futures_util::StreamExt; -use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; -use std::collections::HashMap; -use std::time::Duration; -use tauri::{Emitter, State}; -use tokio::fs::File; -use tokio::io::AsyncWriteExt; -use tokio_util::sync::CancellationToken; -use url::Url; - -#[derive(Default)] -pub struct DownloadManagerState { - pub cancel_tokens: HashMap, -} - -#[derive(serde::Deserialize, Clone, Debug)] -pub struct ProxyConfig { - pub url: String, - pub username: Option, - pub password: Option, - pub no_proxy: Option>, // List of domains to bypass proxy - pub ignore_ssl: Option, // Ignore SSL certificate verification -} - -#[derive(serde::Deserialize, Clone, Debug)] -pub struct DownloadItem { - pub url: String, - pub save_path: String, - pub proxy: Option, -} - -#[derive(serde::Serialize, Clone, Debug)] -pub struct DownloadEvent { - pub transferred: u64, - pub total: u64, -} - -fn err_to_string(e: E) -> String { - format!("Error: {}", e) -} - -fn validate_proxy_config(config: &ProxyConfig) -> Result<(), String> { - // Validate proxy URL format - if let Err(e) = Url::parse(&config.url) { - return Err(format!("Invalid proxy URL '{}': {}", config.url, e)); - } - - // Check if proxy URL has valid scheme - let url = Url::parse(&config.url).unwrap(); // Safe to unwrap as we just validated it - match url.scheme() { - "http" | "https" | "socks4" | "socks5" => {} - scheme => return Err(format!("Unsupported proxy scheme: {}", scheme)), - } - - // Validate authentication credentials - if config.username.is_some() && config.password.is_none() { - return Err("Username provided without password".to_string()); - } - - if config.password.is_some() && config.username.is_none() { - return Err("Password provided without username".to_string()); - } - - // Validate no_proxy entries - if let Some(no_proxy) = &config.no_proxy { - for entry in no_proxy { - if entry.is_empty() { - return Err("Empty no_proxy entry".to_string()); - } - // Basic validation for wildcard patterns - if entry.starts_with("*.") && entry.len() < 3 { - return Err(format!("Invalid wildcard pattern: {}", entry)); - } - } - } - - // SSL verification settings are all optional booleans, no validation needed - - Ok(()) -} - -fn create_proxy_from_config(config: &ProxyConfig) -> Result { - // Validate the configuration first - validate_proxy_config(config)?; - - let mut proxy = reqwest::Proxy::all(&config.url).map_err(err_to_string)?; - - // Add authentication if provided - if let (Some(username), Some(password)) = (&config.username, &config.password) { - proxy = proxy.basic_auth(username, password); - } - - Ok(proxy) -} - -fn should_bypass_proxy(url: &str, no_proxy: &[String]) -> bool { - if no_proxy.is_empty() { - return false; - } - - // Parse the URL to get the host - let parsed_url = match Url::parse(url) { - Ok(u) => u, - Err(_) => return false, - }; - - let host = match parsed_url.host_str() { - Some(h) => h, - None => return false, - }; - - // Check if host matches any no_proxy entry - for entry in no_proxy { - if entry == "*" { - return true; - } - - // Simple wildcard matching - if entry.starts_with("*.") { - let domain = &entry[2..]; - if host.ends_with(domain) { - return true; - } - } else if host == entry { - return true; - } - } - - false -} - -fn _get_client_for_item( - item: &DownloadItem, - header_map: &HeaderMap, -) -> Result { - let mut client_builder = reqwest::Client::builder() - .http2_keep_alive_timeout(Duration::from_secs(15)) - .default_headers(header_map.clone()); - - // Add proxy configuration if provided - if let Some(proxy_config) = &item.proxy { - // Handle SSL verification settings - if proxy_config.ignore_ssl.unwrap_or(false) { - client_builder = client_builder.danger_accept_invalid_certs(true); - log::info!("SSL certificate verification disabled for URL {}", item.url); - } - - // Note: reqwest doesn't have fine-grained SSL verification controls - // for verify_proxy_ssl, verify_proxy_host_ssl, verify_peer_ssl, verify_host_ssl - // These settings are handled by the underlying TLS implementation - - // Check if this URL should bypass proxy - let no_proxy = proxy_config.no_proxy.as_deref().unwrap_or(&[]); - if !should_bypass_proxy(&item.url, no_proxy) { - let proxy = create_proxy_from_config(proxy_config)?; - client_builder = client_builder.proxy(proxy); - log::info!("Using proxy {} for URL {}", proxy_config.url, item.url); - } else { - log::info!("Bypassing proxy for URL {}", item.url); - } - } - - client_builder.build().map_err(err_to_string) -} - -#[tauri::command] -pub async fn download_files( - app: tauri::AppHandle, - state: State<'_, AppState>, - items: Vec, - task_id: &str, - headers: HashMap, -) -> Result<(), String> { - // insert cancel tokens - let cancel_token = CancellationToken::new(); - { - let mut download_manager = state.download_manager.lock().await; - if download_manager.cancel_tokens.contains_key(task_id) { - return Err(format!("task_id {} exists", task_id)); - } - download_manager - .cancel_tokens - .insert(task_id.to_string(), cancel_token.clone()); - } - // TODO: Support resuming downloads when FE is ready - let result = _download_files_internal( - app.clone(), - &items, - &headers, - task_id, - false, - cancel_token.clone(), - ) - .await; - - // cleanup - { - let mut download_manager = state.download_manager.lock().await; - download_manager.cancel_tokens.remove(task_id); - } - - // delete files if cancelled - if cancel_token.is_cancelled() { - let jan_data_folder = get_jan_data_folder_path(app.clone()); - for item in items { - let save_path = jan_data_folder.join(&item.save_path); - let _ = std::fs::remove_file(&save_path); // don't check error - } - } - - result.map_err(err_to_string) -} - -#[tauri::command] -pub async fn cancel_download_task(state: State<'_, AppState>, task_id: &str) -> Result<(), String> { - // NOTE: might want to add User-Agent header - let mut download_manager = state.download_manager.lock().await; - if let Some(token) = download_manager.cancel_tokens.remove(task_id) { - token.cancel(); - log::info!("Cancelled download task: {}", task_id); - Ok(()) - } else { - Err(format!("No download task: {}", task_id)) - } -} - -fn _convert_headers( - headers: &HashMap, -) -> Result> { - let mut header_map = HeaderMap::new(); - for (k, v) in headers { - let key = HeaderName::from_bytes(k.as_bytes())?; - let value = HeaderValue::from_str(v)?; - header_map.insert(key, value); - } - Ok(header_map) -} - -async fn _get_file_size( - client: &reqwest::Client, - url: &str, -) -> Result> { - let resp = client.head(url).send().await?; - if !resp.status().is_success() { - return Err(format!("Failed to get file size: HTTP status {}", resp.status()).into()); - } - // this is buggy, always return 0 for HEAD request - // Ok(resp.content_length().unwrap_or(0)) - - match resp.headers().get("content-length") { - Some(value) => { - let value_str = value.to_str()?; - let value_u64: u64 = value_str.parse()?; - Ok(value_u64) - } - None => Ok(0), - } -} - -async fn _download_files_internal( - app: tauri::AppHandle, - items: &[DownloadItem], - headers: &HashMap, - task_id: &str, - resume: bool, - cancel_token: CancellationToken, -) -> Result<(), String> { - log::info!("Start download task: {}", task_id); - - let header_map = _convert_headers(headers).map_err(err_to_string)?; - - let total_size = { - let mut total_size = 0u64; - for item in items.iter() { - let client = _get_client_for_item(item, &header_map).map_err(err_to_string)?; - total_size += _get_file_size(&client, &item.url) - .await - .map_err(err_to_string)?; - } - total_size - }; - log::info!("Total download size: {}", total_size); - - let mut evt = DownloadEvent { - transferred: 0, - total: total_size, - }; - let evt_name = format!("download-{}", task_id); - - // save file under Jan data folder - let jan_data_folder = get_jan_data_folder_path(app.clone()); - - for item in items.iter() { - let save_path = jan_data_folder.join(&item.save_path); - let save_path = normalize_path(&save_path); - - if !save_path.starts_with(&jan_data_folder) { - return Err(format!( - "Path {} is outside of Jan data folder {}", - save_path.display(), - jan_data_folder.display() - )); - } - - // Create parent directories if they don't exist - if let Some(parent) = save_path.parent() { - if !parent.exists() { - tokio::fs::create_dir_all(parent) - .await - .map_err(err_to_string)?; - } - } - - let current_extension = save_path.extension().unwrap_or_default().to_string_lossy(); - let append_extension = |ext: &str| { - if current_extension.is_empty() { - ext.to_string() - } else { - format!("{}.{}", current_extension, ext) - } - }; - let tmp_save_path = save_path.with_extension(append_extension("tmp")); - let url_save_path = save_path.with_extension(append_extension("url")); - - let mut should_resume = resume - && tmp_save_path.exists() - && tokio::fs::read_to_string(&url_save_path) - .await - .map(|url| url == item.url) // check if we resume the same URL - .unwrap_or(false); - - tokio::fs::write(&url_save_path, item.url.clone()) - .await - .map_err(err_to_string)?; - - log::info!("Started downloading: {}", item.url); - let client = _get_client_for_item(item, &header_map).map_err(err_to_string)?; - let mut download_delta = 0u64; - let resp = if should_resume { - let downloaded_size = tmp_save_path.metadata().map_err(err_to_string)?.len(); - match _get_maybe_resume(&client, &item.url, downloaded_size).await { - Ok(resp) => { - log::info!( - "Resume download: {}, already downloaded {} bytes", - item.url, - downloaded_size - ); - download_delta += downloaded_size; - resp - } - Err(e) => { - // fallback to normal download - log::warn!("Failed to resume download: {}", e); - should_resume = false; - _get_maybe_resume(&client, &item.url, 0).await? - } - } - } else { - _get_maybe_resume(&client, &item.url, 0).await? - }; - let mut stream = resp.bytes_stream(); - - let file = if should_resume { - // resume download, append to existing file - tokio::fs::OpenOptions::new() - .write(true) - .append(true) - .open(&tmp_save_path) - .await - .map_err(err_to_string)? - } else { - // start new download, create a new file - File::create(&tmp_save_path).await.map_err(err_to_string)? - }; - let mut writer = tokio::io::BufWriter::new(file); - - // write chunk to file - while let Some(chunk) = stream.next().await { - if cancel_token.is_cancelled() { - if !should_resume { - tokio::fs::remove_dir_all(&save_path.parent().unwrap()) - .await - .ok(); - } - log::info!("Download cancelled for task: {}", task_id); - app.emit(&evt_name, evt.clone()).unwrap(); - return Ok(()); - } - - let chunk = chunk.map_err(err_to_string)?; - writer.write_all(&chunk).await.map_err(err_to_string)?; - download_delta += chunk.len() as u64; - - // only update every 10 MB - if download_delta >= 10 * 1024 * 1024 { - evt.transferred += download_delta; - app.emit(&evt_name, evt.clone()).unwrap(); - download_delta = 0u64; - } - } - - writer.flush().await.map_err(err_to_string)?; - evt.transferred += download_delta; - - // rename tmp file to final file - tokio::fs::rename(&tmp_save_path, &save_path) - .await - .map_err(err_to_string)?; - tokio::fs::remove_file(&url_save_path) - .await - .map_err(err_to_string)?; - log::info!("Finished downloading: {}", item.url); - } - - app.emit(&evt_name, evt.clone()).unwrap(); - Ok(()) -} - -async fn _get_maybe_resume( - client: &reqwest::Client, - url: &str, - start_bytes: u64, -) -> Result { - if start_bytes > 0 { - let resp = client - .get(url) - .header("Range", format!("bytes={}-", start_bytes)) - .send() - .await - .map_err(err_to_string)?; - if resp.status() != reqwest::StatusCode::PARTIAL_CONTENT { - return Err(format!( - "Failed to resume download: HTTP status {}, {}", - resp.status(), - resp.text().await.unwrap_or_default() - )); - } - Ok(resp) - } else { - let resp = client.get(url).send().await.map_err(err_to_string)?; - if !resp.status().is_success() { - return Err(format!( - "Failed to download: HTTP status {}, {}", - resp.status(), - resp.text().await.unwrap_or_default() - )); - } - Ok(resp) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::collections::HashMap; - - // Helper function to create a minimal proxy config for testing - fn create_test_proxy_config(url: &str) -> ProxyConfig { - ProxyConfig { - url: url.to_string(), - username: None, - password: None, - no_proxy: None, - ignore_ssl: None, - } - } - - #[test] - fn test_validate_proxy_config() { - // Valid HTTP proxy - let config = ProxyConfig { - url: "http://proxy.example.com:8080".to_string(), - username: Some("user".to_string()), - password: Some("pass".to_string()), - no_proxy: Some(vec!["localhost".to_string(), "*.example.com".to_string()]), - ignore_ssl: Some(true), - }; - assert!(validate_proxy_config(&config).is_ok()); - - // Valid HTTPS proxy - let config = ProxyConfig { - url: "https://proxy.example.com:8080".to_string(), - username: None, - password: None, - no_proxy: None, - ignore_ssl: None, - }; - assert!(validate_proxy_config(&config).is_ok()); - - // Valid SOCKS5 proxy - let config = ProxyConfig { - url: "socks5://proxy.example.com:1080".to_string(), - username: None, - password: None, - no_proxy: None, - ignore_ssl: None, - }; - assert!(validate_proxy_config(&config).is_ok()); - - // Invalid URL - let config = create_test_proxy_config("invalid-url"); - assert!(validate_proxy_config(&config).is_err()); - - // Unsupported scheme - let config = create_test_proxy_config("ftp://proxy.example.com:21"); - assert!(validate_proxy_config(&config).is_err()); - - // Username without password - let mut config = create_test_proxy_config("http://proxy.example.com:8080"); - config.username = Some("user".to_string()); - assert!(validate_proxy_config(&config).is_err()); - - // Password without username - let mut config = create_test_proxy_config("http://proxy.example.com:8080"); - config.password = Some("pass".to_string()); - assert!(validate_proxy_config(&config).is_err()); - - // Empty no_proxy entry - let mut config = create_test_proxy_config("http://proxy.example.com:8080"); - config.no_proxy = Some(vec!["".to_string()]); - assert!(validate_proxy_config(&config).is_err()); - - // Invalid wildcard pattern - let mut config = create_test_proxy_config("http://proxy.example.com:8080"); - config.no_proxy = Some(vec!["*.".to_string()]); - assert!(validate_proxy_config(&config).is_err()); - } - - #[test] - fn test_should_bypass_proxy() { - let no_proxy = vec![ - "localhost".to_string(), - "127.0.0.1".to_string(), - "*.example.com".to_string(), - "specific.domain.com".to_string(), - ]; - - // Should bypass for localhost - assert!(should_bypass_proxy("http://localhost:8080/path", &no_proxy)); - - // Should bypass for 127.0.0.1 - assert!(should_bypass_proxy("https://127.0.0.1:3000/api", &no_proxy)); - - // Should bypass for wildcard match - assert!(should_bypass_proxy( - "http://sub.example.com/path", - &no_proxy - )); - assert!(should_bypass_proxy("https://api.example.com/v1", &no_proxy)); - - // Should bypass for specific domain - assert!(should_bypass_proxy( - "http://specific.domain.com/test", - &no_proxy - )); - - // Should NOT bypass for other domains - assert!(!should_bypass_proxy("http://other.com/path", &no_proxy)); - assert!(!should_bypass_proxy("https://example.org/api", &no_proxy)); - - // Should bypass everything with "*" - let wildcard_no_proxy = vec!["*".to_string()]; - assert!(should_bypass_proxy( - "http://any.domain.com/path", - &wildcard_no_proxy - )); - - // Empty no_proxy should not bypass anything - let empty_no_proxy = vec![]; - assert!(!should_bypass_proxy( - "http://any.domain.com/path", - &empty_no_proxy - )); - } - - #[test] - fn test_create_proxy_from_config() { - // Valid configuration should work - let mut config = create_test_proxy_config("http://proxy.example.com:8080"); - config.username = Some("user".to_string()); - config.password = Some("pass".to_string()); - assert!(create_proxy_from_config(&config).is_ok()); - - // Invalid configuration should fail - let config = create_test_proxy_config("invalid-url"); - assert!(create_proxy_from_config(&config).is_err()); - } - - #[test] - fn test_convert_headers() { - let mut headers = HashMap::new(); - headers.insert("User-Agent".to_string(), "test-agent".to_string()); - headers.insert("Authorization".to_string(), "Bearer token".to_string()); - - let header_map = _convert_headers(&headers).unwrap(); - assert_eq!(header_map.len(), 2); - assert_eq!(header_map.get("User-Agent").unwrap(), "test-agent"); - assert_eq!(header_map.get("Authorization").unwrap(), "Bearer token"); - } - - #[test] - fn test_proxy_ssl_verification_settings() { - // Test proxy config with SSL verification settings - let mut config = create_test_proxy_config("https://proxy.example.com:8080"); - config.ignore_ssl = Some(true); - - // Should validate successfully - assert!(validate_proxy_config(&config).is_ok()); - - // Test with all SSL settings as false - config.ignore_ssl = Some(false); - - // Should still validate successfully - assert!(validate_proxy_config(&config).is_ok()); - } - - #[test] - fn test_proxy_config_with_mixed_ssl_settings() { - // Test with mixed SSL settings - ignore_ssl true, others false - let mut config = create_test_proxy_config("https://proxy.example.com:8080"); - config.ignore_ssl = Some(true); - - assert!(validate_proxy_config(&config).is_ok()); - assert!(create_proxy_from_config(&config).is_ok()); - } - - #[test] - fn test_proxy_config_ssl_defaults() { - // Test with no SSL settings (should use None defaults) - let config = create_test_proxy_config("https://proxy.example.com:8080"); - - assert_eq!(config.ignore_ssl, None); - - assert!(validate_proxy_config(&config).is_ok()); - assert!(create_proxy_from_config(&config).is_ok()); - } - - #[test] - fn test_download_item_with_ssl_proxy() { - // Test that DownloadItem can be created with SSL proxy configuration - let mut proxy_config = create_test_proxy_config("https://proxy.example.com:8080"); - proxy_config.ignore_ssl = Some(true); - - let download_item = DownloadItem { - url: "https://example.com/file.zip".to_string(), - save_path: "downloads/file.zip".to_string(), - proxy: Some(proxy_config), - }; - - assert!(download_item.proxy.is_some()); - let proxy = download_item.proxy.unwrap(); - assert_eq!(proxy.ignore_ssl, Some(true)); - } - - #[test] - fn test_client_creation_with_ssl_settings() { - // Test client creation with SSL settings - let mut proxy_config = create_test_proxy_config("https://proxy.example.com:8080"); - proxy_config.ignore_ssl = Some(true); - - let download_item = DownloadItem { - url: "https://example.com/file.zip".to_string(), - save_path: "downloads/file.zip".to_string(), - proxy: Some(proxy_config), - }; - - let header_map = HeaderMap::new(); - let result = _get_client_for_item(&download_item, &header_map); - - // Should create client successfully even with SSL settings - assert!(result.is_ok()); - } - - #[test] - fn test_proxy_config_with_http_and_ssl_settings() { - // Test that SSL settings work with HTTP proxy (though not typically used) - let mut config = create_test_proxy_config("http://proxy.example.com:8080"); - config.ignore_ssl = Some(true); - - assert!(validate_proxy_config(&config).is_ok()); - assert!(create_proxy_from_config(&config).is_ok()); - } - - #[test] - fn test_proxy_config_with_socks_and_ssl_settings() { - // Test that SSL settings work with SOCKS proxy - let mut config = create_test_proxy_config("socks5://proxy.example.com:1080"); - config.ignore_ssl = Some(false); - - assert!(validate_proxy_config(&config).is_ok()); - assert!(create_proxy_from_config(&config).is_ok()); - } - - #[test] - fn test_download_item_creation() { - let item = DownloadItem { - url: "https://example.com/file.tar.gz".to_string(), - save_path: "models/test.tar.gz".to_string(), - proxy: None, - }; - - assert_eq!(item.url, "https://example.com/file.tar.gz"); - assert_eq!(item.save_path, "models/test.tar.gz"); - } - - #[test] - fn test_download_event_creation() { - let event = DownloadEvent { - transferred: 1024, - total: 2048, - }; - - assert_eq!(event.transferred, 1024); - assert_eq!(event.total, 2048); - } - - #[test] - fn test_err_to_string() { - let error = "Test error"; - let result = err_to_string(error); - assert_eq!(result, "Error: Test error"); - } - - #[test] - fn test_convert_headers_valid() { - let mut headers = HashMap::new(); - headers.insert("Content-Type".to_string(), "application/json".to_string()); - headers.insert("Authorization".to_string(), "Bearer token123".to_string()); - - let result = _convert_headers(&headers); - assert!(result.is_ok()); - - let header_map = result.unwrap(); - assert_eq!(header_map.len(), 2); - assert_eq!(header_map.get("Content-Type").unwrap(), "application/json"); - assert_eq!(header_map.get("Authorization").unwrap(), "Bearer token123"); - } - - #[test] - fn test_convert_headers_invalid_header_name() { - let mut headers = HashMap::new(); - headers.insert("Invalid\nHeader".to_string(), "value".to_string()); - - let result = _convert_headers(&headers); - assert!(result.is_err()); - } - - #[test] - fn test_convert_headers_invalid_header_value() { - let mut headers = HashMap::new(); - headers.insert("Content-Type".to_string(), "invalid\nvalue".to_string()); - - let result = _convert_headers(&headers); - assert!(result.is_err()); - } - - #[test] - fn test_download_manager_state_default() { - let state = DownloadManagerState::default(); - assert!(state.cancel_tokens.is_empty()); - } - - #[test] - fn test_download_event_serialization() { - let event = DownloadEvent { - transferred: 512, - total: 1024, - }; - - let json = serde_json::to_string(&event).unwrap(); - assert!(json.contains("\"transferred\":512")); - assert!(json.contains("\"total\":1024")); - } - - #[test] - fn test_download_item_deserialization() { - let json = r#"{"url":"https://example.com/file.zip","save_path":"downloads/file.zip"}"#; - let item: DownloadItem = serde_json::from_str(json).unwrap(); - - assert_eq!(item.url, "https://example.com/file.zip"); - assert_eq!(item.save_path, "downloads/file.zip"); - } -} diff --git a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/mod.rs b/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/mod.rs deleted file mode 100644 index 35a24a4f9..000000000 --- a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod server; -pub mod cleanup; diff --git a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs b/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs deleted file mode 100644 index 2679f3284..000000000 --- a/src-tauri/src/core/utils/extensions/inference_llamacpp_extension/server.rs +++ /dev/null @@ -1,1152 +0,0 @@ -use base64::{engine::general_purpose, Engine as _}; -use hmac::{Hmac, Mac}; -use rand::{rngs::StdRng, Rng, SeedableRng}; -use serde::{Deserialize, Serialize}; -use sha2::Sha256; -use std::collections::HashSet; -use std::path::PathBuf; -use std::process::Stdio; -use std::time::Duration; -use sysinfo::{Pid, ProcessesToUpdate, System}; -use tauri::State; // Import Manager trait -use thiserror; -use tokio::io::{AsyncBufReadExt, BufReader}; -use tokio::process::Command; -use tokio::sync::mpsc; -use tokio::time::{timeout, Instant}; - -use crate::core::state::AppState; -use crate::core::state::LLamaBackendSession; - -type HmacSha256 = Hmac; - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "SCREAMING_SNAKE_CASE")] -pub enum ErrorCode { - BinaryNotFound, - ModelFileNotFound, - LibraryPathInvalid, - - // --- Model Loading Errors --- - ModelLoadFailed, - DraftModelLoadFailed, - MultimodalProjectorLoadFailed, - ModelArchNotSupported, - ModelLoadTimedOut, - LlamaCppProcessError, - - // --- Memory Errors --- - OutOfMemory, - - // --- Internal Application Errors --- - DeviceListParseFailed, - IoError, - InternalError, -} - -#[derive(Debug, Clone, Serialize, thiserror::Error)] -#[error("LlamacppError {{ code: {code:?}, message: \"{message}\" }}")] -pub struct LlamacppError { - pub code: ErrorCode, - pub message: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option, -} -impl LlamacppError { - pub fn new(code: ErrorCode, message: String, details: Option) -> Self { - Self { - code, - message, - details, - } - } - - /// Parses stderr from llama.cpp and creates a specific LlamacppError. - pub fn from_stderr(stderr: &str) -> Self { - let lower_stderr = stderr.to_lowercase(); - // TODO: add others - let is_out_of_memory = lower_stderr.contains("out of memory") - || lower_stderr.contains("insufficient memory") - || lower_stderr.contains("erroroutofdevicememory") // vulkan specific - || lower_stderr.contains("kiogpucommandbuffercallbackerroroutofmemory") // Metal-specific error code - || lower_stderr.contains("cuda_error_out_of_memory"); // CUDA-specific - - if is_out_of_memory { - return Self::new( - ErrorCode::OutOfMemory, - "Out of memory. The model requires more RAM or VRAM than available.".into(), - Some(stderr.into()), - ); - } - - if lower_stderr.contains("error loading model architecture") { - return Self::new( - ErrorCode::ModelArchNotSupported, - "The model's architecture is not supported by this version of the backend.".into(), - Some(stderr.into()), - ); - } - Self::new( - ErrorCode::LlamaCppProcessError, - "The model process encountered an unexpected error.".into(), - Some(stderr.into()), - ) - } -} - -// Error type for server commands -#[derive(Debug, thiserror::Error)] -pub enum ServerError { - #[error(transparent)] - Llamacpp(#[from] LlamacppError), - - #[error("IO error: {0}")] - Io(#[from] std::io::Error), - - #[error("Tauri error: {0}")] - Tauri(#[from] tauri::Error), -} - -// impl serialization for tauri -impl serde::Serialize for ServerError { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let error_to_serialize: LlamacppError = match self { - ServerError::Llamacpp(err) => err.clone(), - ServerError::Io(e) => LlamacppError::new( - ErrorCode::IoError, - "An input/output error occurred.".into(), - Some(e.to_string()), - ), - ServerError::Tauri(e) => LlamacppError::new( - ErrorCode::InternalError, - "An internal application error occurred.".into(), - Some(e.to_string()), - ), - }; - error_to_serialize.serialize(serializer) - } -} - -type ServerResult = Result; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SessionInfo { - pub pid: i32, // opaque handle for unload/chat - pub port: i32, // llama-server output port - pub model_id: String, - pub model_path: String, // path of the loaded model - pub api_key: String, -} - -#[derive(serde::Serialize, serde::Deserialize)] -pub struct UnloadResult { - success: bool, - error: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DeviceInfo { - pub id: String, - pub name: String, - pub mem: i32, - pub free: i32, -} - -#[cfg(windows)] -use std::os::windows::ffi::OsStrExt; - -#[cfg(windows)] -use std::ffi::OsStr; - -#[cfg(windows)] -use windows_sys::Win32::Storage::FileSystem::GetShortPathNameW; - -#[cfg(windows)] -pub fn get_short_path>(path: P) -> Option { - let wide: Vec = OsStr::new(path.as_ref()) - .encode_wide() - .chain(Some(0)) - .collect(); - - let mut buffer = vec![0u16; 260]; - let len = unsafe { GetShortPathNameW(wide.as_ptr(), buffer.as_mut_ptr(), buffer.len() as u32) }; - - if len > 0 { - Some(String::from_utf16_lossy(&buffer[..len as usize])) - } else { - None - } -} - -// --- Load Command --- -#[tauri::command] -pub async fn load_llama_model( - state: State<'_, AppState>, - backend_path: &str, - library_path: Option<&str>, - mut args: Vec, -) -> ServerResult { - let mut process_map = state.llama_server_process.lock().await; - - log::info!("Attempting to launch server at path: {:?}", backend_path); - log::info!("Using arguments: {:?}", args); - - let server_path_buf = PathBuf::from(backend_path); - if !server_path_buf.exists() { - let err_msg = format!("Binary not found at {:?}", backend_path); - log::error!( - "Server binary not found at expected path: {:?}", - backend_path - ); - return Err(LlamacppError::new( - ErrorCode::BinaryNotFound, - "The llama.cpp server binary could not be found.".into(), - Some(err_msg), - ) - .into()); - } - - let port_str = args - .iter() - .position(|arg| arg == "--port") - .and_then(|i| args.get(i + 1)) - .cloned() - .unwrap_or_default(); - let port: i32 = match port_str.parse() { - Ok(p) => p, - Err(_) => { - eprintln!("Invalid port value: '{}', using default 8080", port_str); - 8080 - } - }; - // FOR MODEL PATH; TODO: DO SIMILARLY FOR MMPROJ PATH - let model_path_index = args.iter().position(|arg| arg == "-m").ok_or_else(|| { - LlamacppError::new( - ErrorCode::ModelLoadFailed, - "Model path argument '-m' is missing.".into(), - None, - ) - })?; - - let model_path = args.get(model_path_index + 1).cloned().ok_or_else(|| { - LlamacppError::new( - ErrorCode::ModelLoadFailed, - "Model path was not provided after '-m' flag.".into(), - None, - ) - })?; - - let model_path_pb = PathBuf::from(&model_path); - if !model_path_pb.exists() { - let err_msg = format!( - "Invalid or inaccessible model path: {}", - model_path_pb.display() - ); - log::error!("{}", &err_msg); - return Err(LlamacppError::new( - ErrorCode::ModelFileNotFound, - "The specified model file does not exist or is not accessible.".into(), - Some(err_msg), - ) - .into()); - } - #[cfg(windows)] - { - // use short path on Windows - if let Some(short) = get_short_path(&model_path_pb) { - args[model_path_index + 1] = short; - } else { - args[model_path_index + 1] = model_path_pb.display().to_string(); - } - } - #[cfg(not(windows))] - { - args[model_path_index + 1] = model_path_pb.display().to_string(); - } - // ----------------------------------------------------------------- - - let api_key = args - .iter() - .position(|arg| arg == "--api-key") - .and_then(|i| args.get(i + 1)) - .cloned() - .unwrap_or_default(); - - let model_id = args - .iter() - .position(|arg| arg == "-a") - .and_then(|i| args.get(i + 1)) - .cloned() - .unwrap_or_default(); - - // Configure the command to run the server - let mut command = Command::new(backend_path); - command.args(args); - - if let Some(lib_path) = library_path { - if cfg!(target_os = "linux") { - let new_lib_path = match std::env::var("LD_LIBRARY_PATH") { - Ok(path) => format!("{}:{}", path, lib_path), - Err(_) => lib_path.to_string(), - }; - command.env("LD_LIBRARY_PATH", new_lib_path); - } else if cfg!(target_os = "windows") { - let new_path = match std::env::var("PATH") { - Ok(path) => format!("{};{}", path, lib_path), - Err(_) => lib_path.to_string(), - }; - command.env("PATH", new_path); - - // Normalize the path by removing UNC prefix if present - let normalized_path = lib_path.trim_start_matches(r"\\?\").to_string(); - log::info!("Library path:\n{}", &normalized_path); - - // Only set current_dir if the normalized path exists and is a directory - let path = std::path::Path::new(&normalized_path); - if path.exists() && path.is_dir() { - command.current_dir(&normalized_path); - } else { - log::warn!( - "Library path '{}' does not exist or is not a directory", - normalized_path - ); - } - } else { - log::warn!("Library path setting is not supported on this OS"); - } - } - command.stdout(Stdio::piped()); - command.stderr(Stdio::piped()); - #[cfg(all(windows, target_arch = "x86_64"))] - { - use std::os::windows::process::CommandExt; - const CREATE_NO_WINDOW: u32 = 0x0800_0000; - const CREATE_NEW_PROCESS_GROUP: u32 = 0x0000_0200; - command.creation_flags(CREATE_NO_WINDOW | CREATE_NEW_PROCESS_GROUP); - } - - // Spawn the child process - let mut child = command.spawn().map_err(ServerError::Io)?; - - let stderr = child.stderr.take().expect("stderr was piped"); - let stdout = child.stdout.take().expect("stdout was piped"); - - // Create channels for communication between tasks - let (ready_tx, mut ready_rx) = mpsc::channel::(1); - - // Spawn task to monitor stdout for readiness - let _stdout_task = tokio::spawn(async move { - let mut reader = BufReader::new(stdout); - let mut byte_buffer = Vec::new(); - - loop { - byte_buffer.clear(); - match reader.read_until(b'\n', &mut byte_buffer).await { - Ok(0) => break, // EOF - Ok(_) => { - let line = String::from_utf8_lossy(&byte_buffer); - let line = line.trim_end(); - if !line.is_empty() { - log::info!("[llamacpp stdout] {}", line); - } - } - Err(e) => { - log::error!("Error reading stdout: {}", e); - break; - } - } - } - }); - - // Spawn task to capture stderr and monitor for errors - let stderr_task = tokio::spawn(async move { - let mut reader = BufReader::new(stderr); - let mut byte_buffer = Vec::new(); - let mut stderr_buffer = String::new(); - - loop { - byte_buffer.clear(); - match reader.read_until(b'\n', &mut byte_buffer).await { - Ok(0) => break, // EOF - Ok(_) => { - let line = String::from_utf8_lossy(&byte_buffer); - let line = line.trim_end(); - - if !line.is_empty() { - stderr_buffer.push_str(line); - stderr_buffer.push('\n'); - log::info!("[llamacpp] {}", line); - - // Check for critical error indicators that should stop the process - let line_lower = line.to_string().to_lowercase(); - // Check for readiness indicator - llama-server outputs this when ready - if line_lower.contains("server is listening on") - || line_lower.contains("starting the main loop") - || line_lower.contains("server listening on") - { - log::info!("Model appears to be ready based on logs: '{}'", line); - let _ = ready_tx.send(true).await; - } - } - } - Err(e) => { - log::error!("Error reading logs: {}", e); - break; - } - } - } - - stderr_buffer - }); - - // Check if process exited early - if let Some(status) = child.try_wait()? { - if !status.success() { - let stderr_output = stderr_task.await.unwrap_or_default(); - log::error!("llama.cpp failed early with code {:?}", status); - log::error!("{}", stderr_output); - return Err(LlamacppError::from_stderr(&stderr_output).into()); - } - } - - // Wait for server to be ready or timeout - let timeout_duration = Duration::from_secs(180); // 3 minutes timeout - let start_time = Instant::now(); - log::info!("Waiting for model session to be ready..."); - loop { - tokio::select! { - // Server is ready - Some(true) = ready_rx.recv() => { - log::info!("Model is ready to accept requests!"); - break; - } - // Check for process exit more frequently - _ = tokio::time::sleep(Duration::from_millis(50)) => { - // Check if process exited - if let Some(status) = child.try_wait()? { - let stderr_output = stderr_task.await.unwrap_or_default(); - if !status.success() { - log::error!("llama.cpp exited with error code {:?}", status); - return Err(LlamacppError::from_stderr(&stderr_output).into()); - } else { - log::error!("llama.cpp exited successfully but without ready signal"); - return Err(LlamacppError::from_stderr(&stderr_output).into()); - } - } - - // Timeout check - if start_time.elapsed() > timeout_duration { - log::error!("Timeout waiting for server to be ready"); - let _ = child.kill().await; - let stderr_output = stderr_task.await.unwrap_or_default(); - return Err(LlamacppError::new( - ErrorCode::ModelLoadTimedOut, - "The model took too long to load and timed out.".into(), - Some(format!("Timeout: {}s\n\nStderr:\n{}", timeout_duration.as_secs(), stderr_output)), - ).into()); - } - } - } - } - - // Get the PID to use as session ID - let pid = child.id().map(|id| id as i32).unwrap_or(-1); - - log::info!("Server process started with PID: {} and is ready", pid); - let session_info = SessionInfo { - pid: pid.clone(), - port: port, - model_id: model_id, - model_path: model_path_pb.display().to_string(), - api_key: api_key, - }; - - // Insert session info to process_map - process_map.insert( - pid.clone(), - LLamaBackendSession { - child, - info: session_info.clone(), - }, - ); - - Ok(session_info) -} - -// --- Unload Command --- -#[tauri::command] -pub async fn unload_llama_model( - pid: i32, - state: State<'_, AppState>, -) -> ServerResult { - let mut map = state.llama_server_process.lock().await; - if let Some(session) = map.remove(&pid) { - let mut child = session.child; - #[cfg(unix)] - { - use nix::sys::signal::{kill, Signal}; - use nix::unistd::Pid; - - if let Some(raw_pid) = child.id() { - let raw_pid = raw_pid as i32; - log::info!("Sending SIGTERM to PID {}", raw_pid); - let _ = kill(Pid::from_raw(raw_pid), Signal::SIGTERM); - - match timeout(Duration::from_secs(5), child.wait()).await { - Ok(Ok(status)) => log::info!("Process exited gracefully: {}", status), - Ok(Err(e)) => log::error!("Error waiting after SIGTERM: {}", e), - Err(_) => { - log::warn!("SIGTERM timed out; sending SIGKILL to PID {}", raw_pid); - let _ = kill(Pid::from_raw(raw_pid), Signal::SIGKILL); - match child.wait().await { - Ok(s) => log::info!("Force-killed process exited: {}", s), - Err(e) => log::error!("Error waiting after SIGKILL: {}", e), - } - } - } - } - } - - #[cfg(all(windows, target_arch = "x86_64"))] - { - if let Some(raw_pid) = child.id() { - log::warn!( - "gracefully killing is unsupported on Windows, force-killing PID {}", - raw_pid - ); - - // Since we know a graceful shutdown doesn't work and there are no child processes - // to worry about, we can use `child.kill()` directly. On Windows, this is - // a forceful termination via the `TerminateProcess` API. - if let Err(e) = child.kill().await { - log::error!( - "Failed to send kill signal to PID {}: {}. It may have already terminated.", - raw_pid, - e - ); - } - - match child.wait().await { - Ok(status) => log::info!( - "process {} has been terminated. Final exit status: {}", - raw_pid, - status - ), - Err(e) => log::error!( - "Error waiting on child process {} after kill: {}", - raw_pid, - e - ), - } - } - } - Ok(UnloadResult { - success: true, - error: None, - }) - } else { - log::warn!("No server with PID '{}' found", pid); - Ok(UnloadResult { - success: true, - error: None, - }) - } -} - -#[tauri::command] -pub async fn get_devices( - backend_path: &str, - library_path: Option<&str>, -) -> ServerResult> { - log::info!("Getting devices from server at path: {:?}", backend_path); - - let server_path_buf = PathBuf::from(backend_path); - if !server_path_buf.exists() { - log::error!( - "Server binary not found at expected path: {:?}", - backend_path - ); - return Err(LlamacppError::new( - ErrorCode::BinaryNotFound, - "The llama.cpp server binary could not be found.".into(), - Some(format!("Path: {}", backend_path)), - ) - .into()); - } - - // Configure the command to run the server with --list-devices - let mut command = Command::new(backend_path); - command.arg("--list-devices"); - - // Set up library path similar to load function - if let Some(lib_path) = library_path { - if cfg!(target_os = "linux") { - let new_lib_path = match std::env::var("LD_LIBRARY_PATH") { - Ok(path) => format!("{}:{}", path, lib_path), - Err(_) => lib_path.to_string(), - }; - command.env("LD_LIBRARY_PATH", new_lib_path); - } else if cfg!(target_os = "windows") { - let new_path = match std::env::var("PATH") { - Ok(path) => format!("{};{}", path, lib_path), - Err(_) => lib_path.to_string(), - }; - command.env("PATH", new_path); - - // Normalize the path by removing UNC prefix if present - let normalized_path = lib_path.trim_start_matches(r"\\?\").to_string(); - log::info!("Library path:\n{}", &normalized_path); - - // Only set current_dir if the normalized path exists and is a directory - let path = std::path::Path::new(&normalized_path); - if path.exists() && path.is_dir() { - command.current_dir(&normalized_path); - } else { - log::warn!( - "Library path '{}' does not exist or is not a directory", - normalized_path - ); - } - } else { - log::warn!("Library path setting is not supported on this OS"); - } - } - - command.stdout(Stdio::piped()); - command.stderr(Stdio::piped()); - - #[cfg(all(windows, target_arch = "x86_64"))] - { - use std::os::windows::process::CommandExt; - const CREATE_NO_WINDOW: u32 = 0x0800_0000; - const CREATE_NEW_PROCESS_GROUP: u32 = 0x0000_0200; - command.creation_flags(CREATE_NO_WINDOW | CREATE_NEW_PROCESS_GROUP); - } - - // Execute the command and wait for completion - let output = timeout(Duration::from_secs(30), command.output()) - .await - .map_err(|_| { - LlamacppError::new( - ErrorCode::InternalError, - "Timeout waiting for device list".into(), - None, - ) - })? - .map_err(ServerError::Io)?; - - // Check if command executed successfully - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - log::error!("llama-server --list-devices failed: {}", stderr); - return Err(LlamacppError::from_stderr(&stderr).into()); - } - // Parse the output - let stdout = String::from_utf8_lossy(&output.stdout); - log::info!("Device list output:\n{}", stdout); - - parse_device_output(&stdout) -} - -fn parse_device_output(output: &str) -> ServerResult> { - let mut devices = Vec::new(); - let mut found_devices_section = false; - - for raw in output.lines() { - // detect header (ignoring whitespace) - if raw.trim() == "Available devices:" { - found_devices_section = true; - continue; - } - - if !found_devices_section { - continue; - } - - // skip blank lines - if raw.trim().is_empty() { - continue; - } - - // now parse any non-blank line after the header - let line = raw.trim(); - if let Some(device) = parse_device_line(line)? { - devices.push(device); - } - } - - if devices.is_empty() && found_devices_section { - log::warn!("No devices found in output"); - } else if !found_devices_section { - return Err(LlamacppError::new( - ErrorCode::DeviceListParseFailed, - "Could not find 'Available devices:' section in the backend output.".into(), - Some(output.to_string()), - ) - .into()); - } - - Ok(devices) -} - -fn parse_device_line(line: &str) -> ServerResult> { - let line = line.trim(); - - log::info!("Parsing device line: '{}'", line); - - // Expected formats: - // "Vulkan0: Intel(R) Arc(tm) A750 Graphics (DG2) (8128 MiB, 8128 MiB free)" - // "CUDA0: NVIDIA GeForce RTX 4090 (24576 MiB, 24000 MiB free)" - // "SYCL0: Intel(R) Arc(TM) A750 Graphics (8000 MiB, 7721 MiB free)" - - // Split by colon to get ID and rest - let parts: Vec<&str> = line.splitn(2, ':').collect(); - if parts.len() != 2 { - log::warn!("Skipping malformed device line: {}", line); - return Ok(None); - } - - let id = parts[0].trim().to_string(); - let rest = parts[1].trim(); - - // Use regex-like approach to find the memory pattern at the end - // Look for pattern: (number MiB, number MiB free) at the end - if let Some(memory_match) = find_memory_pattern(rest) { - let (memory_start, memory_content) = memory_match; - let name = rest[..memory_start].trim().to_string(); - - // Parse memory info: "8128 MiB, 8128 MiB free" - let memory_parts: Vec<&str> = memory_content.split(',').collect(); - if memory_parts.len() >= 2 { - if let (Ok(total_mem), Ok(free_mem)) = ( - parse_memory_value(memory_parts[0].trim()), - parse_memory_value(memory_parts[1].trim()), - ) { - log::info!( - "Parsed device - ID: '{}', Name: '{}', Mem: {}, Free: {}", - id, - name, - total_mem, - free_mem - ); - - return Ok(Some(DeviceInfo { - id, - name, - mem: total_mem, - free: free_mem, - })); - } - } - } - - log::warn!("Could not parse device line: {}", line); - Ok(None) -} - -fn find_memory_pattern(text: &str) -> Option<(usize, &str)> { - // Find the last parenthesis that contains the memory pattern - let mut last_match = None; - let mut chars = text.char_indices().peekable(); - - while let Some((start_idx, ch)) = chars.next() { - if ch == '(' { - // Find the closing parenthesis - let remaining = &text[start_idx + 1..]; - if let Some(close_pos) = remaining.find(')') { - let content = &remaining[..close_pos]; - - // Check if this looks like memory info - if is_memory_pattern(content) { - last_match = Some((start_idx, content)); - } - } - } - } - - last_match -} - -fn is_memory_pattern(content: &str) -> bool { - // Check if content matches pattern like "8128 MiB, 8128 MiB free" - // Must contain: numbers, "MiB", comma, "free" - if !(content.contains("MiB") && content.contains("free") && content.contains(',')) { - return false; - } - - let parts: Vec<&str> = content.split(',').collect(); - if parts.len() != 2 { - return false; - } - - parts.iter().all(|part| { - let part = part.trim(); - // Each part should start with a number and contain "MiB" - part.split_whitespace() - .next() - .map_or(false, |first_word| first_word.parse::().is_ok()) - && part.contains("MiB") - }) -} - -fn parse_memory_value(mem_str: &str) -> ServerResult { - // Handle formats like "8000 MiB" or "7721 MiB free" - let parts: Vec<&str> = mem_str.split_whitespace().collect(); - if parts.is_empty() { - return Err(LlamacppError::new( - ErrorCode::DeviceListParseFailed, - format!("empty memory value: {}", mem_str), - None, - ) - .into()); - } - - // Take the first part which should be the number - let number_str = parts[0]; - number_str.parse::().map_err(|_| { - LlamacppError::new( - ErrorCode::DeviceListParseFailed, - format!("Could not parse memory value: '{}'", number_str), - None, - ) - .into() - }) -} - -// crypto -#[tauri::command] -pub fn generate_api_key(model_id: String, api_secret: String) -> Result { - let mut mac = HmacSha256::new_from_slice(api_secret.as_bytes()) - .map_err(|e| format!("Invalid key length: {}", e))?; - mac.update(model_id.as_bytes()); - let result = mac.finalize(); - let code_bytes = result.into_bytes(); - let hash = general_purpose::STANDARD.encode(code_bytes); - Ok(hash) -} - -// process aliveness check -#[tauri::command] -pub async fn is_process_running(pid: i32, state: State<'_, AppState>) -> Result { - let mut system = System::new(); - system.refresh_processes(ProcessesToUpdate::All, true); - let process_pid = Pid::from(pid as usize); - let alive = system.process(process_pid).is_some(); - - if !alive { - let mut map = state.llama_server_process.lock().await; - map.remove(&pid); - } - - Ok(alive) -} - -// check port availability -fn is_port_available(port: u16) -> bool { - std::net::TcpListener::bind(("127.0.0.1", port)).is_ok() -} - -#[tauri::command] -pub async fn get_random_port(state: State<'_, AppState>) -> Result { - const MAX_ATTEMPTS: u32 = 20000; - let mut attempts = 0; - let mut rng = StdRng::from_entropy(); - - // Get all active ports from sessions - let map = state.llama_server_process.lock().await; - - let used_ports: HashSet = map - .values() - .filter_map(|session| { - // Convert valid ports to u16 (filter out placeholder ports like -1) - if session.info.port > 0 && session.info.port <= u16::MAX as i32 { - Some(session.info.port as u16) - } else { - None - } - }) - .collect(); - - drop(map); // unlock early - - while attempts < MAX_ATTEMPTS { - let port = rng.gen_range(3000..4000); - - if used_ports.contains(&port) { - attempts += 1; - continue; - } - - if is_port_available(port) { - return Ok(port); - } - - attempts += 1; - } - - Err("Failed to find an available port for the model to load".into()) -} - -// find session -#[tauri::command] -pub async fn find_session_by_model( - model_id: String, - state: State<'_, AppState>, -) -> Result, String> { - let map = state.llama_server_process.lock().await; - - let session_info = map - .values() - .find(|backend_session| backend_session.info.model_id == model_id) - .map(|backend_session| backend_session.info.clone()); - - Ok(session_info) -} - -// get running models -#[tauri::command] -pub async fn get_loaded_models(state: State<'_, AppState>) -> Result, String> { - let map = state.llama_server_process.lock().await; - - let model_ids = map - .values() - .map(|backend_session| backend_session.info.model_id.clone()) - .collect(); - - Ok(model_ids) -} - -// tests -// -#[cfg(test)] -mod tests { - use super::*; - use std::path::PathBuf; - #[cfg(windows)] - use tempfile; - - #[test] - fn test_parse_multiple_devices() { - let output = r#"ggml_vulkan: Found 2 Vulkan devices: -ggml_vulkan: 0 = NVIDIA GeForce RTX 3090 (NVIDIA) | uma: 0 | fp16: 1 | bf16: 0 | warp size: 32 | shared memory: 49152 | int dot: 0 | matrix cores: KHR_coopmat -ggml_vulkan: 1 = AMD Radeon Graphics (RADV GFX1151) (radv) | uma: 1 | fp16: 1 | bf16: 0 | warp size: 64 | shared memory: 65536 | int dot: 0 | matrix cores: KHR_coopmat -Available devices: -Vulkan0: NVIDIA GeForce RTX 3090 (24576 MiB, 24576 MiB free) -Vulkan1: AMD Radeon Graphics (RADV GFX1151) (87722 MiB, 87722 MiB free) -"#; - - let devices = parse_device_output(output).unwrap(); - - assert_eq!(devices.len(), 2); - - // Check first device - assert_eq!(devices[0].id, "Vulkan0"); - assert_eq!(devices[0].name, "NVIDIA GeForce RTX 3090"); - assert_eq!(devices[0].mem, 24576); - assert_eq!(devices[0].free, 24576); - - // Check second device - assert_eq!(devices[1].id, "Vulkan1"); - assert_eq!(devices[1].name, "AMD Radeon Graphics (RADV GFX1151)"); - assert_eq!(devices[1].mem, 87722); - assert_eq!(devices[1].free, 87722); - } - - #[test] - fn test_parse_single_device() { - let output = r#"Available devices: -CUDA0: NVIDIA GeForce RTX 4090 (24576 MiB, 24000 MiB free)"#; - - let devices = parse_device_output(output).unwrap(); - - assert_eq!(devices.len(), 1); - assert_eq!(devices[0].id, "CUDA0"); - assert_eq!(devices[0].name, "NVIDIA GeForce RTX 4090"); - assert_eq!(devices[0].mem, 24576); - assert_eq!(devices[0].free, 24000); - } - - #[test] - fn test_parse_with_extra_whitespace_and_empty_lines() { - let output = r#" -Available devices: - -Vulkan0: NVIDIA GeForce RTX 3090 (24576 MiB, 24576 MiB free) - -Vulkan1: AMD Radeon Graphics (RADV GFX1151) (87722 MiB, 87722 MiB free) - -"#; - - let devices = parse_device_output(output).unwrap(); - - assert_eq!(devices.len(), 2); - assert_eq!(devices[0].id, "Vulkan0"); - assert_eq!(devices[1].id, "Vulkan1"); - } - - #[test] - fn test_parse_different_backends() { - let output = r#"Available devices: -CUDA0: NVIDIA GeForce RTX 4090 (24576 MiB, 24000 MiB free) -Vulkan0: NVIDIA GeForce RTX 3090 (24576 MiB, 24576 MiB free) -SYCL0: Intel(R) Arc(TM) A750 Graphics (8000 MiB, 7721 MiB free)"#; - - let devices = parse_device_output(output).unwrap(); - - assert_eq!(devices.len(), 3); - - assert_eq!(devices[0].id, "CUDA0"); - assert_eq!(devices[0].name, "NVIDIA GeForce RTX 4090"); - - assert_eq!(devices[1].id, "Vulkan0"); - assert_eq!(devices[1].name, "NVIDIA GeForce RTX 3090"); - - assert_eq!(devices[2].id, "SYCL0"); - assert_eq!(devices[2].name, "Intel(R) Arc(TM) A750 Graphics"); - assert_eq!(devices[2].mem, 8000); - assert_eq!(devices[2].free, 7721); - } - - #[test] - fn test_parse_complex_gpu_names() { - let output = r#"Available devices: -Vulkan0: Intel(R) Arc(tm) A750 Graphics (DG2) (8128 MiB, 8128 MiB free) -Vulkan1: AMD Radeon RX 7900 XTX (Navi 31) [RDNA 3] (24576 MiB, 24000 MiB free)"#; - - let devices = parse_device_output(output).unwrap(); - - assert_eq!(devices.len(), 2); - - assert_eq!(devices[0].id, "Vulkan0"); - assert_eq!(devices[0].name, "Intel(R) Arc(tm) A750 Graphics (DG2)"); - assert_eq!(devices[0].mem, 8128); - assert_eq!(devices[0].free, 8128); - - assert_eq!(devices[1].id, "Vulkan1"); - assert_eq!(devices[1].name, "AMD Radeon RX 7900 XTX (Navi 31) [RDNA 3]"); - assert_eq!(devices[1].mem, 24576); - assert_eq!(devices[1].free, 24000); - } - - #[test] - fn test_parse_no_devices() { - let output = r#"Available devices:"#; - - let devices = parse_device_output(output).unwrap(); - assert_eq!(devices.len(), 0); - } - - #[test] - fn test_parse_missing_header() { - let output = r#"Vulkan0: NVIDIA GeForce RTX 3090 (24576 MiB, 24576 MiB free)"#; - - let result = parse_device_output(output); - assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("Could not find 'Available devices:' section")); - } - - #[test] - fn test_parse_malformed_device_line() { - let output = r#"Available devices: -Vulkan0: NVIDIA GeForce RTX 3090 (24576 MiB, 24576 MiB free) -Invalid line without colon -Vulkan1: AMD Radeon Graphics (RADV GFX1151) (87722 MiB, 87722 MiB free)"#; - - let devices = parse_device_output(output).unwrap(); - - // Should skip the malformed line and parse the valid ones - assert_eq!(devices.len(), 2); - assert_eq!(devices[0].id, "Vulkan0"); - assert_eq!(devices[1].id, "Vulkan1"); - } - - #[test] - fn test_parse_device_line_individual() { - // Test the individual line parser - let line = "Vulkan0: NVIDIA GeForce RTX 3090 (24576 MiB, 24576 MiB free)"; - let device = parse_device_line(line).unwrap().unwrap(); - - assert_eq!(device.id, "Vulkan0"); - assert_eq!(device.name, "NVIDIA GeForce RTX 3090"); - assert_eq!(device.mem, 24576); - assert_eq!(device.free, 24576); - } - - #[test] - fn test_memory_pattern_detection() { - assert!(is_memory_pattern("24576 MiB, 24576 MiB free")); - assert!(is_memory_pattern("8000 MiB, 7721 MiB free")); - assert!(!is_memory_pattern("just some text")); - assert!(!is_memory_pattern("24576 MiB")); - assert!(!is_memory_pattern("24576, 24576")); - } - - #[test] - fn test_parse_memory_value() { - assert_eq!(parse_memory_value("24576 MiB").unwrap(), 24576); - assert_eq!(parse_memory_value("7721 MiB free").unwrap(), 7721); - assert_eq!(parse_memory_value("8000").unwrap(), 8000); - - assert!(parse_memory_value("").is_err()); - assert!(parse_memory_value("not_a_number MiB").is_err()); - } - - #[test] - fn test_find_memory_pattern() { - let text = "NVIDIA GeForce RTX 3090 (24576 MiB, 24576 MiB free)"; - let result = find_memory_pattern(text); - assert!(result.is_some()); - let (_start, content) = result.unwrap(); - assert_eq!(content, "24576 MiB, 24576 MiB free"); - - // Test with multiple parentheses - let text = "Intel(R) Arc(tm) A750 Graphics (DG2) (8128 MiB, 8128 MiB free)"; - let result = find_memory_pattern(text); - assert!(result.is_some()); - let (_start, content) = result.unwrap(); - assert_eq!(content, "8128 MiB, 8128 MiB free"); - } - #[test] - fn test_path_with_uncommon_dir_names() { - const UNCOMMON_DIR_NAME: &str = "тест-你好-éàç-🚀"; - #[cfg(windows)] - { - let dir = tempfile::tempdir().expect("Failed to create temp dir"); - let long_path = dir.path().join(UNCOMMON_DIR_NAME); - std::fs::create_dir(&long_path) - .expect("Failed to create test directory with non-ASCII name"); - let short_path = get_short_path(&long_path); - assert!( - short_path.is_ascii(), - "The resulting short path must be composed of only ASCII characters. Got: {}", - short_path - ); - assert!( - PathBuf::from(&short_path).exists(), - "The returned short path must exist on the filesystem" - ); - assert_ne!( - short_path, - long_path.to_str().unwrap(), - "Short path should not be the same as the long path" - ); - } - #[cfg(not(windows))] - { - // On Unix, paths are typically UTF-8 and there's no "short path" concept. - let long_path_str = format!("/tmp/{}", UNCOMMON_DIR_NAME); - let path_buf = PathBuf::from(&long_path_str); - let displayed_path = path_buf.display().to_string(); - assert_eq!( - displayed_path, long_path_str, - "Path with non-ASCII characters should be preserved exactly on non-Windows platforms" - ); - } - } -} diff --git a/src-tauri/src/core/utils/extensions/mod.rs b/src-tauri/src/core/utils/extensions/mod.rs deleted file mode 100644 index 790471f22..000000000 --- a/src-tauri/src/core/utils/extensions/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod inference_llamacpp_extension; diff --git a/src-tauri/src/core/utils/mod.rs b/src-tauri/src/core/utils/mod.rs deleted file mode 100644 index 4e0149e20..000000000 --- a/src-tauri/src/core/utils/mod.rs +++ /dev/null @@ -1,213 +0,0 @@ -pub mod download; -pub mod extensions; - -use std::fs; -use std::path::{Component, Path, PathBuf}; -use tauri::Runtime; - -use super::cmd::get_jan_data_folder_path; -#[cfg(windows)] -use std::path::Prefix; - -pub const THREADS_DIR: &str = "threads"; -pub const THREADS_FILE: &str = "thread.json"; -pub const MESSAGES_FILE: &str = "messages.jsonl"; - -pub fn get_data_dir(app_handle: tauri::AppHandle) -> PathBuf { - get_jan_data_folder_path(app_handle).join(THREADS_DIR) -} - -pub fn get_thread_dir(app_handle: tauri::AppHandle, thread_id: &str) -> PathBuf { - get_data_dir(app_handle).join(thread_id) -} - -pub fn get_thread_metadata_path( - app_handle: tauri::AppHandle, - thread_id: &str, -) -> PathBuf { - get_thread_dir(app_handle, thread_id).join(THREADS_FILE) -} - -pub fn get_messages_path(app_handle: tauri::AppHandle, thread_id: &str) -> PathBuf { - get_thread_dir(app_handle, thread_id).join(MESSAGES_FILE) -} - -pub fn ensure_data_dirs(app_handle: tauri::AppHandle) -> Result<(), String> { - let data_dir = get_data_dir(app_handle.clone()); - if !data_dir.exists() { - fs::create_dir_all(&data_dir).map_err(|e| e.to_string())?; - } - Ok(()) -} - -pub fn ensure_thread_dir_exists( - app_handle: tauri::AppHandle, - thread_id: &str, -) -> Result<(), String> { - ensure_data_dirs(app_handle.clone())?; - let thread_dir = get_thread_dir(app_handle, thread_id); - if !thread_dir.exists() { - fs::create_dir(&thread_dir).map_err(|e| e.to_string())?; - } - Ok(()) -} - -// https://github.com/rust-lang/cargo/blob/rust-1.67.0/crates/cargo-util/src/paths.rs#L82-L107 -pub fn normalize_path(path: &Path) -> PathBuf { - let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(_prefix_component)) = components.peek().cloned() - { - #[cfg(windows)] - // Remove only the Verbatim prefix, but keep the drive letter (e.g., C:\) - match _prefix_component.kind() { - Prefix::VerbatimDisk(disk) => { - components.next(); // skip this prefix - // Re-add the disk prefix (e.g., C:) - let mut pb = PathBuf::new(); - pb.push(format!("{}:", disk as char)); - pb - } - Prefix::Verbatim(_) | Prefix::VerbatimUNC(_, _) => { - components.next(); // skip this prefix - PathBuf::new() - } - _ => { - components.next(); - PathBuf::from(c.as_os_str()) - } - } - #[cfg(not(windows))] - { - components.next(); // skip this prefix - PathBuf::from(c.as_os_str()) - } - } else { - PathBuf::new() - }; - - for component in components { - match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); - } - Component::CurDir => {} - Component::ParentDir => { - ret.pop(); - } - Component::Normal(c) => { - ret.push(c); - } - } - } - ret -} - -pub fn can_override_npx() -> bool { - // we need to check the CPU for the AVX2 instruction support if we are running under the MacOS - // with Intel CPU. We can override `npx` command with `bun` only if CPU is - // supporting AVX2, otherwise we need to use default `npx` binary - #[cfg(all(target_os = "macos", any(target_arch = "x86", target_arch = "x86_64")))] - { - if !is_x86_feature_detected!("avx2") { - log::warn!("Your CPU doesn't support AVX2 instruction, default npx binary will be used"); - return false; // we cannot override npx with bun binary - } - } - - true // by default, we can override npx with bun binary -} - -#[tauri::command] -pub fn write_yaml( - app: tauri::AppHandle, - data: serde_json::Value, - save_path: &str, -) -> Result<(), String> { - // TODO: have an internal function to check scope - let jan_data_folder = get_jan_data_folder_path(app.clone()); - let save_path = normalize_path(&jan_data_folder.join(save_path)); - if !save_path.starts_with(&jan_data_folder) { - return Err(format!( - "Error: save path {} is not under jan_data_folder {}", - save_path.to_string_lossy(), - jan_data_folder.to_string_lossy(), - )); - } - let file = fs::File::create(&save_path).map_err(|e| e.to_string())?; - let mut writer = std::io::BufWriter::new(file); - serde_yaml::to_writer(&mut writer, &data).map_err(|e| e.to_string())?; - Ok(()) -} - -#[tauri::command] -pub fn read_yaml(app: tauri::AppHandle, path: &str) -> Result { - let jan_data_folder = get_jan_data_folder_path(app.clone()); - let path = normalize_path(&jan_data_folder.join(path)); - if !path.starts_with(&jan_data_folder) { - return Err(format!( - "Error: path {} is not under jan_data_folder {}", - path.to_string_lossy(), - jan_data_folder.to_string_lossy(), - )); - } - let file = fs::File::open(&path).map_err(|e| e.to_string())?; - let reader = std::io::BufReader::new(file); - let data: serde_json::Value = serde_yaml::from_reader(reader).map_err(|e| e.to_string())?; - Ok(data) -} - -#[tauri::command] -pub fn decompress(app: tauri::AppHandle, path: &str, output_dir: &str) -> Result<(), String> { - let jan_data_folder = get_jan_data_folder_path(app.clone()); - let path_buf = normalize_path(&jan_data_folder.join(path)); - if !path_buf.starts_with(&jan_data_folder) { - return Err(format!( - "Error: path {} is not under jan_data_folder {}", - path_buf.to_string_lossy(), - jan_data_folder.to_string_lossy(), - )); - } - - let output_dir_buf = normalize_path(&jan_data_folder.join(output_dir)); - if !output_dir_buf.starts_with(&jan_data_folder) { - return Err(format!( - "Error: output directory {} is not under jan_data_folder {}", - output_dir_buf.to_string_lossy(), - jan_data_folder.to_string_lossy(), - )); - } - - // Ensure output directory exists - fs::create_dir_all(&output_dir_buf).map_err(|e| { - format!( - "Failed to create output directory {}: {}", - output_dir_buf.to_string_lossy(), - e - ) - })?; - - let file = fs::File::open(&path_buf).map_err(|e| e.to_string())?; - if path.ends_with(".tar.gz") { - let tar = flate2::read::GzDecoder::new(file); - let mut archive = tar::Archive::new(tar); - archive.unpack(&output_dir_buf).map_err(|e| e.to_string())?; - } else { - return Err("Unsupported file format. Only .tar.gz is supported.".to_string()); - } - - Ok(()) -} - -// check if a system library is available -#[tauri::command] -pub fn is_library_available(library: &str) -> bool { - match unsafe { libloading::Library::new(library) } { - Ok(_) => true, - Err(e) => { - log::info!("Library {} is not available: {}", library, e); - false - } - } -} - diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index e449fc739..63d60a571 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -1,13 +1,15 @@ mod core; -use core::utils::extensions::inference_llamacpp_extension::cleanup::cleanup_processes; use core::{ - cmd::get_jan_data_folder_path, + app::commands::get_jan_data_folder_path, + downloads::models::DownloadManagerState, + mcp::helpers::clean_up_mcp_servers, setup::{self, setup_mcp}, - state::{generate_app_token, AppState}, - utils::download::DownloadManagerState, + state::AppState, }; +use jan_utils::generate_app_token; use std::{collections::HashMap, sync::Arc}; use tauri::{Emitter, Manager, RunEvent}; +use tauri_plugin_llamacpp::cleanup_llama_processes; use tokio::sync::Mutex; #[cfg_attr(mobile, tauri::mobile_entry_point)] @@ -30,76 +32,70 @@ pub fn run() { .plugin(tauri_plugin_store::Builder::new().build()) .plugin(tauri_plugin_updater::Builder::new().build()) .plugin(tauri_plugin_shell::init()) + .plugin(tauri_plugin_llamacpp::init()) + .plugin(tauri_plugin_hardware::init()) .invoke_handler(tauri::generate_handler![ // FS commands - Deperecate soon - core::fs::join_path, - core::fs::mkdir, - core::fs::exists_sync, - core::fs::readdir_sync, - core::fs::read_file_sync, - core::fs::rm, - core::fs::file_stat, - core::fs::write_file_sync, - // App commands - core::cmd::get_app_configurations, - core::cmd::get_active_extensions, - core::cmd::get_user_home_path, - core::cmd::update_app_configuration, - core::cmd::get_jan_data_folder_path, - core::cmd::get_jan_extensions_path, - core::cmd::relaunch, - core::cmd::open_app_directory, - core::cmd::open_file_explorer, - core::cmd::install_extensions, - core::cmd::app_token, - core::cmd::start_server, - core::cmd::stop_server, - core::cmd::get_server_status, - core::cmd::read_logs, - core::cmd::change_app_data_folder, - core::cmd::factory_reset, + core::filesystem::commands::join_path, + core::filesystem::commands::mkdir, + core::filesystem::commands::exists_sync, + core::filesystem::commands::readdir_sync, + core::filesystem::commands::read_file_sync, + core::filesystem::commands::rm, + core::filesystem::commands::file_stat, + core::filesystem::commands::write_file_sync, + core::filesystem::commands::write_yaml, + core::filesystem::commands::read_yaml, + core::filesystem::commands::decompress, + // App configuration commands + core::app::commands::get_app_configurations, + core::app::commands::get_user_home_path, + core::app::commands::update_app_configuration, + core::app::commands::get_jan_data_folder_path, + core::app::commands::get_configuration_file_path, + core::app::commands::default_data_folder_path, + core::app::commands::change_app_data_folder, + core::app::commands::app_token, + // Extension commands + core::extensions::commands::get_jan_extensions_path, + core::extensions::commands::install_extensions, + core::extensions::commands::get_active_extensions, + // System commands + core::system::commands::relaunch, + core::system::commands::open_app_directory, + core::system::commands::open_file_explorer, + core::system::commands::factory_reset, + core::system::commands::read_logs, + core::system::commands::is_library_available, + // Server commands + core::server::commands::start_server, + core::server::commands::stop_server, + core::server::commands::get_server_status, // MCP commands - core::mcp::get_tools, - core::mcp::call_tool, - core::mcp::restart_mcp_servers, - core::mcp::get_connected_servers, - core::mcp::save_mcp_configs, - core::mcp::get_mcp_configs, - core::mcp::activate_mcp_server, - core::mcp::deactivate_mcp_server, - core::mcp::reset_mcp_restart_count, + core::mcp::commands::get_tools, + core::mcp::commands::call_tool, + core::mcp::commands::restart_mcp_servers, + core::mcp::commands::get_connected_servers, + core::mcp::commands::save_mcp_configs, + core::mcp::commands::get_mcp_configs, + core::mcp::commands::activate_mcp_server, + core::mcp::commands::deactivate_mcp_server, + core::mcp::commands::reset_mcp_restart_count, // Threads - core::threads::list_threads, - core::threads::create_thread, - core::threads::modify_thread, - core::threads::delete_thread, - core::threads::list_messages, - core::threads::create_message, - core::threads::modify_message, - core::threads::delete_message, - core::threads::get_thread_assistant, - core::threads::create_thread_assistant, - core::threads::modify_thread_assistant, - // generic utils - core::utils::write_yaml, - core::utils::read_yaml, - core::utils::decompress, - core::utils::is_library_available, + core::threads::commands::list_threads, + core::threads::commands::create_thread, + core::threads::commands::modify_thread, + core::threads::commands::delete_thread, + core::threads::commands::list_messages, + core::threads::commands::create_message, + core::threads::commands::modify_message, + core::threads::commands::delete_message, + core::threads::commands::get_thread_assistant, + core::threads::commands::create_thread_assistant, + core::threads::commands::modify_thread_assistant, // Download - core::utils::download::download_files, - core::utils::download::cancel_download_task, - // hardware - core::hardware::get_system_info, - core::hardware::get_system_usage, - // llama-cpp extension - core::utils::extensions::inference_llamacpp_extension::server::load_llama_model, - core::utils::extensions::inference_llamacpp_extension::server::unload_llama_model, - core::utils::extensions::inference_llamacpp_extension::server::get_devices, - core::utils::extensions::inference_llamacpp_extension::server::get_random_port, - core::utils::extensions::inference_llamacpp_extension::server::find_session_by_model, - core::utils::extensions::inference_llamacpp_extension::server::get_loaded_models, - core::utils::extensions::inference_llamacpp_extension::server::generate_api_key, - core::utils::extensions::inference_llamacpp_extension::server::is_process_running, + core::downloads::commands::download_files, + core::downloads::commands::cancel_download_task, ]) .manage(AppState { app_token: Some(generate_app_token()), @@ -109,7 +105,6 @@ pub fn run() { mcp_active_servers: Arc::new(Mutex::new(HashMap::new())), mcp_successfully_connected: Arc::new(Mutex::new(HashMap::new())), server_handle: Arc::new(Mutex::new(None)), - llama_server_process: Arc::new(Mutex::new(HashMap::new())), }) .setup(|app| { app.handle().plugin( @@ -140,19 +135,6 @@ pub fn run() { setup_mcp(app); Ok(()) }) - .on_window_event(|window, event| match event { - tauri::WindowEvent::CloseRequested { .. } => { - if window.label() == "main" { - window.emit("kill-mcp-servers", ()).unwrap(); - let state = window.app_handle().state::(); - - tauri::async_runtime::block_on(async { - cleanup_processes(state).await; - }); - } - } - _ => {} - }) .build(tauri::generate_context!()) .expect("error while running tauri application"); @@ -164,8 +146,6 @@ pub fn run() { let app_handle = app.clone(); tokio::task::block_in_place(|| { tauri::async_runtime::block_on(async { - let state = app_handle.state::(); - // Hide window immediately if let Some(window) = app_handle.get_webview_window("main") { let _ = window.hide(); @@ -173,7 +153,9 @@ pub fn run() { } // Quick cleanup with shorter timeout - cleanup_processes(state).await; + let state = app_handle.state::(); + let _ = clean_up_mcp_servers(state).await; + let _ = cleanup_llama_processes(app.clone()).await; }); }); } diff --git a/src-tauri/utils/Cargo.toml b/src-tauri/utils/Cargo.toml new file mode 100644 index 000000000..65f4dc8e1 --- /dev/null +++ b/src-tauri/utils/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "jan-utils" +version = "0.1.0" +edition = "2021" + +[dependencies] +base64 = "0.22" +hmac = "0.12" +log = { version = "0.4", optional = true } +rand = "0.8" +reqwest = { version = "0.11", features = ["json"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +sha2 = "0.10" +tokio = { version = "1", features = ["process"] } +url = "2.5" + +[features] +default = [] +logging = ["log"] diff --git a/src-tauri/utils/src/cli.rs b/src-tauri/utils/src/cli.rs new file mode 100644 index 000000000..b2d03a448 --- /dev/null +++ b/src-tauri/utils/src/cli.rs @@ -0,0 +1,20 @@ +/// Extracts the value of a command line argument flag from args vector +pub fn extract_arg_value(args: &[String], flag: &str) -> String { + args.iter() + .position(|arg| arg == flag) + .and_then(|i| args.get(i + 1)) + .cloned() + .unwrap_or_default() +} + +/// Parses port from command line arguments with fallback to default (8080) +pub fn parse_port_from_args(args: &[String]) -> i32 { + let port_str = extract_arg_value(args, "--port"); + match port_str.parse() { + Ok(p) => p, + Err(_) => { + eprintln!("Invalid port value: '{}', using default 8080", port_str); + 8080 + } + } +} diff --git a/src-tauri/utils/src/config.rs b/src-tauri/utils/src/config.rs new file mode 100644 index 000000000..6466386be --- /dev/null +++ b/src-tauri/utils/src/config.rs @@ -0,0 +1,23 @@ +use serde_json::Value; + +/// Extract command, args, and environment from JSON config +pub fn extract_command_args( + config: &Value, +) -> Option<(String, Vec, serde_json::Map)> { + let obj = config.as_object()?; + let command = obj.get("command")?.as_str()?.to_string(); + let args = obj.get("args")?.as_array()?.clone(); + let envs = obj + .get("env") + .unwrap_or(&Value::Object(serde_json::Map::new())) + .as_object()? + .clone(); + Some((command, args, envs)) +} + +/// Extract boolean "active" field from JSON config +pub fn extract_active_status(config: &Value) -> Option { + let obj = config.as_object()?; + let active = obj.get("active")?.as_bool()?; + Some(active) +} diff --git a/src-tauri/utils/src/crypto.rs b/src-tauri/utils/src/crypto.rs new file mode 100644 index 000000000..dcc1d541b --- /dev/null +++ b/src-tauri/utils/src/crypto.rs @@ -0,0 +1,26 @@ +use base64::{engine::general_purpose, Engine as _}; +use hmac::{Hmac, Mac}; +use rand::{distributions::Alphanumeric, Rng}; +use sha2::Sha256; + +type HmacSha256 = Hmac; + +/// Generates random app token +pub fn generate_app_token() -> String { + rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(32) + .map(char::from) + .collect() +} + +/// Generate API key using HMAC-SHA256 +pub fn generate_api_key(model_id: String, api_secret: String) -> Result { + let mut mac = HmacSha256::new_from_slice(api_secret.as_bytes()) + .map_err(|e| format!("Invalid key length: {}", e))?; + mac.update(model_id.as_bytes()); + let result = mac.finalize(); + let code_bytes = result.into_bytes(); + let hash = general_purpose::STANDARD.encode(code_bytes); + Ok(hash) +} diff --git a/src-tauri/utils/src/fs.rs b/src-tauri/utils/src/fs.rs new file mode 100644 index 000000000..a54451c1a --- /dev/null +++ b/src-tauri/utils/src/fs.rs @@ -0,0 +1,35 @@ +use std::fs; +use std::io; +use std::path::PathBuf; + +/// Recursively copies directories with exclusion support +pub fn copy_dir_recursive( + src: &PathBuf, + dst: &PathBuf, + exclude_dirs: &[&str], +) -> Result<(), io::Error> { + if !dst.exists() { + fs::create_dir_all(dst)?; + } + + for entry in fs::read_dir(src)? { + let entry = entry?; + let file_type = entry.file_type()?; + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); + + if file_type.is_dir() { + // Skip excluded directories + if let Some(dir_name) = entry.file_name().to_str() { + if exclude_dirs.contains(&dir_name) { + continue; + } + } + copy_dir_recursive(&src_path, &dst_path, exclude_dirs)?; + } else { + fs::copy(&src_path, &dst_path)?; + } + } + + Ok(()) +} diff --git a/src-tauri/utils/src/http.rs b/src-tauri/utils/src/http.rs new file mode 100644 index 000000000..3fd4007ec --- /dev/null +++ b/src-tauri/utils/src/http.rs @@ -0,0 +1,50 @@ +/// Checks if header name is a CORS-related header +pub fn is_cors_header(header_name: &str) -> bool { + let header_lower = header_name.to_lowercase(); + header_lower.starts_with("access-control-") +} + +/// Validates if host is in trusted hosts list +pub fn is_valid_host(host: &str, trusted_hosts: &[Vec]) -> bool { + if host.is_empty() { + return false; + } + + let host_without_port = if host.starts_with('[') { + host.split(']') + .next() + .unwrap_or(host) + .trim_start_matches('[') + } else { + host.split(':').next().unwrap_or(host) + }; + let default_valid_hosts = ["localhost", "127.0.0.1", "0.0.0.0"]; + + if default_valid_hosts + .iter() + .any(|&valid| host_without_port.to_lowercase() == valid.to_lowercase()) + { + return true; + } + + trusted_hosts.iter().flatten().any(|valid| { + let host_lower = host.to_lowercase(); + let valid_lower = valid.to_lowercase(); + + if host_lower == valid_lower { + return true; + } + + let valid_without_port = if valid.starts_with('[') { + valid + .split(']') + .next() + .unwrap_or(valid) + .trim_start_matches('[') + } else { + valid.split(':').next().unwrap_or(valid) + }; + + host_without_port.to_lowercase() == valid_without_port.to_lowercase() + }) +} diff --git a/src-tauri/utils/src/lib.rs b/src-tauri/utils/src/lib.rs new file mode 100644 index 000000000..36efcc0c6 --- /dev/null +++ b/src-tauri/utils/src/lib.rs @@ -0,0 +1,22 @@ +pub mod cli; +pub mod config; +pub mod crypto; +pub mod fs; +pub mod http; +pub mod math; +pub mod network; +pub mod path; +pub mod string; +pub mod system; + +// Re-export commonly used functions +pub use cli::*; +pub use config::*; +pub use crypto::*; +pub use fs::*; +pub use http::*; +pub use math::*; +pub use network::*; +pub use path::*; +pub use string::*; +pub use system::*; diff --git a/src-tauri/utils/src/math.rs b/src-tauri/utils/src/math.rs new file mode 100644 index 000000000..cdda4d058 --- /dev/null +++ b/src-tauri/utils/src/math.rs @@ -0,0 +1,50 @@ +const MCP_BASE_RESTART_DELAY_MS: u64 = 1000; // Start with 1 second +const MCP_MAX_RESTART_DELAY_MS: u64 = 30000; // Cap at 30 seconds +const MCP_BACKOFF_MULTIPLIER: f64 = 2.0; // Double the delay each time + +/// Calculate exponential backoff delay with jitter +/// +/// # Arguments +/// * `attempt` - The current restart attempt number (1-based) +/// +/// # Returns +/// * `u64` - Delay in milliseconds, capped at MCP_MAX_RESTART_DELAY_MS +pub fn calculate_exponential_backoff_delay(attempt: u32) -> u64 { + use std::cmp; + + // Calculate base exponential delay: base_delay * multiplier^(attempt-1) + let exponential_delay = + (MCP_BASE_RESTART_DELAY_MS as f64) * MCP_BACKOFF_MULTIPLIER.powi((attempt - 1) as i32); + + // Cap the delay at maximum + let capped_delay = cmp::min(exponential_delay as u64, MCP_MAX_RESTART_DELAY_MS); + + // Add jitter (±25% randomness) to prevent thundering herd + let jitter_range = (capped_delay as f64 * 0.25) as u64; + let jitter = if jitter_range > 0 { + use std::collections::hash_map::DefaultHasher; + use std::hash::{Hash, Hasher}; + + // Use attempt number as seed for deterministic but varied jitter + let mut hasher = DefaultHasher::new(); + attempt.hash(&mut hasher); + let hash = hasher.finish(); + + // Convert hash to jitter value in range [-jitter_range, +jitter_range] + let jitter_offset = (hash % (jitter_range * 2)) as i64 - jitter_range as i64; + jitter_offset + } else { + 0 + }; + + // Apply jitter while ensuring delay stays positive and within bounds + let final_delay = cmp::max( + 100, // Minimum 100ms delay + cmp::min( + MCP_MAX_RESTART_DELAY_MS, + (capped_delay as i64 + jitter) as u64, + ), + ); + + final_delay +} diff --git a/src-tauri/utils/src/network.rs b/src-tauri/utils/src/network.rs new file mode 100644 index 000000000..05704c12e --- /dev/null +++ b/src-tauri/utils/src/network.rs @@ -0,0 +1,148 @@ +use rand::{rngs::StdRng, Rng, SeedableRng}; +use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; +use std::collections::{HashMap, HashSet}; +use url::Url; + +#[derive(serde::Deserialize, Clone, Debug)] +pub struct ProxyConfig { + pub url: String, + pub username: Option, + pub password: Option, + pub no_proxy: Option>, // List of domains to bypass proxy + pub ignore_ssl: Option, // Ignore SSL certificate verification +} + +/// Check if a port is available for binding +pub fn is_port_available(port: u16) -> bool { + std::net::TcpListener::bind(("127.0.0.1", port)).is_ok() +} + +/// Generate a random port that's not in the used_ports set and is available +pub fn generate_random_port(used_ports: &HashSet) -> Result { + const MAX_ATTEMPTS: u32 = 20000; + let mut attempts = 0; + let mut rng = StdRng::from_entropy(); + + while attempts < MAX_ATTEMPTS { + let port = rng.gen_range(3000..4000); + + if used_ports.contains(&port) { + attempts += 1; + continue; + } + + if is_port_available(port) { + return Ok(port); + } + + attempts += 1; + } + + Err("Failed to find an available port for the model to load".into()) +} + +/// Validates proxy configuration including URL format, scheme, authentication, and no_proxy entries +pub fn validate_proxy_config(config: &ProxyConfig) -> Result<(), String> { + // Validate proxy URL format + if let Err(e) = Url::parse(&config.url) { + return Err(format!("Invalid proxy URL '{}': {}", config.url, e)); + } + + // Check if proxy URL has valid scheme + let url = Url::parse(&config.url).unwrap(); // Safe to unwrap as we just validated it + match url.scheme() { + "http" | "https" | "socks4" | "socks5" => {} + scheme => return Err(format!("Unsupported proxy scheme: {}", scheme)), + } + + // Validate authentication credentials + if config.username.is_some() && config.password.is_none() { + return Err("Username provided without password".to_string()); + } + + if config.password.is_some() && config.username.is_none() { + return Err("Password provided without username".to_string()); + } + + // Validate no_proxy entries + if let Some(no_proxy) = &config.no_proxy { + for entry in no_proxy { + if entry.is_empty() { + return Err("Empty no_proxy entry".to_string()); + } + // Basic validation for wildcard patterns + if entry.starts_with("*.") && entry.len() < 3 { + return Err(format!("Invalid wildcard pattern: {}", entry)); + } + } + } + + // SSL verification settings are all optional booleans, no validation needed + + Ok(()) +} + +/// Checks if URL should bypass proxy based on no_proxy patterns (supports wildcards) +pub fn should_bypass_proxy(url: &str, no_proxy: &[String]) -> bool { + if no_proxy.is_empty() { + return false; + } + + // Parse the URL to get the host + let parsed_url = match Url::parse(url) { + Ok(u) => u, + Err(_) => return false, + }; + + let host = match parsed_url.host_str() { + Some(h) => h, + None => return false, + }; + + // Check if host matches any no_proxy entry + for entry in no_proxy { + if entry == "*" { + return true; + } + + // Simple wildcard matching + if entry.starts_with("*.") { + let domain = &entry[2..]; + if host.ends_with(domain) { + return true; + } + } else if host == entry { + return true; + } + } + + false +} + +/// Creates reqwest::Proxy from ProxyConfig with authentication +pub fn create_proxy_from_config(config: &ProxyConfig) -> Result { + // Validate the configuration first + validate_proxy_config(config)?; + + let mut proxy = reqwest::Proxy::all(&config.url).map_err(|e| format!("Error: {}", e))?; + + // Add authentication if provided + if let (Some(username), Some(password)) = (&config.username, &config.password) { + proxy = proxy.basic_auth(username, password); + } + + Ok(proxy) +} + +/// Converts HashMap to reqwest HeaderMap +pub fn convert_headers( + headers: &HashMap, +) -> Result> { + let mut header_map = HeaderMap::new(); + for (k, v) in headers { + let key = HeaderName::from_bytes(k.as_bytes())?; + let value = HeaderValue::from_str(v)?; + header_map.insert(key, value); + } + Ok(header_map) +} diff --git a/src-tauri/utils/src/path.rs b/src-tauri/utils/src/path.rs new file mode 100644 index 000000000..41918edf0 --- /dev/null +++ b/src-tauri/utils/src/path.rs @@ -0,0 +1,76 @@ +#[cfg(windows)] +use std::path::Prefix; +use std::path::{Component, Path, PathBuf}; + +/// Normalizes file paths by handling path components, prefixes, and resolving relative paths +/// Based on: https://github.com/rust-lang/cargo/blob/rust-1.67.0/crates/cargo-util/src/paths.rs#L82-L107 +pub fn normalize_path(path: &Path) -> PathBuf { + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(_prefix_component)) = components.peek().cloned() + { + #[cfg(windows)] + // Remove only the Verbatim prefix, but keep the drive letter (e.g., C:\) + match _prefix_component.kind() { + Prefix::VerbatimDisk(disk) => { + components.next(); // skip this prefix + // Re-add the disk prefix (e.g., C:) + let mut pb = PathBuf::new(); + pb.push(format!("{}:", disk as char)); + pb + } + Prefix::Verbatim(_) | Prefix::VerbatimUNC(_, _) => { + components.next(); // skip this prefix + PathBuf::new() + } + _ => { + components.next(); + PathBuf::from(c.as_os_str()) + } + } + #[cfg(not(windows))] + { + components.next(); // skip this prefix + PathBuf::from(c.as_os_str()) + } + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + ret.pop(); + } + Component::Normal(c) => { + ret.push(c); + } + } + } + ret +} + +/// Removes file:/ and file:\ prefixes from file paths +pub fn normalize_file_path(path: &str) -> String { + path.replace("file:/", "").replace("file:\\", "") +} + +/// Removes prefix from path string with proper formatting +pub fn remove_prefix(path: &str, prefix: &str) -> String { + if !prefix.is_empty() && path.starts_with(prefix) { + let result = path[prefix.len()..].to_string(); + if result.is_empty() { + "/".to_string() + } else if result.starts_with('/') { + result + } else { + format!("/{}", result) + } + } else { + path.to_string() + } +} diff --git a/src-tauri/utils/src/string.rs b/src-tauri/utils/src/string.rs new file mode 100644 index 000000000..b71dbaaff --- /dev/null +++ b/src-tauri/utils/src/string.rs @@ -0,0 +1,75 @@ +/// Parses 16-byte array to UUID string format +pub fn parse_uuid(bytes: &[u8; 16]) -> String { + // UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + // 4-2-2-2-6 bytes + format!( + "{:02x}{:02x}{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}", + bytes[0], bytes[1], bytes[2], bytes[3], + bytes[4], bytes[5], + bytes[6], bytes[7], + bytes[8], bytes[9], + bytes[10], bytes[11], bytes[12], bytes[13], bytes[14], bytes[15] + ) +} + +/// Safely converts C string buffer to Rust String +pub fn parse_c_string(buf: &[i8]) -> String { + let bytes: Vec = buf + .iter() + .take_while(|&&c| c != 0) + .map(|&c| c as u8) + .collect(); + String::from_utf8_lossy(&bytes).into_owned() +} + +/// Formats any Display error to "Error: {}" string +pub fn err_to_string(e: E) -> String { + format!("Error: {}", e) +} + +/// Finds memory patterns in text using parentheses parsing +pub fn find_memory_pattern(text: &str) -> Option<(usize, &str)> { + // Find the last parenthesis that contains the memory pattern + let mut last_match = None; + let mut chars = text.char_indices().peekable(); + + while let Some((start_idx, ch)) = chars.next() { + if ch == '(' { + // Find the closing parenthesis + let remaining = &text[start_idx + 1..]; + if let Some(close_pos) = remaining.find(')') { + let content = &remaining[..close_pos]; + + // Check if this looks like memory info + if is_memory_pattern(content) { + last_match = Some((start_idx, content)); + } + } + } + } + + last_match +} + +/// Validates if content matches memory pattern format +pub fn is_memory_pattern(content: &str) -> bool { + // Check if content matches pattern like "8128 MiB, 8128 MiB free" + // Must contain: numbers, "MiB", comma, "free" + if !(content.contains("MiB") && content.contains("free") && content.contains(',')) { + return false; + } + + let parts: Vec<&str> = content.split(',').collect(); + if parts.len() != 2 { + return false; + } + + parts.iter().all(|part| { + let part = part.trim(); + // Each part should start with a number and contain "MiB" + part.split_whitespace() + .next() + .map_or(false, |first_word| first_word.parse::().is_ok()) + && part.contains("MiB") + }) +} diff --git a/src-tauri/utils/src/system.rs b/src-tauri/utils/src/system.rs new file mode 100644 index 000000000..d4ebc79af --- /dev/null +++ b/src-tauri/utils/src/system.rs @@ -0,0 +1,72 @@ +/// Checks AVX2 CPU support for npx override with bun binary +pub fn can_override_npx() -> bool { + // We need to check the CPU for the AVX2 instruction support if we are running under MacOS + // with Intel CPU. We can override `npx` command with `bun` only if CPU is + // supporting AVX2, otherwise we need to use default `npx` binary + #[cfg(all(target_os = "macos", any(target_arch = "x86", target_arch = "x86_64")))] + { + if !is_x86_feature_detected!("avx2") { + #[cfg(feature = "logging")] + log::warn!( + "Your CPU doesn't support AVX2 instruction, default npx binary will be used" + ); + return false; // we cannot override npx with bun binary + } + } + + true // by default, we can override npx with bun binary +} + +/// Setup library paths for different operating systems +pub fn setup_library_path(library_path: Option<&str>, command: &mut tokio::process::Command) { + if let Some(lib_path) = library_path { + if cfg!(target_os = "linux") { + let new_lib_path = match std::env::var("LD_LIBRARY_PATH") { + Ok(path) => format!("{}:{}", path, lib_path), + Err(_) => lib_path.to_string(), + }; + command.env("LD_LIBRARY_PATH", new_lib_path); + } else if cfg!(target_os = "windows") { + let new_path = match std::env::var("PATH") { + Ok(path) => format!("{};{}", path, lib_path), + Err(_) => lib_path.to_string(), + }; + command.env("PATH", new_path); + + // Normalize the path by removing UNC prefix if present + let normalized_path = lib_path.trim_start_matches(r"\\?\").to_string(); + #[cfg(feature = "logging")] + log::info!("Library path:\n{}", &normalized_path); + + // Only set current_dir if the normalized path exists and is a directory + let path = std::path::Path::new(&normalized_path); + if path.exists() && path.is_dir() { + command.current_dir(&normalized_path); + } else { + #[cfg(feature = "logging")] + log::warn!( + "Library path '{}' does not exist or is not a directory", + normalized_path + ); + } + } else { + #[cfg(feature = "logging")] + log::warn!("Library path setting is not supported on this OS"); + } + } +} + +/// Setup Windows-specific process creation flags +pub fn setup_windows_process_flags(command: &mut tokio::process::Command) { + #[cfg(all(windows, target_arch = "x86_64"))] + { + use std::os::windows::process::CommandExt; + const CREATE_NO_WINDOW: u32 = 0x0800_0000; + const CREATE_NEW_PROCESS_GROUP: u32 = 0x0000_0200; + command.creation_flags(CREATE_NO_WINDOW | CREATE_NEW_PROCESS_GROUP); + } + #[cfg(not(all(windows, target_arch = "x86_64")))] + { + let _ = command; // Silence unused parameter warning on non-Windows platforms + } +} diff --git a/testRunner.js b/testRunner.js deleted file mode 100644 index 1067f05a3..000000000 --- a/testRunner.js +++ /dev/null @@ -1,19 +0,0 @@ -const jestRunner = require('jest-runner') - -class EmptyTestFileRunner extends jestRunner.default { - async runTests(tests, watcher, onStart, onResult, onFailure, options) { - const nonEmptyTests = tests.filter( - (test) => test.context.hasteFS.getSize(test.path) > 0 - ) - return super.runTests( - nonEmptyTests, - watcher, - onStart, - onResult, - onFailure, - options - ) - } -} - -module.exports = EmptyTestFileRunner diff --git a/web-app/src/containers/ChatInput.tsx b/web-app/src/containers/ChatInput.tsx index a2f4bb013..5268834b2 100644 --- a/web-app/src/containers/ChatInput.tsx +++ b/web-app/src/containers/ChatInput.tsx @@ -371,7 +371,9 @@ const ChatInput = ({ model, className, initialMessage }: ChatInputProps) => { setRows(Math.min(newRows, maxRows)) }} onKeyDown={(e) => { - if (e.key === 'Enter' && !e.shiftKey && prompt.trim()) { + // e.keyCode 229 is for IME input with Safari + const isComposing = e.nativeEvent.isComposing || e.keyCode === 229; + if (e.key === 'Enter' && !e.shiftKey && prompt.trim() && !isComposing) { e.preventDefault() // Submit the message when Enter is pressed without Shift handleSendMesage(prompt) diff --git a/web-app/src/containers/dialogs/LoadModelErrorDialog.tsx b/web-app/src/containers/dialogs/LoadModelErrorDialog.tsx index 136f13853..410305c46 100644 --- a/web-app/src/containers/dialogs/LoadModelErrorDialog.tsx +++ b/web-app/src/containers/dialogs/LoadModelErrorDialog.tsx @@ -52,6 +52,16 @@ export default function LoadModelErrorDialog() { return copyText } + if (typeof error === 'object') { + const errorObj = error as { + code?: string + message: string + details?: string + } + + return errorObj.message + } + return JSON.stringify(error) } diff --git a/web-app/src/hooks/__tests__/useAppUpdater.test.ts b/web-app/src/hooks/__tests__/useAppUpdater.test.ts index 250c37fed..2c736f0f3 100644 --- a/web-app/src/hooks/__tests__/useAppUpdater.test.ts +++ b/web-app/src/hooks/__tests__/useAppUpdater.test.ts @@ -48,6 +48,12 @@ Object.defineProperty(window, 'core', { writable: true, }) +// Mock global AUTO_UPDATER_DISABLED +Object.defineProperty(global, 'AUTO_UPDATER_DISABLED', { + value: false, + writable: true, +}) + import { isDev } from '@/lib/utils' import { check } from '@tauri-apps/plugin-updater' import { events } from '@janhq/core' @@ -251,11 +257,14 @@ describe('useAppUpdater', () => { downloadAndInstall: mockDownloadAndInstall, } + // Mock check to return the update + mockCheck.mockResolvedValue(mockUpdate) + const { result } = renderHook(() => useAppUpdater()) - // Set update info first - act(() => { - result.current.updateState.updateInfo = mockUpdate + // Set update info first by calling checkForUpdate + await act(async () => { + await result.current.checkForUpdate() }) // Mock the download and install process @@ -296,11 +305,14 @@ describe('useAppUpdater', () => { downloadAndInstall: mockDownloadAndInstall, } + // Mock check to return the update + mockCheck.mockResolvedValue(mockUpdate) + const { result } = renderHook(() => useAppUpdater()) - // Set update info first - act(() => { - result.current.updateState.updateInfo = mockUpdate + // Set update info first by calling checkForUpdate + await act(async () => { + await result.current.checkForUpdate() }) mockDownloadAndInstall.mockRejectedValue(new Error('Download failed')) @@ -338,11 +350,14 @@ describe('useAppUpdater', () => { downloadAndInstall: mockDownloadAndInstall, } + // Mock check to return the update + mockCheck.mockResolvedValue(mockUpdate) + const { result } = renderHook(() => useAppUpdater()) - // Set update info first - act(() => { - result.current.updateState.updateInfo = mockUpdate + // Set update info first by calling checkForUpdate + await act(async () => { + await result.current.checkForUpdate() }) mockDownloadAndInstall.mockImplementation(async (progressCallback) => { diff --git a/web-app/src/hooks/__tests__/useModelSources.test.ts b/web-app/src/hooks/__tests__/useModelSources.test.ts index a14a8107f..41e5985a8 100644 --- a/web-app/src/hooks/__tests__/useModelSources.test.ts +++ b/web-app/src/hooks/__tests__/useModelSources.test.ts @@ -25,6 +25,11 @@ vi.mock('@/services/models', () => ({ fetchModelCatalog: vi.fn(), })) +// Mock the sanitizeModelId function +vi.mock('@/lib/utils', () => ({ + sanitizeModelId: vi.fn((id: string) => id), +})) + describe('useModelSources', () => { let mockFetchModelCatalog: any @@ -49,7 +54,6 @@ describe('useModelSources', () => { expect(result.current.error).toBe(null) expect(result.current.loading).toBe(false) expect(typeof result.current.fetchSources).toBe('function') - expect(typeof result.current.addSource).toBe('function') }) describe('fetchSources', () => { @@ -57,15 +61,19 @@ describe('useModelSources', () => { const mockSources: CatalogModel[] = [ { model_name: 'model-1', - provider: 'provider-1', description: 'First model', - version: '1.0.0', + developer: 'provider-1', + downloads: 100, + num_quants: 1, + quants: [{ model_id: 'model-1-q4', path: '/path/1', file_size: '1GB' }], }, { model_name: 'model-2', - provider: 'provider-2', description: 'Second model', - version: '2.0.0', + developer: 'provider-2', + downloads: 200, + num_quants: 1, + quants: [{ model_id: 'model-2-q4', path: '/path/2', file_size: '2GB' }], }, ] @@ -102,18 +110,22 @@ describe('useModelSources', () => { const existingSources: CatalogModel[] = [ { model_name: 'existing-model', - provider: 'existing-provider', description: 'Existing model', - version: '1.0.0', + developer: 'existing-provider', + downloads: 50, + num_quants: 1, + quants: [{ model_id: 'existing-model-q4', path: '/path/existing', file_size: '1GB' }], }, ] const newSources: CatalogModel[] = [ { model_name: 'new-model', - provider: 'new-provider', description: 'New model', - version: '2.0.0', + developer: 'new-provider', + downloads: 150, + num_quants: 1, + quants: [{ model_id: 'new-model-q4', path: '/path/new', file_size: '2GB' }], }, ] @@ -139,24 +151,30 @@ describe('useModelSources', () => { const existingSources: CatalogModel[] = [ { model_name: 'duplicate-model', - provider: 'old-provider', description: 'Old version', - version: '1.0.0', + developer: 'old-provider', + downloads: 100, + num_quants: 1, + quants: [{ model_id: 'duplicate-model-q4', path: '/path/old', file_size: '1GB' }], }, { model_name: 'unique-model', - provider: 'provider', description: 'Unique model', - version: '1.0.0', + developer: 'provider', + downloads: 75, + num_quants: 1, + quants: [{ model_id: 'unique-model-q4', path: '/path/unique', file_size: '1GB' }], }, ] const newSources: CatalogModel[] = [ { model_name: 'duplicate-model', - provider: 'new-provider', description: 'New version', - version: '2.0.0', + developer: 'new-provider', + downloads: 200, + num_quants: 1, + quants: [{ model_id: 'duplicate-model-q4-new', path: '/path/new', file_size: '2GB' }], }, ] @@ -208,9 +226,11 @@ describe('useModelSources', () => { const mockSources: CatalogModel[] = [ { model_name: 'model-1', - provider: 'provider-1', description: 'Model 1', - version: '1.0.0', + developer: 'provider-1', + downloads: 100, + num_quants: 1, + quants: [{ model_id: 'model-1-q4', path: '/path/1', file_size: '1GB' }], }, ] @@ -225,153 +245,6 @@ describe('useModelSources', () => { }) }) - describe('addSource', () => { - it('should add a new source to the store', () => { - const { result } = renderHook(() => useModelSources()) - - const testModel: CatalogModel = { - model_name: 'test-model', - description: 'Test model description', - developer: 'test-developer', - downloads: 100, - num_quants: 2, - quants: [ - { - model_id: 'test-model-q4', - path: 'https://example.com/test-model-q4.gguf', - file_size: '2.0 GB', - }, - ], - created_at: '2023-01-01T00:00:00Z', - } - - act(() => { - result.current.addSource(testModel) - }) - - expect(result.current.sources).toHaveLength(1) - expect(result.current.sources[0]).toEqual(testModel) - }) - - it('should replace existing source with same model_name', () => { - const { result } = renderHook(() => useModelSources()) - - const originalModel: CatalogModel = { - model_name: 'duplicate-model', - description: 'Original description', - developer: 'original-developer', - downloads: 50, - num_quants: 1, - quants: [], - created_at: '2023-01-01T00:00:00Z', - } - - const updatedModel: CatalogModel = { - model_name: 'duplicate-model', - description: 'Updated description', - developer: 'updated-developer', - downloads: 150, - num_quants: 2, - quants: [ - { - model_id: 'duplicate-model-q4', - path: 'https://example.com/duplicate-model-q4.gguf', - file_size: '3.0 GB', - }, - ], - created_at: '2023-02-01T00:00:00Z', - } - - act(() => { - result.current.addSource(originalModel) - }) - - expect(result.current.sources).toHaveLength(1) - - act(() => { - result.current.addSource(updatedModel) - }) - - expect(result.current.sources).toHaveLength(1) - expect(result.current.sources[0]).toEqual(updatedModel) - }) - - it('should handle multiple different sources', () => { - const { result } = renderHook(() => useModelSources()) - - const model1: CatalogModel = { - model_name: 'model-1', - description: 'First model', - developer: 'developer-1', - downloads: 100, - num_quants: 1, - quants: [], - created_at: '2023-01-01T00:00:00Z', - } - - const model2: CatalogModel = { - model_name: 'model-2', - description: 'Second model', - developer: 'developer-2', - downloads: 200, - num_quants: 1, - quants: [], - created_at: '2023-01-02T00:00:00Z', - } - - act(() => { - result.current.addSource(model1) - }) - - act(() => { - result.current.addSource(model2) - }) - - expect(result.current.sources).toHaveLength(2) - expect(result.current.sources).toContainEqual(model1) - expect(result.current.sources).toContainEqual(model2) - }) - - it('should handle CatalogModel with complete quants data', () => { - const { result } = renderHook(() => useModelSources()) - - const modelWithQuants: CatalogModel = { - model_name: 'model-with-quants', - description: 'Model with quantizations', - developer: 'quant-developer', - downloads: 500, - num_quants: 3, - quants: [ - { - model_id: 'model-q4_k_m', - path: 'https://example.com/model-q4_k_m.gguf', - file_size: '2.0 GB', - }, - { - model_id: 'model-q8_0', - path: 'https://example.com/model-q8_0.gguf', - file_size: '4.0 GB', - }, - { - model_id: 'model-f16', - path: 'https://example.com/model-f16.gguf', - file_size: '8.0 GB', - }, - ], - created_at: '2023-01-01T00:00:00Z', - readme: 'https://example.com/readme.md', - } - - act(() => { - result.current.addSource(modelWithQuants) - }) - - expect(result.current.sources).toHaveLength(1) - expect(result.current.sources[0]).toEqual(modelWithQuants) - expect(result.current.sources[0].quants).toHaveLength(3) - }) - }) - describe('state management', () => { it('should maintain state across multiple hook instances', () => { const { result: result1 } = renderHook(() => useModelSources()) @@ -386,9 +259,11 @@ describe('useModelSources', () => { const mockSources: CatalogModel[] = [ { model_name: 'shared-model', - provider: 'shared-provider', description: 'Shared model', - version: '1.0.0', + developer: 'shared-provider', + downloads: 100, + num_quants: 1, + quants: [{ model_id: 'shared-model-q4', path: '/path/shared', file_size: '1GB' }], }, ] @@ -436,18 +311,22 @@ describe('useModelSources', () => { const sources1: CatalogModel[] = [ { model_name: 'model-1', - provider: 'provider-1', description: 'First batch', - version: '1.0.0', + developer: 'provider-1', + downloads: 100, + num_quants: 1, + quants: [{ model_id: 'model-1-q4', path: '/path/1', file_size: '1GB' }], }, ] const sources2: CatalogModel[] = [ { model_name: 'model-2', - provider: 'provider-2', description: 'Second batch', - version: '2.0.0', + developer: 'provider-2', + downloads: 200, + num_quants: 1, + quants: [{ model_id: 'model-2-q4', path: '/path/2', file_size: '2GB' }], }, ] @@ -486,9 +365,11 @@ describe('useModelSources', () => { const mockSources: CatalogModel[] = [ { model_name: 'recovery-model', - provider: 'recovery-provider', description: 'Recovery model', - version: '1.0.0', + developer: 'recovery-provider', + downloads: 100, + num_quants: 1, + quants: [{ model_id: 'recovery-model-q4', path: '/path/recovery', file_size: '1GB' }], }, ] diff --git a/web-app/src/hooks/useAppUpdater.ts b/web-app/src/hooks/useAppUpdater.ts index fb24c6600..303cb43e3 100644 --- a/web-app/src/hooks/useAppUpdater.ts +++ b/web-app/src/hooks/useAppUpdater.ts @@ -53,6 +53,11 @@ export const useAppUpdater = () => { const checkForUpdate = useCallback( async (resetRemindMeLater = false) => { + if (AUTO_UPDATER_DISABLED) { + console.log('Auto updater is disabled') + return + } + try { // Reset remindMeLater if requested (e.g., when called from settings) if (resetRemindMeLater) { @@ -148,6 +153,11 @@ export const useAppUpdater = () => { ) const downloadAndInstallUpdate = useCallback(async () => { + if (AUTO_UPDATER_DISABLED) { + console.log('Auto updater is disabled') + return + } + if (!updateState.updateInfo) return try { diff --git a/web-app/src/hooks/useModelProvider.ts b/web-app/src/hooks/useModelProvider.ts index b1a988183..9be26ce41 100644 --- a/web-app/src/hooks/useModelProvider.ts +++ b/web-app/src/hooks/useModelProvider.ts @@ -227,34 +227,23 @@ export const useModelProvider = create()( > } - // Migration for cont_batching description update (version 0 -> 1) if (version === 0 && state?.providers) { - state.providers = state.providers.map((provider) => { - if (provider.provider === 'llamacpp' && provider.settings) { - provider.settings = provider.settings.map((setting) => { - if (setting.key === 'cont_batching') { - return { - ...setting, - description: - 'Enable continuous batching (a.k.a dynamic batching) for concurrent requests.', - } - } - return setting - }) - } - return provider - }) - } - - // Migration for chatTemplate key to chat_template (version 1 -> 2) - if (version === 1 && state?.providers) { state.providers.forEach((provider) => { + // Update cont_batching description for llamacpp provider + if (provider.provider === 'llamacpp' && provider.settings) { + const contBatchingSetting = provider.settings.find( + (s) => s.key === 'cont_batching' + ) + if (contBatchingSetting) { + contBatchingSetting.description = + 'Enable continuous batching (a.k.a dynamic batching) for concurrent requests.' + } + } + + // Migrate model settings if (provider.models) { provider.models.forEach((model) => { - // Initialize settings if it doesn't exist - if (!model.settings) { - model.settings = {} - } + if (!model.settings) model.settings = {} // Migrate chatTemplate key to chat_template if (model.settings.chatTemplate) { @@ -262,7 +251,7 @@ export const useModelProvider = create()( delete model.settings.chatTemplate } - // Add missing chat_template setting if it doesn't exist + // Add missing settings with defaults if (!model.settings.chat_template) { model.settings.chat_template = { ...modelSettings.chatTemplate, @@ -271,22 +260,7 @@ export const useModelProvider = create()( }, } } - }) - } - }) - } - // Migration for override_tensor_buffer_type key (version 2 -> 3) - if (version === 2 && state?.providers) { - state.providers.forEach((provider) => { - if (provider.models) { - provider.models.forEach((model) => { - // Initialize settings if it doesn't exist - if (!model.settings) { - model.settings = {} - } - - // Add missing override_tensor_buffer_type setting if it doesn't exist if (!model.settings.override_tensor_buffer_t) { model.settings.override_tensor_buffer_t = { ...modelSettings.override_tensor_buffer_t, @@ -303,7 +277,7 @@ export const useModelProvider = create()( return state }, - version: 3, + version: 1, } ) ) diff --git a/web-app/src/hooks/useModelSources.ts b/web-app/src/hooks/useModelSources.ts index 916d8eae2..3357947e1 100644 --- a/web-app/src/hooks/useModelSources.ts +++ b/web-app/src/hooks/useModelSources.ts @@ -2,13 +2,13 @@ import { create } from 'zustand' import { localStorageKey } from '@/constants/localStorage' import { createJSONStorage, persist } from 'zustand/middleware' import { fetchModelCatalog, CatalogModel } from '@/services/models' +import { sanitizeModelId } from '@/lib/utils' // Zustand store for model sources type ModelSourcesState = { sources: CatalogModel[] error: Error | null loading: boolean - addSource: (source: CatalogModel) => void fetchSources: () => Promise } @@ -18,19 +18,18 @@ export const useModelSources = create()( sources: [], error: null, loading: false, - - addSource: (source: CatalogModel) => { - set((state) => ({ - sources: [ - ...state.sources.filter((e) => e.model_name !== source.model_name), - source, - ], - })) - }, fetchSources: async () => { set({ loading: true, error: null }) try { - const newSources = await fetchModelCatalog() + const newSources = await fetchModelCatalog().then((catalogs) => + catalogs.map((catalog) => ({ + ...catalog, + quants: catalog.quants.map((quant) => ({ + ...quant, + model_id: sanitizeModelId(quant.model_id), + })), + })) + ) set({ sources: newSources.length ? newSources : get().sources, diff --git a/web-app/src/lib/service.ts b/web-app/src/lib/service.ts index 351780445..0898cc4dc 100644 --- a/web-app/src/lib/service.ts +++ b/web-app/src/lib/service.ts @@ -22,8 +22,6 @@ export const AppRoutes = [ 'getConnectedServers', 'readLogs', 'changeAppDataFolder', - 'getSystemInfo', - 'getSystemUsage', ] // Define API routes based on different route types export const Routes = [...CoreRoutes, ...APIRoutes, ...AppRoutes].map((r) => ({ diff --git a/web-app/src/lib/utils.ts b/web-app/src/lib/utils.ts index dd3301e03..3d896b883 100644 --- a/web-app/src/lib/utils.ts +++ b/web-app/src/lib/utils.ts @@ -155,3 +155,7 @@ export function formatDuration(startTime: number, endTime?: number): string { return `${durationMs}ms` } } + +export function sanitizeModelId(modelId: string): string { + return modelId.replace(/[^a-zA-Z0-9/_\-.]/g, '').replace(/\./g, "_") +} diff --git a/web-app/src/routes/hub/$modelId.tsx b/web-app/src/routes/hub/$modelId.tsx index d46c20ca2..f34057ae4 100644 --- a/web-app/src/routes/hub/$modelId.tsx +++ b/web-app/src/routes/hub/$modelId.tsx @@ -27,6 +27,7 @@ import { import { Progress } from '@/components/ui/progress' import { Button } from '@/components/ui/button' import { cn } from '@/lib/utils' +import { useGeneralSetting } from '@/hooks/useGeneralSetting' type SearchParams = { repo: string @@ -42,6 +43,7 @@ export const Route = createFileRoute('/hub/$modelId')({ function HubModelDetail() { const { modelId } = useParams({ from: Route.id }) const navigate = useNavigate() + const { huggingfaceToken } = useGeneralSetting() const { sources, fetchSources } = useModelSources() // eslint-disable-next-line @typescript-eslint/no-explicit-any const search = useSearch({ from: Route.id as any }) @@ -60,12 +62,15 @@ function HubModelDetail() { }, [fetchSources]) const fetchRepo = useCallback(async () => { - const repoInfo = await fetchHuggingFaceRepo(search.repo || modelId) + const repoInfo = await fetchHuggingFaceRepo( + search.repo || modelId, + huggingfaceToken + ) if (repoInfo) { const repoDetail = convertHfRepoToCatalogModel(repoInfo) setRepoData(repoDetail) } - }, [modelId, search]) + }, [modelId, search, huggingfaceToken]) useEffect(() => { fetchRepo() @@ -151,7 +156,20 @@ function HubModelDetail() { useEffect(() => { if (modelData?.readme) { setIsLoadingReadme(true) + // Try fetching without headers first + // There is a weird issue where this HF link will return error when access public repo with auth header fetch(modelData.readme) + .then((response) => { + if (!response.ok && huggingfaceToken && modelData?.readme) { + // Retry with Authorization header if first fetch failed + return fetch(modelData.readme, { + headers: { + Authorization: `Bearer ${huggingfaceToken}`, + }, + }) + } + return response + }) .then((response) => response.text()) .then((content) => { setReadmeContent(content) @@ -162,7 +180,7 @@ function HubModelDetail() { setIsLoadingReadme(false) }) } - }, [modelData?.readme]) + }, [modelData?.readme, huggingfaceToken]) if (!modelData) { return ( diff --git a/web-app/src/routes/hub/index.tsx b/web-app/src/routes/hub/index.tsx index 66e079412..3bf146d87 100644 --- a/web-app/src/routes/hub/index.tsx +++ b/web-app/src/routes/hub/index.tsx @@ -39,6 +39,7 @@ import HeaderPage from '@/containers/HeaderPage' import { Loader } from 'lucide-react' import { useTranslation } from '@/i18n/react-i18next-compat' import Fuse from 'fuse.js' +import { useGeneralSetting } from '@/hooks/useGeneralSetting' type ModelProps = { model: CatalogModel @@ -57,6 +58,7 @@ export const Route = createFileRoute(route.hub.index as any)({ function Hub() { const parentRef = useRef(null) + const { huggingfaceToken } = useGeneralSetting() const { t } = useTranslation() const sortOptions = [ @@ -71,7 +73,7 @@ function Hub() { } }, []) - const { sources, addSource, fetchSources, loading } = useModelSources() + const { sources, fetchSources, loading } = useModelSources() const [searchValue, setSearchValue] = useState('') const [sortSelected, setSortSelected] = useState('newest') @@ -130,7 +132,9 @@ function Hub() { // Apply search filter if (debouncedSearchValue.length) { const fuse = new Fuse(filtered, searchOptions) - filtered = fuse.search(debouncedSearchValue).map((result) => result.item) + // Remove domain from search value (e.g., "huggingface.co/author/model" -> "author/model") + const cleanedSearchValue = debouncedSearchValue.replace(/^https?:\/\/[^/]+\//, '') + filtered = fuse.search(cleanedSearchValue).map((result) => result.item) } // Apply downloaded filter if (showOnlyDownloaded) { @@ -185,14 +189,20 @@ function Hub() { addModelSourceTimeoutRef.current = setTimeout(async () => { try { // Fetch HuggingFace repository information - const repoInfo = await fetchHuggingFaceRepo(e.target.value) + const repoInfo = await fetchHuggingFaceRepo( + e.target.value, + huggingfaceToken + ) if (repoInfo) { const catalogModel = convertHfRepoToCatalogModel(repoInfo) if ( - !sources.some((s) => s.model_name === catalogModel.model_name) + !sources.some( + (s) => + catalogModel.model_name.trim().split('/').pop() === + s.model_name.trim() + ) ) { setHuggingFaceRepo(catalogModel) - addSource(catalogModel) } } } catch (error) { @@ -284,7 +294,8 @@ function Hub() { const handleDownload = () => { // Immediately set local downloading state addLocalDownloadingModel(modelId) - pullModel(modelId, modelUrl) + const mmprojPath = model.mmproj_models?.[0]?.path + pullModel(modelId, modelUrl, mmprojPath) } return ( @@ -501,7 +512,7 @@ function Hub() {
- {loading ? ( + {loading && !filteredModels.length ? (
{t('hub:loadingModels')} @@ -745,7 +756,10 @@ function Hub() { ) pullModel( variant.model_id, - variant.path + variant.path, + filteredModels[ + virtualItem.index + ].mmproj_models?.[0]?.path ) }} > diff --git a/web-app/src/routes/settings/__tests__/general.test.tsx b/web-app/src/routes/settings/__tests__/general.test.tsx index f5033b30b..96388b0fb 100644 --- a/web-app/src/routes/settings/__tests__/general.test.tsx +++ b/web-app/src/routes/settings/__tests__/general.test.tsx @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, vi } from 'vitest' -import { render, screen, fireEvent, waitFor } from '@testing-library/react' +import { render, screen, fireEvent, waitFor, act } from '@testing-library/react' import { Route as GeneralRoute } from '../general' // Mock all the dependencies @@ -68,9 +68,12 @@ vi.mock('@/hooks/useGeneralSetting', () => ({ }), })) +// Create a controllable mock +const mockCheckForUpdate = vi.fn() + vi.mock('@/hooks/useAppUpdater', () => ({ useAppUpdater: () => ({ - checkForUpdate: vi.fn(), + checkForUpdate: mockCheckForUpdate, }), })) @@ -184,12 +187,17 @@ vi.mock('@tauri-apps/plugin-opener', () => ({ revealItemInDir: vi.fn(), })) -vi.mock('@tauri-apps/api/webviewWindow', () => ({ - WebviewWindow: vi.fn().mockImplementation((label: string, options: any) => ({ +vi.mock('@tauri-apps/api/webviewWindow', () => { + const MockWebviewWindow = vi.fn().mockImplementation((label: string, options: any) => ({ once: vi.fn(), setFocus: vi.fn(), - })), -})) + })) + MockWebviewWindow.getByLabel = vi.fn().mockReturnValue(null) + + return { + WebviewWindow: MockWebviewWindow, + } +}) vi.mock('@tauri-apps/api/event', () => ({ emit: vi.fn(), @@ -244,6 +252,7 @@ global.window = { core: { api: { relaunch: vi.fn(), + getConnectedServers: vi.fn().mockResolvedValue([]), }, }, } @@ -258,20 +267,26 @@ Object.assign(navigator, { describe('General Settings Route', () => { beforeEach(() => { vi.clearAllMocks() + // Reset the mock to return a promise that resolves immediately by default + mockCheckForUpdate.mockResolvedValue(null) }) - it('should render the general settings page', () => { + it('should render the general settings page', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) expect(screen.getByTestId('header-page')).toBeInTheDocument() expect(screen.getByTestId('settings-menu')).toBeInTheDocument() expect(screen.getByText('common:settings')).toBeInTheDocument() }) - it('should render app version', () => { + it('should render app version', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) expect(screen.getByText('v1.0.0')).toBeInTheDocument() }) @@ -284,64 +299,82 @@ describe('General Settings Route', () => { // expect(screen.getByTestId('language-switcher')).toBeInTheDocument() // }) - it('should render switches for experimental features and spell check', () => { + it('should render switches for experimental features and spell check', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const switches = screen.getAllByTestId('switch') expect(switches.length).toBeGreaterThanOrEqual(2) }) - it('should render huggingface token input', () => { + it('should render huggingface token input', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const input = screen.getByTestId('input') expect(input).toBeInTheDocument() expect(input).toHaveValue('test-token') }) - it('should handle spell check toggle', () => { + it('should handle spell check toggle', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const switches = screen.getAllByTestId('switch') expect(switches.length).toBeGreaterThan(0) // Test that switches are interactive - fireEvent.click(switches[0]) + await act(async () => { + fireEvent.click(switches[0]) + }) expect(switches[0]).toBeInTheDocument() }) - it('should handle experimental features toggle', () => { + it('should handle experimental features toggle', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const switches = screen.getAllByTestId('switch') expect(switches.length).toBeGreaterThan(0) // Test that switches are interactive if (switches.length > 1) { - fireEvent.click(switches[1]) + await act(async () => { + fireEvent.click(switches[1]) + }) expect(switches[1]).toBeInTheDocument() } }) - it('should handle huggingface token change', () => { + it('should handle huggingface token change', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const input = screen.getByTestId('input') expect(input).toBeInTheDocument() // Test that input is interactive - fireEvent.change(input, { target: { value: 'new-token' } }) + await act(async () => { + fireEvent.change(input, { target: { value: 'new-token' } }) + }) expect(input).toBeInTheDocument() }) it('should handle check for updates', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const buttons = screen.getAllByTestId('button') const checkUpdateButton = buttons.find((button) => @@ -350,7 +383,9 @@ describe('General Settings Route', () => { if (checkUpdateButton) { expect(checkUpdateButton).toBeInTheDocument() - fireEvent.click(checkUpdateButton) + await act(async () => { + fireEvent.click(checkUpdateButton) + }) // Test that button is interactive expect(checkUpdateButton).toBeInTheDocument() } @@ -358,7 +393,9 @@ describe('General Settings Route', () => { it('should handle data folder display', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) // Test that component renders without errors expect(screen.getByTestId('header-page')).toBeInTheDocument() @@ -367,25 +404,31 @@ describe('General Settings Route', () => { it('should handle copy to clipboard', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) // Test that component renders without errors expect(screen.getByTestId('header-page')).toBeInTheDocument() expect(screen.getByTestId('settings-menu')).toBeInTheDocument() }) - it('should handle factory reset dialog', () => { + it('should handle factory reset dialog', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) expect(screen.getByTestId('dialog')).toBeInTheDocument() expect(screen.getByTestId('dialog-trigger')).toBeInTheDocument() expect(screen.getByTestId('dialog-content')).toBeInTheDocument() }) - it('should render external links', () => { + it('should render external links', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) // Check for external links const links = screen.getAllByRole('link') @@ -394,7 +437,9 @@ describe('General Settings Route', () => { it('should handle logs window opening', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const buttons = screen.getAllByTestId('button') const openLogsButton = buttons.find((button) => @@ -404,14 +449,18 @@ describe('General Settings Route', () => { if (openLogsButton) { expect(openLogsButton).toBeInTheDocument() // Test that button is interactive - fireEvent.click(openLogsButton) + await act(async () => { + fireEvent.click(openLogsButton) + }) expect(openLogsButton).toBeInTheDocument() } }) it('should handle reveal logs folder', async () => { const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const buttons = screen.getAllByTestId('button') const revealLogsButton = buttons.find((button) => @@ -421,26 +470,39 @@ describe('General Settings Route', () => { if (revealLogsButton) { expect(revealLogsButton).toBeInTheDocument() // Test that button is interactive - fireEvent.click(revealLogsButton) + await act(async () => { + fireEvent.click(revealLogsButton) + }) expect(revealLogsButton).toBeInTheDocument() } }) - it('should show correct file explorer text for Windows', () => { + it('should show correct file explorer text for Windows', async () => { global.IS_WINDOWS = true global.IS_MACOS = false const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) expect( screen.getByText('settings:general.showInFileExplorer') ).toBeInTheDocument() }) - it('should disable check for updates button when checking', () => { + it('should disable check for updates button when checking', async () => { + // Create a promise that we can control + let resolveUpdate: (value: any) => void + const updatePromise = new Promise((resolve) => { + resolveUpdate = resolve + }) + mockCheckForUpdate.mockReturnValue(updatePromise) + const Component = GeneralRoute.component as React.ComponentType - render() + await act(async () => { + render() + }) const buttons = screen.getAllByTestId('button') const checkUpdateButton = buttons.find((button) => @@ -448,8 +510,22 @@ describe('General Settings Route', () => { ) if (checkUpdateButton) { - fireEvent.click(checkUpdateButton) + // Click the button but don't await it yet + act(() => { + fireEvent.click(checkUpdateButton) + }) + + // Now the button should be disabled while checking expect(checkUpdateButton).toBeDisabled() + + // Resolve the promise to finish the update check + await act(async () => { + resolveUpdate!(null) + await updatePromise + }) + + // Button should be enabled again + expect(checkUpdateButton).not.toBeDisabled() } }) }) diff --git a/web-app/src/routes/settings/general.tsx b/web-app/src/routes/settings/general.tsx index a5ce5ec26..3ee558ae7 100644 --- a/web-app/src/routes/settings/general.tsx +++ b/web-app/src/routes/settings/general.tsx @@ -173,6 +173,7 @@ function General() { setSelectedNewPath(null) setIsDialogOpen(false) } catch (error) { + console.error(error) toast.error( error instanceof Error ? error.message @@ -259,26 +260,28 @@ function General() { } /> - -
- {isCheckingUpdate - ? t('settings:general.checkingForUpdates') - : t('settings:general.checkForUpdates')} -
- - } - /> + {!AUTO_UPDATER_DISABLED && ( + +
+ {isCheckingUpdate + ? t('settings:general.checkingForUpdates') + : t('settings:general.checkForUpdates')} +
+ + } + /> + )} {/* } diff --git a/web-app/src/routes/system-monitor.tsx b/web-app/src/routes/system-monitor.tsx index e7eac9dad..f09d2061b 100644 --- a/web-app/src/routes/system-monitor.tsx +++ b/web-app/src/routes/system-monitor.tsx @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { createFileRoute } from '@tanstack/react-router' -import { useEffect, useState } from 'react' +import { useEffect } from 'react' import { useHardware } from '@/hooks/useHardware' import { Progress } from '@/components/ui/progress' import { route } from '@/constants/routes' @@ -19,12 +19,7 @@ function SystemMonitor() { const { t } = useTranslation() const { hardwareData, systemUsage, updateSystemUsage } = useHardware() - const { - devices: llamacppDevices, - fetchDevices, - } = useLlamacppDevices() - - const [isInitialized, setIsInitialized] = useState(false) + const { devices: llamacppDevices, fetchDevices } = useLlamacppDevices() useEffect(() => { // Fetch llamacpp devices @@ -46,14 +41,6 @@ function SystemMonitor() { return () => clearInterval(intervalId) }, [updateSystemUsage]) - // Initialize when hardware data and llamacpp devices are available - useEffect(() => { - if (hardwareData.gpus.length > 0 && !isInitialized) { - setIsInitialized(true) - } - }, [hardwareData.gpus.length, isInitialized]) - - // Calculate RAM usage percentage const ramUsagePercentage = toNumber(systemUsage.used_memory / hardwareData.total_memory) * 100 diff --git a/web-app/src/services/__tests__/hardware.test.ts b/web-app/src/services/__tests__/hardware.test.ts index 64359907f..f877b3b3c 100644 --- a/web-app/src/services/__tests__/hardware.test.ts +++ b/web-app/src/services/__tests__/hardware.test.ts @@ -52,7 +52,7 @@ describe('hardware service', () => { const result = await getHardwareInfo() - expect(vi.mocked(invoke)).toHaveBeenCalledWith('get_system_info') + expect(vi.mocked(invoke)).toHaveBeenCalledWith('plugin:hardware|get_system_info') expect(result).toEqual(mockHardwareData) }) @@ -61,7 +61,7 @@ describe('hardware service', () => { vi.mocked(invoke).mockRejectedValue(mockError) await expect(getHardwareInfo()).rejects.toThrow('Failed to get hardware info') - expect(vi.mocked(invoke)).toHaveBeenCalledWith('get_system_info') + expect(vi.mocked(invoke)).toHaveBeenCalledWith('plugin:hardware|get_system_info') }) it('should return correct type from invoke', async () => { @@ -112,7 +112,7 @@ describe('hardware service', () => { const result = await getSystemUsage() - expect(vi.mocked(invoke)).toHaveBeenCalledWith('get_system_usage') + expect(vi.mocked(invoke)).toHaveBeenCalledWith('plugin:hardware|get_system_usage') expect(result).toEqual(mockSystemUsage) }) @@ -121,7 +121,7 @@ describe('hardware service', () => { vi.mocked(invoke).mockRejectedValue(mockError) await expect(getSystemUsage()).rejects.toThrow('Failed to get system usage') - expect(vi.mocked(invoke)).toHaveBeenCalledWith('get_system_usage') + expect(vi.mocked(invoke)).toHaveBeenCalledWith('plugin:hardware|get_system_usage') }) it('should return correct type from invoke', async () => { @@ -255,8 +255,8 @@ describe('hardware service', () => { expect(hardwareResult).toEqual(mockHardwareData) expect(usageResult).toEqual(mockSystemUsage) expect(vi.mocked(invoke)).toHaveBeenCalledTimes(2) - expect(vi.mocked(invoke)).toHaveBeenNthCalledWith(1, 'get_system_info') - expect(vi.mocked(invoke)).toHaveBeenNthCalledWith(2, 'get_system_usage') + expect(vi.mocked(invoke)).toHaveBeenNthCalledWith(1, 'plugin:hardware|get_system_info') + expect(vi.mocked(invoke)).toHaveBeenNthCalledWith(2, 'plugin:hardware|get_system_usage') }) }) }) \ No newline at end of file diff --git a/web-app/src/services/__tests__/models.test.ts b/web-app/src/services/__tests__/models.test.ts index b648b2677..b783f6ab5 100644 --- a/web-app/src/services/__tests__/models.test.ts +++ b/web-app/src/services/__tests__/models.test.ts @@ -325,7 +325,10 @@ describe('models service', () => { expect(result).toEqual(mockRepoData) expect(fetch).toHaveBeenCalledWith( - 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' + 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true', + { + headers: {}, + } ) }) @@ -341,19 +344,28 @@ describe('models service', () => { 'https://huggingface.co/microsoft/DialoGPT-medium' ) expect(fetch).toHaveBeenCalledWith( - 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' + 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true', + { + headers: {}, + } ) // Test with domain prefix await fetchHuggingFaceRepo('huggingface.co/microsoft/DialoGPT-medium') expect(fetch).toHaveBeenCalledWith( - 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' + 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true', + { + headers: {}, + } ) // Test with trailing slash await fetchHuggingFaceRepo('microsoft/DialoGPT-medium/') expect(fetch).toHaveBeenCalledWith( - 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true' + 'https://huggingface.co/api/models/microsoft/DialoGPT-medium?blobs=true', + { + headers: {}, + } ) }) @@ -379,7 +391,10 @@ describe('models service', () => { expect(result).toBeNull() expect(fetch).toHaveBeenCalledWith( - 'https://huggingface.co/api/models/nonexistent/model?blobs=true' + 'https://huggingface.co/api/models/nonexistent/model?blobs=true', + { + headers: {}, + } ) }) diff --git a/web-app/src/services/hardware.ts b/web-app/src/services/hardware.ts index 79bc7b1c3..ff50cae28 100644 --- a/web-app/src/services/hardware.ts +++ b/web-app/src/services/hardware.ts @@ -15,7 +15,7 @@ export interface DeviceList { * @returns {Promise} A promise that resolves to the hardware information. */ export const getHardwareInfo = async () => { - return invoke('get_system_info') as Promise + return invoke('plugin:hardware|get_system_info') as Promise } /** @@ -23,7 +23,7 @@ export const getHardwareInfo = async () => { * @returns {Promise} A promise that resolves to the hardware information. */ export const getSystemUsage = async () => { - return invoke('get_system_usage') as Promise + return invoke('plugin:hardware|get_system_usage') as Promise } /** diff --git a/web-app/src/services/models.ts b/web-app/src/services/models.ts index 12bf1997d..d2b9e551e 100644 --- a/web-app/src/services/models.ts +++ b/web-app/src/services/models.ts @@ -1,3 +1,4 @@ +import { sanitizeModelId } from '@/lib/utils' import { AIEngine, EngineManager, @@ -12,6 +13,12 @@ export interface ModelQuant { file_size: string } +export interface MMProjModel { + model_id: string + path: string + file_size: string +} + export interface CatalogModel { model_name: string description: string @@ -19,6 +26,7 @@ export interface CatalogModel { downloads: number num_quants: number quants: ModelQuant[] + mmproj_models?: MMProjModel[] created_at?: string readme?: string } @@ -99,7 +107,8 @@ export const fetchModelCatalog = async (): Promise => { * @returns A promise that resolves to the repository information. */ export const fetchHuggingFaceRepo = async ( - repoId: string + repoId: string, + hfToken?: string ): Promise => { try { // Clean the repo ID to handle various input formats @@ -114,7 +123,14 @@ export const fetchHuggingFaceRepo = async ( } const response = await fetch( - `https://huggingface.co/api/models/${cleanRepoId}?blobs=true` + `https://huggingface.co/api/models/${cleanRepoId}?blobs=true`, + { + headers: hfToken + ? { + Authorization: `Bearer ${hfToken}`, + } + : {}, + } ) if (!response.ok) { @@ -157,7 +173,7 @@ export const convertHfRepoToCatalogModel = ( const modelId = file.rfilename.replace(/\.gguf$/i, '') return { - model_id: modelId, + model_id: sanitizeModelId(modelId), path: `https://huggingface.co/${repo.modelId}/resolve/main/${file.rfilename}`, file_size: formatFileSize(file.size), } @@ -193,9 +209,14 @@ export const updateModel = async ( * @param model The model to pull. * @returns A promise that resolves when the model download task is created. */ -export const pullModel = async (id: string, modelPath: string) => { +export const pullModel = async ( + id: string, + modelPath: string, + mmprojPath?: string +) => { return getEngine()?.import(id, { modelPath, + mmprojPath, }) } diff --git a/web-app/src/types/models.ts b/web-app/src/types/models.ts index e2319a4e3..fec96aa1c 100644 --- a/web-app/src/types/models.ts +++ b/web-app/src/types/models.ts @@ -17,7 +17,7 @@ export enum ModelCapabilities { // TODO: Remove this enum when we integrate llama.cpp extension export enum DefaultToolUseSupportedModels { - JanNano = 'jan-nano', + JanNano = 'jan-', Qwen3 = 'qwen3', Lucy = 'lucy', } diff --git a/website/astro.config.mjs b/website/astro.config.mjs index ba894459c..da1d2f48b 100644 --- a/website/astro.config.mjs +++ b/website/astro.config.mjs @@ -1,15 +1,44 @@ // @ts-check import { defineConfig } from 'astro/config' import starlight from '@astrojs/starlight' -import starlightThemeRapide from 'starlight-theme-rapide' +import starlightThemeNext from 'starlight-theme-next' +// import starlightThemeRapide from 'starlight-theme-rapide' import starlightSidebarTopics from 'starlight-sidebar-topics' import mermaid from 'astro-mermaid' +import { fileURLToPath } from 'url' +import path, { dirname } from 'path' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) // https://astro.build/config export default defineConfig({ // Deploy to the new v2 subdomain site: 'https://v2.jan.ai', - // No 'base' property is needed, as this will be deployed to the root of the subdomain. + vite: { + resolve: { + alias: { + '@': path.resolve(__dirname, './src'), + '@/components': path.resolve(__dirname, './src/components'), + '@/layouts': path.resolve(__dirname, './src/layouts'), + '@/assets': path.resolve(__dirname, './src/assets'), + '@/content': path.resolve(__dirname, './src/content'), + '@/styles': path.resolve(__dirname, './src/styles'), + '@/utils': path.resolve(__dirname, './src/utils'), + }, + }, + assetsInclude: [ + '**/*.jpg', + '**/*.jpeg', + '**/*.png', + '**/*.gif', + '**/*.svg', + '**/*.webp', + ], + optimizeDeps: { + exclude: ['@astrojs/starlight'], + }, + }, integrations: [ mermaid({ theme: 'default', @@ -17,14 +46,16 @@ export default defineConfig({ }), starlight({ title: '👋 Jan', + favicon: 'jan2.png', plugins: [ - starlightThemeRapide(), + // starlightThemeRapide(), + starlightThemeNext(), starlightSidebarTopics( [ { label: 'Jan Desktop', - link: '/', + link: '/jan/', icon: 'rocket', items: [ { @@ -108,25 +139,10 @@ export default defineConfig({ { label: 'Local Server', items: [ - { label: 'Introduction', link: '/local-server/' }, - { label: 'Server Setup', slug: 'local-server/api-server' }, { - label: 'Jan Data Folder', - slug: 'local-server/data-folder', - }, - { label: 'Server Settings', slug: 'local-server/settings' }, - { - label: 'Llama.cpp Server', - slug: 'local-server/llama-cpp', - }, - { - label: 'Server Troubleshooting', - slug: 'local-server/troubleshooting', - }, - { - label: 'Integrations', + label: 'All', collapsed: true, - autogenerate: { directory: 'local-server/integrations' }, + autogenerate: { directory: 'local-server' }, }, ], }, @@ -144,17 +160,100 @@ export default defineConfig({ { label: 'Jan Mobile', link: '/mobile/', - badge: { text: 'Coming Soon', variant: 'caution' }, + badge: { text: 'Soon', variant: 'caution' }, icon: 'phone', items: [{ label: 'Overview', slug: 'mobile' }], }, { label: 'Jan Server', link: '/server/', - badge: { text: 'Coming Soon', variant: 'caution' }, + badge: { text: 'Soon', variant: 'caution' }, icon: 'forward-slash', items: [{ label: 'Overview', slug: 'server' }], }, + { + label: 'Handbook', + link: '/handbook/', + icon: 'open-book', + items: [ + { label: 'Welcome', slug: 'handbook' }, + { + label: 'About Jan', + items: [ + { + label: 'Why does Jan Exist?', + collapsed: true, + autogenerate: { directory: 'handbook/why' }, + }, + { + label: 'How we make Money', + collapsed: true, + autogenerate: { directory: 'handbook/money' }, + }, + { + label: 'Who We Hire', + collapsed: true, + autogenerate: { directory: 'handbook/who' }, + }, + { + label: "Jan's Philosophies", + collapsed: true, + autogenerate: { directory: 'handbook/philosophy' }, + }, + { + label: 'Brand & Identity', + collapsed: true, + autogenerate: { directory: 'handbook/brand' }, + }, + ], + }, + { + label: 'How We Work', + items: [ + { + label: 'Team Roster', + collapsed: true, + autogenerate: { directory: 'handbook/team' }, + }, + { + label: "Jan's Culture", + collapsed: true, + autogenerate: { directory: 'handbook/culture' }, + }, + { + label: 'How We Build', + collapsed: true, + autogenerate: { directory: 'handbook/how' }, + }, + { + label: 'How We Sell', + collapsed: true, + autogenerate: { directory: 'handbook/sell' }, + }, + ], + }, + { + label: 'HR', + items: [ + { + label: 'HR Lifecycle', + collapsed: true, + autogenerate: { directory: 'handbook/lifecycle' }, + }, + { + label: 'HR Policies', + collapsed: true, + autogenerate: { directory: 'handbook/hr' }, + }, + { + label: 'Compensation', + collapsed: true, + autogenerate: { directory: 'handbook/comp' }, + }, + ], + }, + ], + }, ], { exclude: [ diff --git a/website/bun.lock b/website/bun.lock index 09e4c8323..24b037fea 100644 --- a/website/bun.lock +++ b/website/bun.lock @@ -14,8 +14,10 @@ "sharp": "^0.34.3", "starlight-openapi": "^0.19.1", "starlight-sidebar-topics": "^0.6.0", + "starlight-theme-next": "^0.3.2", "starlight-theme-rapide": "^0.5.1", "starlight-videos": "^0.3.0", + "unist-util-visit": "^5.0.0", }, }, }, @@ -1076,6 +1078,8 @@ "starlight-sidebar-topics": ["starlight-sidebar-topics@0.6.0", "", { "dependencies": { "picomatch": "^4.0.2" }, "peerDependencies": { "@astrojs/starlight": ">=0.32.0" } }, "sha512-ysmOR7zaHYKtk18/mpW4MbEMDioR/ZBsisu9bdQrq0v9BlHWpW7gAdWlqFWO9zdv1P7l0Mo1WKd0wJ0UtqOVEQ=="], + "starlight-theme-next": ["starlight-theme-next@0.3.2", "", { "peerDependencies": { "@astrojs/starlight": ">=0.34" } }, "sha512-GQGhZ67nZ09pWVQoecl1N+H/1EUkUOvLVpjqOCHlkSotCblwrWrj4guEsdF9aKkNqiyTi6zzwZ5sxQospvdHOg=="], + "starlight-theme-rapide": ["starlight-theme-rapide@0.5.1", "", { "peerDependencies": { "@astrojs/starlight": ">=0.34.0" } }, "sha512-QRF6mzcYHLEX5UpUvOPXVVwISS298siIJLcKextoMLhXcnF12nX+IYJ0LNxFk9XaPbX9uDXIieSBJf5Pztkteg=="], "starlight-videos": ["starlight-videos@0.3.0", "", { "dependencies": { "@astro-community/astro-embed-youtube": "^0.5.6", "hastscript": "^9.0.0", "iso8601-duration": "^2.1.2", "srt-parser-2": "^1.2.3", "unist-util-visit": "^5.0.0" }, "peerDependencies": { "@astrojs/starlight": ">=0.34.0" } }, "sha512-1yvFUEn3P+ZjuGr5COswQp14cZdIvsGjg9lqDIyW5clCrZaBiDMSNPLYngyQozaDbrublEp6/V9HbJR6sGnSOA=="], diff --git a/website/package.json b/website/package.json index 895c7e11e..aa6f74799 100644 --- a/website/package.json +++ b/website/package.json @@ -20,8 +20,10 @@ "sharp": "^0.34.3", "starlight-openapi": "^0.19.1", "starlight-sidebar-topics": "^0.6.0", + "starlight-theme-next": "^0.3.2", "starlight-theme-rapide": "^0.5.1", - "starlight-videos": "^0.3.0" + "starlight-videos": "^0.3.0", + "unist-util-visit": "^5.0.0" }, "packageManager": "yarn@1.22.22" } diff --git a/website/public/assets/images/changelog/changelog0.6.6.gif b/website/public/assets/images/changelog/changelog0.6.6.gif new file mode 100644 index 000000000..a4e710270 Binary files /dev/null and b/website/public/assets/images/changelog/changelog0.6.6.gif differ diff --git a/website/public/assets/images/homepage/app-frame-dark-fixed.webp b/website/public/assets/images/homepage/app-frame-dark-fixed.webp new file mode 100644 index 000000000..638f9347a Binary files /dev/null and b/website/public/assets/images/homepage/app-frame-dark-fixed.webp differ diff --git a/website/public/assets/images/homepage/app-frame-light-fixed.png b/website/public/assets/images/homepage/app-frame-light-fixed.png new file mode 100644 index 000000000..aff00d8ba Binary files /dev/null and b/website/public/assets/images/homepage/app-frame-light-fixed.png differ diff --git a/website/public/assets/images/homepage/app-frame-light-fixed.webp b/website/public/assets/images/homepage/app-frame-light-fixed.webp new file mode 100644 index 000000000..6db003e43 Binary files /dev/null and b/website/public/assets/images/homepage/app-frame-light-fixed.webp differ diff --git a/website/public/assets/images/homepage/assistant-dark.png b/website/public/assets/images/homepage/assistant-dark.png new file mode 100644 index 000000000..f7d737a51 Binary files /dev/null and b/website/public/assets/images/homepage/assistant-dark.png differ diff --git a/website/public/assets/images/homepage/assistant-light.png b/website/public/assets/images/homepage/assistant-light.png new file mode 100644 index 000000000..6d50e29d6 Binary files /dev/null and b/website/public/assets/images/homepage/assistant-light.png differ diff --git a/website/public/assets/images/homepage/extension-dark.png b/website/public/assets/images/homepage/extension-dark.png new file mode 100644 index 000000000..b85e16cf4 Binary files /dev/null and b/website/public/assets/images/homepage/extension-dark.png differ diff --git a/website/public/assets/images/homepage/extension-light.png b/website/public/assets/images/homepage/extension-light.png new file mode 100644 index 000000000..55677a494 Binary files /dev/null and b/website/public/assets/images/homepage/extension-light.png differ diff --git a/website/public/assets/images/homepage/features01.png b/website/public/assets/images/homepage/features01.png new file mode 100644 index 000000000..30174f13f Binary files /dev/null and b/website/public/assets/images/homepage/features01.png differ diff --git a/website/public/assets/images/homepage/features01.webp b/website/public/assets/images/homepage/features01.webp new file mode 100644 index 000000000..1b1979995 Binary files /dev/null and b/website/public/assets/images/homepage/features01.webp differ diff --git a/website/public/assets/images/homepage/features01dark.png b/website/public/assets/images/homepage/features01dark.png new file mode 100644 index 000000000..f556da320 Binary files /dev/null and b/website/public/assets/images/homepage/features01dark.png differ diff --git a/website/public/assets/images/homepage/features01dark.webp b/website/public/assets/images/homepage/features01dark.webp new file mode 100644 index 000000000..7d71cb531 Binary files /dev/null and b/website/public/assets/images/homepage/features01dark.webp differ diff --git a/website/public/assets/images/homepage/features02.png b/website/public/assets/images/homepage/features02.png new file mode 100644 index 000000000..56688c050 Binary files /dev/null and b/website/public/assets/images/homepage/features02.png differ diff --git a/website/public/assets/images/homepage/features02.webp b/website/public/assets/images/homepage/features02.webp new file mode 100644 index 000000000..eed502a14 Binary files /dev/null and b/website/public/assets/images/homepage/features02.webp differ diff --git a/website/public/assets/images/homepage/features02dark.png b/website/public/assets/images/homepage/features02dark.png new file mode 100644 index 000000000..92943878f Binary files /dev/null and b/website/public/assets/images/homepage/features02dark.png differ diff --git a/website/public/assets/images/homepage/features02dark.webp b/website/public/assets/images/homepage/features02dark.webp new file mode 100644 index 000000000..3f6bf854d Binary files /dev/null and b/website/public/assets/images/homepage/features02dark.webp differ diff --git a/website/public/assets/images/homepage/features03.png b/website/public/assets/images/homepage/features03.png new file mode 100644 index 000000000..290a8812c Binary files /dev/null and b/website/public/assets/images/homepage/features03.png differ diff --git a/website/public/assets/images/homepage/features03.webp b/website/public/assets/images/homepage/features03.webp new file mode 100644 index 000000000..a73500479 Binary files /dev/null and b/website/public/assets/images/homepage/features03.webp differ diff --git a/website/public/assets/images/homepage/features03dark.png b/website/public/assets/images/homepage/features03dark.png new file mode 100644 index 000000000..3afff9290 Binary files /dev/null and b/website/public/assets/images/homepage/features03dark.png differ diff --git a/website/public/assets/images/homepage/features03dark.webp b/website/public/assets/images/homepage/features03dark.webp new file mode 100644 index 000000000..a280eea49 Binary files /dev/null and b/website/public/assets/images/homepage/features03dark.webp differ diff --git a/website/public/assets/images/homepage/features04.png b/website/public/assets/images/homepage/features04.png new file mode 100644 index 000000000..b9e08f3a2 Binary files /dev/null and b/website/public/assets/images/homepage/features04.png differ diff --git a/website/public/assets/images/homepage/features04.webp b/website/public/assets/images/homepage/features04.webp new file mode 100644 index 000000000..e9ef1f132 Binary files /dev/null and b/website/public/assets/images/homepage/features04.webp differ diff --git a/website/public/assets/images/homepage/features04dark.png b/website/public/assets/images/homepage/features04dark.png new file mode 100644 index 000000000..997dd42ba Binary files /dev/null and b/website/public/assets/images/homepage/features04dark.png differ diff --git a/website/public/assets/images/homepage/features04dark.webp b/website/public/assets/images/homepage/features04dark.webp new file mode 100644 index 000000000..d4a8641a1 Binary files /dev/null and b/website/public/assets/images/homepage/features04dark.webp differ diff --git a/website/public/assets/images/homepage/features05.png b/website/public/assets/images/homepage/features05.png new file mode 100644 index 000000000..b60c65574 Binary files /dev/null and b/website/public/assets/images/homepage/features05.png differ diff --git a/website/public/assets/images/homepage/features05.webp b/website/public/assets/images/homepage/features05.webp new file mode 100644 index 000000000..6a6e57c6f Binary files /dev/null and b/website/public/assets/images/homepage/features05.webp differ diff --git a/website/public/assets/images/homepage/features05dark.png b/website/public/assets/images/homepage/features05dark.png new file mode 100644 index 000000000..a4bd8e05c Binary files /dev/null and b/website/public/assets/images/homepage/features05dark.png differ diff --git a/website/public/assets/images/homepage/features05dark.webp b/website/public/assets/images/homepage/features05dark.webp new file mode 100644 index 000000000..9ca8731a8 Binary files /dev/null and b/website/public/assets/images/homepage/features05dark.webp differ diff --git a/website/public/assets/images/homepage/glow.png b/website/public/assets/images/homepage/glow.png new file mode 100644 index 000000000..099422e51 Binary files /dev/null and b/website/public/assets/images/homepage/glow.png differ diff --git a/website/public/assets/images/homepage/icon.png b/website/public/assets/images/homepage/icon.png new file mode 100644 index 000000000..786eada6b Binary files /dev/null and b/website/public/assets/images/homepage/icon.png differ diff --git a/website/public/assets/images/homepage/lifehacker-dark.png b/website/public/assets/images/homepage/lifehacker-dark.png new file mode 100644 index 000000000..69f26b9d4 Binary files /dev/null and b/website/public/assets/images/homepage/lifehacker-dark.png differ diff --git a/website/public/assets/images/homepage/lifehacker-light.png b/website/public/assets/images/homepage/lifehacker-light.png new file mode 100644 index 000000000..a9b31c1ed Binary files /dev/null and b/website/public/assets/images/homepage/lifehacker-light.png differ diff --git a/website/public/assets/images/homepage/mac-system-black.svg b/website/public/assets/images/homepage/mac-system-black.svg new file mode 100644 index 000000000..0b866815d --- /dev/null +++ b/website/public/assets/images/homepage/mac-system-black.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/website/public/assets/images/homepage/mac-system-white.svg b/website/public/assets/images/homepage/mac-system-white.svg new file mode 100644 index 000000000..4539db6eb --- /dev/null +++ b/website/public/assets/images/homepage/mac-system-white.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/website/public/assets/images/homepage/mapbase-dark.png b/website/public/assets/images/homepage/mapbase-dark.png new file mode 100644 index 000000000..cecde27a8 Binary files /dev/null and b/website/public/assets/images/homepage/mapbase-dark.png differ diff --git a/website/public/assets/images/homepage/mapbase-dark.webp b/website/public/assets/images/homepage/mapbase-dark.webp new file mode 100644 index 000000000..b59026f51 Binary files /dev/null and b/website/public/assets/images/homepage/mapbase-dark.webp differ diff --git a/website/public/assets/images/homepage/mapbase-light.png b/website/public/assets/images/homepage/mapbase-light.png new file mode 100644 index 000000000..bd38d8a2b Binary files /dev/null and b/website/public/assets/images/homepage/mapbase-light.png differ diff --git a/website/public/assets/images/homepage/mapbase-light.webp b/website/public/assets/images/homepage/mapbase-light.webp new file mode 100644 index 000000000..413c98e84 Binary files /dev/null and b/website/public/assets/images/homepage/mapbase-light.webp differ diff --git a/website/scripts/fix-blog-images.js b/website/scripts/fix-blog-images.js new file mode 100644 index 000000000..041228975 --- /dev/null +++ b/website/scripts/fix-blog-images.js @@ -0,0 +1,140 @@ +#!/usr/bin/env node + +import fs from 'fs' +import path from 'path' +import { fileURLToPath } from 'url' +import { dirname } from 'path' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) + +const blogDir = path.join(__dirname, '..', 'src', 'content', 'blog') + +// Function to convert filename to a valid JavaScript variable name +function toVariableName(filename) { + // Remove extension and special characters, convert to camelCase + const base = path.basename(filename, path.extname(filename)) + let varName = base + .replace(/[-_\s]+(.)?/g, (_, c) => (c ? c.toUpperCase() : '')) + .replace(/[^a-zA-Z0-9]/g, '') + .replace(/^./, (c) => c.toLowerCase()) + + // If the variable name starts with a number, prefix with 'img' + if (/^[0-9]/.test(varName)) { + varName = 'img' + varName.charAt(0).toUpperCase() + varName.slice(1) + } + + return varName +} + +// Function to process a single MDX file +function processMDXFile(filePath) { + console.log(`Processing: ${filePath}`) + + let content = fs.readFileSync(filePath, 'utf-8') + + // Find all image references + const imageRegex = /!\[([^\]]*)\]\((\.\/_assets\/[^)]+)\)/g + const images = [] + let match + + while ((match = imageRegex.exec(content)) !== null) { + const altText = match[1] + const imagePath = match[2] + const filename = path.basename(imagePath) + const varName = toVariableName(filename) + 'Img' + + // Check if we already have this image + if (!images.find((img) => img.varName === varName)) { + images.push({ + varName, + path: imagePath, + altText, + originalMatch: match[0], + }) + } + } + + if (images.length === 0) { + console.log(` No images found in ${path.basename(filePath)}`) + return + } + + console.log(` Found ${images.length} images`) + + // Find where to insert imports (after existing imports or frontmatter) + const frontmatterEnd = content.indexOf('---', content.indexOf('---') + 3) + 3 + let importInsertPosition = frontmatterEnd + + // Check if there are already imports + const existingImportRegex = /^import\s+.*$/gm + const imports = content.match(existingImportRegex) + + if (imports && imports.length > 0) { + // Find the last import + const lastImport = imports[imports.length - 1] + importInsertPosition = content.indexOf(lastImport) + lastImport.length + } + + // Generate import statements + const importStatements = images + .map((img) => `import ${img.varName} from '${img.path}';`) + .join('\n') + + // Insert imports + if (imports && imports.length > 0) { + // Add to existing imports + content = + content.slice(0, importInsertPosition) + + '\n' + + importStatements + + content.slice(importInsertPosition) + } else { + // Add new import section after frontmatter + content = + content.slice(0, frontmatterEnd) + + '\n\n' + + importStatements + + '\n' + + content.slice(frontmatterEnd) + } + + // Replace all image references with JSX img tags + images.forEach((img) => { + // Create regex for this specific image + const specificImageRegex = new RegExp( + `!\\[([^\\]]*)\\]\\(${img.path.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\)`, + 'g' + ) + + content = content.replace(specificImageRegex, (match, altText) => { + return `${altText || img.altText}` + }) + }) + + // Write the updated content back + fs.writeFileSync(filePath, content) + console.log(` ✓ Updated ${path.basename(filePath)}`) +} + +// Process all MDX files in the blog directory +function processAllBlogPosts() { + const files = fs.readdirSync(blogDir) + const mdxFiles = files.filter((file) => file.endsWith('.mdx')) + + console.log(`Found ${mdxFiles.length} MDX files in blog directory\n`) + + mdxFiles.forEach((file) => { + const filePath = path.join(blogDir, file) + try { + processMDXFile(filePath) + } catch (error) { + console.error(`Error processing ${file}:`, error.message) + } + }) + + console.log('\n✨ All blog posts processed!') +} + +// Run the script +processAllBlogPosts() diff --git a/website/src/assets/blog/3090s.jpg b/website/src/assets/blog/3090s.jpg new file mode 100644 index 000000000..3a62b3f6f Binary files /dev/null and b/website/src/assets/blog/3090s.jpg differ diff --git a/website/src/assets/blog/4070s.jpg b/website/src/assets/blog/4070s.jpg new file mode 100644 index 000000000..3d8622347 Binary files /dev/null and b/website/src/assets/blog/4070s.jpg differ diff --git a/website/src/assets/blog/4090s.png b/website/src/assets/blog/4090s.png new file mode 100644 index 000000000..2c49a3248 Binary files /dev/null and b/website/src/assets/blog/4090s.png differ diff --git a/website/src/assets/blog/ai-locally-llama.cpp.jpg b/website/src/assets/blog/ai-locally-llama.cpp.jpg new file mode 100644 index 000000000..967b63bf7 Binary files /dev/null and b/website/src/assets/blog/ai-locally-llama.cpp.jpg differ diff --git a/website/src/assets/blog/catastrophic-demo.png b/website/src/assets/blog/catastrophic-demo.png new file mode 100644 index 000000000..7c869fc0e Binary files /dev/null and b/website/src/assets/blog/catastrophic-demo.png differ diff --git a/website/src/assets/blog/chat-with-docs-prompt.jpg b/website/src/assets/blog/chat-with-docs-prompt.jpg new file mode 100644 index 000000000..df47dd4ef Binary files /dev/null and b/website/src/assets/blog/chat-with-docs-prompt.jpg differ diff --git a/website/src/assets/blog/chat-with-your-docs-offline-ai.jpg b/website/src/assets/blog/chat-with-your-docs-offline-ai.jpg new file mode 100644 index 000000000..efcda0f07 Binary files /dev/null and b/website/src/assets/blog/chat-with-your-docs-offline-ai.jpg differ diff --git a/website/src/assets/blog/chat-with-your-docs2.jpg b/website/src/assets/blog/chat-with-your-docs2.jpg new file mode 100644 index 000000000..1577b3f5c Binary files /dev/null and b/website/src/assets/blog/chat-with-your-docs2.jpg differ diff --git a/website/src/assets/blog/deepseek-r1-locally-jan.jpg b/website/src/assets/blog/deepseek-r1-locally-jan.jpg new file mode 100644 index 000000000..2168b8986 Binary files /dev/null and b/website/src/assets/blog/deepseek-r1-locally-jan.jpg differ diff --git a/website/src/assets/blog/download-jan.jpg b/website/src/assets/blog/download-jan.jpg new file mode 100644 index 000000000..f799260c7 Binary files /dev/null and b/website/src/assets/blog/download-jan.jpg differ diff --git a/website/src/assets/blog/egpu.jpg b/website/src/assets/blog/egpu.jpg new file mode 100644 index 000000000..9f631d4fd Binary files /dev/null and b/website/src/assets/blog/egpu.jpg differ diff --git a/website/src/assets/blog/gradient-decent.gif b/website/src/assets/blog/gradient-decent.gif new file mode 100644 index 000000000..9828f2fe9 Binary files /dev/null and b/website/src/assets/blog/gradient-decent.gif differ diff --git a/website/src/assets/blog/hugging-face-jan-model-download.jpg b/website/src/assets/blog/hugging-face-jan-model-download.jpg new file mode 100644 index 000000000..c6cfa8ea5 Binary files /dev/null and b/website/src/assets/blog/hugging-face-jan-model-download.jpg differ diff --git a/website/src/assets/blog/jan-hf-model-download.jpg b/website/src/assets/blog/jan-hf-model-download.jpg new file mode 100644 index 000000000..929acf2ff Binary files /dev/null and b/website/src/assets/blog/jan-hf-model-download.jpg differ diff --git a/website/src/assets/blog/jan-hub-deepseek-r1.jpg b/website/src/assets/blog/jan-hub-deepseek-r1.jpg new file mode 100644 index 000000000..12c0c6640 Binary files /dev/null and b/website/src/assets/blog/jan-hub-deepseek-r1.jpg differ diff --git a/website/src/assets/blog/jan-hub-download-deepseek-r1-2.jpg b/website/src/assets/blog/jan-hub-download-deepseek-r1-2.jpg new file mode 100644 index 000000000..24be4bd25 Binary files /dev/null and b/website/src/assets/blog/jan-hub-download-deepseek-r1-2.jpg differ diff --git a/website/src/assets/blog/jan-hub-download-deepseek-r1.jpg b/website/src/assets/blog/jan-hub-download-deepseek-r1.jpg new file mode 100644 index 000000000..83d9ab370 Binary files /dev/null and b/website/src/assets/blog/jan-hub-download-deepseek-r1.jpg differ diff --git a/website/src/assets/blog/jan-hub-for-ai-models.jpg b/website/src/assets/blog/jan-hub-for-ai-models.jpg new file mode 100644 index 000000000..a158499b4 Binary files /dev/null and b/website/src/assets/blog/jan-hub-for-ai-models.jpg differ diff --git a/website/src/assets/blog/jan-library-deepseek-r1.jpg b/website/src/assets/blog/jan-library-deepseek-r1.jpg new file mode 100644 index 000000000..6a54082dc Binary files /dev/null and b/website/src/assets/blog/jan-library-deepseek-r1.jpg differ diff --git a/website/src/assets/blog/jan-local-ai.jpg b/website/src/assets/blog/jan-local-ai.jpg new file mode 100644 index 000000000..2c8c145ff Binary files /dev/null and b/website/src/assets/blog/jan-local-ai.jpg differ diff --git a/website/src/assets/blog/jan-model-download.jpg b/website/src/assets/blog/jan-model-download.jpg new file mode 100644 index 000000000..7e949403d Binary files /dev/null and b/website/src/assets/blog/jan-model-download.jpg differ diff --git a/website/src/assets/blog/jan-model-selection.jpg b/website/src/assets/blog/jan-model-selection.jpg new file mode 100644 index 000000000..b630c800e Binary files /dev/null and b/website/src/assets/blog/jan-model-selection.jpg differ diff --git a/website/src/assets/blog/jan-runs-deepseek-r1-distills.jpg b/website/src/assets/blog/jan-runs-deepseek-r1-distills.jpg new file mode 100644 index 000000000..02ce847f4 Binary files /dev/null and b/website/src/assets/blog/jan-runs-deepseek-r1-distills.jpg differ diff --git a/website/src/assets/blog/jan-system-prompt-deepseek-r1.jpg b/website/src/assets/blog/jan-system-prompt-deepseek-r1.jpg new file mode 100644 index 000000000..f79e71af0 Binary files /dev/null and b/website/src/assets/blog/jan-system-prompt-deepseek-r1.jpg differ diff --git a/website/src/assets/blog/jan.ai.jpg b/website/src/assets/blog/jan.ai.jpg new file mode 100644 index 000000000..d635d1ab9 Binary files /dev/null and b/website/src/assets/blog/jan.ai.jpg differ diff --git a/website/src/assets/blog/local-ai-model-parameters.jpg b/website/src/assets/blog/local-ai-model-parameters.jpg new file mode 100644 index 000000000..1d26fc4a5 Binary files /dev/null and b/website/src/assets/blog/local-ai-model-parameters.jpg differ diff --git a/website/src/assets/blog/offline-chatgpt-alternative-ai-without-internet.jpg b/website/src/assets/blog/offline-chatgpt-alternative-ai-without-internet.jpg new file mode 100644 index 000000000..6dffb1e95 Binary files /dev/null and b/website/src/assets/blog/offline-chatgpt-alternative-ai-without-internet.jpg differ diff --git a/website/src/assets/blog/offline-chatgpt-alternatives-jan.jpg b/website/src/assets/blog/offline-chatgpt-alternatives-jan.jpg new file mode 100644 index 000000000..065b33636 Binary files /dev/null and b/website/src/assets/blog/offline-chatgpt-alternatives-jan.jpg differ diff --git a/website/src/assets/blog/og-4090s.webp b/website/src/assets/blog/og-4090s.webp new file mode 100644 index 000000000..6db1b10b2 Binary files /dev/null and b/website/src/assets/blog/og-4090s.webp differ diff --git a/website/src/assets/blog/open-source-ai-quantization.jpg b/website/src/assets/blog/open-source-ai-quantization.jpg new file mode 100644 index 000000000..fe605c3cd Binary files /dev/null and b/website/src/assets/blog/open-source-ai-quantization.jpg differ diff --git a/website/src/assets/blog/openchat-bench-0106.png b/website/src/assets/blog/openchat-bench-0106.png new file mode 100644 index 000000000..9fa37960f Binary files /dev/null and b/website/src/assets/blog/openchat-bench-0106.png differ diff --git a/website/src/assets/blog/qwen3-in-jan-hub.jpeg b/website/src/assets/blog/qwen3-in-jan-hub.jpeg new file mode 100644 index 000000000..e58c5beab Binary files /dev/null and b/website/src/assets/blog/qwen3-in-jan-hub.jpeg differ diff --git a/website/src/assets/blog/qwen3-settings-in-jan.jpeg b/website/src/assets/blog/qwen3-settings-in-jan.jpeg new file mode 100644 index 000000000..82d7540a7 Binary files /dev/null and b/website/src/assets/blog/qwen3-settings-in-jan.jpeg differ diff --git a/website/src/assets/blog/qwen3-settings-jan-ai.jpeg b/website/src/assets/blog/qwen3-settings-jan-ai.jpeg new file mode 100644 index 000000000..7fc432e38 Binary files /dev/null and b/website/src/assets/blog/qwen3-settings-jan-ai.jpeg differ diff --git a/website/src/assets/blog/replay.png b/website/src/assets/blog/replay.png new file mode 100644 index 000000000..8ada6ce84 Binary files /dev/null and b/website/src/assets/blog/replay.png differ diff --git a/website/src/assets/blog/run-ai-locally-with-jan.jpg b/website/src/assets/blog/run-ai-locally-with-jan.jpg new file mode 100644 index 000000000..942ab38ba Binary files /dev/null and b/website/src/assets/blog/run-ai-locally-with-jan.jpg differ diff --git a/website/src/assets/blog/run-deepseek-r1-locally-in-jan.jpg b/website/src/assets/blog/run-deepseek-r1-locally-in-jan.jpg new file mode 100644 index 000000000..aa6980585 Binary files /dev/null and b/website/src/assets/blog/run-deepseek-r1-locally-in-jan.jpg differ diff --git a/website/src/assets/blog/throughput_Comparison.png b/website/src/assets/blog/throughput_Comparison.png new file mode 100644 index 000000000..6bb63d03c Binary files /dev/null and b/website/src/assets/blog/throughput_Comparison.png differ diff --git a/website/src/assets/tom_gauld.png b/website/src/assets/tom_gauld.png new file mode 100644 index 000000000..1f929f645 Binary files /dev/null and b/website/src/assets/tom_gauld.png differ diff --git a/website/src/components/Blog/BlogImage.astro b/website/src/components/Blog/BlogImage.astro new file mode 100644 index 000000000..cb3190d56 --- /dev/null +++ b/website/src/components/Blog/BlogImage.astro @@ -0,0 +1,230 @@ +--- +export interface Props { + src: string; + alt: string; + caption?: string; + width?: number; + height?: number; + loading?: 'lazy' | 'eager'; + class?: string; +} + +const { + src, + alt, + caption, + width, + height, + loading = 'lazy', + class: className = '' +} = Astro.props; + +// Handle different image path formats +let imageSrc = src; + +// If the path starts with ./ or ../, it's a relative path from the MDX file +if (src.startsWith('./') || src.startsWith('../')) { + // Remove the leading ./ or ../ + imageSrc = src.replace(/^\.\.?\//, ''); + + // Prepend the blog content path if it doesn't include it + if (!imageSrc.includes('/content/blog/')) { + imageSrc = `/src/content/blog/${imageSrc}`; + } +} else if (!src.startsWith('http') && !src.startsWith('/')) { + // For paths without ./ prefix, assume they're relative to blog content + imageSrc = `/src/content/blog/${src}`; +} +--- + +
+ {alt} + {caption && ( +
{caption}
+ )} +
+ + + + diff --git a/website/src/components/Blog/CTABlog.astro b/website/src/components/Blog/CTABlog.astro new file mode 100644 index 000000000..3d1ab1044 --- /dev/null +++ b/website/src/components/Blog/CTABlog.astro @@ -0,0 +1,87 @@ +--- +export interface Props { + title?: string; + description?: string; + buttonText?: string; + buttonLink?: string; + variant?: 'primary' | 'secondary' | 'gradient'; + align?: 'left' | 'center' | 'right'; +} + +const { + title = "Ready to get started?", + description = "Download Jan and start running AI models locally on your device.", + buttonText = "Download Jan", + buttonLink = "https://jan.ai", + variant = 'primary', + align = 'center' +} = Astro.props; + +const variantClasses = { + primary: 'bg-blue-50 dark:bg-blue-900/20 border-blue-200 dark:border-blue-800', + secondary: 'bg-gray-50 dark:bg-gray-900/20 border-gray-200 dark:border-gray-800', + gradient: 'bg-gradient-to-r from-blue-50 to-purple-50 dark:from-blue-900/20 dark:to-purple-900/20 border-purple-200 dark:border-purple-800' +}; + +const alignClasses = { + left: 'text-left', + center: 'text-center', + right: 'text-right' +}; + +const buttonVariantClasses = { + primary: 'bg-blue-600 hover:bg-blue-700 text-white', + secondary: 'bg-gray-800 hover:bg-gray-900 dark:bg-gray-200 dark:hover:bg-gray-300 text-white dark:text-gray-900', + gradient: 'bg-gradient-to-r from-blue-600 to-purple-600 hover:from-blue-700 hover:to-purple-700 text-white' +}; +--- + +
+
+ {title && ( +

+ {title} +

+ )} + + {description && ( +

+ {description} +

+ )} + + + {buttonText} + + + + +
+
+ + diff --git a/website/src/components/Callout.astro b/website/src/components/Callout.astro new file mode 100644 index 000000000..5329bf57a --- /dev/null +++ b/website/src/components/Callout.astro @@ -0,0 +1,85 @@ +--- +export interface Props { + type?: 'info' | 'warning' | 'error' | 'success' | 'note'; + emoji?: string; + children?: any; +} + +const { type = 'note', emoji } = Astro.props; + +const typeConfig = { + info: { + bgColor: 'bg-blue-50 dark:bg-blue-900/20', + borderColor: 'border-blue-200 dark:border-blue-800', + textColor: 'text-blue-900 dark:text-blue-200', + defaultEmoji: 'ℹ️' + }, + warning: { + bgColor: 'bg-yellow-50 dark:bg-yellow-900/20', + borderColor: 'border-yellow-200 dark:border-yellow-800', + textColor: 'text-yellow-900 dark:text-yellow-200', + defaultEmoji: '⚠️' + }, + error: { + bgColor: 'bg-red-50 dark:bg-red-900/20', + borderColor: 'border-red-200 dark:border-red-800', + textColor: 'text-red-900 dark:text-red-200', + defaultEmoji: '🚨' + }, + success: { + bgColor: 'bg-green-50 dark:bg-green-900/20', + borderColor: 'border-green-200 dark:border-green-800', + textColor: 'text-green-900 dark:text-green-200', + defaultEmoji: '✅' + }, + note: { + bgColor: 'bg-gray-50 dark:bg-gray-900/20', + borderColor: 'border-gray-200 dark:border-gray-800', + textColor: 'text-gray-900 dark:text-gray-200', + defaultEmoji: '📝' + } +}; + +const config = typeConfig[type] || typeConfig.note; +const displayEmoji = emoji || config.defaultEmoji; +--- + +
+
+ +
+ +
+
+
+ + diff --git a/website/src/components/Changelog/ChangelogHeader.astro b/website/src/components/Changelog/ChangelogHeader.astro new file mode 100644 index 000000000..57212fb9b --- /dev/null +++ b/website/src/components/Changelog/ChangelogHeader.astro @@ -0,0 +1,36 @@ +--- +export interface Props { + title: string; + date: string; + ogImage?: string; +} + +const { title, date, ogImage } = Astro.props; + +// Format the date nicely +const formattedDate = new Date(date).toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' +}); +--- + +
+ {ogImage && ( +
+ {title} +
+ )} +
+ + diff --git a/website/src/components/CustomNav.astro b/website/src/components/CustomNav.astro index fee336449..62300644c 100644 --- a/website/src/components/CustomNav.astro +++ b/website/src/components/CustomNav.astro @@ -4,6 +4,17 @@ import Search from '@astrojs/starlight/components/Search.astro'; import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro'; import { Icon } from '@astrojs/starlight/components'; + +// Determine if we're on a docs page based on the current path +const currentPath = Astro.url.pathname; +const isDocsPage = currentPath.startsWith('/jan/') || + currentPath.startsWith('/mobile/') || + currentPath.startsWith('/server/') || + currentPath.startsWith('/local-server/') || + currentPath === '/' || + currentPath === '/index' || + currentPath === '/docs' || + currentPath === '/docs/'; ---
@@ -11,18 +22,24 @@ import { Icon } from '@astrojs/starlight/components';
- + 👋 Jan - - @@ -33,82 +50,113 @@ import { Icon } from '@astrojs/starlight/components';
- +
- - + + + Changelog + + API Reference - - + +
+ +
- - + + + {!isDocsPage && ( +
+ + +
+ )}
- diff --git a/website/src/components/DownloadButton.astro b/website/src/components/DownloadButton.astro new file mode 100644 index 000000000..ba4a5d49a --- /dev/null +++ b/website/src/components/DownloadButton.astro @@ -0,0 +1,233 @@ +--- +export interface Props { + class?: string; + showStats?: boolean; + downloadCount?: string; +} + +const { class: className, showStats = false, downloadCount = '3.8M+' } = Astro.props; + +// Download links for different platforms +const downloadLinks = { + 'mac-intel': 'https://github.com/janhq/jan/releases/download/v0.5.14/jan-mac-x64-0.5.14.dmg', + 'mac-arm': 'https://github.com/janhq/jan/releases/download/v0.5.14/jan-mac-arm64-0.5.14.dmg', + 'windows': 'https://github.com/janhq/jan/releases/download/v0.5.14/jan-win-x64-0.5.14.exe', + 'linux-deb': 'https://github.com/janhq/jan/releases/download/v0.5.14/jan-linux-amd64-0.5.14.deb', + 'linux-appimage': 'https://github.com/janhq/jan/releases/download/v0.5.14/jan-linux-x86_64-0.5.14.AppImage' +}; +--- + +
+
+ + + + +
+ + + + + + + + +
+
+ + {showStats && ( +

+ {downloadCount} downloads | Free & Open Source +

+ )} +
+ + + + diff --git a/website/src/components/Steps.astro b/website/src/components/Steps.astro new file mode 100644 index 000000000..3abeff7e5 --- /dev/null +++ b/website/src/components/Steps.astro @@ -0,0 +1,112 @@ +--- +export interface Props { + class?: string; +} + +const { class: className } = Astro.props; +--- + +
+ +
+ + diff --git a/website/src/components/YouTube.astro b/website/src/components/YouTube.astro new file mode 100644 index 000000000..6459addcb --- /dev/null +++ b/website/src/components/YouTube.astro @@ -0,0 +1,60 @@ +--- +export interface Props { + id: string; + title?: string; + class?: string; +} + +const { id, title = 'YouTube video player', class: className } = Astro.props; + +// Extract video ID and handle both formats: +// - Simple ID: "4mvHgLy_YV8" +// - ID with params: "4mvHgLy_YV8?si=74cmdMmcH3gmpv0R" +const videoId = id.split('?')[0]; +const params = id.includes('?') ? '?' + id.split('?')[1] : ''; +--- + +
+ +
+ + diff --git a/website/src/content.config.ts b/website/src/content.config.ts index ee21da167..3c8b69d82 100644 --- a/website/src/content.config.ts +++ b/website/src/content.config.ts @@ -1,8 +1,38 @@ -import { defineCollection } from 'astro:content'; +import { defineCollection, z } from 'astro:content'; import { docsLoader } from '@astrojs/starlight/loaders'; import { docsSchema } from '@astrojs/starlight/schema'; import { videosSchema } from 'starlight-videos/schemas'; +const changelogSchema = z.object({ + title: z.string(), + description: z.string(), + date: z.date(), + version: z.string().optional(), + image: z.string().optional(), + gif: z.string().optional(), + video: z.string().optional(), + featured: z.boolean().default(false), +}); + +const blogSchema = z.object({ + title: z.string(), + description: z.string(), + date: z.date(), + tags: z.string().optional(), + categories: z.string().optional(), + author: z.string().optional(), + ogImage: z.string().optional(), + featured: z.boolean().default(false), +}); + export const collections = { docs: defineCollection({ loader: docsLoader(), schema: docsSchema({ extend: videosSchema }) }), + changelog: defineCollection({ + type: 'content', + schema: changelogSchema, + }), + blog: defineCollection({ + type: 'content', + schema: blogSchema, + }), }; diff --git a/website/src/content/blog/_assets/3090s.jpg b/website/src/content/blog/_assets/3090s.jpg new file mode 100644 index 000000000..3a62b3f6f Binary files /dev/null and b/website/src/content/blog/_assets/3090s.jpg differ diff --git a/website/src/content/blog/_assets/4070s.jpg b/website/src/content/blog/_assets/4070s.jpg new file mode 100644 index 000000000..3d8622347 Binary files /dev/null and b/website/src/content/blog/_assets/4070s.jpg differ diff --git a/website/src/content/blog/_assets/4090s.png b/website/src/content/blog/_assets/4090s.png new file mode 100644 index 000000000..2c49a3248 Binary files /dev/null and b/website/src/content/blog/_assets/4090s.png differ diff --git a/website/src/content/blog/_assets/ai-locally-llama.cpp.jpg b/website/src/content/blog/_assets/ai-locally-llama.cpp.jpg new file mode 100644 index 000000000..967b63bf7 Binary files /dev/null and b/website/src/content/blog/_assets/ai-locally-llama.cpp.jpg differ diff --git a/website/src/content/blog/_assets/catastrophic-demo.png b/website/src/content/blog/_assets/catastrophic-demo.png new file mode 100644 index 000000000..7c869fc0e Binary files /dev/null and b/website/src/content/blog/_assets/catastrophic-demo.png differ diff --git a/website/src/content/blog/_assets/chat-with-docs-prompt.jpg b/website/src/content/blog/_assets/chat-with-docs-prompt.jpg new file mode 100644 index 000000000..df47dd4ef Binary files /dev/null and b/website/src/content/blog/_assets/chat-with-docs-prompt.jpg differ diff --git a/website/src/content/blog/_assets/chat-with-your-docs-offline-ai.jpg b/website/src/content/blog/_assets/chat-with-your-docs-offline-ai.jpg new file mode 100644 index 000000000..efcda0f07 Binary files /dev/null and b/website/src/content/blog/_assets/chat-with-your-docs-offline-ai.jpg differ diff --git a/website/src/content/blog/_assets/chat-with-your-docs2.jpg b/website/src/content/blog/_assets/chat-with-your-docs2.jpg new file mode 100644 index 000000000..1577b3f5c Binary files /dev/null and b/website/src/content/blog/_assets/chat-with-your-docs2.jpg differ diff --git a/website/src/content/blog/_assets/deepseek-r1-locally-jan.jpg b/website/src/content/blog/_assets/deepseek-r1-locally-jan.jpg new file mode 100644 index 000000000..2168b8986 Binary files /dev/null and b/website/src/content/blog/_assets/deepseek-r1-locally-jan.jpg differ diff --git a/website/src/content/blog/_assets/download-jan.jpg b/website/src/content/blog/_assets/download-jan.jpg new file mode 100644 index 000000000..f799260c7 Binary files /dev/null and b/website/src/content/blog/_assets/download-jan.jpg differ diff --git a/website/src/content/blog/_assets/egpu.jpg b/website/src/content/blog/_assets/egpu.jpg new file mode 100644 index 000000000..9f631d4fd Binary files /dev/null and b/website/src/content/blog/_assets/egpu.jpg differ diff --git a/website/src/content/blog/_assets/gradient-decent.gif b/website/src/content/blog/_assets/gradient-decent.gif new file mode 100644 index 000000000..9828f2fe9 Binary files /dev/null and b/website/src/content/blog/_assets/gradient-decent.gif differ diff --git a/website/src/content/blog/_assets/hugging-face-jan-model-download.jpg b/website/src/content/blog/_assets/hugging-face-jan-model-download.jpg new file mode 100644 index 000000000..c6cfa8ea5 Binary files /dev/null and b/website/src/content/blog/_assets/hugging-face-jan-model-download.jpg differ diff --git a/website/src/content/blog/_assets/jan-hf-model-download.jpg b/website/src/content/blog/_assets/jan-hf-model-download.jpg new file mode 100644 index 000000000..929acf2ff Binary files /dev/null and b/website/src/content/blog/_assets/jan-hf-model-download.jpg differ diff --git a/website/src/content/blog/_assets/jan-hub-deepseek-r1.jpg b/website/src/content/blog/_assets/jan-hub-deepseek-r1.jpg new file mode 100644 index 000000000..12c0c6640 Binary files /dev/null and b/website/src/content/blog/_assets/jan-hub-deepseek-r1.jpg differ diff --git a/website/src/content/blog/_assets/jan-hub-download-deepseek-r1-2.jpg b/website/src/content/blog/_assets/jan-hub-download-deepseek-r1-2.jpg new file mode 100644 index 000000000..24be4bd25 Binary files /dev/null and b/website/src/content/blog/_assets/jan-hub-download-deepseek-r1-2.jpg differ diff --git a/website/src/content/blog/_assets/jan-hub-download-deepseek-r1.jpg b/website/src/content/blog/_assets/jan-hub-download-deepseek-r1.jpg new file mode 100644 index 000000000..83d9ab370 Binary files /dev/null and b/website/src/content/blog/_assets/jan-hub-download-deepseek-r1.jpg differ diff --git a/website/src/content/blog/_assets/jan-hub-for-ai-models.jpg b/website/src/content/blog/_assets/jan-hub-for-ai-models.jpg new file mode 100644 index 000000000..a158499b4 Binary files /dev/null and b/website/src/content/blog/_assets/jan-hub-for-ai-models.jpg differ diff --git a/website/src/content/blog/_assets/jan-library-deepseek-r1.jpg b/website/src/content/blog/_assets/jan-library-deepseek-r1.jpg new file mode 100644 index 000000000..6a54082dc Binary files /dev/null and b/website/src/content/blog/_assets/jan-library-deepseek-r1.jpg differ diff --git a/website/src/content/blog/_assets/jan-local-ai.jpg b/website/src/content/blog/_assets/jan-local-ai.jpg new file mode 100644 index 000000000..2c8c145ff Binary files /dev/null and b/website/src/content/blog/_assets/jan-local-ai.jpg differ diff --git a/website/src/content/blog/_assets/jan-model-download.jpg b/website/src/content/blog/_assets/jan-model-download.jpg new file mode 100644 index 000000000..7e949403d Binary files /dev/null and b/website/src/content/blog/_assets/jan-model-download.jpg differ diff --git a/website/src/content/blog/_assets/jan-model-selection.jpg b/website/src/content/blog/_assets/jan-model-selection.jpg new file mode 100644 index 000000000..b630c800e Binary files /dev/null and b/website/src/content/blog/_assets/jan-model-selection.jpg differ diff --git a/website/src/content/blog/_assets/jan-runs-deepseek-r1-distills.jpg b/website/src/content/blog/_assets/jan-runs-deepseek-r1-distills.jpg new file mode 100644 index 000000000..02ce847f4 Binary files /dev/null and b/website/src/content/blog/_assets/jan-runs-deepseek-r1-distills.jpg differ diff --git a/website/src/content/blog/_assets/jan-system-prompt-deepseek-r1.jpg b/website/src/content/blog/_assets/jan-system-prompt-deepseek-r1.jpg new file mode 100644 index 000000000..f79e71af0 Binary files /dev/null and b/website/src/content/blog/_assets/jan-system-prompt-deepseek-r1.jpg differ diff --git a/website/src/content/blog/_assets/jan.ai.jpg b/website/src/content/blog/_assets/jan.ai.jpg new file mode 100644 index 000000000..d635d1ab9 Binary files /dev/null and b/website/src/content/blog/_assets/jan.ai.jpg differ diff --git a/website/src/content/blog/_assets/local-ai-model-parameters.jpg b/website/src/content/blog/_assets/local-ai-model-parameters.jpg new file mode 100644 index 000000000..1d26fc4a5 Binary files /dev/null and b/website/src/content/blog/_assets/local-ai-model-parameters.jpg differ diff --git a/website/src/content/blog/_assets/offline-chatgpt-alternative-ai-without-internet.jpg b/website/src/content/blog/_assets/offline-chatgpt-alternative-ai-without-internet.jpg new file mode 100644 index 000000000..6dffb1e95 Binary files /dev/null and b/website/src/content/blog/_assets/offline-chatgpt-alternative-ai-without-internet.jpg differ diff --git a/website/src/content/blog/_assets/offline-chatgpt-alternatives-jan.jpg b/website/src/content/blog/_assets/offline-chatgpt-alternatives-jan.jpg new file mode 100644 index 000000000..065b33636 Binary files /dev/null and b/website/src/content/blog/_assets/offline-chatgpt-alternatives-jan.jpg differ diff --git a/website/src/content/blog/_assets/og-4090s.webp b/website/src/content/blog/_assets/og-4090s.webp new file mode 100644 index 000000000..6db1b10b2 Binary files /dev/null and b/website/src/content/blog/_assets/og-4090s.webp differ diff --git a/website/src/content/blog/_assets/open-source-ai-quantization.jpg b/website/src/content/blog/_assets/open-source-ai-quantization.jpg new file mode 100644 index 000000000..fe605c3cd Binary files /dev/null and b/website/src/content/blog/_assets/open-source-ai-quantization.jpg differ diff --git a/website/src/content/blog/_assets/openchat-bench-0106.png b/website/src/content/blog/_assets/openchat-bench-0106.png new file mode 100644 index 000000000..9fa37960f Binary files /dev/null and b/website/src/content/blog/_assets/openchat-bench-0106.png differ diff --git a/website/src/content/blog/_assets/qwen3-in-jan-hub.jpeg b/website/src/content/blog/_assets/qwen3-in-jan-hub.jpeg new file mode 100644 index 000000000..e58c5beab Binary files /dev/null and b/website/src/content/blog/_assets/qwen3-in-jan-hub.jpeg differ diff --git a/website/src/content/blog/_assets/qwen3-settings-in-jan.jpeg b/website/src/content/blog/_assets/qwen3-settings-in-jan.jpeg new file mode 100644 index 000000000..82d7540a7 Binary files /dev/null and b/website/src/content/blog/_assets/qwen3-settings-in-jan.jpeg differ diff --git a/website/src/content/blog/_assets/qwen3-settings-jan-ai.jpeg b/website/src/content/blog/_assets/qwen3-settings-jan-ai.jpeg new file mode 100644 index 000000000..7fc432e38 Binary files /dev/null and b/website/src/content/blog/_assets/qwen3-settings-jan-ai.jpeg differ diff --git a/website/src/content/blog/_assets/replay.png b/website/src/content/blog/_assets/replay.png new file mode 100644 index 000000000..8ada6ce84 Binary files /dev/null and b/website/src/content/blog/_assets/replay.png differ diff --git a/website/src/content/blog/_assets/run-ai-locally-with-jan.jpg b/website/src/content/blog/_assets/run-ai-locally-with-jan.jpg new file mode 100644 index 000000000..942ab38ba Binary files /dev/null and b/website/src/content/blog/_assets/run-ai-locally-with-jan.jpg differ diff --git a/website/src/content/blog/_assets/run-deepseek-r1-locally-in-jan.jpg b/website/src/content/blog/_assets/run-deepseek-r1-locally-in-jan.jpg new file mode 100644 index 000000000..aa6980585 Binary files /dev/null and b/website/src/content/blog/_assets/run-deepseek-r1-locally-in-jan.jpg differ diff --git a/website/src/content/blog/_assets/throughput_Comparison.png b/website/src/content/blog/_assets/throughput_Comparison.png new file mode 100644 index 000000000..6bb63d03c Binary files /dev/null and b/website/src/content/blog/_assets/throughput_Comparison.png differ diff --git a/website/src/content/blog/benchmarking-nvidia-tensorrt-llm.mdx b/website/src/content/blog/benchmarking-nvidia-tensorrt-llm.mdx new file mode 100644 index 000000000..75bb2099e --- /dev/null +++ b/website/src/content/blog/benchmarking-nvidia-tensorrt-llm.mdx @@ -0,0 +1,321 @@ +--- +title: Benchmarking NVIDIA TensorRT-LLM +description: This post compares the performance of TensorRT-LLM and llama.cpp on consumer NVIDIA GPUs, highlighting the trade-offs among speed, resource usage, and convenience. +tags: Nvidia, TensorRT-LLM, llama.cpp, rtx3090, rtx4090, "inference engine" +categories: research +ogImage: assets/images/general/og-throughput-benchmark.png +date: 2024-04-29 +--- + +import { Aside } from '@astrojs/starlight/components' + + +import throughputComparison from '@/assets/blog/throughput_Comparison.png'; +import img4090s from '@/assets/blog/4090s.png'; +import og4090s from '@/assets/blog/og-4090s.webp'; +import img3090s from '@/assets/blog/3090s.jpg'; +import img4070s from '@/assets/blog/4070s.jpg'; +import egpu from '@/assets/blog/egpu.jpg'; + + +Jan now supports [NVIDIA TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) in addition to [llama.cpp](https://github.com/ggerganov/llama.cpp), making Jan multi-engine and ultra-fast for users with Nvidia GPUs. + +We've been excited for TensorRT-LLM for a while, and [had a lot of fun implementing it](https://github.com/menloresearch/nitro-tensorrt-llm). As part of the process, we've run some benchmarks, to see how TensorRT-LLM fares on consumer hardware (e.g. [4090s](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/), [3090s](https://www.nvidia.com/en-us/geforce/graphics-cards/30-series/)) we commonly see in the [Jan's hardware community](https://discord.com/channels/1107178041848909847/1201834752206974996). + + + + + +## Key Findings + +image + +TensorRT-LLM was: + +- **30-70% faster** than llama.cpp on the same hardware +- **Consumes less memory on consecutive runs** and **marginally more GPU VRAM utilization** than llama.cpp +- **20%+ smaller compiled model sizes** than llama.cpp +- **Less convenient** as models have to be compiled for a specific OS and GPU architecture, vs. llama.cpp's "Compile once, run everywhere" portability +- **Less accessible** as it does not support older-generation NVIDIA GPUs + +## Why TensorRT-LLM? + +[TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) is Nvidia's open-source inference library that incorporates Nvidia's proprietary optimizations beyond the open-source [cuBLAS](https://developer.nvidia.com/cublas) library. + +As compared to [llama.cpp](https://github.com/ggerganov/llama.cpp), which today dominates Desktop AI as a cross-platform inference engine, TensorRT-LLM is highly optimized for Nvidia GPUs. While llama.cpp compiles models into a [single, generalizable CUDA "backend"](https://github.com/ggerganov/llama.cpp/blob/master/ggml-cuda.cu) that can run on a wide range of Nvidia GPUs, TensorRT-LLM compiles models into a [GPU-specific execution graph](https://www.baseten.co/blog/high-performance-ml-inference-with-nvidia-tensorrt/) that is highly optimized for that specific GPU's Tensor Cores, CUDA cores, VRAM and memory bandwidth. + +TensorRT-LLM is typically used in datacenter-grade GPUs, where it produces a [face-melting 10,000 tokens/s](https://nvidia.github.io/TensorRT-LLM/blogs/H100vsA100.html) on [NVIDIA H100 Tensor Core GPUs](https://www.nvidia.com/en-us/data-center/h100/). We were curious for how TensorRT-LLM performs on consumer-grade GPUs, and gave it a spin. + +| Llama.cpp | TensorRT-LLM | +| ------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------- | +| Baseline | Blazing fast (30-70% faster) | +| Compile once, run cross-platform | Compiled and highly optimized for specific GPU architecture | +| Generalizable and Portable | Highly-optimized | +| Model compiles to [single, generalizable CUDA "backend"](https://github.com/ggerganov/llama.cpp/blob/master/ggml-cuda.cu) | Model compiles to [GPU-specific execution graph](https://www.baseten.co/blog/high-performance-ml-inference-with-nvidia-tensorrt/) | + +## Experiment Setup + +We ran the experiment using standardized inference requests in a sandboxed environment: +- **Model**: Mistral 7b model, compiled and quantized at a comparable `int4` quantization. +- **Test runs**: 5 batches of 10 runs each, per inference engine, on a bare metal PC with no other applications. +- **Parameters**: User defaults, i.e. `batch_size 1`, `input_len 2048` and `output_len 512` +- **Measurements**: + - CPU, memory from Jan system monitor + - GPU VRAM utilization metrics from `nvidia-smi`, and taken over an interval of 14 seconds. + - Throughput (token/sec) using [Jan's built-in Tokens/sec perf stat](https://github.com/search?q=repo%3Ajanhq%2Fjan%20timeDiffInSeconds&type=code). + + + + + +### Hardware Selection + +We chose the following GPUs based on our users' preferences: + +| NVIDIA GPU | VRAM (GB) | CUDA Cores | Tensor Cores | Memory Bus Width (bit) | Memory Bandwidth (GB/s) | Connection (GB/s) | +| --------------------------------- | --------- | ---------- | ------------ | ---------------------- | ----------------------- | -------------------------------------------- | +| GeForce RTX 4090 (Ada) | 24 | 16,384 | 512 | 384 | ~1000 | PCIe4.0 x16 (~32) | +| GeForce RTX 3090 (Ampere) | 24 | 10,496 | 328 | 384 | 935.8 | PCIe4.0 x16 (~32) | +| GeForce RTX 4070 Laptop GPU (Ada) | 8 | 7680 | 144 | 192 | 272 | PCIe4.0 x4 (~8) | +| GeForce RTX 4090 eGPU (Ada) | 24 | 16,384 | 512 | 384 | ~1000 | Thunderbolt 3 connected to a USB4 USB-C port ([~1.25-5?](https://www.cablematters.com/Blog/Thunderbolt/usb4-vs-thunderbolt-3)) | + +### llama.cpp Setup + +- llama.cpp commit [15499eb](https://github.com/ggerganov/llama.cpp/commit/15499eb94227401bdc8875da6eb85c15d37068f7) +- We used `Mistral-7b-q4_k_m` in `GGUF` with `ngl` at `100` + + + +### TensorRT-LLM Setup + +- TensorRT-LLM version [0.7.1](https://github.com/NVIDIA/TensorRT-LLM/releases/tag/v0.7.1) and build on Windows +- For TensorRT-LLM, we used `Mistral-7b-int4 AWQ` +- We ran TensorRT-LLM with `free_gpu_memory_fraction` to test it with the lowest VRAM consumption +- Note: We picked AWQ for TensorRT-LLM to be a closer comparison to GGUF's Q4. + +## Results + +### NVIDIA GeForce RTX 4090 GPU + +image +*Jan is built on this Dual-4090 workstation, which recently got upgraded to a nice case* + +image +*The original case (or lack thereof) for our Dual-4090 cluster, as posted on [r/localllama](https://www.reddit.com/r/LocalLLaMA/comments/16lxt6a/case_for_dual_4090s/)* + + + +For this test, we used Jan's [Dual-4090 workstation](https://www.reddit.com/r/LocalLLaMA/comments/16lxt6a/case_for_dual_4090s/), which our engineers timeshare to build Jan. + +The [NVIDIA GeForce RTX 4090](https://www.nvidia.com/en-us/geforce/graphics-cards/40-series/) is the latest top-of-the-line desktop GPU, with an MSRP of $1,599, and uses the Ada architecture. It has a ~1000 GB/s memory bandwidth within VRAM, and a PCIe4 x16 lane (~32 GB/s) between the GPU and the CPU. + +| Metrics | GGUF (using CPU) | GGUF (using GPU) | TensorRT-LLM | How TensorRT-LLM Compares | +| ------------------------ | ---------------- | ---------------- | ------------ | ------------------------- | +| Throughput (token/s) | 14.0 | 100.43 | 170.63 | ✅ 69.89% faster | +| Max GPU Utilization (%) | N/A | 83.50 | 88.50 | 5.99% more | +| Max VRAM Utilization (%) | N/A | 64 | 72.1 | 12.66% more | +| Avg RAM Used (GB) | 0.611 | 7.105 | 4.98 | ✅ 29.88% less | +| Disk Size (GB) | 4.07 | 4.06 | 3.05 | ✅ 24.88% smaller | + +TensorRT-LLM was almost 70% faster than llama.cpp by building the model for the GeForce RTX 4090 GPU’s Ada architecture for optimal graph execution, fully utilizing the 512 Tensor Cores, 16,384 CUDA cores, and 1,000 GB/s of memory bandwidth. + +The intuition for why llama.cpp is slower is because it compiles a model into a [single, generalizable CUDA “backend”](https://github.com/ggerganov/llama.cpp/blob/master/ggml-cuda.cu) that can run on many NVIDIA GPUs. Doing so requires llama.cpp to sacrifice all the optimizations that TensorRT-LLM makes with its compilation to a GPU-specific execution graph. + +### NVIDIA GeForce RTX 3090 GPU + +image +*Our 3090 Machine, now used by one of our engineers to build Jan* + + + +The [NVIDIA's GeForce RTX 3090](https://www.nvidia.com/en-us/geforce/graphics-cards/30-series/rtx-3090-3090ti/) is a popular desktop GPU, and retails for approximately $1,500 (as of April 24). It uses the NVIDIA Ampere architecture. As compared to its successor GeForce RTX 4090, it has 33% fewer CUDA cores (10,496) and Tensor Cores (328) and 7% less memory bandwidth (~930 GB/s). + +| Metrics | GGUF (using CPU) | GGUF (using GPU) | TensorRT-LLM | How TensorRT-LLM Compares | +| ------------------------ | ---------------- | ---------------- | ------------ | ------------------------- | +| Throughput (token/s) | 11.42 | 88.70 | 144.19 | ✅ 62.57% faster | +| Max GPU Utilization (%) | N/A | 80.40 | 89.10 | 10.82% more | +| Max VRAM Utilization (%) | N/A | 66.80 | 76.20 | 14.07% more | +| Avg RAM Used (GB) | 0.611 | 2.60 | 0.98 | 62.41%% less | +| Disk Size (GB) | 4.07 | 4.06 | 3.05 | ✅ 24.88% smaller | + +Interestingly, the GeForce RTX 3090 was only 16.6% slower compared with the GeForce RTX 4090. On TPS, TensorRT-LLM outperformed llama.cpp by 62.57%. Curiously, it also used negligible RAM for subsequent inference requests after the initial model warmup. + +### NVIDIA GeForce RTX 4070 Laptop GPU + +image + + + +We also benchmarked an NVIDIA GeForce RTX 4070 Laptop GPU with 8gb of VRAM, which is a popular configuration among Jan users. Laptop GPUs are less powerful than their desktop counterparts, as they trade portability for reduced energy consumption and thermal constraints. + +| Metrics | GGUF (using CPU) | GGUF (using GPU) | TensorRT-LLM | Difference on GPU | +| ------------------------ | ---------------- | ---------------- | ------------ | ----------------- | +| Throughput (token/s) | 11.57 | 39.70 | 51.57 | ✅ 29.9% faster | +| Max GPU Utilization (%) | N/A | 80.00 | 84.67 | 5.83% more | +| Max VRAM Utilization (%) | N/A | 72.78 | 81.22 | 11.60% more | +| Avg RAM Used (GB) | 4.49 | 4.44 | 1.04 | ✅ 76.55%% less | +| Disk Size (GB) | 4.07 | 4.06 | 3.05 | ✅ 24.88% smaller | + +TensorRT-LLM on the laptop dGPU was 29.9% faster in tokens per second throughput than llama.cpp, but significantly slower than the desktop GPUs. + +The intuition for this is fairly simple: the GeForce RTX 4070 Laptop GPU has 53.1% fewer CUDA cores and Tensor Cores (compared to the 4090), and less VRAM (8gb vs. 24gb). This reduces the surface area for GPU-specific optimizations for TensorRT-LLM. + +The GeForce RTX 4070 Laptop GPU is also ~70% slower than the GeForce RTX 4090 desktop GPU, showing the hardware effect of less electricity draw, less VRAM, and thermal constraints on inference speed. + +### Laptop with NVIDIA GeForce RTX 4090 eGPU + +image + + + +Our last benchmark was to experiment with an [Asus RTX 4090 eGPU](https://www.gigabyte.com/Graphics-Card/GV-N4090IXEB-24GD), that was connected via a [Thunderbolt 3 port](https://www.gigabyte.com/Graphics-Card/GV-N4090IXEB-24GD) to the [Razer Blade 14's USB4 port](https://www.razer.com/sg-en/gaming-laptops/razer-blade-14). Theoretically, the results should be fairly similar to the GeForce RTX 4090 desktop GPU as they have identical underlying GPUs, but with very different connection speeds. + +We thought it would be an interesting to see how TensorRT-LLM handles a 68.4% reduction in communication bandwidth between the CPU and GPU: +- Thunderbolt 3 connection (1.25-5 GB/s?) for eGPUs +- PCIe 4.0 x4 (~8 GB/s) for "on device" desktops + + + +Overall, we used mid-to-high-end NVIDIA desktop GPUs for our tests, as TensorRT-LLM’s performance enhancements are most apparent on bigger VRAMs. For users with lower-spec machines, llama.cpp is better. + +| Metrics | GGUF (using CPU) | GGUF (using GPU) | TensorRT-LLM | Difference on GPU | +| ------------------------ | ---------------- | ---------------- | ------------ | ----------------- | +| Throughput (token/s) | 11.56 | 62.22 | 104.95 | ✅ 68.66% faster | +| Max VRAM Utilization (%) | 0 | 65 | 99 | 52.31% more | +| RAM Used (GB) | 0.611 | 5.38 | 4.11 | ✅ 23.61% less | +| Disk Size (GB) | 4.07 | 4.06 | 3.05 | ✅ 24.88% smaller | + +The Thunderbolt 3 eGPU had a 38.5% lower tokens/s as compared to the PCIe4.0 x16 connected GPU. But the % speedup vs. llama.cpp was similar, at around 69%. + +Interestingly, the VRAM used with the eGPU was variably higher. Our hypothesis is that the slower communication bandwidth results in more VRAM being allocated, as memory is released mostly slowly as well. + +## Conclusion + +### Token Speed + +image + +| Throughput (Higher is Better) | TensorRT-LLM | Llama.cpp | % Difference | +| ---------------------------------- | --------------- | ----------- | ------------- | +| GeForce RTX 4090 desktop GPU | ✅ 170.63t/s | 100.43t/s | 69.89% faster | +| GeForce RTX 3090 desktop GPU | ✅ 144.19t/s | 88.70t/s | 62.57% faster | +| GeForce RTX 4090 eGPU | ✅ 104.95t/s | 62.22t/s | 68.66% faster | +| GeForce RTX 4070 Laptop GPU | ✅ 51.57t/s | 39.70t/s | 29.90% faster | +| Laptop AMD Ryzen™ 9 8945HS, 8C/16T | (Not supported) | ✅ 11.57t/s | | + +- TensorRT-LLM is up to **70% faster** than llama.cpp on desktop GPUs (e.g. 3090 GPU, 4090 GPUs) while using less RAM & CPU (but more fully utilizing VRAM) +- TensorRT-LLM is up to **30% faster** on laptop GPUs (e.g. 4070 GPUs) with smaller VRAM + +### Max VRAM Utilization + +| Average VRAM utilization % | TensorRT-LLM | Llama.cpp | % Difference | +| ---------------------------- | ------------ | --------- | ------------ | +| GeForce RTX 4090 desktop GPU | 72.10 | 64.00 | 12.66% more | +| GeForce RTX 3090 desktop GPU | 76.20 | 66.80 | 14.07% more | +| GeForce RTX 4070 Laptop GPU | 81.22 | 72.78 | 11.06% more | +| GeForce RTX 4090 eGPU | N/A | N/A | N/A | + +- TensorRT-LLM used marginally more average VRAM utilization at peak utilization vs. llama.cpp (up to 14%). Though this could have interesting implications on consuming more electricity over time. +- Note: we used comparable (but not identical) quantizations, and TensorRT-LLM’s `AWQ INT4` is implemented differently from llama.cpp’s `q4_k_m` + +### Max RAM Usage + +| Max RAM utilization | TensorRT-LLM | Llama.cpp | % Difference | +| ---------------------------- | ------------ | --------- | ---------------- | +| GeForce RTX 4090 desktop GPU | ✅ 4.98 | 7.11 | ✅ 29.88% less | +| GeForce RTX 3090 desktop GPU | ✅ 0.98 | 2.60 | ✅ 62.41% less | +| GeForce RTX 4070 Laptop GPU | ✅ 1.04 | 4.44 | ✅ 76.55%% less | +| GeForce RTX 4090 eGPU | ✅ 4.11 | 5.38 | ✅ 23.61% less | + +TensorRT-LLM uses a lot less Max RAM vs. llama.cpp on slower connection (PCIe 3.0 or Thunderbolt 3) due to better memory management and efficient delegation to VRAM. On faster connection, it’s at least equal to llama.cpp. + +### Compiled Model Size and Number of Files +- Contrary to popular belief, TensorRT-LLM prebuilt models turned out to not be that huge +- Mistral 7b int4 was actually 25% smaller in TensorRT-LLM, at 3.05gb vs. 4.06gb +- Note: These are approximate comparisons, as TensorRT-LLM’s AWQ INT4 is implemented differently from llama.cpp’s q4_k_m +- The bigger takeaway is that the Compiled model sizes are roughly in the same ballpark, while the number of files for TensorRT-LLM is 7x the GGUF number of files. + +| Model size (Lower is better) | TensorRT-LLM AWQ int4 | Llama.cpp GGUF Q4 | % Difference | +| ---------------------------- | --------------------- | ----------------- | ----------------- | +| Mistral 7B | ✅ 3.05GB | 4.06GB | ✅ 24.88% smaller | + +### Convenience +- Llama.cpp still wins on cross-platform versatility and convenience of a “compile once, run everywhere” approach +- TensorRT-LLM still requires compilation to specific OS and architecture, though this could be solved by pre-compiling and publishing models on [Nvidia's NGC Model Catalog](https://catalog.ngc.nvidia.com/orgs/nvidia/teams/ai-foundation/collections/codellama) + +### Accessibility +- Llama.cpp unsurprisingly beats TensorRT-LLM in terms of accessibility +- TensorRT-LLM does not support older NVIDIA GPUs and won’t work well on smaller VRAM cards (e.g. 2-4gb VRAM) + +## Final Notes + +Our benchmarking is not perfect. We evaluated over a dozen tools ([llmperf](https://github.com/ray-project/llmperf), [psutil](https://pypi.org/project/psutil/), [gpustat](https://github.com/wookayin/gpustat), native utilities, and more) and found that everyone measures TPS, common metrics differently. We eventually settled on using our own tools in Jan, which are consistent across any inference engine and hardware. As for runtime parameters, we went with default settings, likely representing the typical user experience. + +We also did not overclock for this benchmark , as it is not a default setting for most users. But we've measured in our tests that TensorRT-LLM can go even faster with a few tweaks. We see this as a pretty exciting future direction. + + + +We're also publishing the underlying [raw experimental data](https://drive.google.com/file/d/1rDwd8XD8erKt0EgIKqOBidv8LsCO6lef/view?usp=sharing), and would encourage the community to scrutinize and help us improve. + +Special thanks to Asli Sabanci Demiroz, Annamalai Chockalingam, Jordan Dodge from Nvidia, and Georgi Gerganov from llama.cpp for feedback, review and suggestions. diff --git a/website/src/content/blog/bitdefender.mdx b/website/src/content/blog/bitdefender.mdx new file mode 100644 index 000000000..27a28e4b2 --- /dev/null +++ b/website/src/content/blog/bitdefender.mdx @@ -0,0 +1,149 @@ +--- +title: 'Bitdefender False Positive Flag' +description: "10th January 2024, Jan's 0.4.4 Release on Windows triggered Bitdefender to incorrectly flag it as infected with Gen:Variant.Tedy.258323, leading to automatic quarantine warnings on users' computers." +date: 2024-01-10 +tags: postmortem, bitdefender +categories: building-jan +keywords: + [ + postmortem, + bitdefender, + false positive, + antivirus, + jan, + nitro, + incident, + incident response, + supply chain security, + user communication, + documentation, + antivirus compatibility, + cross-platform testing, + proactive incident response, + user education, + lessons learned, + ] +--- + +import CTABlog from '@/components/Blog/CTABlog.astro'; + + +# Bitdefender False Positive Flag + +Following the recent incident related to Jan version 0.4.4 triggering Bitdefender on Windows with Gen:Variant.Tedy.258323 on January 10, 2024, we wanted to provide a comprehensive postmortem and outline the necessary follow-up actions. + +## Incident Overview + +### Bug Description + +Jan 0.4.4 installation on Windows triggered Bitdefender to flag it as infected with Gen:Variant.Tedy.258323, leading to automatic quarantine. + +### Affected Antivirus + +- McAfee / Microsoft Defender was unaffected +- Bitdefender consistently flagged the issue. + +### Incident Timeline + +- _10 Jan, 2:18 am SGT:_ Hawke flags up Malware antivirus errors for 0.4.4 installation on Windows computers. +- _10 Jan, 2:21 am SGT:_ @0xSage responds in Discord. +- _10 Jan, 2:35 am SGT:_ Hawke confirms multiple people have experienced this error on fresh installs. +- _10 Jan, 2:41 am SGT:_ @louis-jan and @dan-jan revert 0.4.4 out of an abundance of caution. +- _Incident ongoing:_ To triage and investigate the next day. +- _10 Jan, 11:36 am SGT:_ @Hien has investigated all versions of Nitro and conducted scans using Bitdefender. Only the 2 latest versions raised warnings (0.2.7, 0.2.8). +- _10 Jan, 12:44 pm SGT:_ @Hien tested again for the 0.2.6 and suggested using 0.2.6 for now, the 2 remaining Nitro version (0.2.7, 0.2.8) will under further investigation. +- The team started testing on the fixed build. +- _10 Jan, 3:22 pm SGT:_ Diagnosis found that it's most likely a false positive. @Hien has only found a solution by attempting to build Nitro Windows CPU on a GitHub-hosted runner and hasn't identified the root cause yet. +- _10 Jan, 5:24 pm SGT:_ @Hien testing two scenarios and still trying to understand the workings of Bitdefender. +- _11 Jan, 5:46 pm SGT:_ Postmortem meeting + +## Investigation Update + +- @Hien has investigated all versions of Nitro and conducted scans using Bitdefender. and only the 2 latest versions raised warnings from Bitdefender. Nitro 0.2.6, which is the highest version without the issue, was tested again, and it no longer triggers a warning from Bitdefender. +- We have observed that Nitro versions up to 0.2.6 remain unaffected. However, Bitdefender flags versions 0.2.7 and 0.2.8 as infected, leading to the deletion. In order to proceed with the current release, Hien suggests downgrading Nitro to version 0.2.6 and conducting tests with this version. Simultaneously, he will investigate why Bitdefender is flagging versions 0.2.7 and 0.2.8. +- It's essential to note that between versions 0.2.6, 0.2.7, and 0.2.8, only minor changes were made, which should not trigger a malicious code warning. We can refer to the changelog between 0.2.7 and 0.2.8 to pinpoint these changes. +- Our primary message is to convey that we did not introduce malicious code into Jan (indicating a false positive), and the investigation aims to understand the root cause behind Bitdefender flagging versions 0.2.7 and 0.2.8. +- The current diagnosis looks like a false positive but it's still under investigation. Reference link: [here](https://stackoverflow.com/questions/75886428/fake-positive-bit-defender-problem-genvariant-tedy-304469), [here](https://stackoverflow.com/questions/58010466/bitdefender-detects-my-console-application-as-genvariant-ursu-56053), and [here](https://www.cisa.gov/sites/default/files/2023-06/mar-10365227.r1.v1.clear_.pdf). +- @Hien testing two scenarios and still trying to understand the workings of Bitdefender. Still under investigation: is the issue with the code or the CI? + - In Case 1, using the same CI agent for tags 0.2.6 and 0.2.8, after PRs by Alan and myself, Bitdefender flagged the Nitro CPU binary build. Naturally, one would conclude this is due to the code. + - However, I proceeded with a further experiment: for the 0.2.8 code, instead of using our CI agent, I used a GitHub hosted agent. This time, Bitdefender did not flag our binary build. +- We've identified the Bitdefender warning was not an attack. There is no malicious code +- We've isolated the event to originate from a CI agent, which resulted in a BitDefender false positive alert. + +## Follow-ups and Action Items + +1. **Reproduce Bitdefender Flag in Controlled Environment [Done]:** + + - _Objective:_ To replicate the issue in a controlled environment to understand the triggers and specifics of Bitdefender's detection. + +2. **Investigate Malicious Code or False Positive:** + + - _Objective:_ Determine whether the flagged issue is a result of actual malicious code or a false positive. If it's a false positive, work towards resolution while communicating with Bitdefender. + +3. **Supply Chain Attack Assessment:** + + - _Objective:_ Evaluate the possibility of a supply chain attack. Investigate whether the Nitro 0.4.4 distribution was compromised or tampered with during the release process. + +4. **Testing after the Hotfix:** + + - _Objective:_ In addition to verifying the issue after the fix, it is essential to conduct comprehensive testing across related areas, ensuring compatibility across different operating systems and antivirus software (latest version / free version only). + +5. **Process Improvement for Future Releases:** + + - _Objective:_ Identify and implement improvements to our release process to prevent similar incidents in the future. This may include enhanced testing procedures, code analysis, and collaboration with antivirus software providers during the pre-release phase. Additionally, we should add verifying the latest antivirus software in the release checklist. + +6. **Documentation of Tested Antivirus Versions:** + - _Objective:_ Create a document that outlines the testing conducted, including a matrix that correlates Jan versions with the tested antivirus versions. + - _Sample list:_ for consideration purpose + - Bitdefender + - McAfee + - Avira + - Kaspersky + - Norton + - Microsoft defender + - AVG + - TotalAV + +## Next Steps + +- The team should follow up on each action item with clear ownership priority, and deadlines. +- Communicate progress transparently with the community and clients through appropriate channels. If any insights or suggestions, share them within the dedicated channels. +- Update internal documentation and procedures based on the lessons learned from this incident. + +## Lessons Learned + +1. **Antivirus Compatibility Awareness:** + + - _Observation:_ The incident underscored the significance of recognizing and testing for antivirus compatibility, particularly with widely-used solutions like Bitdefender. + - _Lesson Learned:_ In the future, we will integrate comprehensive checks for compatibility with various antivirus software, including both antivirus and "Malicious Code Detection," into our CI or QA checklist. This proactive measure aims to minimize false positive detections during the release and testing processes. + +2. **Cross-Platform Testing:** + + - _Observation:_ The problem did not occur on MacOS and Linux systems, implying a potential oversight in cross-platform testing during our release procedures. + - _Lesson Learned:_ Clarification — This observation is not directly related to antivirus testing. Instead, it underscores the necessity to improve our testing protocols, encompassing multiple operating systems. This ensures a thorough evaluation of potential issues on diverse platforms, considering the various antivirus software and differences in architectures on Mac and Linux systems. + +3. **User Communication and Documentation:** + + - _Observation:_ Due to the timely response from Nicole, who was still active on Discord and Github at 2 am, this quick response facilitated our ability to assess the impact accurately. + - _Lesson Learned:_ While our communication with users was effective in this instance, it was mainly due to Nicole's presence during the incident. To improve our overall response capability, we should prioritize "24/7 rapid triage and response." This involves ensuring continuous availability or establishing a reliable rotation of team members for swift user communication and issue documentation, further enhancing our incident response efficiency. + +4. **Proactive Incident Response:** + + - _Observation:_ The incident response, while involving a prompt version rollback, experienced a slight delay due to the release occurring at midnight. This delay postponed the initiation of the investigation until the next working hours. + - _Lesson Learned:_ Recognizing the importance of swift incident response, particularly in time-sensitive situations, we acknowledge that releasing updates during off-hours can impact the immediacy of our actions. Moving forward, we will strive to optimize our release schedules to minimize delays and ensure that investigations can commence promptly regardless of the time of day. This may involve considering alternative release windows or implementing automated responses to critical incidents, ensuring a more proactive and timely resolution. + +5. **Supply Chain Security Measures:** + + - _Observation:_ While the incident prompted consideration of a potential supply chain attack, it's crucial to emphasize that this was not the case. Nonetheless, the incident underscored the importance of reviewing our supply chain security measures. + - _Lesson Learned:_ Going forward, we should strengthen supply chain security by introducing additional verification steps to uphold the integrity of our release process. Collaborating with distribution channels is essential for enhancing security checks and ensuring a robust supply chain. + - _Longer-term:_ Exploring options for checking Jan for malicious code and incorporating antivirus as part of our CI/CD pipeline should be considered for a more comprehensive and proactive approach. + +6. **User Education on False Positives:** + - _Observation:_ Users reported Bitdefender automatically "disinfecting" the flagged Nitro version without allowing any user actions. + - _Lesson Learned:_ Educate users about the possibility of false positives and guide them on how to whitelist or report such incidents to their antivirus provider (if possible). Provide clear communication on steps users can take in such situations. + +These lessons learned will serve as a foundation for refining our processes and ensuring a more resilient release and incident response framework in the future. Continuous improvement is key to maintaining the reliability and security of our software. + +Thank you for your dedication and cooperation in resolving this matter promptly. + + \ No newline at end of file diff --git a/website/src/content/blog/data-is-moat.mdx b/website/src/content/blog/data-is-moat.mdx new file mode 100644 index 000000000..5e238103a --- /dev/null +++ b/website/src/content/blog/data-is-moat.mdx @@ -0,0 +1,116 @@ +--- +title: "The Invisible Moat around Open-Source LLM" +description: "Uncover the pivotal role of data ownership in training the next iteration of LLM." +tags: OpenAI has a moat, Catastrophic forgetting, ChatGPT +date: 2024-03-25 +unlisted: true +categories: research +--- + +import CTABlog from '@/components/Blog/CTABlog.astro'; + +import catastrophicDemo from '@/assets/blog/catastrophic-demo.png'; +import gradientDecent from '@/assets/blog/gradient-decent.gif'; +import replay from '@/assets/blog/replay.png'; +import openchatBench0106 from '@/assets/blog/openchat-bench-0106.png'; + + +# The Invisible Moat around Open-Source LLM + +In the crowded AI landscape, OpenAI's ChatGPT stands out, not just for its capabilities but for its unique access to the pre-trained dataset. This post explores the vital role of data in maintaining a competitive edge, focusing on OpenAI's strategic advantage through data ownership. + +## Data: The Secret Weapon +OpenAI, with ChatGPT, has carved a distinct advantage. By harnessing user interactions, it gains invaluable insights into diverse use cases, enabling precise model refinements. The cornerstone of this advantage lies in the "pre-trained dataset." This treasure trove of data empowers OpenAI to cater to specific needs, ensuring sustained improvement and differentiation. + +## The rise of the opensource + +``` +- How they/Mistral/Llama make money? +-> around having pretrained data -> finetuning +First para: +Rise of Open Source LLMs like Mistral, Llama2, Llama3 +People think they don't have a moat = everything is open source +Second para: +We actually think these guys have an "invisible moat" +Pre-training data is not released, and makes a huge difference in fine-tuning efficacy +``` + +### Why pretrained data is important? + +> *Owning the pre-trained dataset is crucial as it represents the original distribution.* +Access to the pre-trained dataset acts as a master key to address the critical issue of ["Catastrophic forgetting"](https://en.wikipedia.org/wiki/Catastrophic_interference) in Language Learning Models (LLMs). This phenomenon describes how LLMs lose hold of prior knowledge upon learning new information. Access to the foundational dataset allows for effective fine-tuning, balancing the introduction of new data with the retention of existing knowledge. + +Catastrophic forgetting + +**Figure 1.** Demonstrates the catastrophic forgetting issue: without mixing datasets, AI overfits on new tasks, impairing normal communication. + +### Illustrating Catastrophic Forgetting + +``` +What is fine-tuning +Process of Finetuning (pretrain, instruct, finetune) +Fine-tuning datasets +Risk of catastrophic forgetting +"Why is Pre-trained data important?" +What is pre-training dataset +How does fine-tuning with pre-training dataset differ from when you don't have it +How does it avoid catastrophic forgetting +``` + +Catastrophic forgetting can be visualized as a ball in a multidimensional landscape, where moving towards new knowledge risks losing grasp on the old. +Pre-trained data acts as a map, guiding fine-tuning in a way that incorporates new information while safeguarding existing knowledge. + +Gradient decent + +**Figure 2.** [Gradient decent demonstration](https://en.wikipedia.org/wiki/Gradient_descent) + +### Smoothing Distribution Shifts + +As described above, with the mixture of the pre-trained dataset ensures smoother distribution shifts when introducing new information, as it embodies a comprehensive spectrum of prior knowledge. + +This continuity in knowledge transition helps in maintaining the robustness of the model against sudden changes, akin to providing a more gradual learning curve where the new information is incrementally integrated with the existing knowledge base. + +This concept is supported by the [EleutherAI's research](https://arxiv.org/abs/2403.08763) highlighting the importance of how tasks are sequenced in the learning process, suggesting that introducing dissimilar tasks early on can expand the network's capacity for new information. + +**Table 1.** Final results for English-only 405M parameter models trained with different replay amounts show models with more replay perform better in balancing learning and forgetting (measured as AVG Loss). Notably, just 1% mix with a pre-trained dataset significantly lowers AVG loss, effectively shifting model knowledge from English (the Pile) to German. + +Replay method + +*Note:* **Replay** is the method involves combining the training dataset from the pre-trained model with new task datasets. + +### Acting as a Noise Mask + +The pre-trained data can also serve as a form of "noise masking", similar to techniques used in training [early computer vision models](https://arxiv.org/abs/1911.04252). + +This approach introduces a level of ["noise"](https://arxiv.org/abs/2310.05914) during training, which can prevent the model from overfitting to the new dataset. By retaining a mix of original and new data, the model is exposed to a broader range of scenarios, enhancing its generalization capabilities and robustness across tasks. + +## Solutions + +### Overwhelming approach + +Overcoming these challenges requires a balanced approach. One partial method involves inundating the model with extensive, curated data, allowing for comprehensive fine-tuning. While effective, this approach demands significant computational resources, a comprehensive filtering process for low-quality inputs, and an extraordinarily high cost associated with gathering millions of high-quality responses. + +In the open-source community, 2 notable examples of fine-tuning with Mistral as a base model on large datasets collected from top-rated GPT-4 and human responses demonstrate a distribution shift that enhances model performance, including [OpenChat](https://huggingface.co/openchat/openchat-3.5-0106) and [Hermes-Pro](https://huggingface.co/teknium/OpenHermes-2.5-Mistral-7B). + +Openchat results + +**Figure 2.** After fine-tuning with a large amount of data samples, the model's performance improved, outperforming ChatGPT and Grok-1 in some benchmarks. + +### Fully open source model + +- Example: Dolma + olma from allenai + +## Conclusion + +The ownership and strategic use of pre-trained data serve as an invisible moat. It not only enables the tackling of complex challenges like catastrophic forgetting but also provides a baseline for continuous, targeted improvements. Although there is a solution to decentralize, the cost remains reasonably high. + +Fully open pretrained + open weight + +## Reference +- [Catastrophic forgetting](https://arxiv.org/abs/2308.08747) +- [Simple and Scalable Strategies to Continually Pre-train Large Language Models](https://arxiv.org/abs/2403.08763) +- [Gradient descent](https://en.wikipedia.org/wiki/Gradient_descent) +- [Neftune](https://arxiv.org/abs/2310.05914) +- [Self-training with Noisy Student improves ImageNet classification](https://arxiv.org/abs/1911.04252) + + \ No newline at end of file diff --git a/website/src/content/blog/deepseek-r1-locally.mdx b/website/src/content/blog/deepseek-r1-locally.mdx new file mode 100644 index 000000000..5c7ae8f8d --- /dev/null +++ b/website/src/content/blog/deepseek-r1-locally.mdx @@ -0,0 +1,141 @@ +--- +title: "Run DeepSeek R1 locally on your device (Beginner-Friendly Guide)" +description: "A straightforward guide to running DeepSeek R1 locally regardless of your background." +tags: DeepSeek, R1, local AI, Jan, GGUF, Qwen, Llama +categories: guides +date: 2025-01-31 +ogImage: assets/deepseek-r1-locally-jan.jpg +twitter: + card: summary_large_image + site: "@jandotai" + title: "Run DeepSeek R1 locally on your device (Beginner-Friendly Guide)" + description: "A straightforward guide to running DeepSeek R1 locally regardless of your background." + image: assets/deepseek-r1-locally-jan.jpg +--- + +import CTABlog from '@/components/Blog/CTABlog.astro'; +import { Aside } from '@astrojs/starlight/components' + + +import deepseekR1LocallyJan from '@/assets/blog/deepseek-r1-locally-jan.jpg'; +import downloadJan from '@/assets/blog/download-jan.jpg'; +import janLibraryDeepseekR1 from '@/assets/blog/jan-library-deepseek-r1.jpg'; +import janHubDeepseekR1 from '@/assets/blog/jan-hub-deepseek-r1.jpg'; +import janRunsDeepseekR1Distills from '@/assets/blog/jan-runs-deepseek-r1-distills.jpg'; + + +# Run DeepSeek R1 locally on your device (Beginner-Friendly Guide) + +DeepSeek R1 running locally in Jan AI interface, showing the chat interface and model settings + +DeepSeek R1 is one of the best open-source models in the market right now, and you can run DeepSeek R1 on your own computer! + + + +DeepSeek R1 requires data-center level computers to run at its full potential, and we'll use a smaller version that works great on regular computers. + +Why use an optimized version? +- Efficient performance on standard hardware +- Faster download and initialization +- Optimized storage requirements +- Maintains most of the original model's capabilities + +## Quick Steps at a Glance +1. Download [Jan](https://jan.ai/) +2. Select a model version +3. Choose settings +4. Set up the prompt template & start using DeepSeek R1 + +Let's walk through each step with detailed instructions. + +## Step 1: Download Jan +[Jan](https://jan.ai/) is an open-source application that enables you to run AI models locally. It's available for Windows, Mac, and Linux. For beginners, Jan is the best choice to get started. + +Jan AI interface, showing the download button + +1. Visit [jan.ai](https://jan.ai) +2. Download the appropriate version for your operating system +3. Install the app + +## Step 2: Choose Your DeepSeek R1 Version + +To run AI models like DeepSeek R1 on your computer, you'll need something called VRAM (Video Memory). Think of VRAM as your computer's special memory for handling complex tasks like gaming or, in our case, running AI models. It's different from regular RAM - VRAM is part of your graphics card (GPU). + + + +Let's first check how much VRAM your computer has. Don't worry if it's not much - DeepSeek R1 has versions for all kinds of computers! + +Finding your VRAM is simple: +- On Windows: Press `Windows + R`, type `dxdiag`, hit Enter, and look under the "Display" tab +- On Mac: Click the Apple menu, select "About This Mac", then "More Info", and check under "Graphics/Displays" +- On Linux: Open Terminal and type `nvidia-smi` for NVIDIA GPUs, or `lspci -v | grep -i vga` for other graphics cards + + + +Once you know your VRAM, here's what version of DeepSeek R1 will work best for you. If you have: +- 6GB VRAM: Go for the 1.5B version - it's fast and efficient +- 8GB VRAM: You can run the 7B or 8B versions, which offer great capabilities +- 16GB or more VRAM: You have access to the larger models with enhanced features + +Available versions and basic requirements for DeepSeek R1 distills: + +| Version | Model Link | Required VRAM | +|---------|------------|---------------| +| Qwen 1.5B | [DeepSeek-R1-Distill-Qwen-1.5B-GGUF](https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF) | 6GB+ | +| Qwen 7B | [DeepSeek-R1-Distill-Qwen-7B-GGUF](https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-7B-GGUF) | 8GB+ | +| Llama 8B | [DeepSeek-R1-Distill-Llama-8B-GGUF](https://huggingface.co/unsloth/DeepSeek-R1-Distill-Llama-8B-GGUF) | 8GB+ | +| Qwen 14B | [DeepSeek-R1-Distill-Qwen-14B-GGUF](https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-14B-GGUF) | 16GB+ | +| Qwen 32B | [DeepSeek-R1-Distill-Qwen-32B-GGUF](https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-32B-GGUF) | 16GB+ | +| Llama 70B | [DeepSeek-R1-Distill-Llama-70B-GGUF](https://huggingface.co/unsloth/DeepSeek-R1-Distill-Llama-70B-GGUF) | 48GB+ | + +To download your chosen model: + +Launch Jan and navigate to Jan Hub using the sidebar + +Jan AI interface, showing the model library + +3. Input the model link in this field: + +Jan AI interface, showing the model link input field + +## Step 3: Configure Model Settings +When configuring your model, you'll encounter quantization options: + + + +## Step 4: Configure Prompt Template +Final configuration step: + +1. Access Model Settings via the sidebar +2. Locate the Prompt Template configuration +3. Use this specific format: + + + +This template is for proper communication between you and the model. + +You're now ready to interact with DeepSeek R1: + +Jan interface, showing DeepSeek R1 running locally + +## Need Assistance? + + + + diff --git a/website/src/content/blog/offline-chatgpt-alternative.mdx b/website/src/content/blog/offline-chatgpt-alternative.mdx new file mode 100644 index 000000000..d6fa07e93 --- /dev/null +++ b/website/src/content/blog/offline-chatgpt-alternative.mdx @@ -0,0 +1,125 @@ +--- +title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead" +description: "Learn how to use AI offline with Jan - a free, open-source alternative to ChatGPT that works 100% offline on your computer." +tags: AI, ChatGPT alternative, offline AI, Jan, local AI, privacy +categories: guides +date: 2025-02-08 +ogImage: _assets/offline-chatgpt-alternatives-jan.jpg +twitter: + card: summary_large_image + site: "@jandotai" + title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead" + description: "Want to use ChatGPT offline? Learn how to run AI models locally with Jan - free, open-source, and works without internet." + image: _assets/offline-chatgpt-alternatives-jan.jpg +--- + +import CTABlog from '@/components/Blog/CTABlog.astro'; +import { Aside } from '@astrojs/starlight/components' + + +import offlineChatgptAlternativeAiWithoutInternet from '@/assets/blog/offline-chatgpt-alternative-ai-without-internet.jpg'; + + +# Offline ChatGPT: You can't run ChatGPT offline, do this instead + +ChatGPT is a cloud-based service that requires internet access. However, it's not the only way to use AI. You can run AI models offline on your device with [Jan](https://jan.ai/). It's completely free, open-source, and gives you 100% offline capability. You can even use AI on a plane! + + + +## Jan as an offline ChatGPT alternative + +Use Jan to chat with AI models without internet access +*Jan lets you use AI offline - no internet connection needed* + +Here's how to get started with offline AI in 3 simple steps: + +### 1. Download Jan + +Go to [jan.ai](https://jan.ai) and download the version for your computer (Mac, Windows, or Linux). It's completely free. + +![Download Jan for offline AI use](./_assets/jan.ai.jpg "Get Jan for free and start using AI offline") + +### 2. Download an AI model + +You'll need an AI model to use AI offline, so download a model from Jan. Once it's on your computer, you don't need internet anymore. + +![Choose an AI model that works offline](./_assets/jan-model-selection.jpg "Find the perfect AI model for offline use") +*Select an AI model that matches your needs and computer capabilities* + + + +### 3. Start using AI offline + +![Chat with AI offline using Jan's interface](./_assets/run-ai-locally-with-jan.jpg "Experience ChatGPT-like interactions without internet") +*Use Jan's clean interface to chat with AI - no internet required* + +Once downloaded, you can use AI anywhere, anytime: + +- Chat like you do with ChatGPT +- Work on documents offline +- Get coding help without internet +- Keep your conversations private +- Use AI even when servers are down + +## How to chat with your docs in Jan? + +To chat with your docs in Jan, you need to activate experimental mode. + +![Activate experimental mode in Jan's settings](./_assets/chat-with-your-docs-offline-ai.jpg "Enable experimental features to chat with your documents") +*Turn on experimental mode in settings to chat with your docs* + +After activating experimental mode, simply add your files and ask questions about them. + +![Chat with your documents using Jan](./_assets/chat-with-docs-prompt.jpg "Ask questions about your documents offline") +*Chat with your documents privately - no internet needed* + +I did this for you and got a reply from a 7B parameter model. If you'd like to learn what "7B" means and understand other local AI terms, check our [guide on running AI models locally](/blog/run-ai-models-locally). + +A response from AI, Qwen2.5 7B Instruct Q4: + +`This document appears to be about the benefits and advantages of running artificial intelligence (AI) models locally on your device rather than using cloud-based or remote AI services. The key points it highlights include data privacy, offline functionality, freedom from paywalls and restrictions, and giving users full control over their AI models. Additionally, the text mentions that local AI is becoming a new trend and provides a link to a guide for beginners who want to learn more about this topic.` + +Local AI makes possible offline AI use, so Jan is going to be your first step to get started. + +## Why choose Jan over ChatGPT? + +1. **True Offline Use:** Unlike ChatGPT, Jan works without internet +2. **100% Private:** Your data never leaves your computer +3. **Free Forever:** No subscriptions or API costs +4. **No Server Issues:** No more "ChatGPT is at capacity" +5. **Your Choice of Models:** Use newer models as they come out + +**"Is it really free? What's the catch?"** +Yes, it's completely free and open source. Jan is built by developers who believe in making AI accessible to everyone. + +**"How does it compare to ChatGPT?"** +Modern open-source models like DeepSeek and Mistral are very capable. While they might not match GPT-4, they're perfect for most tasks and getting better every month. + +**"Do I need a powerful computer?"** +If your computer is from the last 5 years, it will likely work fine. You need about 8GB of RAM and 10GB of free space for comfortable usage. + +**"What about my privacy?"** +Everything stays on your computer. Your conversations, documents, and data never leave your device unless you choose to share them. + +Want to learn more about the technical side? Check our detailed [guide on running AI models locally](/blog/run-ai-models-locally). It's not required to [use AI offline](https://jan.ai/) but helps understand how it all works. + +## Need help? + + + + diff --git a/website/src/content/blog/qwen3-settings.mdx b/website/src/content/blog/qwen3-settings.mdx new file mode 100644 index 000000000..0fee2ecc0 --- /dev/null +++ b/website/src/content/blog/qwen3-settings.mdx @@ -0,0 +1,131 @@ +--- +title: "Best Settings to Run Qwen3-30B-A3B Locally" +description: "If you're running Qwen3-30B-A3B locally, don't guess your way through the settings. This guide tells you what actually works based on Qwen's own documentation and what we've seen hold up in practice." +tags: Qwen3, local AI, model settings, Jan, offline AI +categories: guides +date: 2025-05-10 +ogImage: assets/images/general/qwen3-30b-settings.jpg +--- + +import CTABlog from '@/components/Blog/CTABlog.astro'; +import { Aside } from '@astrojs/starlight/components' + + +import qwen3SettingsJanAi from '@/assets/blog/qwen3-settings-jan-ai.jpeg'; +import qwen3InJanHub from '@/assets/blog/qwen3-in-jan-hub.jpeg'; +import qwen3SettingsInJan from '@/assets/blog/qwen3-settings-in-jan.jpeg'; + + +# Best Settings to Run Qwen3-30B-A3B Locally + +If you're running Qwen3-30B-A3B locally, don't guess your way through the settings. This guide tells you what actually works based on Qwen's own documentation and what we've seen hold up in practice. + + + +Qwen3 comes with a unique toggle: `enable_thinking`. When it's on, the model "thinks", it breaks down problems, reasons step-by-step, and wraps part of its output in a `...` block. When it's off, the model skips all that and just gives you an answer. + +That changes how you configure it. + +--- + +### Thinking mode (`enable_thinking=True`) + +This is the mode for reasoning, math, coding, logic — anything that benefits from step-by-step generation. + +**Use these generation settings:** + +``` +Temperature: 0.6 +TopP: 0.95 +TopK: 20 +Max tokens: 32,768 +Do not use greedy decoding +``` + + + + +## Quick summary + +Qwen3 settings + +### Non-thinking mode (`enable_thinking=False`) + +This is for fast, general-purpose replies. Instruction following, chat, creative writing — no `` block, no extra steps. + +**Use these settings:** + +```makefile +Temperature: 0.7 +TopP: 0.8 +TopK: 20 +``` + + + + + +## Soft vs. hard switch + +You can toggle thinking dynamically in the prompt using: + +``` +/think # turns thinking ON +/no_think # turns it OFF +``` + +This works only if `enable_thinking=True` is set in the code. If you set it to False, the soft switch won't do anything. + + + +### What most people miss + +- **Don't log the `think` block in chat history.** Qwen recommends keeping only the final answer. Otherwise, the next reply gets bloated and off-topic. +- **Greedy decoding is a trap.** It's tempting to use for consistency, but Qwen3's output gets worse - and sometimes broken - without sampling. +- **YaRN isn't always needed.** The model supports up to 32k context by default. Use YaRN only if you regularly go beyond that. + +--- + +## Running Qwen3 locally with Jan + +The easiest way to run Qwen3-30B-A3B locally is through Jan. + +1. Download and install [Jan](https://jan.ai) +2. Open Jan and navigate to Jan Hub +3. Find `Qwen3` and `Qwen3-30B-A3B` in the model list +4. Click "Download" to get the model + +### Qwen3 in Jan Hub + +You can easily find Qwen3 models in Jan Hub: + +Qwen3 in Jan Hub + +Once downloaded, Jan handles all the technical setup, so you can focus on using the model rather than configuring it. The settings we covered in this guide are automatically applied when you use Qwen3 through Jan. + +### How to customize Qwen3-30B-A3B settings in Jan + +You can also customize these settings anytime by opening the right panel in Jan and adjusting the parameters to match your needs. + +Qwen3 settings in Jan app + +## Bottom Line + +If you're running Qwen3-30B-A3B locally, treat it like two models in one. Flip the thinking mode based on the task, adjust the generation settings accordingly, and let it work how it was meant to. + +## Need help? + + diff --git a/website/src/content/blog/rag-is-not-enough.mdx b/website/src/content/blog/rag-is-not-enough.mdx new file mode 100644 index 000000000..c163b5588 --- /dev/null +++ b/website/src/content/blog/rag-is-not-enough.mdx @@ -0,0 +1,134 @@ +--- +title: "RAG is not enough: Lessons from Beating GPT-3.5 on Specialized Tasks with Mistral 7B" +description: We present a straightforward approach to customizing small, open-source models using fine-tuning and RAG that outperforms GPT-3.5 for specialized use cases. +tags: RAG, opensource chatgpt alternative, outperform ChatGPT, Mistral +date: 2024-03-25 +unlisted: true +categories: research +--- + +import CTABlog from '@/components/Blog/CTABlog.astro'; + + +# RAG is not enough: Lessons from Beating GPT-3.5 on Specialized Tasks with Mistral 7B + +## Abstract + +We present a straightforward approach to customizing small, open-source models using fine-tuning and RAG that outperforms GPT-3.5 for specialized use cases. With it, we achieved superior Q&A results of [technical documentation](https://nitro.jan.ai/docs) for a small codebase [codebase](https://github.com/menloresearch/nitro). + +In short, (1) extending a general foundation model like [Mistral](https://huggingface.co/mistralai/Mistral-7B-v0.1) with strong math and coding, and (2) training it over a high-quality, synthetic dataset generated from the intended corpus, and (3) adding RAG capabilities, can lead to significant accuracy improvements. + +Problems still arise with catastrophic forgetting in general tasks, commonly observed during specialized domain fine-tuning. In our case, this is likely exacerbated by our lack of access to Mistral’s original training dataset and various compression techniques used in our approach to keep the model small. + +## Selecting a strong foundation model + +[Mistral 7B](https://huggingface.co/mistralai/Mistral-7B-v0.1) outshines both [Meta's Llama-2 7B](https://huggingface.co/meta-llama/Llama-2-7b) and [Google's Gemma 7B](https://huggingface.co/google/gemma-7b) in key benchmarks, making it our choice for a base model. Starting with a strong foundation like Mistral allowed us to achieve greater accuracy in our specialized adaptations. + +![image](https://hackmd.io/_uploads/S1TN64kTa.png) + +*Figure 1. Mistral 7B excels in benchmarks, ranking among the top foundational models.* + +*Note: We are not sponsored by the Mistral team, though lots of folks like to run Mistral locally using [Jan](https://jan.ai/)., our desktop client.* + +## Cost effectively improving the base model + +Our technical use case required excellent math capabilities, an area where Mistral can underperform. Thus, we tested a number of Mistral model variants, from foundation models to finetunes to model merges, to find a stronger base model before proceeding to finetuning. + +![image](https://hackmd.io/_uploads/SkYBaVk6a.png) + +*Figure 2: The merged model, Stealth, doubles the mathematical capabilities of its foundational model while retaining the performance in other tasks.* + +We found merging models is quick and cost-effective, enabling fast adjustments based on the result of each iteration. + +We ended up with [Stealth 7B v1.1](https://huggingface.co/jan-hq/stealth-v1.1), a [SLERP](https://github.com/Digitous/LLM-SLERP-Merge) merge of Mistral with the following: + +- [WizardMath](https://huggingface.co/WizardLM/WizardMath-7B-V1.1) for its math capabilities. +- [WizardCoder](https://huggingface.co/WizardLM/WizardCoder-Python-7B-V1.0) for its coding capabilities. +- Our own [Trinity](https://huggingface.co/jan-hq/trinity-v1.2) model for its versatility across general tasks. + +This particular combination yielded the best tradeoff across mathematical & technical reasoning while retaining the most pre-merge performance on general tasks. + +## **DPO finetuning** + +Merging different LLMs can lead to a mixed answering style because each model was originally trained on different types of data. + +Thus, we applied Direct Preference Optimization ([DPO](https://arxiv.org/abs/2305.18290)) using the [Intel's Orca DPO pairs](https://huggingface.co/datasets/Intel/orca_dpo_pairs) dataset, chosen for its helpful answering style in general, math and coding concentration. + +This approach produced a final model - [Stealth 7B v1.2](https://huggingface.co/jan-hq/stealth-v1.2), aligned to our technical preferences and demonstrating minimal loss. + +## **Using our own technical documentation** + +With the base model ready, we started on our specific use case. + +Jan is an open-source project enjoying strong growth, but at one point we began receiving a new support ticket every minute, which quickly overwhelmed our bootstrapped resources. + +So, we directed our efforts toward training a model to answer user questions based on existing technical documentation. + +Specifically, we trained it on on the [Nitro documentation](https://nitro.jan.ai/docs). For context, Nitro is the default inference engine for Jan. It’s a enterprise-ready server implementation of LlamaCPP, written in C++, with multimodal, queues, and other production-level server capabilities. + +It made an interesting corpus because it was rife with post-2023 technical jargon, edge cases, and poor informational layout. + +## Generating training data + +The first step was to transform Nitro’s unstructured format into a synthetic Q&A dataset designed for [instruction tuning](https://arxiv.org/pdf/2109.01652.pdf). + +The text was split into chunks of 300-token segments with 30-token overlaps. This helped to avoid a [lost-in-the-middle](https://arxiv.org/abs/2307.03172) problem where LLM can’t use context efficiently to answer given questions. + +The chunks were then given to GPT-4 with 8k context length to generate 3800 Q&A pairs. The [training dataset](https://huggingface.co/datasets/jan-hq/nitro_binarized_v2) is available on HuggingFace. + +## **Training** + +Training was done with supervised finetuning (SFT) from the [Hugging Face's alignment-handbook](https://github.com/huggingface/alignment-handbook), per [Huggingface's Zephyr Beta](https://github.com/huggingface/alignment-handbook/tree/main/recipes/zephyr-7b-beta) guidelines. + +We used consumer-grade, dual Nvidia RTX 4090s for the training. The end-to-end training took 18 minutes. We found optimal hyperparameters in LoRA for this specific task to be `r = 256` and `alpha = 512`. + +This final model can be found [here on Huggingface](https://huggingface.co/jan-hq/nitro-v1.2-e3). + +![image](https://hackmd.io/_uploads/SJyDTVk6p.png) + +*Figure 3. Using the new finetuned model in [Jan](https://jan.ai/)* + +## Improving results with RAG + +As an additional step, we also added [Retrieval Augmented Generation (RAG)](https://blogs.nvidia.com/blog/what-is-retrieval-augmented-generation/) as an experiment parameter. + +A simple RAG setup was done using **[Llamaindex](https://www.llamaindex.ai/)** and the **[bge-en-base-v1.5 embedding](https://huggingface.co/BAAI/bge-base-en-v1.5)** model for efficient documentation retrieval and question-answering. You can find the RAG implementation [here](https://github.com/menloresearch/open-foundry/blob/main/rag-is-not-enough/rag/nitro_rag.ipynb). + +## Benchmarking the Results + +We curated a new set of [50 multiple-choice questions](https://github.com/menloresearch/open-foundry/blob/main/rag-is-not-enough/rag/mcq_nitro.csv) (MCQ) based on the Nitro docs. The questions had varying levels of difficulty and had trick components that challenged the model's ability to discern misleading information. + +![image](https://hackmd.io/_uploads/By9vaE1Ta.png) + +*Figure 4. Comparison between finetuned model and OpenAI's GPT* + +**Results** + +- GPT-3.5 with RAG: 56.7% +- GPT-4 with RAG: 64.3% +- Merged 7B Model ([Stealth 7B](https://huggingface.co/jan-hq/stealth-v1.3)) with RAG: 47.7% +- Finetuned 7B Model (Nitro 7B) with RAG: 57.8% + +This indicates that with task-specific training, we can improve an open-source, Small Language Model to the level of GPT-3.5 on domain knowledge. + +Notably, the finetuned + RAG approach also demonstrated more consistency across benchmarking, as indicated by its lower standard deviation. + +## Conclusion + +We conclude that this combination of model merging + finetuning + RAG yields promise. This finding is relevant for teams and individuals that need specialized, technical small language models that need to run in resource-constrained or highly secured environments, where GPT may not be an option. + +Anecdotally, we’ve had some success using this model in practice to onboard new team members to the Nitro codebase. + +A full research report with more statistics can be found [here](https://github.com/menloresearch/open-foundry/blob/main/rag-is-not-enough/README.md). + +# References + +- [Catastrophic forgetting](https://arxiv.org/abs/2308.08747) +- [Math specialization](https://arxiv.org/abs/2308.09583) +- [Code specialization](https://arxiv.org/abs/2306.08568) +- [Search specialization](https://github.com/SciPhi-AI/agent-search) +- [Evol Instruct](https://github.com/nlpxucan/WizardLM) +- [Lost in the middle](https://arxiv.org/abs/2307.03172) +- [Instruction tuning](https://arxiv.org/pdf/2109.01652.pdf) + + \ No newline at end of file diff --git a/website/src/content/blog/run-ai-models-locally.mdx b/website/src/content/blog/run-ai-models-locally.mdx new file mode 100644 index 000000000..99934dfab --- /dev/null +++ b/website/src/content/blog/run-ai-models-locally.mdx @@ -0,0 +1,228 @@ +--- +title: "How to run AI models locally as a beginner?" +description: "A straightforward guide to running AI models locally on your computer, regardless of your background." +tags: AI, local models, Jan, GGUF, privacy, local AI +categories: guides +date: 2025-01-31 +ogImage: assets/run-ai-locally-with-jan.jpg +twitter: + card: summary_large_image + site: "@jandotai" + title: "How to run AI models locally as a beginner?" + description: "Learn how to run AI models locally on your computer for enhanced privacy and control. Perfect for beginners!" + image: assets/run-ai-locally-with-jan.jpg +--- + +import CTABlog from '@/components/Blog/CTABlog.astro'; + +import { Aside } from '@astrojs/starlight/components' + + +# How to run AI models locally as a beginner? + +Most people think running AI models locally is complicated. It's not. Anyone can run powerful AI models like DeepSeek, Llama, and Mistral on their own computer. This guide will show you how, even if you've never written a line of code. + +## Quick steps: +### 1. Download [Jan](https://jan.ai) + +![Jan AI's official website showing the download options](./_assets/jan.ai.jpg "Download Jan from the official website - it's free and open source") +*Download Jan from [jan.ai](https://jan.ai) - it's free and open source.* + +### 2. Choose a model that fits your hardware + +![Jan's model selection interface showing various AI models](./_assets/jan-model-selection.jpg "Jan helps you pick the right AI model for your computer") +*Jan helps you pick the right AI model for your computer.* + +### 3. Start using AI locally + +That's all to run your first AI model locally! + +![Jan's simple and clean chat interface for local AI](./_assets/run-ai-locally-with-jan.jpg "Jan's easy-to-use chat interface after installation") +*Jan's easy-to-use chat interface after installation.* + +Keep reading to learn key terms of local AI and the things you should know before running AI models locally. + +## How Local AI Works + +Before diving into the details, let's understand how AI runs on your computer: + + + +![llama.cpp GitHub repository showing its popularity and wide adoption](./_assets/ai-locally-llama.cpp.jpg "llama.cpp is widely used and trusted in the AI community") +*llama.cpp helps millions of people run AI locally on their computers.* + + + +## Understanding AI Models + +Think of AI models like apps on your computer - some are light and quick to use, while others are bigger but can do more things. When you're choosing an AI model to run on your computer, you'll see names like "Llama-3-8B" or "Mistral-7B". Let's break down what this means in simple terms. + + + +![Jan Hub interface showing model sizes and types](./_assets/jan-hub-for-ai-models.jpg "Jan Hub makes it easy to understand different model sizes and versions") +*Jan Hub makes it easy to understand different model sizes and versions* + +**Good news:** Jan helps you pick the right model size for your computer automatically! You don't need to worry about the technical details - just choose a model that matches what Jan recommends for your computer. + +## What You Can Do with Local AI + + + +## Hardware Requirements + +Before downloading an AI model, consider checking if your computer can run it. Here's a basic guide: + +**The basics your computer needs:** +- A decent processor (CPU) - most computers from the last 5 years will work fine +- At least 8GB of RAM - 16GB or more is better +- Some free storage space - at least 5GB recommended + +### What Models Can Your Computer Run? + +| | | | +|---|---|---| +| Regular Laptop | 3B-7B models | Good for chatting and writing. Like having a helpful assistant | +| Gaming Laptop | 7B-13B models | More capable. Better at complex tasks like coding and analysis | +| Powerful Desktop | 13B+ models | Better performance. Great for professional work and advanced tasks | + + + +## Getting Started with Models + +### Model Versions + +When browsing models in Jan, you'll see terms like "Q4", "Q6", or "Q8". Here's what that means in simple terms: + + + +**Pro tip**: Start with Q4 versions - they work great for most people and run smoothly on regular computers! + +### Getting Models from Hugging Face + +You'll often see links to "Hugging Face" when downloading AI models. Think of Hugging Face as the "GitHub for AI" - it's where the AI community shares their models. Jan makes it super easy to use: + +1. Jan has a built-in connection to Hugging Face +2. You can download models right from Jan's interface +3. No need to visit the Hugging Face website unless you want to explore more options + +## Setting up your local AI + +### Getting Models from Hugging Face + +You'll often see links to "Hugging Face" when downloading AI models. Think of Hugging Face as the "GitHub for AI" - it's where the AI community shares their models. This sounds technical, but Jan makes it super easy to use: + +1. Jan has a built-in connection to Hugging Face +2. You can download models right from Jan's interface +3. No need to visit the Hugging Face website unless you want to explore more options + + + +### 1. Get Started +Download Jan from [jan.ai](https://jan.ai) - it sets everything up for you. + +### 2. Get an AI Model + +You can get models two ways: + +#### 1. Use Jan Hub (Recommended): + - Click "Download Model" in Jan + - Pick a recommended model + - Choose one that fits your computer + +![AI model parameters explained](./_assets/jan-model-download.jpg "Jan Hub makes it easy to download AI models") +*Use Jan Hub to download AI models* + +#### 2. Use Hugging Face: + + + +##### Step 1: Get the model link +Find and copy a GGUF model link from [Hugging Face](https://huggingface.co) + +![Finding a GGUF model on Hugging Face](./_assets/hugging-face-jan-model-download.jpg "Find GGUF models on Hugging Face") +*Look for models with "GGUF" in their name* + +##### Step 2: Open Jan +Launch Jan and go to the Models tab + +![Opening Jan's model section](./_assets/jan-library-deepseek-r1.jpg "Navigate to the Models section in Jan") +*Navigate to the Models section in Jan* + +##### Step 3: Add the model +Paste your Hugging Face link into Jan + +![Adding a model from Hugging Face](./_assets/jan-hub-deepseek-r1.jpg "Paste your GGUF model link here") +*Paste your GGUF model link here* + +##### Step 4: Download +Select your quantization and start the download + +![Downloading the model](./_assets/jan-hf-model-download.jpg "Choose your preferred model size and download") +*Choose your preferred model size and download* + +### Common Questions + +**"My computer doesn't have a graphics card - can I still use AI?"** + +Yes! It will run slower but still work. Start with 7B models. + +**"Which model should I start with?"** + +Try a 7B model first - it's the best balance of smart and fast. + +**"Will it slow down my computer?"** + +Only while you're using the AI. Close other big programs for better speed. + +## Need help? + + diff --git a/website/src/content/changelog/2023-12-21-faster-inference-across-platform.mdx b/website/src/content/changelog/2023-12-21-faster-inference-across-platform.mdx new file mode 100644 index 000000000..00515bd5b --- /dev/null +++ b/website/src/content/changelog/2023-12-21-faster-inference-across-platform.mdx @@ -0,0 +1,17 @@ +--- +title: "Faster inference across: Mac, Windows, Linux, and GPUs" +version: 0.4.3 +description: "" +date: 2023-12-21 +ogImage: "/assets/images/changelog/Jan_v0.4.3.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- Custom models: `Trinity`, `Pandora` (great for general use). +- Faster inference across: Mac, Windows, Linux, and GPUs. +- Connect to remote OpenAI models like GPT4 via API key. \ No newline at end of file diff --git a/website/src/content/changelog/2024-01-16-settings-options-right-panel.mdx b/website/src/content/changelog/2024-01-16-settings-options-right-panel.mdx new file mode 100644 index 000000000..b48fd4601 --- /dev/null +++ b/website/src/content/changelog/2024-01-16-settings-options-right-panel.mdx @@ -0,0 +1,20 @@ +--- +title: "Thread settings options in the right panel" +version: 0.4.4 +description: "" +date: 2024-01-16 +ogImage: "" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- You can now see whether the model is compatible with running on your device. +- You can switch model mid-threads. +- More thread settings options in the right panel. +- CI automation, anti-virus checks. +- Social media access to Jan's Discord & Github from the app for further user support. +- Fixed major bugs, more stability. \ No newline at end of file diff --git a/website/src/content/changelog/2024-01-29-local-api-server.mdx b/website/src/content/changelog/2024-01-29-local-api-server.mdx new file mode 100644 index 000000000..d90ae00b2 --- /dev/null +++ b/website/src/content/changelog/2024-01-29-local-api-server.mdx @@ -0,0 +1,23 @@ +--- +title: "Local API server" +version: 0.4.5 +description: "" +date: 2024-01-29 +ogImage: "/assets/images/changelog/Jan_v0.4.5.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- Local API Server: Experience Jan's dashboard for the local API server to make your data processing smoother and more efficient. +- HTTP Proxy Support: Now, downloading and connecting are seamless, just like browsing Hugging Face in your browser. +- Updated Settings Page: Find what you need faster! We've updated the settings page. + +### Fixes 💫 + +- Auto Update: Enjoy smoother updates. We've fixed the glitches. +- Swagger API Page: Full documentation, no more blanks. +- GPU for Models: Your imported models now fully leverage GPU power. \ No newline at end of file diff --git a/website/src/content/changelog/2024-02-05-jan-data-folder.mdx b/website/src/content/changelog/2024-02-05-jan-data-folder.mdx new file mode 100644 index 000000000..caf9dec98 --- /dev/null +++ b/website/src/content/changelog/2024-02-05-jan-data-folder.mdx @@ -0,0 +1,22 @@ +--- +title: "Jan Data Folder" +version: 0.4.6 +description: "" +date: 2024-02-05 +ogImage: "/assets/images/changelog/jan_product_update_feature.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- `Changing Jan Data Folder`: Now, moving your data across different folders is just a click away, making organization simpler. So you can even use an external drive. +- Factory Settings: You can reset all of Jan's usage data for a fresh start. + +### Fixes 💫 + +- Easily see each thread's last update time, like WhatsApp, keeping you informed. +- A new loading screen during data migration ensures the app is responsive. +- Enhanced notifications for clearer feedback on model runs or errors. diff --git a/website/src/content/changelog/2024-02-10-jan-is-more-stable.mdx b/website/src/content/changelog/2024-02-10-jan-is-more-stable.mdx new file mode 100644 index 000000000..35e96f35f --- /dev/null +++ b/website/src/content/changelog/2024-02-10-jan-is-more-stable.mdx @@ -0,0 +1,27 @@ +--- +title: "Jan is more stable 👋" +version: 0.5.5 +description: "Jan supports Llama 3.2 and Qwen 2.5" +date: 2024-10-02 +ogImage: "/assets/images/changelog/jan-v0.5.5.jpeg" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Highlights 🎉 + +- Meta's Llama 3.2 and Alibaba's Qwen 2.5 added to the hub +- Improved starter screen +- Better local vs. cloud model navigation + +Fixes 💫 + +- Solved GPU acceleration for GGUF models +- Improved model caching & threading +- Resolved input & toolbar overlaps + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.5). \ No newline at end of file diff --git a/website/src/content/changelog/2024-02-26-home-servers-with-helm.mdx b/website/src/content/changelog/2024-02-26-home-servers-with-helm.mdx new file mode 100644 index 000000000..768c3f7bd --- /dev/null +++ b/website/src/content/changelog/2024-02-26-home-servers-with-helm.mdx @@ -0,0 +1,24 @@ +--- +title: "Run Jan on your home-servers with Helm" +version: 0.4.7 +description: "" +date: 2024-02-26 +ogImage: "" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- Run Jan on your `home-servers` with `Helm` +- Use Jan headless or with a Web UI +- `Intel Arc` & `AMD GPU` support through `Vulkan` & `LlamaCPP` + + +### Features & Fixes 💫 + +- 48 fixes, refactoring and stability improvements. +- Conversation threads are auto-summarized & messages are editable. +- Encountering an error? We've replaced vague alerts with a troubleshooting assistant. \ No newline at end of file diff --git a/website/src/content/changelog/2024-03-06-ui-revamp-settings.mdx b/website/src/content/changelog/2024-03-06-ui-revamp-settings.mdx new file mode 100644 index 000000000..dd48c7fbb --- /dev/null +++ b/website/src/content/changelog/2024-03-06-ui-revamp-settings.mdx @@ -0,0 +1,27 @@ +--- +title: "New UI & Codestral Support" +version: 0.5.0 +description: "Revamped Jan's UI to make it clearer and more user-friendly" +date: 2024-06-03 +ogImage: "/assets/images/changelog/jan_v0.5.0.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Revamped Jan's UI to make it clearer and more user-friendly. + +- Updated Assistant, Model, and Tools sections +- Categorized customization options for easier control +- New settings for models, APIs, and experimental features + +## New Model: Codestral + +Jan now supports Mistral's new model Codestral. Thanks [Bartowski](https://huggingface.co/bartowski) for the GGUF model. You can download the model from the hub. + +## More GGUF models + +More GGUF models can run in Jan - we rebased to llama.cpp b3012.Big thanks to [ggerganov](https://github.com/ggerganov) + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.0). diff --git a/website/src/content/changelog/2024-03-11-import-models.mdx b/website/src/content/changelog/2024-03-11-import-models.mdx new file mode 100644 index 000000000..1d51fe087 --- /dev/null +++ b/website/src/content/changelog/2024-03-11-import-models.mdx @@ -0,0 +1,22 @@ +--- +title: "Import models directly using the UI" +version: 0.4.8 +description: "" +date: 2024-03-11 +ogImage: "" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- Import models directly using the UI +- A revamped system monitor + +### Features & Fixes 💫 + +- Install Jan using Homebrew on Mac Silicon (thanks to https://github.com/chenrui333 (I'll tag you when I find your Discord handle! :D)). +- Fixed an HTTP Proxy issue causing download errors. +- UI Improvements and more. \ No newline at end of file diff --git a/website/src/content/changelog/2024-03-19-nitro-tensorrt-llm-extension.mdx b/website/src/content/changelog/2024-03-19-nitro-tensorrt-llm-extension.mdx new file mode 100644 index 000000000..817c5a091 --- /dev/null +++ b/website/src/content/changelog/2024-03-19-nitro-tensorrt-llm-extension.mdx @@ -0,0 +1,18 @@ +--- +title: "Nitro-Tensorrt-LLM Extension" +version: 0.4.9 +description: "" +date: 2024-03-19 +ogImage: "" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- Nitro-Tensorrt-LLM Extension. +- Update models.json. +- Move tensorrt executable to the engine. + diff --git a/website/src/content/changelog/2024-04-02-groq-api-integration.mdx b/website/src/content/changelog/2024-04-02-groq-api-integration.mdx new file mode 100644 index 000000000..aabcb0ef1 --- /dev/null +++ b/website/src/content/changelog/2024-04-02-groq-api-integration.mdx @@ -0,0 +1,22 @@ +--- +title: "Groq API Integration" +version: 0.4.10 +description: "" +date: 2024-04-02 +ogImage: "/assets/images/changelog/jan_update_groq.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- Groq API Integration +- Enhanced hardware troubleshooting guide + +### Features & Fixes 💫 + +- Improved Jan data folder's functionality. +- Fixed URI malformed and `stop` parameter error. +- VRAM-aware model recommendations. \ No newline at end of file diff --git a/website/src/content/changelog/2024-04-15-new-mistral-extension.mdx b/website/src/content/changelog/2024-04-15-new-mistral-extension.mdx new file mode 100644 index 000000000..97fd9541e --- /dev/null +++ b/website/src/content/changelog/2024-04-15-new-mistral-extension.mdx @@ -0,0 +1,18 @@ +--- +title: "New Mistral Extension" +version: 0.4.11 +description: "Jan has a new Mistral Extension letting you chat with larger Mistral models via Mistral API" +date: 2024-04-15 +ogImage: "/assets/images/changelog/jan_mistral_api.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +### Highlights 🎉 + +- Jan has a new `Mistral Extension` letting you chat with larger Mistral models via Mistral API. You can still run smaller Mistral models locally, but now there's a remote option. +- 3rd party extensions can register their own settings pages in the app without any code. +- You can now change set API Prefix for the local API server. +- You can now customize your Assistant's name in Thread Settings. diff --git a/website/src/content/changelog/2024-04-25-llama3-command-r-hugginface.mdx b/website/src/content/changelog/2024-04-25-llama3-command-r-hugginface.mdx new file mode 100644 index 000000000..a2cbdc5b3 --- /dev/null +++ b/website/src/content/changelog/2024-04-25-llama3-command-r-hugginface.mdx @@ -0,0 +1,31 @@ +--- +title: 'Jan now supports Llama3 and Command R+' +version: 0.4.12 +description: "Jan has added compatibility with Llama3 & Command R+" +date: 2024-04-25 +ogImage: "/assets/images/changelog/jan_llama3.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Jan has added compatibility with Meta’s open-source language model, `Llama3`, through the integration with `llamacpp` (thanks to [@ggerganov](https://github.com/ggerganov)). + +Additionally, `Command R+` is now supported. It is the first open-source model to surpass GPT-4 on the [LMSys leaderboard](https://chat.lmsys.org/?leaderboard). + +![Commandr](/assets/images/changelog/jan_cohere_commandr.gif) + +## Import Huggingface models directly + +Users can now import Huggingface models into Jan. Simply copy the model’s link from Huggingface and paste it into the search bar on Jan Hub. + +![HugginFace](/assets/images/changelog/jan_hugging_face.gif) + +## Enhanced LaTeX understanding + +Jan now understands LaTeX, allowing users to process and understand complex mathematical expressions more effectively. + +![Latex](/assets/images/changelog/jan_update_latex.gif) + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.4.12). diff --git a/website/src/content/changelog/2024-05-20-llamacpp-upgrade-new-remote-models.mdx b/website/src/content/changelog/2024-05-20-llamacpp-upgrade-new-remote-models.mdx new file mode 100644 index 000000000..531e5948e --- /dev/null +++ b/website/src/content/changelog/2024-05-20-llamacpp-upgrade-new-remote-models.mdx @@ -0,0 +1,31 @@ +--- +title: "Jan now supports more GGUF models" +version: 0.4.13 +description: "We rebased to llamacpp b2865." +date: 2024-05-20 +ogImage: "/assets/images/changelog/jan_v0.4.13_update.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +With this release, more GGUF models should work now! We rebased to llamacpp b2865! + +## New remote models: Anthropic & Cohere APIs + +Jan now supports `Anthropic API` models `Command R` and `Command R+`, along with `Cohere`'s `Claude 3 Opus`, `Claude 3 Sonnet`, and `Claude 3 Haiku`. + +## New integrations: Martian and OpenRouter + +Jan supports `Martian`, a dynamic LLM router that routes between multiple models and allows users to reduce costs by 20% to 97%. Jan also supports `OpenRouter`, helping users select the best model for each query. + +![New_Integrations](/assets/images/changelog/jan_v0.4.13_update.gif) + +## GPT-4o Access + +Users can now connect to OpenAI's new model GPT-4o. + +![GPT4o](/assets/images/changelog/jan_v0_4_13_openai_gpt4o.gif) + +For more details, see the [GitHub release notes.](https://github.com/menloresearch/jan/releases/tag/v0.4.13) diff --git a/website/src/content/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium.mdx b/website/src/content/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium.mdx new file mode 100644 index 000000000..8c976106a --- /dev/null +++ b/website/src/content/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium.mdx @@ -0,0 +1,19 @@ +--- +title: "Jan now compatible with Aya 23 8B & 35B and Phi-3-Medium" +version: 0.4.14 +description: "Jan now supports Cohere's Aya 23 8B & 35B and Microsoft's Phi-3-Medium." +date: 2024-05-28 +ogImage: "/assets/images/changelog/jan-v0-4-14-phi3.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Jan now supports `Cohere`'s new models `Aya 23 (8B)` & `Aya 23 (35B)` and `Microsoft`'s `Phi-3-Medium`. + +More GGUF models can run in Jan - we rebased to llama.cpp b2961. + +Huge shoutouts to [ggerganov](https://github.com/ggerganov) and contributors for llama.cpp, and [Bartowski](https://huggingface.co/bartowski) for GGUF models. + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.4.14). diff --git a/website/src/content/changelog/2024-06-21-nvidia-nim-support.mdx b/website/src/content/changelog/2024-06-21-nvidia-nim-support.mdx new file mode 100644 index 000000000..13191a869 --- /dev/null +++ b/website/src/content/changelog/2024-06-21-nvidia-nim-support.mdx @@ -0,0 +1,29 @@ +--- +title: "Jan supports NVIDIA NIM" +version: 0.5.1 +description: "Jan has integrated NVIDIA NIM and supports Qwen 2 7B" +date: 2024-06-21 +ogImage: "/assets/images/changelog/jan_nvidia_nim_support.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +## NVIDIA NIM + +We've integrated NVIDIA NIM support. + +## Qwen 2 7B + +You can now access Qwen 2 7B directly in the Jan Hub. + +We've updated to llama.cpp b3088 for better performance - thanks to [GG](https://github.com/ggerganov) + +## Fixes + +- Fixed Anthropic API error +- Reduced chat font weight (back to normal!) +- Restored the maximize button + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.1). diff --git a/website/src/content/changelog/2024-07-15-claude-3-5-support.mdx b/website/src/content/changelog/2024-07-15-claude-3-5-support.mdx new file mode 100644 index 000000000..d13228a43 --- /dev/null +++ b/website/src/content/changelog/2024-07-15-claude-3-5-support.mdx @@ -0,0 +1,35 @@ +--- +title: "Jan supports Claude 3.5 Sonnet" +version: 0.5.2 +description: "You can run Claude 3.5 Sonnet in Jan" +date: 2024-07-15 +ogImage: "/assets/images/changelog/jan_supports_claude_3_5.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +## Claude 3.5 Sonnet + +We've integrated support for Claude 3.5 Sonnet, you can run Anthropic's latest model in Jan. + +Plus, You can now use optional spell check for chats. There's also a new shortcut for app logs in System Monitor. + +## Fixes + +In this hotfix, we've addressed several issues to improve your Jan experience: + +### Gemma 2B Stability + +Gemma 2B now runs without any issues. + +### Tooltip Hover Functionality + +We've restored the tooltip hover functionality, which makes it easier to access helpful information without any glitches. + +### Right-click Options for Thread Settings + +The right-click options for thread settings are now fully operational again. You can now manage your threads with this fix. + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.2). diff --git a/website/src/content/changelog/2024-09-01-llama3-1-gemma2-support.mdx b/website/src/content/changelog/2024-09-01-llama3-1-gemma2-support.mdx new file mode 100644 index 000000000..debe207bf --- /dev/null +++ b/website/src/content/changelog/2024-09-01-llama3-1-gemma2-support.mdx @@ -0,0 +1,26 @@ +--- +title: "v0.5.3 is out with stability improvements!" +version: 0.5.3 +description: "You can run Llama 3.1 and Gemma 2 in Jan" +date: 2024-08-29 +ogImage: "/assets/images/changelog/janv0.5.3.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +## Llama 3.1 and Gemma 2 Support + +Jan finally supports Meta's Llama 3.1 and Google's Gemma 2. Thanks for the patience folks! + +We've been working on stability issues over the last few weeks. Jan is now more stable. + +### Additional Notes + +- Upgraded the inference engine for better performance +- Model settings now persist across new threads +- Fixed the GPU memory utilization bar +- Some UX and copy improvements + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.3). diff --git a/website/src/content/changelog/2024-09-17-improved-cpu-performance.mdx b/website/src/content/changelog/2024-09-17-improved-cpu-performance.mdx new file mode 100644 index 000000000..6951c8647 --- /dev/null +++ b/website/src/content/changelog/2024-09-17-improved-cpu-performance.mdx @@ -0,0 +1,35 @@ +--- +title: "Jan is faster now" +version: 0.5.4 +description: "Jan has faster CPU inference." +date: 2024-09-17 +ogImage: "/assets/images/changelog/jan-v0.5.4.jpg" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Over the last few weeks, we've been working on improving Jan's stability. Every update helps us make Jan's experience faster and smoother. +With version 0.5.4, you’ll notice AI running quicker on CPU - better performance all around. + +### Model Downloads +You can now download models directly from the model selector in Threads. No more jumping between different tabs – just pick, download, and get started. + +### Fast CPU Inference +We've addressed the slower inference speeds on CPU, so you'll notice faster processing times, especially when using larger models. + +### Model Starts +We tackled the notorious "model can't start / The specified module could not be found" error. +Plus, Phi-3 models are now working smoothly even if you're using an outdated version. + +### Consistent Warnings +Performance warnings are now aligned between Model Hub and Threads, giving you more reliable insights no matter where you're working. + +### Persistent Thread Settings +Switching between threads used to reset your instruction settings. That’s fixed now! Your settings will stay intact as you jump between old and new threads. + +### Minor UI Tweaks & Bug Fixes +We’ve also resolved issues with the input slider on the right panel and tackled several smaller bugs to keep everything running smoothly. + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.4). diff --git a/website/src/content/changelog/2024-10-24-jan-stable.mdx b/website/src/content/changelog/2024-10-24-jan-stable.mdx new file mode 100644 index 000000000..9acc7f31e --- /dev/null +++ b/website/src/content/changelog/2024-10-24-jan-stable.mdx @@ -0,0 +1,26 @@ +--- +title: "Jan has Stable, Beta and Nightly versions" +version: 0.5.7 +description: "This release is mostly focused on bug fixes." +date: 2024-10-24 +ogImage: "/assets/images/changelog/jan-v0.5.7.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Highlights 🎉 + +- Jan has Stable, Beta and Nightly versions +- Saving instructions for new threads is now stable + +Fixes 💫 + +- Fixed broken links, hardware issues, and multi-modal download +- Resolved text overlap, scrolling, and multi-monitor reset problems +- Adjusted LLava model EOS token and context input + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.7). \ No newline at end of file diff --git a/website/src/content/changelog/2024-11-22-jan-bugs.mdx b/website/src/content/changelog/2024-11-22-jan-bugs.mdx new file mode 100644 index 000000000..3d1020217 --- /dev/null +++ b/website/src/content/changelog/2024-11-22-jan-bugs.mdx @@ -0,0 +1,25 @@ +--- +title: "Model downloads & running issues fixed" +version: 0.5.9 +description: "Jan v0.5.9 is here: fixing what needed fixing." +date: 2024-11-22 +ogImage: "/assets/images/changelog/jan-v0.5.9.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Jan v0.5.9 is here: fixing what needed fixing + +### Highlights 🎉 + +- Model downloads & running issues fixed +- Document upload bugs resolved +- System glitches addressed: Factory Reset, HTTP Proxy, Hugging Face tokens +- Fixed issues with code blocks in streaming responses +- Improved the UX of the Local API Server page + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.9). \ No newline at end of file diff --git a/website/src/content/changelog/2024-11.14-jan-supports-qwen-coder.mdx b/website/src/content/changelog/2024-11.14-jan-supports-qwen-coder.mdx new file mode 100644 index 000000000..430316da9 --- /dev/null +++ b/website/src/content/changelog/2024-11.14-jan-supports-qwen-coder.mdx @@ -0,0 +1,25 @@ +--- +title: "Jan supports Qwen2.5-Coder 14B & 32B" +version: 0.5.8 +description: "Jan v0.5.8 is out: Jan supports Qwen2.5-Coder 14B & 32B through Cortex" +date: 2024-11-14 +ogImage: "/assets/images/changelog/jan-v0.5.8.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Jan v0.5.8 is out: Jan supports Qwen2.5-Coder 14B & 32B through Cortex + +### Highlights 🎉 + +- A new engine: Jan now runs models via [Cortex](https://cortex.so) +- SupportsAlibaba_Qwen's Coder 14B & 32B Support +- Supports markdown rendering on user messages + +and various UI/UX enhancements 💫 + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.8). \ No newline at end of file diff --git a/website/src/content/changelog/2024-12-03-jan-is-faster.mdx b/website/src/content/changelog/2024-12-03-jan-is-faster.mdx new file mode 100644 index 000000000..79f05b12d --- /dev/null +++ b/website/src/content/changelog/2024-12-03-jan-is-faster.mdx @@ -0,0 +1,22 @@ +--- +title: "Jan v0.5.10 is live" +version: 0.5.10 +description: "Jan is faster, smoother, and more reliable." +date: 2024-12-03 +ogImage: "/assets/images/changelog/jan-v0.5.10.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Jan v0.5.10 is live: Jan is faster, smoother, and more reliable. + +### Highlights 🎉 + +- Resolved model startup issues, memory leaks, and improved token limits +- Clearer error messages and subtle UX improvements + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.10). \ No newline at end of file diff --git a/website/src/content/changelog/2024-12-05-jan-hot-fix-mac.mdx b/website/src/content/changelog/2024-12-05-jan-hot-fix-mac.mdx new file mode 100644 index 000000000..d79c77b03 --- /dev/null +++ b/website/src/content/changelog/2024-12-05-jan-hot-fix-mac.mdx @@ -0,0 +1,26 @@ +--- +title: "Jan v0.5.11 is here!" +version: 0.5.11 +description: "Critical issues fixed, Mac installation updated." +date: 2024-12-05 +ogImage: "/assets/images/changelog/jan-v0.5.11.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Jan v0.5.11 is here - critical issues fixed, Mac installation updated. + +### Highlights 🎉 + +- Crashes (markdown & code highlighting) +- Thread switching & auto-scroll +- Syntax highlighting bugs +- API issues (Anthropic, OpenRouter) +- Title glitches with special characters +- Model settings inconsistencies + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.11). \ No newline at end of file diff --git a/website/src/content/changelog/2024-12-30-jan-new-privacy.mdx b/website/src/content/changelog/2024-12-30-jan-new-privacy.mdx new file mode 100644 index 000000000..3f29dd245 --- /dev/null +++ b/website/src/content/changelog/2024-12-30-jan-new-privacy.mdx @@ -0,0 +1,28 @@ +--- +title: "Jan gives you full control over your privacy" +version: 0.5.12 +description: "Improved Privacy settings to give full control over analytics" +date: 2024-12-30 +ogImage: "/assets/images/changelog/jan-v0.5.12.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +Jan v0.5.12 is here - critical issues fixed, Mac installation updated. + +### Highlights 🎉 + +- Updated privacy settings with opt-in/out options for Jan Analytics +- Adjustable chat width +- The right sidebar and input box are now optimized for new users + +### Fixes 💫 +- Updated privacy settings with opt-in/out options for Jan Analytics +- Adjustable chat width +- The right sidebar and input box are now optimized for new users + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.12). \ No newline at end of file diff --git a/website/src/content/changelog/2025-01-06-key-issues-resolved.mdx b/website/src/content/changelog/2025-01-06-key-issues-resolved.mdx new file mode 100644 index 000000000..40e541c2a --- /dev/null +++ b/website/src/content/changelog/2025-01-06-key-issues-resolved.mdx @@ -0,0 +1,23 @@ +--- +title: "A few key issues have been solved!" +version: 0.5.13 +description: "Jan v0.5.13 is here: A few key issues have been solved." +date: 2025-01-06 +ogImage: "/assets/images/changelog/jan-v0-5-13.jpg" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +👋 Jan v0.5.13 is here: A few key issues have been solved! + +### Highlights 🎉 + +- Resolved model loading issues on MacOS Intel +- Fixed app resetting max_tokens to 8192 on new threads - now uses model settings +- Fixed Vulkan settings visibility for some users + +Update your product or download the latest: https://jan.ai + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.13). diff --git a/website/src/content/changelog/2025-01-23-deepseek-r1-jan.mdx b/website/src/content/changelog/2025-01-23-deepseek-r1-jan.mdx new file mode 100644 index 000000000..3006588b8 --- /dev/null +++ b/website/src/content/changelog/2025-01-23-deepseek-r1-jan.mdx @@ -0,0 +1,36 @@ +--- +title: "Run DeepSeek R1 Distills error-free!" +version: 0.5.14 +description: "Jan v0.5.14 is out: Run DeepSeek R1 Distills error-free!" +date: 2025-01-23 +ogImage: "/assets/images/changelog/jan-v0-5-14-deepseek-r1.jpg" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +👋 Jan v0.5.14 is out: Run DeepSeek R1 Distills error-free! + +You can run DeepSeek R1 distills in Jan error-free. Follow our [step-by-step guide to run DeepSeek R1 locally](/blog/deepseek-r1-locally) and get this AI model running on your device in minutes. + +llama.cpp version updated via Cortex—thanks to GG & llama.cpp community! + +- Paste GGUF links into Jan Hub to download +- Already downloaded the model but facing issues? Update Jan. + +Models: + +Qwen +- DeepSeek-R1-Distill-Qwen-1.5B-GGUF: https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF +- DeepSeek-R1-Distill-Qwen-7B-GGUF: https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-7B-GGUF +- DeepSeek-R1-Distill-Qwen-14B-GGUF: https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-14B-GGUF +- DeepSeek-R1-Distill-Qwen-32B-GGUF: https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-32B-GGUF + +Llama +- DeepSeek-R1-Distill-Llama-8B-GGUF: https://huggingface.co/bartowski/DeepSeek-R1-Distill-Llama-8B-GGUF +- DeepSeek-R1-Distill-Llama-70B-GGUF: https://huggingface.co/bartowski/DeepSeek-R1-Distill-Llama-70B-GGUF + +Update your Jan or [download the latest](https://jan.ai/). + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.14). diff --git a/website/src/content/changelog/2025-02-14-enterprise-security.mdx b/website/src/content/changelog/2025-02-14-enterprise-security.mdx new file mode 100644 index 000000000..00d973bb2 --- /dev/null +++ b/website/src/content/changelog/2025-02-14-enterprise-security.mdx @@ -0,0 +1,134 @@ +--- +title: "Jan Enterprise: Security & Compliance" +version: 0.6.2 +description: "Enterprise-grade security features, compliance tools, and team management capabilities" +date: 2025-02-14 +image: "/assets/images/changelog/enterprise-security.png" +featured: false +--- + +## Enterprise Ready 🏢 + +Jan v0.6.2 introduces comprehensive enterprise features designed for organizations that need advanced security, compliance, and management capabilities. Deploy AI with confidence across your entire organization. + +### 🔐 Advanced Security Framework + +Military-grade security for your AI infrastructure: +- **Zero-Trust Architecture**: Verify every request, trust nothing by default +- **End-to-End Encryption**: Data encrypted in transit and at rest +- **Certificate Management**: Full PKI support with automatic rotation +- **Network Isolation**: Separate AI workloads from corporate networks +- **Audit Logging**: Comprehensive logs for security analysis and compliance + +### 👥 Team Management + +Sophisticated user and access control: +- **Role-Based Access**: Granular permissions for different user types +- **Single Sign-On**: Integration with Active Directory, SAML, and OAuth +- **Multi-Factor Authentication**: Required 2FA with hardware token support +- **Session Management**: Control session duration and concurrent logins +- **User Provisioning**: Automated user creation and deactivation + +### 📊 Compliance Dashboard + +Meet regulatory requirements with confidence: +- **SOC 2 Compliance**: Built-in controls for SOC 2 Type II certification +- **GDPR Tools**: Data subject rights management and privacy controls +- **HIPAA Ready**: Healthcare-specific security and privacy features +- **ISO 27001**: Information security management system alignment +- **Custom Frameworks**: Adapt to your specific compliance requirements + +### 🏗️ Infrastructure Management + +Deploy and scale AI across your organization: +- **Containerized Deployment**: Docker and Kubernetes ready +- **Load Balancing**: Distribute AI workloads across multiple instances +- **Auto-Scaling**: Automatically scale based on demand +- **Health Monitoring**: Real-time system health and performance tracking +- **Disaster Recovery**: Automated backups and failover capabilities + +### 🎯 Data Governance + +Complete control over your data: +- **Data Classification**: Automatically tag and classify sensitive information +- **Retention Policies**: Automated data lifecycle management +- **Data Loss Prevention**: Prevent sensitive data from leaving your environment +- **Geographic Controls**: Control where data is processed and stored +- **Right to Deletion**: Complete data removal on request + +### 🔍 Advanced Analytics + +Insights into AI usage across your organization: +- **Usage Analytics**: Understand how teams use AI capabilities +- **Cost Analysis**: Track AI costs by department, team, or user +- **Performance Metrics**: Monitor AI performance and quality +- **Adoption Reports**: Measure AI adoption across the organization +- **Custom Dashboards**: Create reports tailored to your needs + +### 🛡️ Threat Protection + +Advanced protection against AI-specific threats: +- **Prompt Injection Detection**: Identify and block malicious prompts +- **Content Filtering**: Prevent inappropriate content generation +- **Rate Limiting**: Protect against abuse and resource exhaustion +- **Anomaly Detection**: Identify unusual usage patterns +- **Incident Response**: Automated response to security events + +### 🌐 Integration Capabilities + +Connect Jan to your existing enterprise systems: +- **API Gateway**: Secure API access with rate limiting and authentication +- **Webhook Support**: Real-time notifications to external systems +- **Database Connections**: Direct integration with enterprise databases +- **Workflow Integration**: Connect to existing business processes +- **Custom Connectors**: Build integrations specific to your needs + +### 📋 Policy Management + +Implement and enforce AI governance policies: +- **Usage Policies**: Define acceptable use of AI capabilities +- **Content Policies**: Control what types of content can be generated +- **Model Policies**: Restrict access to specific AI models +- **Data Policies**: Control how data is processed and stored +- **Approval Workflows**: Require approval for sensitive operations + +### 🔧 Administrative Tools + +Powerful tools for IT administrators: +- **Centralized Configuration**: Manage settings across all deployments +- **Bulk Operations**: Perform actions across multiple users or systems +- **Migration Tools**: Move data and settings between environments +- **Backup Management**: Automated and manual backup capabilities +- **System Diagnostics**: Comprehensive troubleshooting tools + +### 📞 Enterprise Support + +Dedicated support for mission-critical deployments: +- **24/7 Support**: Round-the-clock assistance for critical issues +- **Dedicated Success Manager**: Personal point of contact for your organization +- **Training Programs**: Comprehensive training for administrators and users +- **Implementation Services**: Professional deployment and configuration +- **Custom Development**: Tailored features for unique requirements + +## Deployment Options + +Choose the deployment model that fits your needs: +- **On-Premises**: Complete control with on-site deployment +- **Private Cloud**: Dedicated cloud environment just for you +- **Hybrid**: Combine on-premises and cloud capabilities +- **Multi-Region**: Deploy across multiple geographic regions +- **Air-Gapped**: Completely isolated environments for maximum security + +## Getting Started + +Ready to deploy Jan Enterprise? + +1. **Assessment**: Our team evaluates your requirements +2. **Pilot Program**: Start with a small-scale deployment +3. **Training**: Comprehensive training for your team +4. **Full Deployment**: Roll out to your entire organization +5. **Ongoing Support**: Continuous support and optimization + +Transform how your organization uses AI. Contact our enterprise team to learn more. + +[Contact Enterprise Sales](mailto:enterprise@jan.ai) • [Enterprise Documentation](/docs/enterprise) • [Security Whitepaper](/security-whitepaper) \ No newline at end of file diff --git a/website/src/content/changelog/2025-02-18-advanced-llama.cpp-settings.mdx b/website/src/content/changelog/2025-02-18-advanced-llama.cpp-settings.mdx new file mode 100644 index 000000000..b24c72908 --- /dev/null +++ b/website/src/content/changelog/2025-02-18-advanced-llama.cpp-settings.mdx @@ -0,0 +1,28 @@ +--- +title: "You can now tweak llama.cpp settings, and add any cloud model!" +version: 0.5.15 +description: "Jan v0.5.15 is out: Advanced llama.cpp settings and cloud model support" +date: 2025-02-18 +ogImage: "/assets/images/changelog/jan-v0-5-15-llamacpp-settings.jpg" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +👋 Jan v0.5.15 is out with exciting new features and improvements! + +## Highlights 🎉 + +- Improved llama.cpp settings for better model control and performance +- Install and use any cloud model seamlessly +- Enhanced hardware controls for better resource management +- New models supported: + - DeepSeek AI + - Google Gemini + - OpenAI o3-mini + - R1 distills + +Update your Jan or [download the latest](https://jan.ai/). + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.15). diff --git a/website/src/content/changelog/2025-03-05-ui-redesign.mdx b/website/src/content/changelog/2025-03-05-ui-redesign.mdx new file mode 100644 index 000000000..fb47b7b21 --- /dev/null +++ b/website/src/content/changelog/2025-03-05-ui-redesign.mdx @@ -0,0 +1,122 @@ +--- +title: "Jan v0.6.3: UI Renaissance" +version: 0.6.3 +description: "Complete interface redesign with improved UX, dark mode, and accessibility features" +date: 2025-03-05 +image: "/assets/images/changelog/ui-redesign.png" +featured: false +--- + +## A Beautiful New Beginning 🎨 + +Jan v0.6.3 introduces our most comprehensive UI overhaul yet. Every pixel has been reconsidered, every interaction refined. The result? A beautiful, intuitive interface that gets out of your way and lets you focus on what matters - conversations with AI. + +### 🎯 Design Philosophy + +Our new design principles: +- **Clarity First**: Remove visual noise, highlight what's important +- **Consistent Language**: Unified design patterns throughout the app +- **Accessibility Focus**: Usable by everyone, regardless of ability +- **Performance Minded**: Beautiful interfaces that are also fast +- **Future Ready**: Scalable design system for upcoming features + +### 🌙 Enhanced Dark Mode + +Dark mode, reimagined: +- **True Black Option**: Perfect for OLED displays and low-light use +- **Adaptive Contrast**: Automatically adjusts based on ambient light +- **Custom Accent Colors**: Choose your preferred highlight color +- **Smart Switching**: Follows system preferences or custom schedule +- **Reduced Eye Strain**: Carefully calibrated colors for long usage sessions + +### 💬 Conversation Experience + +Completely redesigned chat interface: +- **Improved Message Bubbles**: Better readability and visual hierarchy +- **Smart Typography**: Optimal font sizes and spacing for every device +- **Code Highlighting**: Syntax highlighting for 200+ programming languages +- **Math Rendering**: Beautiful LaTeX math equation display +- **Image Gallery**: Enhanced image viewing with zoom and navigation + +### 🎛️ Settings Overhaul + +Settings that make sense: +- **Organized Categories**: Logical grouping of related options +- **Search Settings**: Find any setting instantly +- **Visual Previews**: See changes before applying them +- **Quick Actions**: Common tasks accessible with fewer clicks +- **Import/Export**: Backup and restore your entire configuration + +### 📱 Responsive Design + +Perfect on every screen: +- **Mobile Optimized**: Touch-friendly interface for tablets and phones +- **Desktop Polish**: Take advantage of larger screens and precise input +- **Window Management**: Better handling of multiple windows and panels +- **Flexible Layouts**: Adapt to any screen size or orientation +- **High DPI Support**: Crisp on retina and 4K displays + +### ♿ Accessibility Improvements + +Jan for everyone: +- **Screen Reader Support**: Full compatibility with assistive technologies +- **Keyboard Navigation**: Complete interface control without a mouse +- **High Contrast Mode**: Enhanced visibility for low-vision users +- **Font Scaling**: Respect system font size preferences +- **Motion Controls**: Reduced motion options for sensitive users + +### 🎨 Theming System + +Express your style: +- **Built-in Themes**: 12 carefully crafted color schemes +- **Custom Themes**: Create your own with our theme editor +- **Theme Sharing**: Import themes created by the community +- **Seasonal Themes**: Special themes for holidays and events +- **Auto-Theming**: Themes that change based on time of day + +### 🔍 Improved Navigation + +Find everything faster: +- **Global Search**: Search conversations, settings, and help instantly +- **Breadcrumbs**: Always know where you are in the app +- **Quick Switcher**: Jump between conversations with keyboard shortcuts +- **Recent Items**: Quick access to your most-used features +- **Favorites System**: Pin important conversations and tools + +### 🎪 Animation & Transitions + +Delightful micro-interactions: +- **Smooth Transitions**: Fluid movement between screens and states +- **Loading Animations**: Engaging feedback during wait times +- **Hover Effects**: Subtle responses to mouse interaction +- **Focus Indicators**: Clear visual feedback for keyboard users +- **Performance Optimized**: 60fps animations that don't drain battery + +### 📊 Visual Data + +Information design that informs: +- **Usage Charts**: Beautiful visualizations of your AI usage +- **Performance Graphs**: Real-time system performance monitoring +- **Progress Indicators**: Clear feedback for long-running operations +- **Status Displays**: At-a-glance system health information +- **Comparison Views**: Side-by-side analysis of models and settings + +### 🚀 Performance Improvements + +Beauty with brains: +- **Faster Rendering**: 40% improvement in interface responsiveness +- **Memory Efficiency**: Reduced RAM usage for smoother operation +- **Bundle Optimization**: Smaller app size, faster loading +- **Asset Loading**: Progressive loading for smoother startup +- **Animation Performance**: Hardware-accelerated animations + +## Migration Guide + +Your existing data and settings are automatically preserved. Some visual elements may look different, but all functionality remains the same or improved. + +**New Users**: Welcome to the most beautiful Jan yet! +**Existing Users**: Your themes and customizations will be migrated automatically. + +Experience the new Jan. Clean, beautiful, and more powerful than ever. + +[Download Jan v0.6.3](https://jan.ai/) • [UI Guide](/docs/interface) • [Accessibility Documentation](/docs/accessibility) \ No newline at end of file diff --git a/website/src/content/changelog/2025-03-14-jan-security-patch.mdx b/website/src/content/changelog/2025-03-14-jan-security-patch.mdx new file mode 100644 index 000000000..19d5bbf9e --- /dev/null +++ b/website/src/content/changelog/2025-03-14-jan-security-patch.mdx @@ -0,0 +1,29 @@ +--- +title: "Security fixes and UI improvements" +version: 0.5.16 +description: "Jan v0.5.16 is out: Security fixes and major improvements to Model Hub and chat experience" +date: 2025-03-14 +ogImage: "/assets/images/changelog/jan-v0-5-16-security-patch.jpg" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +👋 Jan v0.5.16 is out: Security fixes (found in Cortex) and a few improvements! + +## Highlights 🎉 + +- Redesigned Model Hub for better user experience +- Faster chat response times +- Cleaner layout and improved model picker +- New model support: + - GPT-4.5 preview + - Claude 3.7 Sonnet + - Gemma 3 + +⚠️ Important: This release includes critical security fixes. We'll share more details about these security improvements soon. + +Update your Jan or [download the latest](https://jan.ai/). + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.16). diff --git a/website/src/content/changelog/2025-04-10-mobile-launch.mdx b/website/src/content/changelog/2025-04-10-mobile-launch.mdx new file mode 100644 index 000000000..1f75b15a9 --- /dev/null +++ b/website/src/content/changelog/2025-04-10-mobile-launch.mdx @@ -0,0 +1,119 @@ +--- +title: "Jan Mobile: AI in Your Pocket" +version: 1.0.0 +description: "Introducing Jan Mobile - full-featured AI assistant for iOS and Android with cloud sync" +date: 2025-04-10 +image: "/assets/images/changelog/jan-mobile.png" +featured: true +--- + +## AI Goes Mobile 📱 + +After months of development, we're thrilled to announce Jan Mobile - bringing the full power of Jan's AI capabilities to your smartphone. Chat with AI anywhere, sync across devices, and never miss a conversation. + +### 📱 Native Mobile Experience + +Built from the ground up for mobile: +- **Native Performance**: Smooth, responsive interface optimized for touch +- **Offline Capable**: Continue conversations even without internet +- **Battery Optimized**: Efficient background processing preserves battery life +- **Dark Mode**: Beautiful dark theme that's easy on the eyes +- **Haptic Feedback**: Tactile responses for better interaction + +### ☁️ Seamless Cloud Sync + +Your conversations follow you everywhere: +- **Real-time Sync**: Conversations update instantly across all devices +- **Conflict Resolution**: Smart merging when editing on multiple devices +- **Selective Sync**: Choose which conversations to sync to mobile +- **End-to-End Encryption**: Your data remains private and secure +- **Offline Queue**: Messages sync when connection returns + +### 🎯 Mobile-First Features + +Designed for how you use your phone: +- **Voice Input**: Speak your messages instead of typing +- **Voice Output**: AI responses read aloud with natural voices +- **Quick Actions**: Swipe gestures for common tasks +- **Share Integration**: Share content to Jan from any app +- **Widget Support**: Quick access from your home screen + +### 🔒 Privacy & Security + +Your privacy, protected: +- **Local Processing**: Sensitive conversations can stay on-device +- **Biometric Lock**: Secure Jan with fingerprint or face recognition +- **Auto-Lock**: Automatically locks after inactivity +- **Private Mode**: Conversations that don't sync to cloud +- **Data Controls**: Full control over what data is stored where + +### 🤖 Full Model Support + +Access all your favorite models: +- **Cloud Models**: GPT-4, Claude, Gemini, and more +- **Local Models**: Run smaller models directly on your phone +- **Model Switching**: Change models mid-conversation +- **Smart Routing**: Automatically choose the best model for your query +- **Offline Models**: Basic AI capabilities without internet + +### 📸 Rich Media Support + +Beyond just text: +- **Image Analysis**: Upload photos for AI to analyze and discuss +- **Camera Integration**: Take photos directly in Jan for analysis +- **Voice Messages**: Send and receive voice messages +- **File Sharing**: Share documents, PDFs, and more +- **Link Previews**: Rich previews for shared links + +### 🎨 Personalization + +Make Jan your own: +- **Custom Themes**: Choose colors and appearance +- **Chat Backgrounds**: Personalize your conversation view +- **Notification Settings**: Control when and how you're notified +- **Assistant Personalities**: Different AI personalities for different contexts +- **Quick Replies**: Set up common responses + +### 🔄 Cross-Platform Features + +Unified experience across desktop and mobile: +- **Universal Search**: Find conversations across all devices +- **Shared Assistants**: Use the same AI assistants everywhere +- **Unified Settings**: Preferences sync between devices +- **File Access**: Access files shared from desktop +- **Continuous Conversations**: Start on desktop, continue on mobile + +### 📊 Usage Analytics + +Understand your AI usage: +- **Conversation Stats**: See your most active conversations +- **Model Usage**: Track which models you use most +- **Time Analytics**: Understand your usage patterns +- **Export Data**: Download your conversation history +- **Privacy Dashboard**: See exactly what data is stored + +### 🌟 Launch Features + +Available from day one: +- **Free Tier**: Full functionality with generous usage limits +- **Pro Features**: Enhanced models and advanced features +- **Family Sharing**: Share Pro features with family members +- **Student Discount**: Special pricing for students +- **Enterprise Options**: Advanced security and management + +## Platform Availability + +- **iOS**: Available on the App Store (iOS 15.0+) +- **Android**: Available on Google Play (Android 8.0+) +- **Cross-Platform**: Full feature parity between platforms + +## Getting Started + +1. Download Jan Mobile from your app store +2. Sign in with your Jan account (or create one) +3. Your desktop conversations automatically appear +4. Start chatting with AI on the go! + +Your AI assistant is now truly everywhere. Download Jan Mobile today and experience AI without boundaries. + +[Download for iOS](https://apps.apple.com/app/jan-ai) • [Download for Android](https://play.google.com/store/apps/jan) • [Mobile Guide](/docs/mobile) \ No newline at end of file diff --git a/website/src/content/changelog/2025-05-14-jan-qwen3-patch.mdx b/website/src/content/changelog/2025-05-14-jan-qwen3-patch.mdx new file mode 100644 index 000000000..24d784153 --- /dev/null +++ b/website/src/content/changelog/2025-05-14-jan-qwen3-patch.mdx @@ -0,0 +1,23 @@ +--- +title: "Qwen3 support is now more reliable." +version: 0.5.17 +description: "Jan v0.5.17 is out: Qwen3 support is now more reliable" +date: 2025-05-14 +ogImage: "/assets/images/changelog/jan-v0-5-17-gemm3-patch.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +👋 Jan v0.5.17 is out: Qwen3 support is now more reliable + +## Highlights 🎉 + +- Improved Qwen3 support with cleaner token output +- Clearer install and quickstart docs +- UI polish and bug fixes throughout + +Update your Jan or [download the latest](https://jan.ai/). + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.5.17). diff --git a/website/src/content/changelog/2025-05-20-performance-boost.mdx b/website/src/content/changelog/2025-05-20-performance-boost.mdx new file mode 100644 index 000000000..489bb8eea --- /dev/null +++ b/website/src/content/changelog/2025-05-20-performance-boost.mdx @@ -0,0 +1,101 @@ +--- +title: "Jan v0.6.4: Performance Powerhouse" +version: 0.6.4 +description: "Massive speed improvements, GPU optimization, and streamlined model management" +date: 2025-05-20 +image: "/assets/images/changelog/performance-boost.png" +featured: false +--- + +## Speed Like Never Before ⚡ + +Jan v0.6.4 delivers our biggest performance update yet. Models load faster, inference is smoother, and memory usage is dramatically reduced. This is the Jan you've been waiting for. + +### 🚀 Inference Speed Improvements + +Dramatic performance gains across the board: +- **3x Faster Model Loading**: Optimized model initialization reduces wait times +- **50% Faster Inference**: Improved CUDA kernels and memory management +- **Instant Model Switching**: Switch between models with near-zero delay +- **Background Preloading**: Frequently used models stay ready in memory +- **Smart Caching**: Intelligent context caching reduces repeated work + +### 🎯 GPU Optimization Revolution + +Completely rewritten GPU acceleration: +- **Auto-GPU Detection**: Automatically finds and uses your best GPU +- **Multi-GPU Support**: Distribute model layers across multiple GPUs +- **Memory Optimization**: 40% reduction in VRAM usage +- **Dynamic Offloading**: Automatically balance between GPU and CPU +- **CUDA 12 Support**: Latest NVIDIA drivers and optimizations + +### 🧠 Smarter Memory Management + +Revolutionary memory handling: +- **Adaptive Memory**: Automatically adjusts to available system memory +- **Memory Pressure Detection**: Gracefully handles low-memory situations +- **Efficient Model Unloading**: Frees memory when models aren't needed +- **Context Length Optimization**: Handle longer conversations without slowdown +- **Memory Usage Dashboard**: Real-time visibility into memory consumption + +### 📱 Startup Speed Breakthrough + +Jan now starts in seconds, not minutes: +- **Cold Start Optimization**: 5x faster first launch +- **Background Services**: Core services start in parallel +- **Lazy Loading**: Only load components when needed +- **Configuration Caching**: Settings load instantly +- **Progressive Initialization**: UI appears immediately, features load progressively + +### 🔧 Model Management Overhaul + +Streamlined model experience: +- **One-Click Downloads**: Simplified model acquisition +- **Download Resume**: Interrupted downloads continue automatically +- **Parallel Downloads**: Download multiple models simultaneously +- **Storage Optimization**: Automatic cleanup of unused model files +- **Model Recommendations**: AI suggests optimal models for your hardware + +### 💾 Storage Efficiency + +Dramatic reduction in disk usage: +- **Model Compression**: 30% smaller model files without quality loss +- **Duplicate Detection**: Automatically removes duplicate models +- **Incremental Updates**: Only download model changes, not entire files +- **Smart Cleanup**: Removes temporary files and caches automatically +- **Storage Analytics**: See exactly what's using your disk space + +### 🌐 Network Optimizations + +Faster downloads and better connectivity: +- **CDN Integration**: Download models from the closest server +- **Connection Pooling**: Efficient network resource usage +- **Retry Logic**: Automatic recovery from network interruptions +- **Bandwidth Adaptation**: Adjusts download speed to network conditions +- **Offline Mode**: Better handling when internet is unavailable + +### 🔍 Performance Monitoring + +New tools to understand performance: +- **Real-time Metrics**: See inference speed, memory usage, GPU utilization +- **Performance History**: Track performance over time +- **Bottleneck Detection**: Identify what's slowing down your system +- **Benchmark Tools**: Compare performance across different configurations +- **Performance Profiles**: Save optimal settings for different use cases + +### 🐛 Critical Fixes + +Major stability improvements: +- Fixed memory leaks during long conversations +- Resolved GPU driver compatibility issues +- Eliminated random crashes during model switching +- Fixed model corruption during interrupted downloads +- Resolved race conditions in multi-threaded operations + +## Technical Details + +This release includes fundamental changes to our inference engine, memory management, and GPU acceleration systems. While backwards compatible, you may notice different memory usage patterns and significantly improved performance. + +Experience the fastest Jan ever. Download v0.6.4 and feel the difference. + +[Download Jan v0.6.4](https://jan.ai/) • [Performance Guide](/docs/performance) • [Release Notes](https://github.com/menloresearch/jan/releases/tag/v0.6.4) \ No newline at end of file diff --git a/website/src/content/changelog/2025-06-15-mcp-revolution.mdx b/website/src/content/changelog/2025-06-15-mcp-revolution.mdx new file mode 100644 index 000000000..3bf1b03e4 --- /dev/null +++ b/website/src/content/changelog/2025-06-15-mcp-revolution.mdx @@ -0,0 +1,88 @@ +--- +title: "Jan v0.6.5: MCP Revolution - Connect AI to Everything" +version: 0.6.5 +description: "Introducing Model Context Protocol support, browser automation, and powerful tool integrations" +date: 2025-06-15 +image: "../../assets/images/changelog/mcp-revolution.gif" +featured: true +--- + +## The MCP Era Begins 🚀 + +Jan v0.6.5 introduces groundbreaking Model Context Protocol (MCP) support, transforming Jan from a simple chat interface into a powerful AI automation platform. Connect your AI to browsers, APIs, databases, and countless tools. + +### 🔗 Model Context Protocol Integration + +MCP opens up infinite possibilities: +- **Universal Tool Access**: Connect to any service that supports MCP +- **Real-time Data**: Access live information from APIs and databases +- **Browser Automation**: Control web browsers directly through AI commands +- **File System Access**: Read, write, and manipulate files with AI assistance +- **Custom Tools**: Build your own MCP servers for specialized workflows + +### 🌐 Built-in MCP Servers + +Launch with powerful integrations: +- **Browser Control**: Automate web tasks, scrape data, fill forms +- **File Management**: AI-powered file operations and organization +- **API Integration**: Connect to REST APIs, GraphQL endpoints +- **Database Access**: Query and update databases through natural language +- **Git Operations**: Manage repositories with AI assistance + +### 🎯 Smart Tool Discovery + +Jan automatically discovers and configures MCP tools: +- **Auto-detection**: Finds available MCP servers on your system +- **One-click Setup**: Enable tools with simple toggle switches +- **Permission Control**: Fine-grained control over tool access +- **Usage Analytics**: Track which tools your AI uses most + +### 🛡️ Enhanced Security Framework + +Built with security as a priority: +- **Sandboxed Execution**: Tools run in isolated environments +- **Permission System**: Explicit approval for sensitive operations +- **Audit Logging**: Complete history of tool usage and permissions +- **Safe Defaults**: Conservative permissions that you can expand + +### 🎨 Redesigned Tool Interface + +Beautiful new interface for tool management: +- **Visual Tool Cards**: See available tools at a glance +- **Real-time Status**: Know when tools are active or inactive +- **Interactive Setup**: Guided configuration for complex tools +- **Usage Insights**: Understand how your AI uses different tools + +### 🔧 Developer Experience + +For MCP server developers: +- **Local Development**: Test MCP servers directly in Jan +- **Debug Tools**: Built-in logging and error reporting +- **Hot Reload**: Changes to MCP servers update instantly +- **Protocol Validation**: Ensure your servers follow MCP standards + +### 🚀 Performance Improvements + +Under the hood optimizations: +- **Faster Tool Loading**: MCP servers start 3x faster +- **Memory Efficiency**: Reduced memory usage for tool operations +- **Connection Pooling**: Efficient management of tool connections +- **Async Operations**: Non-blocking tool execution + +### 🌟 Coming Next + +The MCP ecosystem is just getting started: +- More built-in integrations (Slack, Discord, GitHub) +- Visual workflow builder for complex automations +- Community marketplace for sharing MCP servers +- Enterprise-grade security and compliance features + +## Breaking Changes + +- Tool permissions now require explicit user approval +- Some legacy integrations have been migrated to MCP +- Configuration format updated for better security + +Transform how you work with AI. Download Jan v0.6.5 and enter the MCP era. + +For technical details, see our [MCP documentation](/docs/mcp) and [GitHub release](https://github.com/menloresearch/jan/releases/tag/v0.6.5). diff --git a/website/src/content/changelog/2025-06-19-jan-ui-revamp.mdx b/website/src/content/changelog/2025-06-19-jan-ui-revamp.mdx new file mode 100644 index 000000000..ae9e2d4f0 --- /dev/null +++ b/website/src/content/changelog/2025-06-19-jan-ui-revamp.mdx @@ -0,0 +1,21 @@ +--- +title: "Jan v0.6.1 is here: It's a whole new vibe!" +version: 0.6.1 +description: "Are you ready for the sexiest UI ever?" +date: 2025-06-19 +ogImage: "/assets/images/changelog/jan-v0.6.1-ui-revamp.png" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +## Highlights 🎉 + +- Jan's been redesigned to be faster, cleaner, and easier to use. +- You can now create assistants with custom instructions and settings from a dedicated tab. +- You can now use Jan with Menlo's models. + +Update your Jan or [download the latest](https://jan.ai/). + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.6.1). \ No newline at end of file diff --git a/website/src/content/changelog/2025-06-26-jan-nano-mcp.mdx b/website/src/content/changelog/2025-06-26-jan-nano-mcp.mdx new file mode 100644 index 000000000..9380c2272 --- /dev/null +++ b/website/src/content/changelog/2025-06-26-jan-nano-mcp.mdx @@ -0,0 +1,21 @@ +--- +title: "Jan v0.6.3 brings new features and models!" +version: 0.6.3 +description: "Unlocking MCP for everyone and bringing our latest model to Jan!" +date: 2025-06-26 +ogImage: "/assets/images/changelog/jn128.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +## Highlights 🎉 + +- We have added Model Context Protocol (MCP) support to the stable build of Jan. It needs to be enabled in the General Settings tab. +- Jan now supports Menlo's latest model, Jan-Nano-128k. +- Some hot fixes and improvements. + +Update your Jan or [download the latest](https://jan.ai/). + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.6.3). \ No newline at end of file diff --git a/website/src/content/changelog/2025-07-17-responsive-ui.mdx b/website/src/content/changelog/2025-07-17-responsive-ui.mdx new file mode 100644 index 000000000..ac27d3630 --- /dev/null +++ b/website/src/content/changelog/2025-07-17-responsive-ui.mdx @@ -0,0 +1,26 @@ +--- +title: "Jan v0.6.5 brings responsive UI and MCP examples!" +version: 0.6.5 +description: "New MCP examples, updated pages, and bug fixes!" +date: 2025-07-17 +ogImage: "/assets/images/changelog/release_v0_6_5.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +## Highlights 🎉 + +Jan v0.6.5 brings responsive UI improvements, enhanced model provider management, and better Linux compatibility alongside +new MCP examples. + +- Support responsive UI on Jan +- Rework of Model Providers UI +- Bump version of llama.cpp +- Fix the bug where fetching models from custom provider can cause app to crash +- AppImage can now render on wayland + mesa + +Update your Jan or [download the latest](https://jan.ai/). + +For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.6.5). \ No newline at end of file diff --git a/website/src/content/changelog/2025-07-31-llamacpp-tutorials.mdx b/website/src/content/changelog/2025-07-31-llamacpp-tutorials.mdx new file mode 100644 index 000000000..fc0310174 --- /dev/null +++ b/website/src/content/changelog/2025-07-31-llamacpp-tutorials.mdx @@ -0,0 +1,119 @@ +--- +title: "Jan v0.6.6: Enhanced llama.cpp integration and smarter model management" +version: 0.6.6 +description: "Major llama.cpp improvements, Hugging Face provider support, and refined MCP experience" +date: 2025-07-31 +ogImage: "/assets/images/changelog/changelog0.6.6.gif" +--- + +import ChangelogHeader from '@/components/Changelog/ChangelogHeader.astro' + + + +## Highlights 🎉 + +Jan v0.6.6 delivers significant improvements to the llama.cpp backend, introduces Hugging Face as a +built-in provider, and brings smarter model management with auto-unload capabilities. This release +also includes numerous MCP refinements and platform-specific enhancements. + +### 🚀 Major llama.cpp Backend Overhaul + +We've completely revamped the llama.cpp integration with: +- **Smart Backend Management**: The backend now auto-updates and persists your settings properly +- **Device Detection**: Jan automatically detects available GPUs and hardware capabilities +- **Direct llama.cpp Access**: Models now interface directly with llama.cpp (previously hidden behind Cortex) +- **Automatic Migration**: Your existing models seamlessly move from Cortex to direct llama.cpp management +- **Better Error Handling**: Clear error messages when models fail to load, with actionable solutions +- **Per-Model Overrides**: Configure specific settings for individual models + +### 🤗 Hugging Face Cloud Router Integration + +Connect to Hugging Face's new cloud inference service: +- Access pre-configured models running on various providers (Fireworks, Together AI, and more) +- Hugging Face handles the routing to the best available provider +- Simplified setup with just your HF token +- Non-deletable provider status to prevent accidental removal +- Note: Direct model ID search in Hub remains available as before + +### 🧠 Smarter Model Management + +New intelligent features to optimize your system resources: +- **Auto-Unload Old Models**: Automatically free up memory by unloading unused models +- **Persistent Settings**: Your model capabilities and settings now persist across app restarts +- **Zero GPU Layers Support**: Set N-GPU Layers to 0 for CPU-only inference +- **Memory Calculation Improvements**: More accurate memory usage reporting + +### 🎯 MCP Refinements + +Enhanced MCP experience with: +- Tool approval dialog improvements with scrollable parameters +- Better experimental feature edge case handling +- Fixed tool call button disappearing issue +- JSON editing tooltips for easier configuration +- Auto-focus on "Always Allow" action for smoother workflows + +### 📚 New MCP Integration Tutorials + +Comprehensive guides for powerful MCP integrations: +- **Canva MCP**: Create and manage designs through natural language - generate logos, presentations, and marketing materials directly from chat +- **Browserbase MCP**: Control cloud browsers with AI - automate web tasks, extract data, and monitor sites without complex scripting +- **Octagon Deep Research MCP**: Access finance-focused research capabilities - analyze markets, investigate companies, and generate investment insights + +### 🖥️ Platform-Specific Improvements + +**Windows:** +- Fixed terminal windows popping up during model loading +- Better process termination handling +- VCRuntime included in installer for compatibility +- Improved NSIS installer with app running checks + +**Linux:** +- AppImage now works properly with newest Tauri version and it went from almost 1GB to less than 200MB +- Better Wayland compatibility + +**macOS:** +- Improved build process and artifact naming + +### 🎨 UI/UX Enhancements + +Quality of life improvements throughout: +- Fixed rename thread dialog showing incorrect thread names +- Assistant instructions now have proper defaults +- Download progress indicators remain visible when scrolling +- Better error pages with clearer messaging +- GPU detection now shows accurate backend information +- Improved clickable areas for better usability + +### 🔧 Developer Experience + +Behind the scenes improvements: +- New automated QA system using CUA (Computer Use Automation) +- Standardized build process across platforms +- Enhanced error stream handling and parsing +- Better proxy support for the new downloader +- Reasoning format support for advanced models + +### 🐛 Bug Fixes + +Notable fixes include: +- Factory reset no longer fails with access denied errors +- OpenRouter provider stays selected properly +- Model search in Hub shows latest data only +- Temporary download files are cleaned up on cancel +- Legacy threads no longer appear above new threads +- Fixed encoding issues on various platforms + +## Breaking Changes + +- Models previously managed by Cortex now interface directly with llama.cpp (automatic migration included) +- Some sampling parameters have been removed from the llama.cpp extension for consistency +- Cortex extension is deprecated in favor of direct llama.cpp integration + +## Coming Next + +We're working on expanding MCP capabilities, improving model download speeds, and adding more provider +integrations. Stay tuned! + +Update your Jan or [download the latest](https://jan.ai/). + +For the complete list of changes, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.6.6). diff --git a/website/src/content/docs/handbook/brand/index.mdx b/website/src/content/docs/handbook/brand/index.mdx new file mode 100644 index 000000000..74f723dc7 --- /dev/null +++ b/website/src/content/docs/handbook/brand/index.mdx @@ -0,0 +1,4 @@ +--- +title: Brand & Identity +asIndexPage: true +--- \ No newline at end of file diff --git a/website/src/content/docs/handbook/comp/bonus.mdx b/website/src/content/docs/handbook/comp/bonus.mdx new file mode 100644 index 000000000..e30321579 --- /dev/null +++ b/website/src/content/docs/handbook/comp/bonus.mdx @@ -0,0 +1,5 @@ +--- +title: "Handbook" +description: "Jan Team Handbook" +--- + diff --git a/website/src/content/docs/handbook/comp/esop.mdx b/website/src/content/docs/handbook/comp/esop.mdx new file mode 100644 index 000000000..53f719ee7 --- /dev/null +++ b/website/src/content/docs/handbook/comp/esop.mdx @@ -0,0 +1,21 @@ +--- +title: "ESOP" +description: "Employee Stock Options" +--- + + +## Shares and Equity + +Jan is [employee-owned](/handbook/why/ownership). The people building Jan own a meaningful percentage of the company over time. + +### Distributions + +Every 6 months, Jan distributes 1% of company equity among its team members: + +- Dec 2023: 1% distributed among 10 team members (1 part-time) +- June 2024: 1% distributed among 15 team members (4 part-time) +- Dec 2024: 1% distributed among 18 team members (5 part-time) + +Distributions are performance-based, and cover both full-time and part-time team members and open source contributors. + +This schedule is subject to change based on the discretion of the board. diff --git a/website/src/content/docs/handbook/comp/index.mdx b/website/src/content/docs/handbook/comp/index.mdx new file mode 100644 index 000000000..63e59206a --- /dev/null +++ b/website/src/content/docs/handbook/comp/index.mdx @@ -0,0 +1,34 @@ +--- +title: "Compensation" +description: "Payment and benefits schemes." +asIndexPage: true +--- + + +### Pay + +Everyone at Jan is on the same payscale, with cost of living adjustments based on [location](#location). + +Your base cash pay is determined based on realized impact, not titles. Workstream owners, responsible for the success/failure of a project & team, are compensated more than individual contributors. Folks at the same contribution levels receive equitable base pay. + +Based on your [progression](progression) speed, we have been known to adjust pay frequently and aggressively. + +### Location + +We provide a location adjustment for folks living in a high cost of living area. We use the [Numbeo](https://www.numbeo.com/cost-of-living/) index to arrive at a fair & equitable total. + +### Bonus + +Depending on location, we pay a discretionary cash bonus twice per year. The bonus typically ranges from 5-17% of your base pay, and is based on the company's performance and your individual contribution. + +### ESOP + +We offer an employee stock ownership plan to all full-time employees. The ESOP is a key part of our compensation package and is designed to align everyone's incentives with the long-term success of the company. Read our [ESOP policy](esop). + +### Benefits + +- **Equipment**: After the probation period, you get a $1000-$1500 USD budget for equipment. Eligible items include: laptop, monitors, keyboard, mouse, and noise cancelling headphones. Please see our [spending policy](spending) for more details. + +- **AI Subscriptions**: We are an AI native team - the expectation is to use AI to 100x your productivity. You get $100/month towards AI subscriptions and tools. Search `AI API Key Access Instructions` in Discord to get access. + +- **Language & Presentation Skills**: We grant a $100 yearly budget to improve in language skills (eg [Italki](https://www.italki.com/), [Elsa](https://elsaspeak.com/en/), [Cambly](https://www.cambly.com/english?lang=en) tutors, classes and books) diff --git a/website/src/content/docs/handbook/culture/communicate.mdx b/website/src/content/docs/handbook/culture/communicate.mdx new file mode 100644 index 000000000..c651ad38f --- /dev/null +++ b/website/src/content/docs/handbook/culture/communicate.mdx @@ -0,0 +1,219 @@ +--- +title: How We Communicate +description: Building a global AI platform requires clear, open communication +--- + +We're a fully-remote team building open superintelligence across continents +and cultures. Clear communication isn't just nice to have—it's how we ship fast, +stay aligned, and build trust with millions of users. + +## Core Communication Principles + +### Default to Open + +- **Build in Public**: Development discussions happen in open GitHub issues +- **Share Progress**: Daily updates in Discord keep everyone informed +- **Document Decisions**: Important choices are recorded for future reference +- **Learn Together**: Share discoveries, failures, and insights with the team + +### Async First, Sync When Needed + +As a global team, we optimize for asynchronous communication: + +- **GitHub**: Technical discussions, code reviews, feature planning +- **Discord**: Quick questions, daily updates, community engagement +- **Documentation**: Long-form thinking, architectural decisions +- **Meetings**: Only when real-time collaboration adds value + +### Write for Clarity + +With team members from different linguistic backgrounds: + +- **Simple English**: Clear over clever, direct over diplomatic +- **Context Included**: Assume readers lack your specific context +- **Examples Help**: Show, don't just tell +- **Visual Aids**: Diagrams, screenshots, and code samples + +> Good: "The model fails to load on M1 Macs. Here's the error log and steps to reproduce..." +> Bad: "It's broken on Mac." + +## Communication Channels + +### Where to Communicate What + +| Type | Channel | Examples | +|------|---------|----------| +| **Feature Development** | GitHub Issues | New features, bug reports, technical discussions | +| **Daily Updates** | Discord #daily-updates | What you worked on, blockers, discoveries | +| **Quick Questions** | Discord team channels | "Anyone know why X happens?" | +| **Long-form Thinking** | GitHub Discussions / Docs | Architecture proposals, post-mortems | +| **User Support** | Discord #support | Helping users with Jan | +| **Urgent Issues** | Discord + @mention | Production bugs, security issues | + +### Response Time Expectations + +- **GitHub Issues**: Within 24-48 hours +- **Discord Questions**: Best effort, timezone dependent +- **User Bug Reports**: Acknowledge within 24 hours +- **Security Issues**: Immediate escalation + +## Communication Best Practices + +### For Engineers + +``` +✅ DO: +- Comment your code like someone will read it at 3am +- Update documentation when you change behavior +- Share WIP early for feedback +- Close the loop: report back on solutions + +❌ DON'T: +- Assume context that isn't written down +- DM technical discussions (keep them public) +- Wait until perfect to share progress +- Leave PRs without description +``` + +### For Everyone + +**Assume Positive Intent** +- We're all working toward the same goal +- Language barriers can cause misunderstandings +- Ask for clarification before assuming + +**Be Specific** +- "The download button doesn't work" → "The download button on Windows returns 404 for model X" +- "It's slow" → "Model loading takes 45 seconds on 8GB RAM" +- "Users want this" → "15 users requested this in issue #123" + +**Close Loops** +- Follow up on questions you ask +- Update issues with solutions +- Thank people who help you +- Share outcomes of discussions + +## Meetings That Work + +We minimize meetings but when we have them: + +### Before the Meeting +- Share agenda 24 hours prior +- Include pre-read materials +- State desired outcome clearly +- Invite only necessary people + +### During the Meeting +- Start with 5-minute silent reading of materials +- Stick to agenda +- Assign action items with owners +- End 5 minutes early + +### After the Meeting +- Post summary in relevant channel +- Create GitHub issues for action items +- Share recording if applicable + +## Writing Culture + +### Pull Requests + +```markdown +## What +Brief description of changes + +## Why +Context and motivation + +## How +Technical approach + +## Testing +How to verify it works + +## Screenshots +If UI changes +``` + +### Daily Updates + +Keep them brief but informative: + +``` +**Yesterday**: Shipped GGUF loader optimization (30% faster) +**Today**: Working on Windows installer bug #456 +**Blockers**: Need review on PR #789 +**TIL**: Quantization affects different models differently +``` + +### Documentation + +- Write docs as you code +- Include examples users can copy +- Explain the why, not just the how +- Keep it up to date or delete it + +## Community Communication + +When engaging with our open-source community: + +### Be Helpful +- No question is too basic +- Provide context and examples +- Point to relevant documentation +- Follow up on issues + +### Be Humble +- We don't have all the answers +- Users often have great ideas +- Mistakes happen, own them +- Thank contributors publicly + +### Be Human +- Personality is welcome +- Celebrate wins together +- Share the journey +- Build relationships + +## Global Team Considerations + +### Time Zones +- Post updates at consistent times +- Record important discussions +- Rotate meeting times fairly +- Respect off hours + +### Cultural Awareness +- Direct feedback styles vary by culture +- Silence doesn't mean agreement +- Ask if unsure about interpretation +- Celebrate diverse perspectives + +### Language +- English is second language for many +- Avoid idioms and slang +- Use simple, clear language +- Be patient with communication + +## Red Flags to Avoid + +- **Information Hoarding**: Share knowledge freely +- **Private Discussions**: Keep technical talk public +- **Assuming Context**: Document everything +- **Delayed Responses**: Acknowledge even if you can't solve immediately +- **Unclear Communication**: If confused, ask for clarification + +## The Jan Way + +Our communication style reflects our values: +- **Open**: Like our code +- **Inclusive**: Like our community +- **Clear**: Like our mission +- **Async**: Like our architecture +- **Global**: Like our vision + +Good communication at Jan isn't about perfection—it's about clarity, openness, and building together across any distance. + +--- + +*"The best code is documented code. The best decisions are documented decisions. The best team is one that communicates openly."* diff --git a/website/src/content/docs/handbook/culture/fight.mdx b/website/src/content/docs/handbook/culture/fight.mdx new file mode 100644 index 000000000..86fae1b20 --- /dev/null +++ b/website/src/content/docs/handbook/culture/fight.mdx @@ -0,0 +1,3 @@ +--- +title: How We Fight +--- \ No newline at end of file diff --git a/website/src/content/docs/handbook/how/sprints.mdx b/website/src/content/docs/handbook/how/sprints.mdx new file mode 100644 index 000000000..f5588ee7d --- /dev/null +++ b/website/src/content/docs/handbook/how/sprints.mdx @@ -0,0 +1,9 @@ +--- +title: "Sprints" +description: "Jan Team Handbook" +--- + +[Jan sprints](https://github.com/orgs/menloresearch/projects/5/views/25) are every 2 weeks. + +Releases are QA'd prior to release. We never release on Fridays, unless ya'll +wanna come back on Saturday. diff --git a/website/src/content/docs/handbook/how/tools.mdx b/website/src/content/docs/handbook/how/tools.mdx new file mode 100644 index 000000000..b54947305 --- /dev/null +++ b/website/src/content/docs/handbook/how/tools.mdx @@ -0,0 +1,37 @@ +--- +title: "Internal Tools" +description: "Jan Team Handbook" +--- + +We like to make AI do the boring stuff. We also automate and dogfood our own tools as much as possible. + +### Tools + +- **Github**: For engineering specs, sprint planning, and documentation. +- **Discord**: For chat, voice, and video calls. +- **Google Hangouts**: For a short, daily standup. +- **Gmail**: For external communication. +- **HackMD/Obsidian**: For ad hoc ideas and meeting notes. +- **Excalidraw**: For whiteboarding. +- **Password Manager**: You will be invited. +- **Jan/Cortex**: A personal, private copilot. + +### Infrastructure + +We use a mix of cloud providers and build our on-premises hardware to cut costs and optimize performance. + +- Singapore Cluster: For training and inference. +- Hanoi Cluster: For CI/CD and data pipelines. +- Burst Compute: We like Runpod for larger training runs. + +See [Infra](/handbook/infra/infra) for more details. + +### CI/CD + +We have a dedicated devops/mlops team. Ping in the `infra-internal` channel for any requests. + +### Automations + +We have a dedicated automations engineer. Her goal is to automate yesterday's tasks today, so that you are doing something different tomorrow. + +Ping in the `#automations` channel for any requests. diff --git a/website/src/content/docs/handbook/hr/1-on-1s.mdx b/website/src/content/docs/handbook/hr/1-on-1s.mdx new file mode 100644 index 000000000..e19e83fd1 --- /dev/null +++ b/website/src/content/docs/handbook/hr/1-on-1s.mdx @@ -0,0 +1,20 @@ +--- +title: "One on Ones" +description: "One on Ones." +--- + +We don't schedule recurring 1-on-1s, because in our experience, engineers and researchers [hate it](https://www.reddit.com/r/cscareerquestions/comments/14rkbwv/does_anybody_else_hate_their_11_meetings/). We believe it's better to address issues and provide feedback as they arise. + +If you need to discuss something with us, simply book a time on the founder or team lead's calendar. **We're always available to chat.** + +✅ Do: +- Schedule check-ins as soon as you need them +- Provide honest feedback and criticism (we appreciate it!) + +❌ Don't: +- Wait for a scheduled one-on-one to raise issues +- Ask for confirmation before scheduling a check-in + +### Exceptions + +If you'd like to establish regular check-ins, please let us know and we're very happy to set one up. diff --git a/website/src/content/docs/handbook/hr/interviewing.mdx b/website/src/content/docs/handbook/hr/interviewing.mdx new file mode 100644 index 000000000..696f44f8c --- /dev/null +++ b/website/src/content/docs/handbook/hr/interviewing.mdx @@ -0,0 +1,46 @@ +--- +title: "Interviewing" +description: "How to interview and hire outliers." +--- + +Our talent pipeline is an organizational moat. This guide walks you through our interviewing process. + +### Process + +Jan's interview process is designed to be self-serve. + +1. **Application**: Candidates apply through our [careers page](https://homebrew.bamboohr.com/careers). We use a combination of AI and human review to shortlist candidates. +2. **Initial Screening**: Shortlisted candidates are invited to a 30-minute initial screening, usually with HR/founder. +3. **2-3 Interviews**: Candidates are interviewed by 2-3 team members. Each interview is 30-45 minutes long. +4. **Founder interview**: The final interview is with one of the founders. This is a 30-minute interview. +5. **Deliberation**: Candidates are evaluated based on their interviews and portfolio. We aim to make a decision within 1 week. + +We aim to be data-driven, and each open role is tracked in our ATS (BambooHR). We use this data to improve our hiring process. + +> Historically, our acceptance rate has been less than 1% of all applications. For direct referrals, this rate is higher at >10%. + +### Preparing for Interviews + +To start interviewing, please follow these steps: + +- [ ] Indicate your interest in helping with interviews to HR. +- [ ] Shadow existing interviews (2-3) to understand our process and ask questions. +- [ ] Create a [Google Appointments link](https://workspace.google.com/intl/en_sg/resources/appointment-scheduling/) for your interviews. Make sure to set a default Google Hangouts link. Share the link with HR. +- [ ] New candidates will automatically schedule interviews with you using the Google Appointments link. +- [ ] If you have an upcoming interview, review their resume and prepare thoughtful questions. +- [ ] Input **detailed evaluations in BambooHR ATS** after each interview. + +### Evaluation + +We do not use a scoring system for interviews and prefer to encourage natural conversation. + +However, if you do need an evaluation template, you can use the following: + +| Criteria | Description | Response | Notes | +|--------------|----------------------------------------|--------------|--------------------------------------------| +| Technical Skills | Proficiency in relevant technical areas (AI, robotics, programming, etc.) indicated in the resume ||| +| Fundamentals | Understanding of core concepts in the candidate's field of expertise (math, statistics, ML, physics, etc.) | | | +| Problem-Solving Ability | Ability to approach and solve complex problems ||| +| Communication Skills | Clarity and effectiveness in communication ||| +| Cultural Fit | Alignment with company [values](/handbook/philosophy/humanity) and [culture](/handbook/who/curiosity) ||| +| Overall Impression | General impression of the candidate's suitability for the role ||| diff --git a/website/src/content/docs/handbook/hr/leave.mdx b/website/src/content/docs/handbook/hr/leave.mdx new file mode 100644 index 000000000..ffbe11cb2 --- /dev/null +++ b/website/src/content/docs/handbook/hr/leave.mdx @@ -0,0 +1,23 @@ +--- +title: "Time Off" +description: "Vacation and unplanned leave policy." +--- + +We offer 14 base vacation days, unlimited sick leave, and additional public holidays based on your respective region. + +### Planning Leave + +1. Submit the days in BambooHR at least 1-2 weeks in advance +2. Inform your team as early as possible + +### Public Holidays + +We observe public holidays in our respective countries of residence. You do not need to use vacation days. + +### Illness & Unplanned Leave + +Please submit all sick leave requests in BambooHR with a medical certificate (when applicable). For sick leave longer than 1 week, please inform HR and your team. + +### Parental + +We offer parental leave for eligible staff. Please inform HR to coordinate parental leave. diff --git a/website/src/content/docs/handbook/hr/progression.mdx b/website/src/content/docs/handbook/hr/progression.mdx new file mode 100644 index 000000000..d4105e07b --- /dev/null +++ b/website/src/content/docs/handbook/hr/progression.mdx @@ -0,0 +1,20 @@ +--- +title: "One on Ones" +description: "One on Ones." +--- + +### Promotions + +We don't believe in titles. We believe in fair compensation and fast progression. + +> Previously, we have been known to organically promote team members 2x within 6 months of joining. + +### Perks + +- Your contributions are Open Source and public +- Speaking opportunities at conferences (we get invited often) +- Direct access to a large network of advisors +- No shortage of hard problems (you don't need to "prove" yourself before working on something interesting) +- Visibility across our hardware, software, and research projects +- Real, blunt feedback, at scale (our users are not shy!) +- Hard, tough, evergreen problems diff --git a/website/src/content/docs/handbook/hr/retreats.mdx b/website/src/content/docs/handbook/hr/retreats.mdx new file mode 100644 index 000000000..2560a7bd8 --- /dev/null +++ b/website/src/content/docs/handbook/hr/retreats.mdx @@ -0,0 +1,17 @@ +--- +title: "Retreats" +description: "Annual Team Retreat." +--- + +## Retreats + +We host an annual team retreat to bring the team together, bond, and plan for the year ahead. + +Previous retreats have been held in: +- 2023 (8 people): Ho Chi Minh City, Vietnam +- 2024 (16 people): Singapore +- 2025: TBA! + +## Meetups + +We encourage team members to meet up in person whenever possible. Remote work can get lonely, and we offer a small budget for regional get-togethers. diff --git a/website/src/content/docs/handbook/hr/side-gigs.mdx b/website/src/content/docs/handbook/hr/side-gigs.mdx new file mode 100644 index 000000000..a5e9a73dc --- /dev/null +++ b/website/src/content/docs/handbook/hr/side-gigs.mdx @@ -0,0 +1,26 @@ +--- +title: "Side Gigs" +description: "Best practices around external engagements." +--- + +We have a permissive policy regarding side gigs, similar to [Posthog's](https://posthog.com/handbook/people/side-gigs). + +However, our main ask is for Jan to be your "main gig", as you will be taking a crucial role in success/failure of Jan. Manage your time in a way that is fair to the company and your colleagues. + +At the end of the day, you all own (or will own) shares of the company. Handle company IP as you would your own property. + +### Declaration + +**For work in related industries, we do ask for an upfront declaration**, so that we can clear potential conflicts of interest. + +Please fill out the following form and submit it to: `hr@menlo.ai` + +| Dates | Organization | Nature of Work | Hours/week | +|-------------|--------------|----------------|------------| +| | | | | +| | | | | +| | | | | + +### A Note on IP + +Startups & careers die because of IP disputes from moonlighting employees. Please respect all party's IP - never intermingle assets (like laptops and logins) between engagements. diff --git a/website/src/content/docs/handbook/hr/spending.mdx b/website/src/content/docs/handbook/hr/spending.mdx new file mode 100644 index 000000000..48e72dabf --- /dev/null +++ b/website/src/content/docs/handbook/hr/spending.mdx @@ -0,0 +1,36 @@ +--- +title: "Spending" +description: "Spending and reimbursement policy." +--- + + +We are incredibly frugal and intend to be around in 50 years. However, we avoid being "penny-wise & pound-foolish" and spend when it matters. + +### Expenses + +#### Software + +For software subscriptions and other expenses incurred as a part of [benefits](compensation#benefits), please save receipts and submit it as a reimbursement [request](#reimbursements). + +#### Large Purchases + +For larger purchases like equipment, please first submit a purchase request through `#admin-requests` in Discord. + +We can directly order for you or reimburse you for the purchase. + +#### Incidentals + +We don't have a policy for incidentals. We trust you to make the right decisions, while keeping in mind that we are a startup and every dollar counts. + +### Reimbursements + +Reimbursements are processed at the end of every month with payroll. + +Email all receipts (required) & the following form to `finance@menlo.ai`: + +| Date | Description | Amount | Currency | Exchange Rate | Total (USD) | +|------------|-------------|--------|----------|---------------|-------------| +| | | | | | | +| | | | | | | +| | | | | | | +| | | | | Grand Total | | diff --git a/website/src/content/docs/handbook/hr/travel.mdx b/website/src/content/docs/handbook/hr/travel.mdx new file mode 100644 index 000000000..a446435cc --- /dev/null +++ b/website/src/content/docs/handbook/hr/travel.mdx @@ -0,0 +1,19 @@ +--- +title: "Travel" +description: "Travel policy." +--- + + +We encourage team members to represent Jan at conferences and travel for in-person alignment + +For all work travel, we ask that you get approval from HR before confirming your trip & booking flights/hotels. + +### Bookings + +We prefer to directly book flights & hotels on your behalf, through our travel portals. + +If you need to arrange travel by yourself, please obtain a travel budget first. Save all receipts for reimbursement. + +### Per Diem + +Depending on the location, we provide a $20 USD per diem for meals and incidentals. This amount is adjustable based on the location. diff --git a/website/src/content/docs/handbook/index.mdx b/website/src/content/docs/handbook/index.mdx new file mode 100644 index 000000000..f023d7bc3 --- /dev/null +++ b/website/src/content/docs/handbook/index.mdx @@ -0,0 +1,82 @@ +--- +title: "Jan Team Handbook" +description: "Building superintelligence that you can own and run anywhere." +--- + +{/* TODO: Replace with Astro-compatible icons: FaDiscord, FaMap */}; +import Steps from '@/components/Steps.astro'; + +## Jan Handbook + +> Jan's Handbook is inspired by [Posthog](https://posthog.com/handbook) and [Gitlab](https://handbook.gitlab.com/). +> Thank you for showing us the way 🙏 + +### Welcome! + +This handbook explains how [Jan](https://jan.ai) works, and is public. + +We're building superintelligence that you can self-host and use locally. Not as a limitation, but as a feature. Your AI should work wherever you need it - on your laptop during a flight, on your company's servers for compliance, or in the cloud for scale. + +Jan's Handbook is a [living document](https://en.wikipedia.org/wiki/Living_document), constantly evolving as we build the future of AI ownership. + +## 1. Chapter 1: Why does Jan exist? + +- [What problem are we solving?](/handbook/why/problem) - Why AI needs to be ownable, not just rentable +- [Who we are building for](/handbook/why/icp) - People who want flexibility and control +- [Our vision for open superintelligence](/handbook/why/ownership) - An ecosystem you can own + +## 2. Chapter 2: How we make money + +- [How we make money](/handbook/money/how) - Open core + optional services +- [What is our moat](/handbook/money/moat) - Community, trust, and aligned incentives + +## 3. Chapter 3: Who we hire + +- [The Fast and the Curious](/handbook/who/curiosity) - Ship today, learn for tomorrow +- [Underdogs Welcome](/handbook/who/underdogs) - Talent is everywhere, opportunity isn't +- [dy/dx > y-intercept](/handbook/who/dydx) - Growth rate beats starting position +- [Obsession](/handbook/who/obsession) - We seek those who can't not build this +- [Team, not family](/handbook/who/team) - Pro sports team approach + +## 4. Chapter 4: How we work + +- [Remote-first](/handbook/how/remote) - Global team, local impact +- [Open-source](/handbook/how/open-source) - Build in public +- [How we coordinate](/handbook/how/tools) - Tools and processes +- [Sprints](/handbook/how/sprints) - Ship every week +- [How we communicate](/handbook/culture/communicate) - Clear, open, async-first + +## 5. Chapter 5: Jan's Philosophy + +- [Open Superintelligence Platform](/handbook/philosophy/open-superintelligence) - Not one model, but an ecosystem +- [Lateral Thinking with Proven Technology](/handbook/philosophy/lateral) - Innovation through integration +- [Humanity-aligned](/handbook/philosophy/humanity) - Technology that unites +- [Perpetual Optimism](/handbook/philosophy/optimism) - Building the future we want to live in + +## 6. Chapter 6: Team & Operations + +- [Team Structure](/handbook/team) - How we're organized +- [Compensation](/handbook/comp) - Fair pay, meaningful equity +- [HR & Culture](/handbook/hr/onboarding) - Joining and growing with Jan + +--- + +## Quick Links + +- **For new team members**: Start with [Onboarding](/handbook/hr/onboarding) +- **For contributors**: Check out our [GitHub](https://github.com/janhq) and [Discord](https://discord.gg/FTk2MvZwJH) +- **For the curious**: Read about [our vision](/handbook/why/ownership) + +## Our North Star + +We're building superintelligence that: +- **Works anywhere**: From your laptop to your data center +- **Belongs to you**: Download it, own it, modify it +- **Scales infinitely**: One person or ten thousand, same platform +- **Improves constantly**: Community-driven development + +This isn't just about making AI accessible. It's about ensuring the most transformative technology in human history can be owned by those who use it. + +--- + +*"The future of AI isn't about choosing between local or cloud. It's about having both, and everything in between, working perfectly together."* \ No newline at end of file diff --git a/website/src/content/docs/handbook/lifecycle/onboarding.mdx b/website/src/content/docs/handbook/lifecycle/onboarding.mdx new file mode 100644 index 000000000..9bf1bbcea --- /dev/null +++ b/website/src/content/docs/handbook/lifecycle/onboarding.mdx @@ -0,0 +1,73 @@ +--- +title: "Onboarding to Jan" +description: "Onboarding Checklist for New Hires." +asIndexPage: true +--- + +Welcome to Jan! We are excited to have you join our team. This guide walks you through the onboarding process. + +> You will receive an onboarding invitation morning of your first day. Prior to the scheduled onboarding call, please send your Github & Discord handles to `hr@menlo.ai`. + +### Expectations + +| Expectation | Description | +|-------------|-------------| +| **Take Initiative** | Take ownership of an area. If you see a problem, take it and own it to completion. Your work will often not be defined. Take the initiative to figure out what needs to be done, seek clarification, and then communicate what your plan to the team. | +| **Bias to Action** | There are many problems to solve. Don't ask for permission or try to build consensus - just take action. After 2-3 months, you should be able to show clear results having turned around a problem-filled area. | +| **Communication** | Clear and timely communication is key. If unsure, ask questions. We are a global team and respect is paramount. Disrespectful behavior is not tolerated. Focus on problem-solving, not personal attacks. Assume Hanlon’s Razor: “Never attribute to malice that which is adequately explained by lack of ~~stupidity~~ communication or ambiguity.” | +| **Mastery** | In this frontier industry, expertise comes from doing. Own your area and build mastery. | + +For more details, see our guides on [remote work](/handbook/how/remote). + +### Code of Conduct + +- **Availability and Communication**: Ensure you are available and engaged during designated work hours and scheduled meetings. +- **Work Environment**: Strive to create a quiet and distraction-free workspace whenever possible, especially on video calls. +- **Video Participation**: Video participation is expected unless there are exceptional circumstances. +- **Dress Code**: Casual attire is perfectly acceptable for meetings, but please exercise good judgment (e.g. no pajamas). +- **Vacations:** Communicate leave at least 2 weeks (1 sprint) in advance via Bamboo. +- **Emergency Leave:** Please inform Daniel, Nicole and HR in your #hr-channel if you require emergency leave. +- **Side Gigs Policy**: See [side-gigs](side-gigs). Please consult with HR on engagements with potential legal & IP implications. +- **Sexual harassment:** We have a zero tolerance policy against behavior of a sexual nature that could reasonably be expected to cause offense or humiliation, e.g. verbal, nonverbal, or physical conduct, via written and electronic communications. + +### Tools + +As much as possible, we build-in-public and use the following tools to asynchronously collaborate: + +- [Github](https://github.com/menloresearch) +- [Jan Discord](https://discord.gg/VSbRN3vwCD) +- [Google Workspace](https://workspace.google.com) +- [Hugging Face](https://huggingface.co/menloresearch) +- Password Manager: You will be invited +- AI Tools and API keys: Coding with AI is heavily encouraged + +### Checklists + +#### Day 1 + +- Sign all HR documents. +- Download and access all tools. +- Check calendar invites: daily standups and TGIF. +- Introduce yourself in the [`#welcome`](https://discord.gg/VSbRN3vwCD) Discord channel. +- Set up your [BambooHR](https://homebrew.bamboohr.com/home) account. +- Set up VPN. Search `VPN access instructions` in Discord for the latest instructions. +- Check out the current sprint in [Github](https://github.com/orgs/menloresearch/projects/5) +- Ask questions in your private `#hr-NAME` channel. + +
+Import **Who's out** (on leave) calendar from BambooHR + +- Go to https://homebrew.bamboohr.com/calendar. Login if needed. +- Click on the gear icon, select **iCal Feeds...** +- Select **Create Calendar Feed** under **Who's Out**. Copy the generated link. +- In Google Calendar, you can import the new calendar from URL. + +
+ +#### Week 1 + +- Post daily updates in the [`#daily-updates`](https://discord.gg/AxypHJRQxd) channel prior to standup. +- Review this [Jan Handbook](https://menlo.ai/handbook). +- Push 1 PR into this Handbook. This a living document! +- Disclose side gigs with potential legal & IP implications to HR. +- Attend TGIF demos on Friday afternoon (6PM GMT+8). diff --git a/website/src/content/docs/handbook/money/how.mdx b/website/src/content/docs/handbook/money/how.mdx new file mode 100644 index 000000000..a49701b1c --- /dev/null +++ b/website/src/content/docs/handbook/money/how.mdx @@ -0,0 +1,144 @@ +--- +title: "How we make money" +description: "Building a sustainable business around open superintelligence" +--- + +Jan is building a sustainable business that aligns our incentives with our users' +success. We believe the best way to build beneficial AI is to ensure our business +model reinforces our values: privacy, ownership, and universal access. + +## Our Business Model + +### 1. Open Core Foundation + +**Jan Desktop is free forever** +- Download and run AI models locally +- No subscriptions, no accounts required +- Full privacy and ownership of your data +- Community-driven development + +This isn't a loss leader—it's our foundation. A thriving open-source community creates: +- Trust through transparency +- Innovation through collaboration +- Distribution without marketing spend +- Feedback loops that improve the product + +### 2. Premium Features & Services + +We monetize through optional add-ons that enhance the Jan experience: + +**Jan Cloud (Coming Soon)** +- Optional cloud sync across devices +- Managed model hosting for teams +- Advanced collaboration features +- Pay only for what you use + +**Enterprise Support** +- Priority support and SLAs +- Custom model deployment +- Air-gapped installations +- Compliance and audit tools + +**Professional Tools** +- Advanced model fine-tuning interfaces +- Team management and permissions +- Analytics and usage insights +- API rate limit increases + +### 3. Model Marketplace + +**Curated Model Hub** +- Premium access to specialized models +- Early access to new model releases +- Commercial licensing for enterprise models +- Revenue sharing with model creators + +We use **Jan Exam** to ensure quality—whether models are free or paid, they must meet our standards. + +### 4. Hardware Integration + +**Optimized Hardware Solutions** +- Pre-configured devices for AI inference +- Plug-and-play AI workstations +- Edge deployment solutions +- Partnership with hardware manufacturers + +### 5. Training & Certification + +**Jan Certified Program** +- Professional certification for developers +- Enterprise training workshops +- Educational institution partnerships +- Community education initiatives + +## Revenue Philosophy + +### What We'll Never Do + +- **Sell your data**: Your conversations remain private +- **Lock you in**: Always provide export and migration tools +- **Paywall core features**: Local AI remains free +- **Compromise on privacy**: No ads, no tracking, no surveillance + +### What We Always Do + +- **Align incentives**: We succeed when you succeed with AI +- **Transparent pricing**: Clear, predictable costs +- **Community first**: Free tier remains fully functional +- **Open standards**: No proprietary lock-in + +## Unit Economics + +Our approach creates sustainable unit economics: + +1. **Zero marginal cost** for open-source users +2. **High-margin** cloud and enterprise services +3. **Network effects** as community grows +4. **Reduced CAC** through open-source distribution + +## The Flywheel + +``` +Open Source Adoption → Community Growth → Better Models → +More Users → Enterprise Adoption → Revenue → Investment in Open Source +``` + +Each turn of the wheel strengthens the next: +- More users improve models through feedback +- Better models attract enterprise customers +- Enterprise revenue funds open development +- Open development attracts more users + +## Long-term Vision + +We're building for a future where: + +- Every organization runs their own AI +- Privacy is the default, not premium +- Open models outperform closed ones +- AI infrastructure is as common as web servers + +Our business model ensures we'll be here to see it through. + +## Current Status + +As of 2024: +- ✅ Sustainable open-source project +- ✅ Growing community adoption +- 🚧 Enterprise features in development +- 🚧 Cloud services in beta +- 📅 Hardware partnerships planned + +## Join Our Mission + +Whether you're a: +- **User**: Your adoption drives our mission +- **Contributor**: Your code shapes our platform +- **Customer**: Your success funds development +- **Partner**: Your integration expands possibilities + +You're part of building the open superintelligence economy. + +--- + +*"The best business model is one where doing the right thing is also the profitable thing. That's what we're building at Jan."* diff --git a/website/src/content/docs/handbook/money/moat.mdx b/website/src/content/docs/handbook/money/moat.mdx new file mode 100644 index 000000000..7faedf2db --- /dev/null +++ b/website/src/content/docs/handbook/money/moat.mdx @@ -0,0 +1,169 @@ +--- +title: "What is our moat?" +description: "Building defensibility through community, trust, and aligned incentives" +--- + +Traditional moats rely on keeping secrets, locking in users, or maintaining +technical advantages. Jan's moat is different—it's built on openness, community, +and aligned values that are impossible to replicate with a closed approach. + +## Community as Competitive Advantage + +Our strongest moat is our community. While others guard their code, we share everything: + +### Network Effects at Scale +- Every user improves Jan for everyone else +- Bug reports, feature requests, and contributions compound +- Community support scales infinitely +- Local communities solve local problems + +### Trust Through Transparency +- Open source code earns trust closed systems can't buy +- Users verify our privacy claims themselves +- No hidden agendas or dark patterns +- Mistakes are public, fixes are collaborative + +### Innovation Velocity +- Thousands of contributors vs hundreds of employees +- Global perspectives vs Silicon Valley groupthink +- 24/7 development across time zones +- Passionate users become passionate builders + +## Technical Moats + +### Local-First Architecture +While others race to build bigger data centers, we're optimizing for edge computing: +- Years of optimization for consumer hardware +- Deep integration with local systems +- Efficient model quantization expertise +- Cross-platform compatibility knowledge + +### Privacy-Preserving Design +Privacy isn't a feature we added—it's our foundation: +- Architecture that makes surveillance impossible +- No user data to monetize or leak +- Local-first eliminates attack surfaces +- Trust that compounds over time + +### Model Agnostic Platform +We're not tied to any single model: +- Support for all open models +- Jan Exam ensures quality across providers +- Users aren't locked to our models +- Best-in-class always available + +## Business Model Alignment + +Our moat strengthens because our business model reinforces our values: + +### No Conflict of Interest +- We don't profit from user data +- No ads means no perverse incentives +- Success comes from user success +- Premium features enhance, not gatekeep + +### Sustainable Growth +- Open source distribution = zero CAC +- Community support = reduced support costs +- User contributions = free R&D +- Word of mouth = organic growth + +## Cultural Moats + +### Values Alignment +Users choose Jan because they share our values: +- Privacy is non-negotiable +- Ownership matters +- Local-first is the future +- Community over corporation + +### Mission-Driven Team +- We attract talent that believes in the mission +- Lower salaries offset by meaningful work +- Equity alignment with long-term vision +- Cultural fit over pure technical skills + +## The Anti-Moat Strategy + +Paradoxically, our moat comes from not trying to build traditional moats: + +### No Lock-In Creates Loyalty +- Easy to leave = users choose to stay +- Export everything = trust in the platform +- No switching costs = genuine preference +- Freedom of choice = actual choice + +### Open Source as Defense +- Can't be acquired and shut down +- Can't be feature-frozen by new management +- Community can fork if we lose our way +- Immortal through decentralization + +## Compounding Advantages + +Our moats compound over time: + +``` +Year 1: Build trust through transparency +Year 2: Community starts contributing significantly +Year 3: Network effects accelerate development +Year 4: Ecosystem becomes self-sustaining +Year 5: Platform effect makes leaving costly (by choice) +``` + +## What Can't Be Copied + +Competitors can't replicate: + +### Time and Trust +- Years of consistent privacy-first decisions +- Track record of putting users first +- Community relationships built over time +- Reputation for reliability and openness + +### Community Culture +- Shared values and mission +- Collaborative problem-solving approach +- Global perspective on AI needs +- Bottom-up innovation mindset + +### Architectural Decisions +- Local-first can't be bolted onto cloud-first +- Privacy can't be added to surveillance systems +- Community-driven can't be faked by corporations +- Open source commitment can't be half-hearted + +## The Ultimate Moat + +Our ultimate moat is simple: **we're building what we'd want to use**. + +- We're users of our own platform +- We feel the pain points personally +- We can't betray users without betraying ourselves +- Our incentives perfectly align with our community + +## Sustainable Defensibility + +Traditional moats erode: +- Patents expire +- Technical advantages get copied +- Network effects can shift platforms +- Regulations can break monopolies + +Jan's moats strengthen: +- Trust compounds daily +- Community grows stronger +- Values attract like-minded people +- Open source ensures immortality + +## Conclusion + +Our moat isn't about keeping others out—it's about bringing everyone in. By building in the open, aligning our incentives with users, and creating genuine value, we're building defensibility that transcends traditional business strategy. + +The question isn't "How do we prevent competition?" but rather "How do we make competition irrelevant by building something that can't exist any other way?" + +That's our moat: being the platform that puts users first, not because it's good business, but because it's the only way to build the AI future we want to live in. + +--- + +*"The strongest moat is a community that would rebuild you if you disappeared."* diff --git a/website/src/content/docs/handbook/philosophy/humanity.mdx b/website/src/content/docs/handbook/philosophy/humanity.mdx new file mode 100644 index 000000000..c8a1e6807 --- /dev/null +++ b/website/src/content/docs/handbook/philosophy/humanity.mdx @@ -0,0 +1,62 @@ +--- +title: "Humanity-aligned" +description: "Jan exists because technology lets humanity work as one." +--- + + +![Our Blessed Kingdom](../../../../assets/tom_gauld.png) + +> "The imagined community of the nation is always limited and sovereign, yet human connections and cooperation stretch far beyond borders." — Benedict Anderson + +## Why this matters + +- AI is geopolitical. Nation-states compete to control it +- We are concerned about those using AI to stoke division, profit from lies, and spur conflict +- Some of us come from places where war isn't distant—it's family history +- Technology should unite, not divide + +## Who we are + +- Remote team across 7 countries +- Diverse backgrounds, united by mission +- Many of us are [third culture kids](https://en.wikipedia.org/wiki/Third_culture_kid)—growing up between worlds +- We often find ourselves misunderstood: an Asian-majority team, English-speaking, influenced by global philosophies +- Crossroad cultures shape us: + - Turkiye: Europe, Asia, Middle East converge + - Singapore: Trade hub bridging worlds +- We respect each other's cultures and build on shared values + +## Jan's stance + +- Humanity first. We build for people, not governments or factions +- AI should enable: + - Shared prosperity + - Universal education + - Peaceful collaboration +- Technology **must** empower humanity to do more—together + +## The bigger picture + +- Human history is one of scaling cooperation: + - Small-scale [kin groups](https://www.sciencedirect.com/science/article/pii/S0960982219303343) → diverse political formations → [modern nation-states](https://en.wikipedia.org/wiki/Westphalian_system) → global networks + - Empires rose and fell. Nationalism united and divided. Globalization connected and excluded +- History doesn't progress. It moves—messy, cyclical, and full of contradiction +- Technology changes the terrain: + - Like ant colonies forming complex systems from simple interactions, humans have always built networks beyond central control + - Complexity emerges from countless small decisions—but unlike ants, we carry ideologies, ambitions, and fears +- AI is another fork in the road. It can reinforce old hierarchies or dismantle them. It can be used to surveil or to liberate + +## Why we exist + +- 30 people, from different countries, met online to build together +- The internet enables connections that were impossible a generation ago +- Ideas cross borders: an anthropologist in Turkiye collaborates with a roboticist in Saigon +- Jan exists because technology lets humanity work as one + +## Our vision + +- AI can accelerate global coordination and shared progress +- Our goal: help humanity align, collaborate, and solve collective challenges +- Success = contributing to humanity's long arc toward unity + +If our work helps the world coordinate better—even slightly—we've done something that matters diff --git a/website/src/content/docs/handbook/philosophy/lateral.mdx b/website/src/content/docs/handbook/philosophy/lateral.mdx new file mode 100644 index 000000000..b783a0880 --- /dev/null +++ b/website/src/content/docs/handbook/philosophy/lateral.mdx @@ -0,0 +1,147 @@ +--- +title: "Lateral Thinking with Proven Technology" +description: "Building the future with battle-tested tools" +--- + +> "Innovation is not about using the newest technology, but about using technology in new ways." — Adapted from Nintendo's philosophy + +## Our Approach + +At Jan, we don't chase the bleeding edge for its own sake. We take proven, battle-tested technologies and combine them in innovative ways to solve real problems. This philosophy shapes everything we build. + +## Why Proven Technology Wins + +### Stability Over Novelty +- **GGUF Format**: We didn't invent a new model format. We adopted the community standard that already works across platforms +- **llama.cpp**: Years of optimization by brilliant minds—why reinvent when we can build upon? +- **Tauri + Web Tech**: Proven UI stack that works everywhere, letting us focus on AI innovation +- **SQLite**: The world's most deployed database for local data—perfect for privacy-first architecture + +### Accessibility Through Maturity +When we use proven technology: +- Hardware requirements are well understood +- Optimization paths are clear +- Community knowledge exists +- Documentation is comprehensive +- Edge cases are known + +This means Jan works on more devices, for more people, with fewer surprises. + +## Lateral Innovation Examples + +### Local AI That Actually Works +**Traditional approach**: Build massive cloud infrastructure, require constant internet +**Our approach**: Use proven quantization techniques to run powerful models on consumer hardware + +### Privacy Without Compromise +**Traditional approach**: Complex encryption and privacy theater +**Our approach**: Simply don't collect data. Use local SQLite. Problem solved. + +### Universal Model Support +**Traditional approach**: Proprietary model formats and exclusive partnerships +**Our approach**: Support the open GGUF standard that the community already uses + +### Cross-Platform Without Complexity +**Traditional approach**: Native apps for each platform, massive development overhead +**Our approach**: One codebase using Electron, works everywhere, ships faster + +## The Power of Constraints + +Working with proven technology creates healthy constraints: + +### Resource Efficiency +- Can't throw infinite compute at problems +- Must optimize for consumer hardware +- Forces elegant solutions over brute force +- Makes us accessible globally, not just in rich markets + +### Clear Boundaries +- Known limitations guide design decisions +- Realistic about what's possible today +- Focus on solving real problems now +- Build stepping stones to the future + +### Community Alignment +- Use formats the community knows +- Build on protocols people trust +- Contribute improvements back upstream +- Stand on the shoulders of giants + +## Innovation Through Integration + +Our innovation comes from how we combine proven pieces: + +``` +llama.cpp (proven inference) + + GGUF (standard format) + + Electron (cross-platform UI) + + SQLite (local data) + + MCP (extensibility) + = Jan (accessible AI for everyone) +``` + +Each component is battle-tested. The magic is in the combination. + +## Real Problems, Real Solutions + +This philosophy keeps us grounded: + +### What We Build +- AI that runs on the laptop you already own +- Privacy that doesn't require a PhD to understand +- Tools that work offline in rural areas +- Features users actually need today + +### What We Don't Build +- Exotic architectures that need exotic hardware +- Complex systems that break in simple ways +- Features that demo well but fail in practice +- Solutions looking for problems + +## The Compound Effect + +Using proven technology creates compounding benefits: + +1. **Faster Development**: Less time debugging novel approaches +2. **Better Reliability**: Years of battle-testing by others +3. **Easier Adoption**: Users already understand the patterns +4. **Stronger Ecosystem**: Can leverage existing tools and knowledge +5. **Lower Costs**: Commodity hardware and proven optimizations + +## Learning from History + +Great innovations often use "old" technology in new ways: +- The iPhone used existing touchscreen tech +- Tesla started with laptop batteries +- SpaceX used proven rocket designs more efficiently +- The web succeeded using simple protocols + +Jan follows this tradition: proven AI inference, standard formats, and simple principles—combined in a way that makes AI accessible to everyone. + +## Building for the Long Term + +Proven technology has staying power: +- Standards that last decades +- Protocols that survive company failures +- Formats that outlive their creators +- Communities that maintain forever + +By building on these foundations, Jan can focus on what matters: making AI useful, private, and accessible to everyone. + +## Our Commitment + +We commit to: +- **Practical over Perfect**: Ship what works today +- **Simple over Sophisticated**: Elegant solutions to real problems +- **Proven over Promising**: Build on solid foundations +- **Accessible over Advanced**: Reach everyone, not just enthusiasts + +## The Future is Already Here + +The technologies needed for private, local AI already exist. They're proven, optimized, and ready. Our job isn't to invent new technology—it's to combine what exists in ways that serve humanity. + +That's lateral thinking. That's how we build the future: not by reaching for tomorrow's technology, but by using today's technology in tomorrow's ways. + +--- + +*"The best technology is invisible. It just works, everywhere, for everyone."* diff --git a/website/src/content/docs/handbook/philosophy/open-superintelligence.mdx b/website/src/content/docs/handbook/philosophy/open-superintelligence.mdx new file mode 100644 index 000000000..2fb1ecd35 --- /dev/null +++ b/website/src/content/docs/handbook/philosophy/open-superintelligence.mdx @@ -0,0 +1,213 @@ +--- +title: "Open Superintelligence Platform" +description: "Building superintelligence as an ecosystem you can own and deploy anywhere" +--- + + +> "Superintelligence isn't one massive model. It's an ecosystem of specialized models, tools, and applications working together - and you should own all of it." — Jan Philosophy + +## What is Open Superintelligence? + +Open superintelligence is AI that adapts to how you want to work, not the other way around. It's not about forcing you to choose between local or cloud, powerful or private, easy or extensible. It's about having it all, and owning it. + +### The Ecosystem Approach + +We're not trying to build GPT-5. We're building something better: specialized models that excel at specific tasks, tools that extend their capabilities, and applications that tie it all together. + +``` +Models (specialized for tasks) + + Tools (extend capabilities) + + Applications (work everywhere) + = Superintelligence you own +``` + +## Why Not One Giant Model? + +The "one model to rule them all" approach has fundamental flaws: + +- **Inefficient**: Using the same weights for poetry and mathematics +- **Inflexible**: Can't optimize for specific use cases +- **Expensive**: Massive compute for simple tasks +- **Monolithic**: Single point of failure and control + +### Our Approach: Specialized Excellence + +- **Jan-Search**: Knows how to find and synthesize information +- **Jan-Write**: Understands structure, tone, and creativity +- **Jan-Analyze**: Excels at reasoning and data interpretation +- **Jan-Code**: Optimized for programming tasks + +Each model does one thing brilliantly. Together, they form superintelligence. + +## The Three Pillars + +### 1. Models That Adapt + +Not just our models, but any model you need: +- **Jan Models**: Purpose-built for specific tasks +- **Community Models**: Any GGUF from Hugging Face +- **Cloud Models**: Connect to OpenAI, Anthropic when needed +- **Your Models**: Fine-tune and deploy your own + +### 2. Tools That Extend + +Models need capabilities beyond conversation: +- **Search**: Get answers, not just links +- **DeepResearch**: Autonomous multi-step investigation +- **BrowserUse**: Let AI interact with the web +- **MCP Protocol**: Connect any data source or API + +### 3. Applications That Scale + +Same experience, infinite deployment options: +- **Jan Desktop**: Your local AI workstation +- **Jan.ai**: Browser-based, no setup required +- **Jan Mobile**: AI that follows you (coming soon) +- **Jan Server**: Self-hosted for teams (coming soon) + +## Ownership Through Flexibility + +True ownership means having choices: + +### Choose Your Deployment +- **Full Local**: Everything on your hardware, works offline +- **Full Cloud**: We host it, you just use it +- **Hybrid**: Local for sensitive work, cloud for heavy compute +- **Self-Hosted**: Your servers, your control + +### Choose Your Models +- Use our models when they're best +- Use community models when they fit +- Use cloud models when you need them +- Train your own when you want to + +### Choose Your Scale +- **Personal**: Run on your laptop +- **Team**: Deploy on your server +- **Enterprise**: Scale across infrastructure +- **Global**: Distribute to the edge + +## Building in the Open + +We don't just open source our code. We open source our entire development process: + +### Watch Us Train +Live at [train.jan.ai](https://train.jan.ai): +- See models training in real-time +- View our datasets and methods +- Learn from our failures +- Track actual progress + +### Test With Us +Evaluate at [eval.jan.ai](https://eval.jan.ai): +- Compare model outputs +- Vote on what works +- Suggest improvements +- Access all evaluation data + +### Build With Us +Everything on [GitHub](https://github.com/janhq): +- Contribute features +- Report issues +- Fork and modify +- Join discussions + +## The Network Effect of Openness + +Open systems create compound benefits: + +### For Users +- More models to choose from +- More tools to extend capabilities +- More deployment options +- More control over your AI + +### For Developers +- Build on a stable platform +- Access to growing ecosystem +- No vendor lock-in fears +- Community support + +### For Organizations +- Deploy how you need +- Customize for your use case +- Control your costs +- Own your infrastructure + +## Quality Without Compromise + +**Jan Exam** ensures excellence across the ecosystem: +- Objective benchmarks +- Real-world testing +- Community validation +- Transparent results + +Whether it's our model or someone else's, if it performs well, it belongs on Jan. + +## The Path Forward + +### Today (Available Now) +- Jan Desktop with local and cloud models +- Basic tools via MCP +- Growing model ecosystem +- Active community + +### Next 12 Months +- Jan v1 specialized models +- Advanced tools (search, research, browser) +- Jan Server for teams +- Mobile applications + +### The Vision +- Models that understand your context +- Tools that act autonomously +- Applications that work everywhere +- An ecosystem owned by its users + +## Why This Wins + +### Against Closed Platforms +- No vendor lock-in vs. total dependence +- Own forever vs. rent forever +- Infinite flexibility vs. their way only +- Community innovation vs. corporate roadmap + +### Against DIY Open Source +- Complete ecosystem vs. fragmented tools +- Works out of box vs. endless configuration +- Unified experience vs. duct-tape integration +- Professional polish vs. research prototypes + +## Join the Revolution + +This isn't just about building better AI. It's about ensuring AI serves humanity, not the other way around. + +When you use Jan, you're not just a user. You're part of a movement building: +- AI that works anywhere +- Intelligence you can own +- Tools that adapt to you +- A future without gatekeepers + +## Our Promise + +We promise to build superintelligence that: + +1. **Works Everywhere**: From laptop to data center to edge +2. **Belongs to You**: Download it, modify it, own it forever +3. **Stays Open**: Core will always be open source +4. **Keeps Improving**: Community-driven development +5. **Respects Choice**: Your deployment, your rules + +## The Bottom Line + +Superintelligence shouldn't be controlled by a few companies. It shouldn't force you to choose between power and ownership. It shouldn't lock you into one way of working. + +With Jan, it doesn't. + +You get superintelligence that adapts to how you work, runs where you need it, and belongs to you completely. + +That's open superintelligence. That's what we're building. + +--- + +*"The best AI platform is one where choosing local or cloud is like choosing between laptop or desktop - same experience, different form factor, your choice."* diff --git a/website/src/content/docs/handbook/philosophy/optimism.mdx b/website/src/content/docs/handbook/philosophy/optimism.mdx new file mode 100644 index 000000000..a0221e026 --- /dev/null +++ b/website/src/content/docs/handbook/philosophy/optimism.mdx @@ -0,0 +1,38 @@ +--- +title: "Perpetual Optimism is a Force Multiplier" +description: "We believe in perpetual optimism." +--- + +import YouTube from '@/components/YouTube.astro' + + + +> "Success consists of going from failure to failure without loss of enthusiasm." — Winston Churchill + +In 1903, [we barely left the ground](https://en.wikipedia.org/wiki/Wright_Flyer). By 1969, [we stood on the moon](https://en.wikipedia.org/wiki/Apollo_11). What once seemed impossible became reality through persistence and optimism. + +## Augmenting Humanity + +- We reject AI doomerism. Focus on possibilities, not fear +- Children represent our hope. We build for their future +- Humanity progresses faster than it feels +- AI is a tool—like electricity or the telephone. It's not the first revolution, nor the last +- History shows resilience. We adapt, mitigate risks, and move forward +- Airplanes once terrified—yet they helped humanity reach the moon and connect the world + +## AI Safety + +- Safety is non-negotiable. Protecting people is the baseline + - AI safety == human safety. If we haven’t solved human safety, we haven’t solved AI safety. + - AI alignment == human alignment. Misaligned societies can’t build aligned systems. +- AI safety requires human alignment first +- Fear and progress must be balanced—panic stalls; awareness guides +- Building for our kids ensures safety is built-in, purpose drives caution +- Airplanes once terrified—yet they helped humanity reach the moon and connect the world + +## Why we believe in optimism + +- Optimism drives solutions; fear paralyzes +- Hope fuels persistence. Failures aren't endpoints +- Every breakthrough began as a dream. We build toward better because we believe in it +- Perpetual optimism multiplies effort and impact diff --git a/website/src/content/docs/handbook/sell/marketing.mdx b/website/src/content/docs/handbook/sell/marketing.mdx new file mode 100644 index 000000000..71b4bd77b --- /dev/null +++ b/website/src/content/docs/handbook/sell/marketing.mdx @@ -0,0 +1,94 @@ +--- +title: "Marketing" +description: "How we structure marketing to drive revenue and growth" +asIndexPage: true +--- + +import YouTube from '@/components/YouTube.astro'; + +> "Marketing is about values." - Steve Jobs + + + +Without a strategy, marketing is: +- throwing social media posts with reach anxiety +- starting podcasts that no one listens +- writing newsletter that even team members don't read + +Marketing is telling your own words in some channels for a purpose. Without a purpose, it's just noise - like how some do. + +Before starting we should align on some terms: + +- **Framework**: The blueprint that organizes our marketing efforts +- **Purpose**: The fundamental reason behind our marketing activities +- **Campaign**: Marketing actions +- **Goal**: The specific, measurable result we aim to achieve through our marketing activities +- **Brief**: The document outlining how we're executing a campaign + +## Framework(s) + +Marketings looks like art, must be managed like math. At Jan, we follow some frameworks for alignment. + +Our marketing efforts consist of 3 purposes and each marketing action must target at least one: + +1. Grow the market +2. Increase the market share +3. Capture market share in a more efficient way + +Each purpose requires campaigns with clear goals. Goal types: + +- KPI Goals +- Project & Campaign Goals +- Experiment Goals +- Hiring Goals + +Campaign executions must leave no questions, so each marketing campaign requires a brief format: + +- **Goals**: KPIs, timeline, relevant OKRs +- **Audience**: Who we're targeting +- **Creatives**: Messaging & creative assets +- **Channels**: Distribution +- **Ownership**: Who is involved + +## Positioning + +Marketing starts with positioning - we always think thorough where to sit in the market before new launches. + +No one cares about product functions, it's all about what you provide. If your positioning requires explanation, it isn't working. We never talk about what our product does until we've established what problem it eliminates. + +We start with a positioning: + +- What is our product/service/platform? + - In customer language, what is it? + - What pain point do we eliminate? + - What we improve? +- Who is this for? + - Who benefits most from this solution? + - What characteristics define this segment? +- Why is it better? + - What are the other players? + - How do we outperform alternatives? + - What makes us uniquely valuable here? + +## Big no's on marketing + +We're playing our game, not theirs. + +- Throwing out marketing activities to see what sticks +- Burning money at Ads +- Random posts +- Copying what others do +- Actions without planning or goals +- Prioritizing paid activities over organic +- Jumping on hypes over strategy + +## Big yes's on marketing + +- Growing together with others +- Playing our game in the highest level +- Listening selectively - Absorb market feedback, but filter it through strategy +- Adding value to what we're working on +- Repurposing content +- Being opinionated about the differentiation and why we're doing +- Understanding the technical aspects at a level that explains a child +- Being aware of the target audience and the messaging diff --git a/website/src/content/docs/handbook/team/index.mdx b/website/src/content/docs/handbook/team/index.mdx new file mode 100644 index 000000000..1465f6e86 --- /dev/null +++ b/website/src/content/docs/handbook/team/index.mdx @@ -0,0 +1,70 @@ +--- +title: "Team Structure" +description: "How Jan's team is organized" +--- + +Jan is an open-source AI platform built by a distributed team focused on making AI +accessible, private, and locally-run. + +Jan is currently ~8 people working across different areas to build the open +superintelligence platform. + +## Jan Desktop & Platform + +Building the core Jan application and platform infrastructure. + +### Engineering +- **Core Platform**: Building Jan Desktop and core infrastructure +- **Dev Relations**: Ramon - Community engagement and developer experience +- **QA & Release**: Ensuring quality and stability across releases + +### Model Team + +Focused on making the best models available to Jan users. + +| Focus Area | Team Members | Description | +|------------|--------------|-------------| +| **Model Hub** | Thien | Leading model quantization and evaluation for Jan's Local Model Hub - ensuring Jan supports the latest and best open source models | +| **Jan Models** | Alan, Alex, Bách, Warren | Developing Jan's flagship model to be the default in Jan Desktop | +| **Advanced Features** | Thinh, Norapat | Working with Ramon to spec and implement:
• Jan Attachments
• Jan DeepResearch
• Jan Voice Mode | +| **Engineering** | Akarshan, Louis, Dinh | Building the core of Jan | +| **Design** | Faisal | Making Jan look gooood, function well, and be accessible to everyone | +| **Product & DevRel** | Yuuki and Ramon | Thinking about the future of Jan and how to best communicate it to users | +| **Management** | Yuuki | Keeping people in check by threatening everyone with corporate speak | +| **Marketing** | Emre | Spreading the word about Jan and its capabilities | +| **Infra** | Minh and Hien| They make things run smoothly | + + +## Infrastructure & Operations + +### Technical Infrastructure +- **Jan Cloud**: Cloud infrastructure for optional services +- **Hardware**: Optimizing for local hardware performance +- **CI/CD**: Continuous integration and deployment + +### Business Operations + +| Team | Focus | Members | +|------|-------|---------| +| **Marketing** | Brand, growth, and community | Emre | +| **Product** | Product strategy and roadmap | Ramon | +| **Finance & HR** | Operations and people | Nicole, finance team | + +## Working Groups + +Cross-functional teams that form around specific initiatives: + +- **Model Evaluation**: Jan Exam benchmarking and quality assurance +- **Community Features**: Building based on user feedback +- **Open Standards**: MCP and other protocol development + +## Our Approach + +- **Open Development**: Build in public with community input +- **Quality First**: Jan Exam as our north star for model quality +- **User Focused**: Every role ultimately serves our users' need for private, local AI +- **Flexible Structure**: Teams adapt based on current priorities and user needs + +## Join Us + +Interested in joining our mission to build open superintelligence? Check out our [careers page](https://jan.ai/careers) or contribute to our [open source projects](https://github.com/janhq). diff --git a/website/src/content/docs/handbook/who/curiosity.mdx b/website/src/content/docs/handbook/who/curiosity.mdx new file mode 100644 index 000000000..9a0e8378d --- /dev/null +++ b/website/src/content/docs/handbook/who/curiosity.mdx @@ -0,0 +1,139 @@ +--- +title: "The Fast and the Curious" +description: "We hire people who move quickly and never stop learning" +--- + +> "In the AI age, the ability to learn faster than the rate of change is the only sustainable advantage." — Adapted from Eric Ries + +We hire people who are both fast and curious—those who ship today while learning for tomorrow. + +## Ship Fast, Ship Often + +Speed is our advantage in the rapidly evolving AI landscape: + +- **Weekly Releases**: We ship updates to Jan every week, not every quarter +- **Rapid Experimentation**: Test ideas with real users, not focus groups +- **Fail Fast, Learn Faster**: Kill features that don't work, double down on what does +- **User Feedback Loop**: From idea to user's hands in days, not months + +### What Fast Means at Jan + +- **Bias for Action**: See a problem? Fix it. Don't wait for permission. +- **MVP Mindset**: Launch at 80% perfect, iterate to 100% +- **Quick Decisions**: Make reversible decisions quickly, deliberate only on irreversible ones +- **Async by Default**: Don't let time zones slow us down + +We've shipped: +- Major features in days that others debate for months +- Model support hours after release, not weeks +- Bug fixes while users are still typing the report + +## Stay Endlessly Curious + +In AI, yesterday's breakthrough is today's baseline: + +### Learning is Non-Negotiable + +- **New Models Weekly**: Understand and integrate the latest AI advances +- **Cross-Domain Knowledge**: From quantization techniques to UI design +- **Community Learning**: Our users teach us as much as we teach them +- **Open Source Study**: Learn from the best codebases in the world + +### Curiosity in Practice + +- **Why Over What**: Don't just implement—understand the reasoning +- **Question Everything**: "Why do we collect user data?" led to our privacy-first approach +- **Learn in Public**: Share discoveries with the community +- **Teach to Learn**: Explaining concepts deepens understanding + +## The Jan Learning Culture + +### Everyone is a Student + +- **No Experts**: In a field moving this fast, everyone is learning +- **Share Knowledge**: Daily discoveries in our Discord channels +- **Document Learning**: Today's experiment is tomorrow's documentation +- **Celebrate Questions**: The "stupid" question often reveals the biggest insight + +### Everyone is a Teacher + +- **Onboarding**: New hires teach us fresh perspectives +- **Community Education**: Blog posts, tutorials, and demos +- **Code as Teaching**: Well-commented code educates future contributors +- **Failure Stories**: Share what didn't work and why + +## What We Look For + +### Signs of Speed + +- **GitHub Velocity**: Frequent commits, quick iterations +- **Project Completion**: Finished projects, not just started ones +- **Response Time**: Quick to engage, quick to deliver +- **Adaptation Speed**: How fast do you integrate feedback? + +### Signs of Curiosity + +- **Side Projects**: What do you build for fun? +- **Learning Artifacts**: Blogs, notes, or projects showing learning +- **Question Quality**: Do you ask insightful questions? +- **Knowledge Breadth**: Interests beyond your specialty + +## Why This Matters for Jan + +### AI Moves Too Fast for Slow + +- Models improve monthly +- User expectations evolve weekly +- Competition ships daily +- Standards change quarterly + +If we're not fast and curious, we're obsolete. + +### Local-First Demands Both + +- **Fast**: Users expect immediate responses, not cloud latency +- **Curious**: Supporting every model requires understanding each one +- **Fast**: Privacy bugs need instant fixes +- **Curious**: New quantization methods need quick adoption + +## The Compound Effect + +Fast + Curious creates exponential growth: + +``` +Ship Fast → User Feedback → Learn → +Ship Smarter → More Users → More Learning → +Ship Even Faster → Compound Growth +``` + +Each cycle makes us: +- Faster at shipping +- Better at learning +- More valuable to users +- More attractive to talent + +## Join Us If... + +- You've shipped something this week (not this year) +- You've learned something new today (not last month) +- You see a Jan issue and think "I could fix that" +- You read our codebase and think "I could improve that" +- You use Jan and think "It could also do this" + +## The Promise + +If you join Jan as someone fast and curious, in a year you'll be: +- **Faster**: Shipping features you can't imagine today +- **Smarter**: Understanding AI at a level that surprises you +- **Connected**: Part of a global community of builders +- **Impactful**: Your code running on millions of devices + +## The Bottom Line + +We don't hire for what you know today. We hire for how fast you'll know what matters tomorrow. + +In the race to build open superintelligence, the fast and curious don't just keep up—they set the pace. + +--- + +*"At Jan, we measure progress in iterations per week, not years of experience."* diff --git a/website/src/content/docs/handbook/who/dydx.mdx b/website/src/content/docs/handbook/who/dydx.mdx new file mode 100644 index 000000000..93c39d274 --- /dev/null +++ b/website/src/content/docs/handbook/who/dydx.mdx @@ -0,0 +1,28 @@ +--- +title: "dy/dx > y-intercept" +description: "The rate of a function is more important than its starting point." +--- + +> "A little bit of slope makes up for a lot of Y-intercept." — John Ousterhout + +The rate of a function is more important than its starting point. We value growth +rate over initial advantage. + +## Why slope matters + +- Knowledge fades, but the ability to learn compounds +- A fast learner will outpace someone with more initial experience over time +- Slow, steady improvement outperforms quick starts with no growth + +## What it means day-to-day + +- Don't hesitate to try something new—even if you start clueless. Learning speed matters +- Mistakes are expected. Learning from them is required +- We'll invest in helping you grow, but the drive has to come from you +- Your trajectory is more important than where you begin + +## Why we believe in this + +- Building something lasting requires patience and commitment to improvement +- We're not interested in shortcuts. We value the work that compounds quietly until it becomes obvious +- If Jan's greatest impact is helping people and ideas grow steadily over time—that's the kind of success we stand for diff --git a/website/src/content/docs/handbook/who/obsession.mdx b/website/src/content/docs/handbook/who/obsession.mdx new file mode 100644 index 000000000..3b14c2da2 --- /dev/null +++ b/website/src/content/docs/handbook/who/obsession.mdx @@ -0,0 +1,35 @@ +--- +title: "Obsession" +description: "We seek obsession." +--- + +> "If you want to build a ship, don't drum up the men to gather wood, divide the work, and give orders. Instead, teach them to yearn for the vast and endless sea." — Antoine de Saint-Exupéry + +We don't hire for skills alone. We hire for obsession. + +## Find the obsessed + +- Breakthroughs require long-term focus, not fleeting interest +- Failure isn't the end—just another step in relentless experimentation +- People who obsess push beyond limits others accept + +## We seek those already consumed + +- You're already working on the problem—because you can't not +- We don't assign you a job; we support your life's work +- Obsession isn't just personal—it becomes more powerful in a team +- Together, a group of obsessives becomes greater than the sum of its parts + +## The best hires share common traits + +- Desperation: Solving the problem feels like a need, not a choice +- Relentlessness: You try, fail, adapt—again and again +- Defiance: Others call it crazy; we call it genius + +## We're looking for the dreamers + +- That deep, persistent song you've been trying to sing? We hear it +- Maybe you've been mocked, dismissed, or misunderstood +- We seek those lonely, weird dreamers who refuse to give up + +Wherever you are in the world, if this feels like you—apply here diff --git a/website/src/content/docs/handbook/who/team.mdx b/website/src/content/docs/handbook/who/team.mdx new file mode 100644 index 000000000..952780265 --- /dev/null +++ b/website/src/content/docs/handbook/who/team.mdx @@ -0,0 +1,26 @@ +--- +title: "Jan is a Sports Team" +description: "Our approach is super simple: Retain. Build trust. Win together." +--- + +> "First, you need to learn how to play. You need 2-3 years together to learn how to play with each other." — [Nikola Jokić](https://www.reddit.com/r/NBASpurs/comments/1cdscti/wise_words_from_jokic_first_you_need_to_learn_how/) + +Jan is a sports team, not unlike a NBA team or a NFL team. We focus on putting the best players on the court, focusing on their 1% strengths, and building a team that can win together. + +## Team vs. Family + +We learn the best principles from [Netflix Culture Deck](https://www.slideshare.net/slideshow/culture-1798664/1798664): + +- We operate like a pro sports team +- There are limited spots on every team — every roster spot counts +- We hire, develop and cut smartly + +## Continuity + +However, we balance this with our belief in Continuity and Stability: + +- Teams with psychological safety take better, long-term risks +- Teams need 2–3 years to truly gel +- Continuity matters; balance change carefully +- Cohesive teams outperform constant reshuffles +- Time builds trust, speed, and better decisions diff --git a/website/src/content/docs/handbook/who/underdogs.mdx b/website/src/content/docs/handbook/who/underdogs.mdx new file mode 100644 index 000000000..f2db21c51 --- /dev/null +++ b/website/src/content/docs/handbook/who/underdogs.mdx @@ -0,0 +1,34 @@ +--- +title: "Underdogs" +description: "We hire from unconventional backgrounds." +--- + +> "I am, somehow, less interested in the weight and convolutions of Einstein's brain than in the near certainty that people of equal talent have lived and died in cotton fields and sweatshops." — Stephen Jay Gould + +We hire from unconventional backgrounds. + +## Opportunity isn't equal + +- Where you're born shapes your chances—but it shouldn't define them +- Talent is everywhere. Opportunity isn't +- We believe the internet has changed how we work and live together. Our team—spread across 10 cities worldwide—couldn't have existed just five years ago +- Fast connections (5G and beyond) mean geography no longer decides your future. We'll find you—wherever you are + +## We seek the overlooked + +- Lonely geniuses, relentless tinkerers, people with potential others missed +- We don't care where you went to school—or if you went at all +- We bet on people like startups: high risk, high potential +- Skills can be taught; drive cannot. We'll teach you to communicate and collaborate +- We trust you'll prove yourself through your work and integrity + +## Meaningful work, close to home + +- We create jobs that matter, in the places people call home +- If Jan's greatest achievement is enabling people to build meaningful careers in their hometowns—that would be success worth celebrating + +## This is your shot + +- Underdogs, dreamers, the overlooked—this place is for you +- We don't just welcome you. We're looking for you +- Wherever you are in the world—apply here diff --git a/website/src/content/docs/handbook/why/icp.mdx b/website/src/content/docs/handbook/why/icp.mdx new file mode 100644 index 000000000..0393dfc41 --- /dev/null +++ b/website/src/content/docs/handbook/why/icp.mdx @@ -0,0 +1,112 @@ +--- +title: "Who we are building for" +description: "Building for people who want to own their AI and choose how to run it" +--- + +Jan is built for people who want control over their AI experience. Our users span from individual developers to global enterprises, united by a desire to own their AI infrastructure and choose how they deploy it. + +## Primary Users + +### Developers Who Want Flexibility +Engineers and creators who need: +- The freedom to run models locally or in the cloud +- Complete control over their AI stack +- The ability to switch providers without losing their work +- Integration options that fit their existing workflows + +### Individuals Who Value Ownership +People who understand that AI should be like any other software: +- Download it once, use it forever +- Your conversations and data belong to you +- No vendor lock-in or subscription traps +- The freedom to modify and extend as needed + +### Teams That Need Options +Organizations that require: +- The flexibility to run on-premises or in the cloud +- The ability to scale from laptop to server cluster +- Control over costs and infrastructure +- Options that adapt to changing requirements + +### Global Users Who Need Access +Millions of potential AI users who face: +- Expensive or unreliable cloud services +- The need for AI that works offline +- Hardware limitations that require optimization +- Different deployment needs for different situations + +## Our North Star: Best Experience, Your Way + +We use **Jan Exam** as our benchmark to ensure quality across all deployment options. Whether you're running locally on a laptop or scaled across a server farm, you get: + +- The same powerful AI capabilities +- Consistent performance for your hardware +- The flexibility to change your setup anytime +- No compromise between local and cloud + +## The Flexibility Spectrum + +Our users choose their own balance: + +### Full Local +- Everything runs on your hardware +- Complete offline capability +- Total data ownership +- Zero recurring costs + +### Hybrid Approach +- Local for sensitive work +- Cloud for heavy compute +- Seamless switching between modes +- Optimize cost vs performance + +### Full Cloud +- Jan.ai for zero setup +- Team collaboration features +- Managed infrastructure +- Scale without limits + +## What Sets Our Users Apart + +They understand that: +- **Ownership matters**: Your AI tools should be assets, not rentals +- **Flexibility is power**: Different tasks need different approaches +- **Control creates value**: The ability to customize and extend is crucial +- **Choice prevents lock-in**: Multiple options keep you free + +## Real User Scenarios + +### The Solo Developer +Runs Jan locally during development, uses cloud for production deployments. Owns their entire stack. + +### The Research Team +Uses local models for sensitive data, cloud models for general research. Switches seamlessly based on needs. + +### The Enterprise +Deploys Jan Server on-premises for compliance, uses Jan.ai for non-critical workloads. One platform, multiple deployment options. + +### The Student +Runs lightweight models locally on their laptop, accesses more powerful models via cloud when needed. Learns AI without breaking the bank. + +## Growing With Our Users + +We build for users at every stage: + +**Day 1**: Download Jan, start chatting locally +**Week 1**: Connect to cloud providers for more power +**Month 1**: Deploy your own server for team access +**Year 1**: Scale across infrastructure you control + +## The Future User + +We're building for the next billion AI users who will expect: +- Software they can own, not just rent +- The freedom to run AI anywhere +- Tools that adapt to their needs +- No artificial limitations + +They'll start with Jan because it gives them choices, and they'll stay because those choices grow with them. + +--- + +*"The best AI is the one that works where you need it, how you need it, when you need it."* diff --git a/website/src/content/docs/handbook/why/ownership.mdx b/website/src/content/docs/handbook/why/ownership.mdx new file mode 100644 index 000000000..67e90b34c --- /dev/null +++ b/website/src/content/docs/handbook/why/ownership.mdx @@ -0,0 +1,180 @@ +--- +title: "Our Vision for Open Superintelligence" +description: "Building superintelligence that you can own, modify, and deploy however you want" +--- + +Jan believes the path to superintelligence isn't through one massive model controlled by a few companies. It's through an ecosystem of specialized models, tools, and applications that anyone can own and deploy. + +## What is Open Superintelligence? + +Open superintelligence is AI that: + +- **Works Everywhere**: From your laptop to your data center to the edge +- **Belongs to You**: Download it, modify it, deploy it - it's yours +- **Scales Infinitely**: Same AI whether you're one person or ten thousand +- **Evolves with Community**: Thousands of contributors, not hundreds of employees + +## The Ecosystem Approach + +Superintelligence isn't one thing - it's many things working together: + +### Specialized Models +Instead of one model trying to do everything: +- **Jan-Search** excels at finding and synthesizing information +- **Jan-Write** understands creativity and structure +- **Jan-Analyze** specializes in reasoning and data +- **Jan-Code** focuses on programming tasks + +Each model does one thing brilliantly, and they hand off tasks to each other. + +### Extensible Tools +Models alone aren't enough. Our tools make them useful: +- Search the web and get answers, not links +- Automate browser tasks naturally +- Parse documents and understand context +- Connect to your data sources via MCP + +### Flexible Deployment +One codebase, infinite configurations: +- **Local Mode**: Everything on your hardware +- **Server Mode**: Self-host for your team +- **Cloud Mode**: We host it for you +- **Hybrid Mode**: Mix and match as needed + +## Why Ownership Matters + +When you own your AI infrastructure: + +### You Control the Experience +- Customize models for your specific needs +- Build workflows that match how you work +- Integrate with your existing tools +- Scale based on your requirements + +### You Keep What You Build +- Your prompts and conversations +- Your fine-tuned models +- Your custom workflows +- Your accumulated knowledge + +### You Choose How to Run +- Offline when you need privacy +- Cloud when you need power +- Local when you need speed +- Distributed when you need scale + +## Building in Public + +We're not building this in secret. Everything is open: + +### Watch Us Train +See our models being trained in real-time at [train.jan.ai](https://train.jan.ai): +- Live loss curves +- Training datasets +- Failed experiments +- Actual progress + +### Test With Us +Help evaluate models at [eval.jan.ai](https://eval.jan.ai): +- Compare outputs side-by-side +- Vote on what actually works +- Suggest improvements +- See all the data + +### Build With Us +Everything on [GitHub](https://github.com/janhq): +- Contribute code +- Report issues +- Suggest features +- Fork and modify + +## The Path to Superintelligence + +### Today: Foundation (✅) +- Jan Desktop works with local and cloud models +- Basic tools via MCP +- Community growing rapidly + +### Next 12 Months: Ecosystem +- Jan v1 models optimized for specific tasks +- Jan Server for self-hosted deployments +- Advanced tools (browser automation, deep research) +- Cross-device synchronization + +### Future: True Superintelligence +- Models that understand your context +- Tools that act on your behalf +- Applications that work everywhere +- An ecosystem that belongs to everyone + +## Our Commitment + +We commit to building superintelligence that: + +### Stays Open +- Core will always be open source +- Models will always be downloadable +- Standards will always be public +- Development will always be transparent + +### Stays Flexible +- Run it anywhere you have compute +- Mix local and cloud as needed +- Scale up or down instantly +- Switch providers without friction + +### Stays Yours +- No vendor lock-in +- No forced updates +- No subscription traps +- No data exploitation + +## Why This Approach Wins + +### Network Effects +Every user makes Jan better: +- Bug reports improve stability +- Feature requests guide development +- Model feedback enhances quality +- Community support helps everyone + +### Compound Innovation +Open development accelerates progress: +- Best ideas come from anywhere +- Solutions shared instantly +- Problems solved collectively +- Innovation happens 24/7 + +### Aligned Incentives +We succeed when you succeed: +- Your productivity is our metric +- Your ownership is our philosophy +- Your freedom is our product +- Your success is our business model + +## Join the Movement + +This isn't just about building better AI. It's about ensuring the most powerful technology in human history belongs to humanity, not corporations. + +Whether you: +- Use Jan for daily work +- Contribute code or ideas +- Share it with others +- Build on top of it +- Deploy it in your organization + +You're part of building superintelligence that everyone can own. + +## The Choice is Yours + +In five years, AI will be everywhere. The question is: + +**Will you rent intelligence from a monopoly, or will you own your own superintelligence?** + +With Jan, you don't have to choose between powerful and private, between cloud and local, between easy and extensible. + +You get it all. And it's yours. + +--- + +*"The best superintelligence is the one you can run anywhere, modify for anything, and own forever."* diff --git a/website/src/content/docs/handbook/why/problem.mdx b/website/src/content/docs/handbook/why/problem.mdx new file mode 100644 index 000000000..26a7b08d1 --- /dev/null +++ b/website/src/content/docs/handbook/why/problem.mdx @@ -0,0 +1,106 @@ +--- +title: "What problem are we solving?" +description: "Building superintelligence that you can own and run anywhere" +--- + +Current AI platforms force unnecessary trade-offs: + +- **All or Nothing**: Either use their cloud or build everything yourself from scratch +- **Vendor Lock-in**: Your prompts, workflows, and knowledge trapped in one platform +- **One-Size-Fits-All**: Same solution whether you're a student or an enterprise +- **Rent Forever**: Monthly subscriptions for software that should be yours +- **Limited Deployment**: Can't run where you need it - offline, on-premises, or at the edge + +## Mission + +Jan is building a complete AI ecosystem that adapts to how you want to work. We believe AI should be: + +- **Truly Owned**: Like any software, download it and it's yours forever +- **Infinitely Flexible**: Run locally, self-host, or use our cloud - same experience +- **Professionally Complete**: Not just models, but the full stack of tools and applications +- **Universally Deployable**: From your laptop to your data center to the edge + +## The Full Stack Approach + +We're not building another chatbot. We're building superintelligence you can own: + +### Models That Specialize +- **Jan Models**: Purpose-built for specific tasks (search, analysis, writing) +- **Community Models**: Any GGUF model from Hugging Face +- **Cloud Models**: Connect to OpenAI, Anthropic, or others when needed +- **Your Models**: Fine-tune and deploy your own + +### Tools That Extend +- **Search**: Get answers, not just links +- **DeepResearch**: Multi-step autonomous research +- **BrowserUse**: Let AI interact with the web for you +- **MCP Integration**: Connect to any data source or service + +### Applications That Scale +- **Jan Desktop**: Your local AI workstation +- **Jan.ai**: Web access with no setup +- **Jan Mobile**: AI that follows you (coming soon) +- **Jan Server**: Your own private AI cloud (coming soon) + +## Why This Matters + +In 5 years, AI will be embedded in everything you do. The question isn't whether you'll use AI, but how: + +**Option A**: Rent access from tech giants, accept their limitations, lose your data when you stop paying + +**Option B**: Own your AI infrastructure, deploy it anywhere, keep everything you build + +We're making Option B not just possible, but better than Option A. + +## The Problem With Status Quo + +### Cloud-Only Platforms +- Can't work offline +- Can't handle sensitive data +- Can't customize deeply +- Can't control costs + +### DIY Open Source +- Requires deep technical knowledge +- Fragmented tools and models +- No unified experience +- Massive integration overhead + +### Jan's Solution +- Download and run in minutes +- Complete ecosystem out of the box +- Scale from laptop to cluster +- Switch between local and cloud seamlessly + +## Real Problems We Solve Today + +### For Developers +"I want to use AI in my app but don't want vendor lock-in" +→ OpenAI-compatible API that runs anywhere + +### For Teams +"We need AI but can't send data to the cloud" +→ Self-host Jan Server on your infrastructure + +### For Individuals +"I want ChatGPT capabilities without the subscription" +→ Download Jan Desktop and own your AI + +### For Enterprises +"We need to scale AI across different deployment scenarios" +→ One platform that works from edge to cloud + +## Vision + +We envision a future where: + +- AI runs wherever you have compute - laptop, phone, server, edge device +- You own your AI infrastructure like you own your other software +- Models, tools, and applications work together seamlessly +- The best AI isn't gated behind corporate APIs + +Jan is the platform that makes this future real, today. + +--- + +*"The future of AI isn't about choosing between local or cloud. It's about having both, and everything in between, working perfectly together."* diff --git a/website/src/content/docs/index.mdx b/website/src/content/docs/index.mdx deleted file mode 100644 index c4caf13d0..000000000 --- a/website/src/content/docs/index.mdx +++ /dev/null @@ -1,171 +0,0 @@ ---- -title: Jan -description: Jan is an open-source ChatGPT-alternative and self-hosted AI platform - build and run AI on your own desktop or server. -keywords: - [ - Jan, - Jan AI, - ChatGPT alternative, - OpenAI platform alternative, - local API, - local AI, - private AI, - conversational AI, - no-subscription fee, - large language model, - LLM, - ] ---- - -import { Aside } from '@astrojs/starlight/components'; - -![Jan's Cover Image](../../assets/jan-app-new.png) - - -Jan is a ChatGPT alternative that runs 100% offline on your desktop and (*soon*) on mobile. Our goal is to -make it easy for anyone, with or without coding skills, to download and use AI models with full control and -[privacy](https://www.reuters.com/legal/legalindustry/privacy-paradox-with-ai-2023-10-31/). - -Jan is powered by [Llama.cpp](https://github.com/ggerganov/llama.cpp), a local AI engine that provides an OpenAI-compatible -API that can run in the background by default at `https://localhost:1337` (or your custom port). This enables you to power all sorts of -applications with AI capabilities from your laptop/PC. For example, you can connect local tools like [Continue](https://jan.ai/docs/server-examples/continue-dev) -and [Cline](https://cline.bot/) to Jan and power them using your favorite models. - -Jan doesn't limit you to locally hosted models, meaning, you can create an API key from your favorite model provider, -add it to Jan via the configuration's page and start talking to your favorite models. - -### Features - -- Download popular open-source LLMs (Llama3, Gemma3, Qwen3, and more) from the HuggingFace [Model Hub](./jan/manage-models) -or import any GGUF files (the model format used by llama.cpp) available locally -- Connect to [cloud services](./jan/remote-models/openai) (OpenAI, Anthropic, Mistral, Groq, etc.) -- [Chat](./jan/threads) with AI models & [customize their parameters](./jan/explanation/model-parameters) via our -intuitive interface -- Use our [local API server](https://jan.ai/api-reference) with an OpenAI-equivalent API to power other apps. - -### Philosophy - -Jan is built to be [user-owned](about#-user-owned), this means that Jan is: -- Truly open source via the [Apache 2.0 license](https://github.com/menloresearch/jan/blob/dev/LICENSE) -- [Data is stored locally, following one of the many local-first principles](https://www.inkandswitch.com/local-first) -- Internet is optional, Jan can run 100% offline -- Free choice of AI models, both local and cloud-based -- We do not collect or sell user data. See our [Privacy Policy](./privacy). - - - -### Inspirations - -Jan is inspired by the concepts of [Calm Computing](https://en.wikipedia.org/wiki/Calm_technology), and the Disappearing Computer. - -## Acknowledgements - -Jan is built on the shoulders of many open-source projects like: - -- [Llama.cpp](https://github.com/ggerganov/llama.cpp/blob/master/LICENSE) -- [Scalar](https://github.com/scalar/scalar) - -## FAQs - -
-What is Jan? - -Jan is a customizable AI assistant that can run offline on your computer - a privacy-focused alternative to tools like ChatGPT, Anthropic's Claude, and Google Gemini, with optional cloud AI support. - -
- -
-How do I get started with Jan? - -Download Jan on your computer, download a model or add API key for a cloud-based one, and start chatting. For detailed setup instructions, see our installation guides. - -
- -
-Is Jan compatible with my system? - -Jan supports all major operating systems: -- [Mac](./jan/installation/mac#compatibility) -- [Windows](./jan/installation/windows#compatibility) -- [Linux](./jan/installation/linux) - -Hardware compatibility includes: -- NVIDIA GPUs (CUDA) -- AMD GPUs (Vulkan) -- Intel Arc GPUs (Vulkan) -- Any GPU with Vulkan support - -
- -
-How does Jan protect my privacy? - -Jan prioritizes privacy by: -- Running 100% offline with locally-stored data -- Using open-source models that keep your conversations private -- Storing all files and chat history on your device in the [Jan Data Folder](./jan/data-folder) -- Never collecting or selling your data - - - -You can optionally share anonymous usage statistics to help improve Jan, but your conversations are never shared. See our complete [Privacy Policy](./jan/privacy). - -
- -
-What models can I use with Jan? - -- Download optimized models from the [Jan Hub](./jan/manage-models) -- Import GGUF models from Hugging Face or your local files -- Connect to cloud providers like OpenAI, Anthropic, Mistral and Groq (requires your own API keys) - -
- -
-Is Jan really free? What's the catch? - -Jan is completely free and open-source with no subscription fees for local models and features. When using cloud-based models (like GPT-4o or Claude Sonnet 3.7), you'll only pay the standard rates to those providers—we add no markup. - -
- -
-Can I use Jan offline? - -Yes! Once you've downloaded a local model, Jan works completely offline with no internet connection needed. - -
- -
-How can I contribute or get community help? - -- Join our [Discord community](https://discord.gg/qSwXFx6Krr) to connect with other users -- Contribute through [GitHub](https://github.com/menloresearch/jan) (no permission needed!) -- Get troubleshooting help in our [Discord](https://discord.com/invite/FTk2MvZwJH) channel [#🆘|jan-help](https://discord.com/channels/1107178041848909847/1192090449725358130) -- Check our [Troubleshooting](./jan/troubleshooting) guide for common issues - -
- -
-Can I self-host Jan? - -Yes! We fully support the self-hosted movement. Either download Jan directly or fork it on [GitHub repository](https://github.com/menloresearch/jan) and build it from source. - -
- -
-What does Jan stand for? - -Jan stands for "Just a Name". We are, admittedly, bad at marketing 😂. - -
- -
-Are you hiring? - -Yes! We love hiring from our community. Check out our open positions at [Careers](https://menlo.bamboohr.com/careers). - -
diff --git a/website/src/content/docs/jan/index.mdx b/website/src/content/docs/jan/index.mdx new file mode 100644 index 000000000..fce7a8db0 --- /dev/null +++ b/website/src/content/docs/jan/index.mdx @@ -0,0 +1,172 @@ +--- +title: Jan +description: Build, run, and own your AI. From laptop to superintelligence. +keywords: + [ + Jan, + self-hosted AI, + local AI, + open superintelligence, + AI ecosystem, + llama.cpp, + GGUF models, + MCP tools + ] +--- + +import { Aside } from '@astrojs/starlight/components'; + +![Jan Desktop](../../../assets/jan-app-new.png) + +## Jan's Goal + +> Jan's goal is to build superintelligence that you can self-host and use locally. + +## What is Jan? + +Jan is an open-source AI ecosystem that runs on your hardware. + +- **Available Today**: Jan Desktop - run AI models on your computer with zero setup +- **Coming Soon**: Complete ecosystem with specialized models, tools, and cross-device sync + +### The Stack + +**Models**: Specialized AI models trained for real tasks (search, analysis, writing) +**Tools**: Browser automation, web search, memory systems via MCP +**Applications**: Desktop (now), Web/Mobile/Server (coming 2025) + +Everything works together. Everything runs where you want it. + + + +## Core Features + +### Run Models Locally +- Download any GGUF model from Hugging Face +- Use our Jan models optimized for specific tasks +- Automatic GPU acceleration (NVIDIA/AMD/Intel) +- OpenAI-compatible API at `localhost:1337` + +### Connect to Cloud (Optional) +- Your API keys for OpenAI, Anthropic, etc. +- Jan.ai cloud models (coming soon) +- Self-hosted Jan Server + +### Extend with Tools +- Web search via MCP +- Browser automation +- File parsing and analysis +- Custom tool development + +## Architecture + +Jan is built on: +- [Llama.cpp](https://github.com/ggerganov/llama.cpp) for inference +- [Model Context Protocol](https://modelcontextprotocol.io) for tools +- Local-first data storage in `~/jan` + +## Why Jan? + +| Feature | Other AI Platforms | Jan | +|:--------|:-------------------|:----| +| **Deployment** | Their servers only | Your device, your servers, or our cloud | +| **Models** | One-size-fits-all | Specialized models for specific tasks | +| **Data** | Stored on their servers | Stays on your hardware | +| **Cost** | Monthly subscription | Free locally, pay for cloud | +| **Extensibility** | Limited APIs | Full ecosystem with MCP tools | + +## Development Philosophy + +1. **Local First**: Everything works offline. Cloud is optional. +2. **User Owned**: Your data, your models, your compute. +3. **Built in Public**: Watch our models train. See our code. Track our progress. + + + +## System Requirements + +**Minimum**: 8GB RAM, 10GB storage +**Recommended**: 16GB RAM, NVIDIA GPU, 50GB storage +**Supported**: Windows 10+, macOS 12+, Linux (Ubuntu 20.04+) + +## What's Next? + +
+When will mobile/web versions launch? + +- **Jan Web**: Beta Q1 2025 +- **Jan Mobile**: Q4 2025 +- **Jan Server**: Q3 2025 + +All versions will sync seamlessly with your desktop. +
+ +
+What models are available? + +**Now**: +- Any GGUF model from Hugging Face +- Cloud models via API keys (OpenAI, Anthropic, etc.) + +**Coming Q1 2025**: +- Jan-Search: Optimized for web search and synthesis +- Jan-Write: Creative and technical writing +- Jan-Analyze: Data analysis and reasoning + +[Watch live training progress →](https://train.jan.ai) +
+ +
+How does Jan make money? + +- **Local use**: Always free +- **Cloud features**: Optional paid services (coming 2025) +- **Enterprise**: Self-hosted deployment and support + +We don't sell your data. We sell software and services. +
+ +
+Can I contribute? + +Yes. Everything is open: +- [GitHub](https://github.com/janhq/jan) - Code contributions +- [Model Training](https://jan.ai/docs/models) - See how we train +- [Discord](https://discord.gg/FTk2MvZwJH) - Join discussions +- [Model Testing](https://eval.jan.ai) - Help evaluate models +
+ +
+Is this just another ChatGPT wrapper? + +No. We're building: +- Our own models trained for specific tasks +- Complete local AI infrastructure +- Tools that extend model capabilities +- An ecosystem that works offline + +ChatGPT is one model behind an API. Jan is a complete AI platform you own. +
+ +
+What about privacy? + +**Local mode**: Your data never leaves your device. Period. +**Cloud mode**: You choose when to use cloud features. Clear separation. + +See our [Privacy Policy](./privacy). +
+ +## Get Started + +1. [Install Jan Desktop](./jan/installation) - Your AI workstation +2. [Explore Models](./jan/models) - Download and configure +3. [Learn the API](./api-reference) - Build with Jan + +--- + +**Questions?** Join our [Discord](https://discord.gg/FTk2MvZwJH) or check [GitHub roadmap](https://github.com/janhq/jan/). diff --git a/website/src/content/docs/jan/installation/windows.mdx b/website/src/content/docs/jan/installation/windows.mdx index b50abf65f..45e80fad2 100644 --- a/website/src/content/docs/jan/installation/windows.mdx +++ b/website/src/content/docs/jan/installation/windows.mdx @@ -23,9 +23,6 @@ keywords: import { Aside, Tabs, TabItem } from '@astrojs/starlight/components'; - -# Windows Installation - ## Compatibility **System requirements:** diff --git a/website/src/content/products/index.mdx b/website/src/content/products/index.mdx index 3dab3e086..fdc2dfdaa 100644 --- a/website/src/content/products/index.mdx +++ b/website/src/content/products/index.mdx @@ -6,7 +6,6 @@ sidebar: --- import { Aside, Card, CardGrid } from '@astrojs/starlight/components'; -**Jan's Goal is** > **to build a superintelligence that you can self-host and use locally on your own devices.** diff --git a/website/src/layouts/Layout.astro b/website/src/layouts/Layout.astro index 591effc34..f13a41ebf 100644 --- a/website/src/layouts/Layout.astro +++ b/website/src/layouts/Layout.astro @@ -18,31 +18,84 @@ const { title } = Astro.props; - + - - + + - - + // Apply the correct theme immediately + if (theme === 'auto') { + const actualTheme = window.matchMedia('(prefers-color-scheme: light)').matches ? 'light' : 'dark'; + document.documentElement.setAttribute('data-theme', actualTheme); + } else { + document.documentElement.setAttribute('data-theme', theme); + } + + // Store non-auto preference + if (storedTheme && storedTheme !== 'auto') { + document.documentElement.setAttribute('data-theme', storedTheme); + } + })(); + + + + diff --git a/website/src/pages/blog.astro b/website/src/pages/blog.astro new file mode 100644 index 000000000..b4b846b46 --- /dev/null +++ b/website/src/pages/blog.astro @@ -0,0 +1,363 @@ +--- +import { getCollection } from 'astro:content'; +import Layout from '../layouts/Layout.astro'; +import CustomNav from '../components/CustomNav.astro'; + +// Get all blog entries and sort by date (newest first) +const blogEntries = await getCollection('blog'); +const sortedEntries = blogEntries.sort((a, b) => + new Date(b.data.date).getTime() - new Date(a.data.date).getTime() +); + +// Extract unique categories +// const allCategories = [...new Set(sortedEntries.flatMap(entry => +// entry.data.categories ? entry.data.categories.split(',').map(cat => cat.trim()) : [] +// ))]; + +const title = 'Blog'; +const description = 'The latest updates from Jan. See Changelog for more product updates.'; + +// Define gradient colors for cards +const gradients = [ + 'from-purple-500 to-pink-500', + 'from-blue-500 to-cyan-400', + 'from-purple-600 to-blue-500', + 'from-cyan-400 to-blue-500', + 'from-pink-500 to-purple-600', + 'from-blue-600 to-purple-600' +]; +--- + + + +
+
+ +
+

Blog

+

The latest updates from Jan. See Changelog for more product updates.

+
+ + +
+ + + + +
+ + +
+ {sortedEntries.map((entry, index) => { + const date = new Date(entry.data.date); + const formattedDate = date.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' + }); + const gradientClass = gradients[index % gradients.length]; + const category = entry.data.categories || 'guides'; + + return ( + + ); + })} +
+
+
+
+ + + + + diff --git a/website/src/pages/blog/[slug].astro b/website/src/pages/blog/[slug].astro new file mode 100644 index 000000000..ac5abf4ce --- /dev/null +++ b/website/src/pages/blog/[slug].astro @@ -0,0 +1,656 @@ +--- +import { getCollection } from 'astro:content'; +import Layout from '../../layouts/Layout.astro'; +import CustomNav from '../../components/CustomNav.astro'; +import '../../styles/blog.css'; + +export async function getStaticPaths() { + const blogEntries = await getCollection('blog'); + return blogEntries.map(entry => ({ + params: { slug: entry.slug }, + props: { entry }, + })); +} + +const { entry } = Astro.props; +const { Content } = await entry.render(); + +const formattedDate = new Date(entry.data.date).toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' +}); + +const tags = entry.data.tags ? entry.data.tags.split(',').map(tag => tag.trim()) : []; +--- + + + +
+
+ + + + +
+ + +

{entry.data.title}

+ +

{entry.data.description}

+ + {tags.length > 0 && ( + + )} +
+ + +
+ +
+ + + +
+
+
+ + + + + + + diff --git a/website/src/pages/changelog.astro b/website/src/pages/changelog.astro new file mode 100644 index 000000000..d4824afee --- /dev/null +++ b/website/src/pages/changelog.astro @@ -0,0 +1,469 @@ +--- +import { getCollection } from 'astro:content'; +import Layout from '../layouts/Layout.astro'; +import CustomNav from '../components/CustomNav.astro'; + +// Get all changelog entries and sort by date (newest first) +const changelogEntries = await getCollection('changelog'); +const sortedEntries = changelogEntries.sort((a, b) => + new Date(b.data.date).getTime() - new Date(a.data.date).getTime() +); + +const title = 'Changelog'; +const description = 'Latest release updates from the Jan team. Check out our Roadmap to see what\'s next.'; +--- + + + +
+
+ +
+

Changelog

+

Latest release updates from the Jan team. Check out our Roadmap to see what's next.

+ + + +
+ + +
+ {sortedEntries.map((entry, index) => { + const date = new Date(entry.data.date); + const formattedDate = date.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' + }); + + return ( +
+ +
+
+ +
+ + +
+ {entry.data.image && ( +
+ {entry.data.title} +
+ )} + +
+

+ {entry.data.title} +

+ +

{entry.data.description}

+ + {entry.data.version && ( + New release Jan App v{entry.data.version} + )} +
+
+
+ ); + })} +
+
+
+
+ + + + + diff --git a/website/src/pages/changelog/[slug].astro b/website/src/pages/changelog/[slug].astro new file mode 100644 index 000000000..48dc1ab9d --- /dev/null +++ b/website/src/pages/changelog/[slug].astro @@ -0,0 +1,306 @@ +--- +import { getCollection, type CollectionEntry } from 'astro:content'; +import Layout from '../../layouts/Layout.astro'; +import CustomNav from '../../components/CustomNav.astro'; + +export async function getStaticPaths() { + const changelogEntries = await getCollection('changelog'); + return changelogEntries.map((entry) => ({ + params: { slug: entry.slug }, + props: { entry }, + })); +} + +interface Props { + entry: CollectionEntry<'changelog'>; +} + +const { entry } = Astro.props; +const { Content } = await entry.render(); + +const date = new Date(entry.data.date); +const formattedDate = date.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' +}); +--- + + + +
+
+ + + + +
+ +

{entry.data.title}

+ + {entry.data.version && ( +
+ v{entry.data.version} +
+ )} + + {entry.data.image && ( +
+ {entry.data.title} +
+ )} +
+ + +
+ +
+ + + +
+
+
+ + \ No newline at end of file diff --git a/website/src/pages/index.astro b/website/src/pages/index.astro new file mode 100644 index 000000000..ff150ee2b --- /dev/null +++ b/website/src/pages/index.astro @@ -0,0 +1,198 @@ +--- +import Layout from '@/layouts/Layout.astro'; +import CustomNav from '@/components/CustomNav.astro'; +import DownloadButton from '@/components/DownloadButton.astro'; + +// Placeholder data - to be fetched from GitHub API later +const latestVersion = 'v0.6.7'; +const downloadCount = '3.8M+'; +--- + + + +
+
+
+ + + + + +
+
+ +

+ Chat with AI
+ without privacy concerns +

+
+ +

+ Jan is an open source ChatGPT-alternative that runs 100% offline. +

+ +
+ +
+
+ +
+
+
+
+ + diff --git a/website/src/pages/products/index.astro b/website/src/pages/products/index.astro index 03cca8501..4d6ecb8ae 100644 --- a/website/src/pages/products/index.astro +++ b/website/src/pages/products/index.astro @@ -6,7 +6,7 @@ import { Content } from '../../content/products/index.mdx';