diff --git a/.github/workflows/jan-server-web-ci-dev.yml b/.github/workflows/jan-server-web-ci-dev.yml index 59515f443..be2243864 100644 --- a/.github/workflows/jan-server-web-ci-dev.yml +++ b/.github/workflows/jan-server-web-ci-dev.yml @@ -12,7 +12,7 @@ jobs: build-and-preview: runs-on: [ubuntu-24-04-docker] env: - JAN_API_BASE: "https://api-dev.menlo.ai/v1" + MENLO_PLATFORM_BASE_URL: "https://api-dev.menlo.ai/v1" permissions: pull-requests: write contents: write @@ -52,7 +52,7 @@ jobs: - name: Build docker image run: | - docker build --build-arg JAN_API_BASE=${{ env.JAN_API_BASE }} -t ${{ steps.vars.outputs.FULL_IMAGE }} . + docker build --build-arg MENLO_PLATFORM_BASE_URL=${{ env.MENLO_PLATFORM_BASE_URL }} -t ${{ steps.vars.outputs.FULL_IMAGE }} . - name: Push docker image if: github.event_name == 'push' diff --git a/.github/workflows/jan-server-web-ci-prod.yml b/.github/workflows/jan-server-web-ci-prod.yml index 1477fea32..cb5b597b7 100644 --- a/.github/workflows/jan-server-web-ci-prod.yml +++ b/.github/workflows/jan-server-web-ci-prod.yml @@ -13,7 +13,7 @@ jobs: deployments: write pull-requests: write env: - JAN_API_BASE: "https://api.menlo.ai/v1" + MENLO_PLATFORM_BASE_URL: "https://api.menlo.ai/v1" GA_MEASUREMENT_ID: "G-YK53MX8M8M" CLOUDFLARE_PROJECT_NAME: "jan-server-web" steps: @@ -43,7 +43,7 @@ jobs: - name: Install dependencies run: make config-yarn && yarn install && yarn build:core && make build-web-app env: - JAN_API_BASE: ${{ env.JAN_API_BASE }} + MENLO_PLATFORM_BASE_URL: ${{ env.MENLO_PLATFORM_BASE_URL }} GA_MEASUREMENT_ID: ${{ env.GA_MEASUREMENT_ID }} - name: Publish to Cloudflare Pages Production diff --git a/.github/workflows/jan-server-web-ci-stag.yml b/.github/workflows/jan-server-web-ci-stag.yml index b1851ebdd..3c2581952 100644 --- a/.github/workflows/jan-server-web-ci-stag.yml +++ b/.github/workflows/jan-server-web-ci-stag.yml @@ -12,7 +12,7 @@ jobs: build-and-preview: runs-on: [ubuntu-24-04-docker] env: - JAN_API_BASE: "https://api-stag.menlo.ai/v1" + MENLO_PLATFORM_BASE_URL: "https://api-stag.menlo.ai/v1" permissions: pull-requests: write contents: write @@ -52,7 +52,7 @@ jobs: - name: Build docker image run: | - docker build --build-arg JAN_API_BASE=${{ env.JAN_API_BASE }} -t ${{ steps.vars.outputs.FULL_IMAGE }} . + docker build --build-arg MENLO_PLATFORM_BASE_URL=${{ env.MENLO_PLATFORM_BASE_URL }} -t ${{ steps.vars.outputs.FULL_IMAGE }} . - name: Push docker image if: github.event_name == 'push' diff --git a/.github/workflows/template-tauri-build-windows-x64-external.yml b/.github/workflows/template-tauri-build-windows-x64-external.yml index 16895de65..dbd5fd7eb 100644 --- a/.github/workflows/template-tauri-build-windows-x64-external.yml +++ b/.github/workflows/template-tauri-build-windows-x64-external.yml @@ -49,6 +49,8 @@ jobs: # Update tauri.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json + mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json jq '.bundle.windows.signCommand = "echo External build - skipping signature: %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json @@ -80,6 +82,36 @@ jobs: echo "---------./src-tauri/Cargo.toml---------" cat ./src-tauri/Cargo.toml + generate_build_version() { + ### Examble + ### input 0.5.6 output will be 0.5.6 and 0.5.6.0 + ### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2 + ### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213 + local new_version="$1" + local base_version + local t_value + # Check if it has a "-" + if [[ "$new_version" == *-* ]]; then + base_version="${new_version%%-*}" # part before - + suffix="${new_version#*-}" # part after - + # Check if it is rcX-beta + if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then + t_value="${BASH_REMATCH[1]}" + else + t_value="$suffix" + fi + else + base_version="$new_version" + t_value="0" + fi + # Export two values + new_base_version="$base_version" + new_build_version="${base_version}.${t_value}" + } + generate_build_version ${{ inputs.new_version }} + sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + if [ "${{ inputs.channel }}" != "stable" ]; then jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json @@ -103,7 +135,14 @@ jobs: chmod +x .github/scripts/rename-workspace.sh .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} cat ./package.json + sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + else + sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template fi + echo "---------nsis.template---------" + cat ./src-tauri/tauri.bundle.windows.nsis.template - name: Build app shell: bash run: | diff --git a/.github/workflows/template-tauri-build-windows-x64.yml b/.github/workflows/template-tauri-build-windows-x64.yml index ed00ef90f..edf8d88aa 100644 --- a/.github/workflows/template-tauri-build-windows-x64.yml +++ b/.github/workflows/template-tauri-build-windows-x64.yml @@ -98,9 +98,15 @@ jobs: # Update tauri.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json + mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json mv /tmp/package.json web-app/package.json + # Add sign commands to tauri.windows.conf.json + jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json + mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json + # Update tauri plugin versions jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json @@ -127,9 +133,35 @@ jobs: echo "---------./src-tauri/Cargo.toml---------" cat ./src-tauri/Cargo.toml - # Add sign commands to tauri.windows.conf.json - jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json - mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json + generate_build_version() { + ### Example + ### input 0.5.6 output will be 0.5.6 and 0.5.6.0 + ### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2 + ### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213 + local new_version="$1" + local base_version + local t_value + # Check if it has a "-" + if [[ "$new_version" == *-* ]]; then + base_version="${new_version%%-*}" # part before - + suffix="${new_version#*-}" # part after - + # Check if it is rcX-beta + if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then + t_value="${BASH_REMATCH[1]}" + else + t_value="$suffix" + fi + else + base_version="$new_version" + t_value="0" + fi + # Export two values + new_base_version="$base_version" + new_build_version="${base_version}.${t_value}" + } + generate_build_version ${{ inputs.new_version }} + sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template echo "---------tauri.windows.conf.json---------" cat ./src-tauri/tauri.windows.conf.json @@ -163,7 +195,14 @@ jobs: chmod +x .github/scripts/rename-workspace.sh .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} cat ./package.json + sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + else + sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template fi + echo "---------nsis.template---------" + cat ./src-tauri/tauri.bundle.windows.nsis.template - name: Install AzureSignTool run: | @@ -234,8 +273,6 @@ jobs: # Upload for tauri updater aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }} aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig - - aws s3 cp ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.MSI_FILE_NAME }} env: AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} @@ -252,13 +289,3 @@ jobs: asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }} asset_name: ${{ steps.metadata.outputs.FILE_NAME }} asset_content_type: application/octet-stream - - name: Upload release assert if public provider is github - if: inputs.public_provider == 'github' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: actions/upload-release-asset@v1.0.1 - with: - upload_url: ${{ inputs.upload_url }} - asset_path: ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }} - asset_name: ${{ steps.metadata.outputs.MSI_FILE_NAME }} - asset_content_type: application/octet-stream diff --git a/Dockerfile b/Dockerfile index 8431f6115..2768a6b23 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,8 @@ # Stage 1: Build stage with Node.js and Yarn v4 FROM node:20-alpine AS builder -ARG JAN_API_BASE=https://api-dev.jan.ai/v1 -ENV JAN_API_BASE=$JAN_API_BASE +ARG MENLO_PLATFORM_BASE_URL=https://api-dev.menlo.ai/v1 +ENV MENLO_PLATFORM_BASE_URL=$MENLO_PLATFORM_BASE_URL # Install build dependencies RUN apk add --no-cache \ diff --git a/Makefile b/Makefile index 890f612c8..a3fa63665 100644 --- a/Makefile +++ b/Makefile @@ -117,7 +117,6 @@ lint: install-and-build test: lint yarn download:bin ifeq ($(OS),Windows_NT) - yarn download:windows-installer endif yarn test yarn copy:assets:tauri diff --git a/WEB_VERSION_TRACKER.md b/WEB_VERSION_TRACKER.md index f9f7aa416..7b16c8280 100644 --- a/WEB_VERSION_TRACKER.md +++ b/WEB_VERSION_TRACKER.md @@ -2,7 +2,17 @@ Internal tracker for web component changes and features. -## v0.0.12 (Current) +## v0.0.13 (current) +**Release Date**: 2025-10-24 +**Commit SHA**: 22645549cea48b1ae24b5b9dc70411fd3bfc9935 + +**Main Features**: +- Migrate auth to platform menlo +- Remove conv prefix +- Disable Project for web +- Model capabilites are fetched correctly from model catalog + +## v0.0.12 **Release Date**: 2025-10-02 **Commit SHA**: df145d63a93bd27336b5b539ce0719fe9c7719e3 diff --git a/autoqa/scripts/setup-android-env.sh b/autoqa/scripts/setup-android-env.sh index 62adc079f..2cf18ae8f 100755 --- a/autoqa/scripts/setup-android-env.sh +++ b/autoqa/scripts/setup-android-env.sh @@ -25,8 +25,8 @@ export RANLIB_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x # Additional environment variables for Rust cross-compilation export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang" -# Only set global CC and AR for Android builds (when TAURI_ANDROID_BUILD is set) -if [ "$TAURI_ANDROID_BUILD" = "true" ]; then +# Only set global CC and AR for Android builds (when IS_ANDROID is set) +if [ "$IS_ANDROID" = "true" ]; then export CC="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang" export AR="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar" echo "Global CC and AR set for Android build" diff --git a/core/package.json b/core/package.json index 203eaf293..df9302210 100644 --- a/core/package.json +++ b/core/package.json @@ -31,7 +31,7 @@ "@vitest/coverage-v8": "^2.1.8", "@vitest/ui": "^2.1.8", "eslint": "8.57.0", - "happy-dom": "^15.11.6", + "happy-dom": "^20.0.0", "pacote": "^21.0.0", "react": "19.0.0", "request": "^2.88.2", diff --git a/core/src/browser/extension.ts b/core/src/browser/extension.ts index 78f90ba16..d562bb9ea 100644 --- a/core/src/browser/extension.ts +++ b/core/src/browser/extension.ts @@ -11,6 +11,8 @@ export enum ExtensionTypeEnum { HuggingFace = 'huggingFace', Engine = 'engine', Hardware = 'hardware', + RAG = 'rag', + VectorDB = 'vectorDB', } export interface ExtensionType { diff --git a/core/src/browser/extensions/engines/AIEngine.ts b/core/src/browser/extensions/engines/AIEngine.ts index a4f98e71c..1be977034 100644 --- a/core/src/browser/extensions/engines/AIEngine.ts +++ b/core/src/browser/extensions/engines/AIEngine.ts @@ -182,6 +182,7 @@ export interface SessionInfo { port: number // llama-server output port (corrected from portid) model_id: string //name of the model model_path: string // path of the loaded model + is_embedding: boolean api_key: string mmproj_path?: string } diff --git a/core/src/browser/extensions/index.ts b/core/src/browser/extensions/index.ts index 30c7de216..6284ad719 100644 --- a/core/src/browser/extensions/index.ts +++ b/core/src/browser/extensions/index.ts @@ -23,3 +23,8 @@ export { MCPExtension } from './mcp' * Base AI Engines. */ export * from './engines' + +export { RAGExtension, RAG_INTERNAL_SERVER } from './rag' +export type { AttachmentInput, IngestAttachmentsResult } from './rag' +export { VectorDBExtension } from './vector-db' +export type { SearchMode, VectorDBStatus, VectorChunkInput, VectorSearchResult, AttachmentFileInfo, VectorDBFileInput, VectorDBIngestOptions } from './vector-db' diff --git a/core/src/browser/extensions/rag.ts b/core/src/browser/extensions/rag.ts new file mode 100644 index 000000000..251d0fcad --- /dev/null +++ b/core/src/browser/extensions/rag.ts @@ -0,0 +1,36 @@ +import { BaseExtension, ExtensionTypeEnum } from '../extension' +import type { MCPTool, MCPToolCallResult } from '../../types' +import type { AttachmentFileInfo } from './vector-db' + +export interface AttachmentInput { + path: string + name?: string + type?: string + size?: number +} + +export interface IngestAttachmentsResult { + filesProcessed: number + chunksInserted: number + files: AttachmentFileInfo[] +} + +export const RAG_INTERNAL_SERVER = 'rag-internal' + +/** + * RAG extension base: exposes RAG tools and orchestration API. + */ +export abstract class RAGExtension extends BaseExtension { + type(): ExtensionTypeEnum | undefined { + return ExtensionTypeEnum.RAG + } + + abstract getTools(): Promise + /** + * Lightweight list of tool names for quick routing/lookup. + */ + abstract getToolNames(): Promise + abstract callTool(toolName: string, args: Record): Promise + + abstract ingestAttachments(threadId: string, files: AttachmentInput[]): Promise +} diff --git a/core/src/browser/extensions/vector-db.ts b/core/src/browser/extensions/vector-db.ts new file mode 100644 index 000000000..27dec0367 --- /dev/null +++ b/core/src/browser/extensions/vector-db.ts @@ -0,0 +1,82 @@ +import { BaseExtension, ExtensionTypeEnum } from '../extension' + +export type SearchMode = 'auto' | 'ann' | 'linear' + +export interface VectorDBStatus { + ann_available: boolean +} + +export interface VectorChunkInput { + text: string + embedding: number[] +} + +export interface VectorSearchResult { + id: string + text: string + score?: number + file_id: string + chunk_file_order: number +} + +export interface AttachmentFileInfo { + id: string + name?: string + path?: string + type?: string + size?: number + chunk_count: number +} + +// High-level input types for file ingestion +export interface VectorDBFileInput { + path: string + name?: string + type?: string + size?: number +} + +export interface VectorDBIngestOptions { + chunkSize: number + chunkOverlap: number +} + +/** + * Vector DB extension base: abstraction over local vector storage and search. + */ +export abstract class VectorDBExtension extends BaseExtension { + type(): ExtensionTypeEnum | undefined { + return ExtensionTypeEnum.VectorDB + } + + abstract getStatus(): Promise + abstract createCollection(threadId: string, dimension: number): Promise + abstract insertChunks( + threadId: string, + fileId: string, + chunks: VectorChunkInput[] + ): Promise + abstract ingestFile( + threadId: string, + file: VectorDBFileInput, + opts: VectorDBIngestOptions + ): Promise + abstract searchCollection( + threadId: string, + query_embedding: number[], + limit: number, + threshold: number, + mode?: SearchMode, + fileIds?: string[] + ): Promise + abstract deleteChunks(threadId: string, ids: string[]): Promise + abstract deleteFile(threadId: string, fileId: string): Promise + abstract deleteCollection(threadId: string): Promise + abstract listAttachments(threadId: string, limit?: number): Promise + abstract getChunks( + threadId: string, + fileId: string, + startOrder: number, + endOrder: number + ): Promise +} diff --git a/core/src/types/setting/settingComponent.ts b/core/src/types/setting/settingComponent.ts index 9dfd9b597..57b222d87 100644 --- a/core/src/types/setting/settingComponent.ts +++ b/core/src/types/setting/settingComponent.ts @@ -12,6 +12,8 @@ export type SettingComponentProps = { extensionName?: string requireModelReload?: boolean configType?: ConfigType + titleKey?: string + descriptionKey?: string } export type ConfigType = 'runtime' | 'setting' diff --git a/docs/public/assets/images/changelog/jan-release-v0.7.0.jpeg b/docs/public/assets/images/changelog/jan-release-v0.7.0.jpeg new file mode 100644 index 000000000..cb0d4a3a9 Binary files /dev/null and b/docs/public/assets/images/changelog/jan-release-v0.7.0.jpeg differ diff --git a/docs/src/pages/changelog/2025-10-02-jan-projects.mdx b/docs/src/pages/changelog/2025-10-02-jan-projects.mdx new file mode 100644 index 000000000..851e26403 --- /dev/null +++ b/docs/src/pages/changelog/2025-10-02-jan-projects.mdx @@ -0,0 +1,28 @@ +--- +title: "Jan v0.7.0: Jan Projects" +version: 0.7.0 +description: "Jan v0.7.0 introduces Projects, model renaming, llama.cpp auto-tuning, model stats, and Azure support." +date: 2025-10-02 +ogImage: "/assets/images/changelog/jan-release-v0.7.0.jpeg" +--- + +import ChangelogHeader from "@/components/Changelog/ChangelogHeader" +import { Callout } from 'nextra/components' + + + +## Jan v0.7.0: Jan Projects + +Jan v0.7.0 is live! This release focuses on helping you organize your workspace and better understand how models run. + +### What’s new +- **Projects**: Group related chats under one project for a cleaner workflow. +- **Rename models**: Give your models custom names for easier identification. +- **Model context stats**: See context usage when a model runs. +- **Auto-loaded cloud models**: Cloud model names now appear automatically. + +--- + +Update your Jan or [download the latest version](https://jan.ai/). + +For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.7.0). diff --git a/docs/src/pages/changelog/2025-10-03-jan-stability-improvements.mdx b/docs/src/pages/changelog/2025-10-03-jan-stability-improvements.mdx new file mode 100644 index 000000000..df756ccfc --- /dev/null +++ b/docs/src/pages/changelog/2025-10-03-jan-stability-improvements.mdx @@ -0,0 +1,26 @@ +--- +title: "Jan v0.7.1: Fixes Windows Version Revert & OpenRouter Models" +version: 0.7.1 +description: "Jan v0.7.1 focuses on bug fixes, including a windows version revert and improvements to OpenRouter models." +date: 2025-10-03 +--- + +import ChangelogHeader from "@/components/Changelog/ChangelogHeader" +import { Callout } from 'nextra/components' + + + +### Bug Fixes: Windows Version Revert & OpenRouter Models + +#### Two quick fixes: +- Jan no longer reverts to an older version on load +- OpenRouter can now add models again +- Add headers for anthropic request to fetch models + +--- + +Update your Jan or [download the latest version](https://jan.ai/). + +For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.7.1). + + diff --git a/docs/src/pages/changelog/2025-10-16-jan-security-update.mdx b/docs/src/pages/changelog/2025-10-16-jan-security-update.mdx new file mode 100644 index 000000000..3437cb66a --- /dev/null +++ b/docs/src/pages/changelog/2025-10-16-jan-security-update.mdx @@ -0,0 +1,25 @@ +--- +title: "Jan v0.7.2: Security Update" +version: 0.7.2 +description: "Jan v0.7.2 updates the happy-dom dependency to v20.0.0 to address a recently disclosed sandbox vulnerability." +date: 2025-10-16 +--- + +import ChangelogHeader from "@/components/Changelog/ChangelogHeader" +import { Callout } from 'nextra/components' + + + +## Jan v0.7.2: Security Update (happy-dom v20) + +This release focuses on **security and stability improvements**. +It updates the `happy-dom` dependency to the latest version to address a recently disclosed vulnerability. + +### Security Fix +- Updated `happy-dom` to **^20.0.0**, preventing untrusted JavaScript executed within HAPPY DOM from accessing process-level functions and executing arbitrary code outside the intended sandbox. + +--- + +Update your Jan or [download the latest version](https://jan.ai/). + +For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.7.2). diff --git a/docs/src/pages/docs/desktop/index.mdx b/docs/src/pages/docs/desktop/index.mdx index 852f097a5..3c225abb3 100644 --- a/docs/src/pages/docs/desktop/index.mdx +++ b/docs/src/pages/docs/desktop/index.mdx @@ -41,7 +41,7 @@ Jan is an open-source replacement for ChatGPT: Jan is a full [product suite](https://en.wikipedia.org/wiki/Software_suite) that offers an alternative to Big AI: - [Jan Desktop](/docs/desktop/quickstart): macOS, Windows, and Linux apps with offline mode -- [Jan Web](https://chat.jan.ai): Jan on browser, a direct alternative to chatgpt.com +- [Jan Web](https://chat.menlo.ai): Jan on browser, a direct alternative to chatgpt.com - Jan Mobile: iOS and Android apps (Coming Soon) - [Jan Server](/docs/server): deploy locally, in your cloud, or on-prem - [Jan Models](/docs/models): Open-source models optimized for deep research, tool use, and reasoning diff --git a/extensions-web/src/conversational-web/api.ts b/extensions-web/src/conversational-web/api.ts index 0e398eb05..bdd147edd 100644 --- a/extensions-web/src/conversational-web/api.ts +++ b/extensions-web/src/conversational-web/api.ts @@ -16,7 +16,7 @@ import { ListConversationItemsResponse } from './types' -declare const JAN_API_BASE: string +declare const MENLO_PLATFORM_BASE_URL: string export class RemoteApi { private authService: JanAuthService @@ -28,7 +28,7 @@ export class RemoteApi { async createConversation( data: Conversation ): Promise { - const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATIONS}` + const url = `${MENLO_PLATFORM_BASE_URL}${CONVERSATION_API_ROUTES.CONVERSATIONS}` return this.authService.makeAuthenticatedRequest( url, @@ -43,12 +43,12 @@ export class RemoteApi { conversationId: string, data: Conversation ): Promise { - const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATION_BY_ID(conversationId)}` + const url = `${MENLO_PLATFORM_BASE_URL}${CONVERSATION_API_ROUTES.CONVERSATION_BY_ID(conversationId)}` return this.authService.makeAuthenticatedRequest( url, { - method: 'PATCH', + method: 'POST', body: JSON.stringify(data), } ) @@ -70,7 +70,7 @@ export class RemoteApi { } const queryString = queryParams.toString() - const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATIONS}${queryString ? `?${queryString}` : ''}` + const url = `${MENLO_PLATFORM_BASE_URL}${CONVERSATION_API_ROUTES.CONVERSATIONS}${queryString ? `?${queryString}` : ''}` return this.authService.makeAuthenticatedRequest( url, @@ -114,7 +114,7 @@ export class RemoteApi { } async deleteConversation(conversationId: string): Promise { - const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATION_BY_ID(conversationId)}` + const url = `${MENLO_PLATFORM_BASE_URL}${CONVERSATION_API_ROUTES.CONVERSATION_BY_ID(conversationId)}` await this.authService.makeAuthenticatedRequest( url, @@ -141,7 +141,7 @@ export class RemoteApi { } const queryString = queryParams.toString() - const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATION_ITEMS(conversationId)}${queryString ? `?${queryString}` : ''}` + const url = `${MENLO_PLATFORM_BASE_URL}${CONVERSATION_API_ROUTES.CONVERSATION_ITEMS(conversationId)}${queryString ? `?${queryString}` : ''}` return this.authService.makeAuthenticatedRequest( url, diff --git a/extensions-web/src/conversational-web/types.ts b/extensions-web/src/conversational-web/types.ts index a6057da5d..ceb994808 100644 --- a/extensions-web/src/conversational-web/types.ts +++ b/extensions-web/src/conversational-web/types.ts @@ -31,7 +31,7 @@ export interface ConversationResponse { id: string object: 'conversation' title?: string - created_at: number + created_at: number | string metadata: ConversationMetadata } @@ -50,6 +50,7 @@ export interface ConversationItemAnnotation { } export interface ConversationItemContent { + type?: string file?: { file_id?: string mime_type?: string @@ -62,23 +63,50 @@ export interface ConversationItemContent { file_id?: string url?: string } + image_file?: { + file_id?: string + mime_type?: string + } input_text?: string output_text?: { annotations?: ConversationItemAnnotation[] text?: string } - reasoning_content?: string text?: { value?: string + text?: string } - type?: string + reasoning_content?: string + tool_calls?: Array<{ + id?: string + type?: string + function?: { + name?: string + arguments?: string + } + }> + tool_call_id?: string + tool_result?: { + content?: Array<{ + type?: string + text?: string + output_text?: { + text?: string + } + }> + output_text?: { + text?: string + } + } + text_result?: string } export interface ConversationItem { content?: ConversationItemContent[] - created_at: number + created_at: number | string id: string object: string + metadata?: Record role: string status?: string type?: string diff --git a/extensions-web/src/conversational-web/utils.ts b/extensions-web/src/conversational-web/utils.ts index 6448d9f4d..ad2f6fde9 100644 --- a/extensions-web/src/conversational-web/utils.ts +++ b/extensions-web/src/conversational-web/utils.ts @@ -1,5 +1,5 @@ import { Thread, ThreadAssistantInfo, ThreadMessage, ContentType } from '@janhq/core' -import { Conversation, ConversationResponse, ConversationItem } from './types' +import { Conversation, ConversationResponse, ConversationItem, ConversationItemContent, ConversationMetadata } from './types' import { DEFAULT_ASSISTANT } from './const' export class ObjectParser { @@ -7,7 +7,7 @@ export class ObjectParser { const modelName = thread.assistants?.[0]?.model?.id || undefined const modelProvider = thread.assistants?.[0]?.model?.engine || undefined const isFavorite = thread.metadata?.is_favorite?.toString() || 'false' - let metadata = {} + let metadata: ConversationMetadata = {} if (modelName && modelProvider) { metadata = { model_id: modelName, @@ -23,15 +23,14 @@ export class ObjectParser { static conversationToThread(conversation: ConversationResponse): Thread { const assistants: ThreadAssistantInfo[] = [] - if ( - conversation.metadata?.model_id && - conversation.metadata?.model_provider - ) { + const metadata: ConversationMetadata = conversation.metadata || {} + + if (metadata.model_id && metadata.model_provider) { assistants.push({ ...DEFAULT_ASSISTANT, model: { - id: conversation.metadata.model_id, - engine: conversation.metadata.model_provider, + id: metadata.model_id, + engine: metadata.model_provider, }, }) } else { @@ -44,16 +43,18 @@ export class ObjectParser { }) } - const isFavorite = conversation.metadata?.is_favorite === 'true' + const isFavorite = metadata.is_favorite === 'true' + const createdAtMs = parseTimestamp(conversation.created_at) + return { id: conversation.id, title: conversation.title || '', assistants, - created: conversation.created_at, - updated: conversation.created_at, + created: createdAtMs, + updated: createdAtMs, model: { - id: conversation.metadata.model_id, - provider: conversation.metadata.model_provider, + id: metadata.model_id, + provider: metadata.model_provider, }, isFavorite, metadata: { is_favorite: isFavorite }, @@ -65,74 +66,70 @@ export class ObjectParser { threadId: string ): ThreadMessage { // Extract text content and metadata from the item - let textContent = '' - let reasoningContent = '' + const textSegments: string[] = [] + const reasoningSegments: string[] = [] const imageUrls: string[] = [] let toolCalls: any[] = [] - let finishReason = '' if (item.content && item.content.length > 0) { for (const content of item.content) { - // Handle text content - if (content.text?.value) { - textContent = content.text.value - } - // Handle output_text for assistant messages - if (content.output_text?.text) { - textContent = content.output_text.text - } - // Handle reasoning content - if (content.reasoning_content) { - reasoningContent = content.reasoning_content - } - // Handle image content - if (content.image?.url) { - imageUrls.push(content.image.url) - } - // Extract finish_reason - if (content.finish_reason) { - finishReason = content.finish_reason - } - } - } - - // Handle tool calls parsing for assistant messages - if (item.role === 'assistant' && finishReason === 'tool_calls') { - try { - // Tool calls are embedded as JSON string in textContent - const toolCallMatch = textContent.match(/\[.*\]/) - if (toolCallMatch) { - const toolCallsData = JSON.parse(toolCallMatch[0]) - toolCalls = toolCallsData.map((toolCall: any) => ({ - tool: { - id: toolCall.id || 'unknown', - function: { - name: toolCall.function?.name || 'unknown', - arguments: toolCall.function?.arguments || '{}' - }, - type: toolCall.type || 'function' - }, - response: { - error: '', - content: [] - }, - state: 'ready' - })) - // Remove tool calls JSON from text content, keep only reasoning - textContent = '' - } - } catch (error) { - console.error('Failed to parse tool calls:', error) + extractContentByType(content, { + onText: (value) => { + if (value) { + textSegments.push(value) + } + }, + onReasoning: (value) => { + if (value) { + reasoningSegments.push(value) + } + }, + onImage: (url) => { + if (url) { + imageUrls.push(url) + } + }, + onToolCalls: (calls) => { + toolCalls = calls.map((toolCall) => { + const callId = toolCall.id || 'unknown' + const rawArgs = toolCall.function?.arguments + const normalizedArgs = + typeof rawArgs === 'string' + ? rawArgs + : JSON.stringify(rawArgs ?? {}) + return { + id: callId, + tool_call_id: callId, + tool: { + id: callId, + function: { + name: toolCall.function?.name || 'unknown', + arguments: normalizedArgs, + }, + type: toolCall.type || 'function', + }, + response: { + error: '', + content: [], + }, + state: 'pending', + } + }) + }, + }) } } // Format final content with reasoning if present let finalTextValue = '' - if (reasoningContent) { - finalTextValue = `${reasoningContent}` + if (reasoningSegments.length > 0) { + finalTextValue += `${reasoningSegments.join('\n')}` } - if (textContent) { - finalTextValue += textContent + if (textSegments.length > 0) { + if (finalTextValue) { + finalTextValue += '\n' + } + finalTextValue += textSegments.join('\n') } // Build content array for ThreadMessage @@ -157,22 +154,26 @@ export class ObjectParser { } // Build metadata - const metadata: any = {} + const metadata: any = { ...(item.metadata || {}) } if (toolCalls.length > 0) { metadata.tool_calls = toolCalls } + const createdAtMs = parseTimestamp(item.created_at) + // Map status from server format to frontend format const mappedStatus = item.status === 'completed' ? 'ready' : item.status || 'ready' + const role = item.role === 'user' || item.role === 'assistant' ? item.role : 'assistant' + return { type: 'text', id: item.id, object: 'thread.message', thread_id: threadId, - role: item.role as 'user' | 'assistant', + role, content: messageContent, - created_at: item.created_at * 1000, // Convert to milliseconds + created_at: createdAtMs, completed_at: 0, status: mappedStatus, metadata, @@ -201,25 +202,46 @@ export const combineConversationItemsToMessages = ( ): ThreadMessage[] => { const messages: ThreadMessage[] = [] const toolResponseMap = new Map() + const sortedItems = [...items].sort( + (a, b) => parseTimestamp(a.created_at) - parseTimestamp(b.created_at) + ) // First pass: collect tool responses - for (const item of items) { + for (const item of sortedItems) { if (item.role === 'tool') { - const toolContent = item.content?.[0]?.text?.value || '' - toolResponseMap.set(item.id, { - error: '', - content: [ - { - type: 'text', - text: toolContent - } - ] - }) + for (const content of item.content ?? []) { + const toolCallId = content.tool_call_id || item.id + const toolResultText = + content.tool_result?.output_text?.text || + (Array.isArray(content.tool_result?.content) + ? content.tool_result?.content + ?.map((entry) => entry.text || entry.output_text?.text) + .filter((text): text is string => Boolean(text)) + .join('\n') + : undefined) + const toolContent = + content.text?.text || + content.text?.value || + content.output_text?.text || + content.input_text || + content.text_result || + toolResultText || + '' + toolResponseMap.set(toolCallId, { + error: '', + content: [ + { + type: 'text', + text: toolContent, + }, + ], + }) + } } } // Second pass: build messages and merge tool responses - for (const item of items) { + for (const item of sortedItems) { // Skip tool messages as they will be merged into assistant messages if (item.role === 'tool') { continue @@ -228,14 +250,35 @@ export const combineConversationItemsToMessages = ( const message = ObjectParser.conversationItemToThreadMessage(item, threadId) // If this is an assistant message with tool calls, merge tool responses - if (message.role === 'assistant' && message.metadata?.tool_calls && Array.isArray(message.metadata.tool_calls)) { + if ( + message.role === 'assistant' && + message.metadata?.tool_calls && + Array.isArray(message.metadata.tool_calls) + ) { const toolCalls = message.metadata.tool_calls as any[] - let toolResponseIndex = 0 - for (const [responseId, responseData] of toolResponseMap.entries()) { - if (toolResponseIndex < toolCalls.length) { - toolCalls[toolResponseIndex].response = responseData - toolResponseIndex++ + for (const toolCall of toolCalls) { + const callId = toolCall.tool_call_id || toolCall.id || toolCall.tool?.id + let responseKey: string | undefined + let response: any = null + + if (callId && toolResponseMap.has(callId)) { + responseKey = callId + response = toolResponseMap.get(callId) + } else { + const iterator = toolResponseMap.entries().next() + if (!iterator.done) { + responseKey = iterator.value[0] + response = iterator.value[1] + } + } + + if (response) { + toolCall.response = response + toolCall.state = 'succeeded' + if (responseKey) { + toolResponseMap.delete(responseKey) + } } } } @@ -245,3 +288,79 @@ export const combineConversationItemsToMessages = ( return messages } + +const parseTimestamp = (value: number | string | undefined): number => { + if (typeof value === 'number') { + // Distinguish between seconds and milliseconds + return value > 1e12 ? value : value * 1000 + } + if (typeof value === 'string') { + const parsed = Date.parse(value) + return Number.isNaN(parsed) ? Date.now() : parsed + } + return Date.now() +} + +const extractContentByType = ( + content: ConversationItemContent, + handlers: { + onText: (value: string) => void + onReasoning: (value: string) => void + onImage: (url: string) => void + onToolCalls: (calls: NonNullable) => void + } +) => { + const type = content.type || '' + + switch (type) { + case 'input_text': + handlers.onText(content.input_text || '') + break + case 'text': + handlers.onText(content.text?.text || content.text?.value || '') + break + case 'output_text': + handlers.onText(content.output_text?.text || '') + break + case 'reasoning_content': + handlers.onReasoning(content.reasoning_content || '') + break + case 'image': + case 'image_url': + if (content.image?.url) { + handlers.onImage(content.image.url) + } + break + case 'tool_calls': + if (content.tool_calls && Array.isArray(content.tool_calls)) { + handlers.onToolCalls(content.tool_calls) + } + break + case 'tool_result': + if (content.tool_result?.output_text?.text) { + handlers.onText(content.tool_result.output_text.text) + } + break + default: + // Fallback for legacy fields without explicit type + if (content.text?.value || content.text?.text) { + handlers.onText(content.text.value || content.text.text || '') + } + if (content.text_result) { + handlers.onText(content.text_result) + } + if (content.output_text?.text) { + handlers.onText(content.output_text.text) + } + if (content.reasoning_content) { + handlers.onReasoning(content.reasoning_content) + } + if (content.image?.url) { + handlers.onImage(content.image.url) + } + if (content.tool_calls && Array.isArray(content.tool_calls)) { + handlers.onToolCalls(content.tool_calls) + } + break + } +} diff --git a/extensions-web/src/jan-provider-web/api.ts b/extensions-web/src/jan-provider-web/api.ts index 97a9608f2..ded8f3214 100644 --- a/extensions-web/src/jan-provider-web/api.ts +++ b/extensions-web/src/jan-provider-web/api.ts @@ -4,10 +4,11 @@ */ import { getSharedAuthService, JanAuthService } from '../shared' -import { JanModel, janProviderStore } from './store' import { ApiError } from '../shared/types/errors' +import { JAN_API_ROUTES } from './const' +import { JanModel, janProviderStore } from './store' -// JAN_API_BASE is defined in vite.config.ts +// MENLO_PLATFORM_BASE_URL is defined in vite.config.ts // Constants const TEMPORARY_CHAT_ID = 'temporary-chat' @@ -19,12 +20,7 @@ const TEMPORARY_CHAT_ID = 'temporary-chat' */ function getChatCompletionConfig(request: JanChatCompletionRequest, stream: boolean = false) { const isTemporaryChat = request.conversation_id === TEMPORARY_CHAT_ID - - // For temporary chats, use the stateless /chat/completions endpoint - // For regular conversations, use the stateful /conv/chat/completions endpoint - const endpoint = isTemporaryChat - ? `${JAN_API_BASE}/chat/completions` - : `${JAN_API_BASE}/conv/chat/completions` + const endpoint = `${MENLO_PLATFORM_BASE_URL}${JAN_API_ROUTES.CHAT_COMPLETIONS}` const payload = { ...request, @@ -44,9 +40,30 @@ function getChatCompletionConfig(request: JanChatCompletionRequest, stream: bool return { endpoint, payload, isTemporaryChat } } -export interface JanModelsResponse { +interface JanModelSummary { + id: string object: string - data: JanModel[] + owned_by: string + created?: number +} + +interface JanModelsResponse { + object: string + data: JanModelSummary[] +} + +interface JanModelCatalogResponse { + id: string + supported_parameters?: { + names?: string[] + default?: Record + } + extras?: { + supported_parameters?: string[] + default_parameters?: Record + [key: string]: unknown + } + [key: string]: unknown } export interface JanChatMessage { @@ -112,6 +129,8 @@ export interface JanChatCompletionChunk { export class JanApiClient { private static instance: JanApiClient private authService: JanAuthService + private modelsCache: JanModel[] | null = null + private modelsFetchPromise: Promise | null = null private constructor() { this.authService = getSharedAuthService() @@ -124,25 +143,64 @@ export class JanApiClient { return JanApiClient.instance } - async getModels(): Promise { + async getModels(options?: { forceRefresh?: boolean }): Promise { try { + const forceRefresh = options?.forceRefresh ?? false + + if (forceRefresh) { + this.modelsCache = null + } else if (this.modelsCache) { + return this.modelsCache + } + + if (this.modelsFetchPromise) { + return this.modelsFetchPromise + } + janProviderStore.setLoadingModels(true) janProviderStore.clearError() - const response = await this.authService.makeAuthenticatedRequest( - `${JAN_API_BASE}/conv/models` - ) + this.modelsFetchPromise = (async () => { + const response = await this.authService.makeAuthenticatedRequest( + `${MENLO_PLATFORM_BASE_URL}${JAN_API_ROUTES.MODELS}` + ) - const models = response.data || [] - janProviderStore.setModels(models) - - return models + const summaries = response.data || [] + + const models: JanModel[] = await Promise.all( + summaries.map(async (summary) => { + const supportedParameters = await this.fetchSupportedParameters(summary.id) + const capabilities = this.deriveCapabilitiesFromParameters(supportedParameters) + + return { + id: summary.id, + object: summary.object, + owned_by: summary.owned_by, + created: summary.created, + capabilities, + supportedParameters, + } + }) + ) + + this.modelsCache = models + janProviderStore.setModels(models) + + return models + })() + + return await this.modelsFetchPromise } catch (error) { + this.modelsCache = null + this.modelsFetchPromise = null + const errorMessage = error instanceof ApiError ? error.message : error instanceof Error ? error.message : 'Failed to fetch models' janProviderStore.setError(errorMessage) janProviderStore.setLoadingModels(false) throw error + } finally { + this.modelsFetchPromise = null } } @@ -254,7 +312,7 @@ export class JanApiClient { async initialize(): Promise { try { janProviderStore.setAuthenticated(true) - // Fetch initial models + // Fetch initial models (cached for subsequent calls) await this.getModels() console.log('Jan API client initialized successfully') } catch (error) { @@ -266,6 +324,52 @@ export class JanApiClient { janProviderStore.setInitializing(false) } } + + private async fetchSupportedParameters(modelId: string): Promise { + try { + const endpoint = `${MENLO_PLATFORM_BASE_URL}${JAN_API_ROUTES.MODEL_CATALOGS}/${this.encodeModelIdForCatalog(modelId)}` + const catalog = await this.authService.makeAuthenticatedRequest(endpoint) + return this.extractSupportedParameters(catalog) + } catch (error) { + console.warn(`Failed to fetch catalog metadata for model "${modelId}":`, error) + return [] + } + } + + private encodeModelIdForCatalog(modelId: string): string { + return modelId + .split('/') + .map((segment) => encodeURIComponent(segment)) + .join('/') + } + + private extractSupportedParameters(catalog: JanModelCatalogResponse | null | undefined): string[] { + if (!catalog) { + return [] + } + + const primaryNames = catalog.supported_parameters?.names + if (Array.isArray(primaryNames) && primaryNames.length > 0) { + return [...new Set(primaryNames)] + } + + const extraNames = catalog.extras?.supported_parameters + if (Array.isArray(extraNames) && extraNames.length > 0) { + return [...new Set(extraNames)] + } + + return [] + } + + private deriveCapabilitiesFromParameters(parameters: string[]): string[] { + const capabilities = new Set() + + if (parameters.includes('tools')) { + capabilities.add('tools') + } + + return Array.from(capabilities) + } } export const janApiClient = JanApiClient.getInstance() diff --git a/extensions-web/src/jan-provider-web/const.ts b/extensions-web/src/jan-provider-web/const.ts new file mode 100644 index 000000000..8f691551d --- /dev/null +++ b/extensions-web/src/jan-provider-web/const.ts @@ -0,0 +1,7 @@ +export const JAN_API_ROUTES = { + MODELS: '/models', + CHAT_COMPLETIONS: '/chat/completions', + MODEL_CATALOGS: '/models/catalogs', +} as const + +export const MODEL_PROVIDER_STORAGE_KEY = 'model-provider' diff --git a/extensions-web/src/jan-provider-web/helpers.ts b/extensions-web/src/jan-provider-web/helpers.ts new file mode 100644 index 000000000..09edb9867 --- /dev/null +++ b/extensions-web/src/jan-provider-web/helpers.ts @@ -0,0 +1,122 @@ +import type { JanModel } from './store' +import { MODEL_PROVIDER_STORAGE_KEY } from './const' + +type StoredModel = { + id?: string + capabilities?: unknown + [key: string]: unknown +} + +type StoredProvider = { + provider?: string + models?: StoredModel[] + [key: string]: unknown +} + +type StoredState = { + state?: { + providers?: StoredProvider[] + [key: string]: unknown + } + version?: number + [key: string]: unknown +} + +const normalizeCapabilities = (capabilities: unknown): string[] => { + if (!Array.isArray(capabilities)) { + return [] + } + + return [...new Set(capabilities.filter((item): item is string => typeof item === 'string'))].sort( + (a, b) => a.localeCompare(b) + ) +} + +/** + * Synchronize Jan models stored in localStorage with the latest server state. + * Returns true if the stored data was modified (including being cleared). + */ +export function syncJanModelsLocalStorage( + remoteModels: JanModel[], + storageKey: string = MODEL_PROVIDER_STORAGE_KEY +): boolean { + const rawStorage = localStorage.getItem(storageKey) + if (!rawStorage) { + return false + } + + let storedState: StoredState + try { + storedState = JSON.parse(rawStorage) as StoredState + } catch (error) { + console.warn('Failed to parse Jan model storage; clearing entry.', error) + localStorage.removeItem(storageKey) + return true + } + + const providers = storedState?.state?.providers + if (!Array.isArray(providers)) { + return false + } + + const remoteModelMap = new Map(remoteModels.map((model) => [model.id, model])) + let storageUpdated = false + + for (const provider of providers) { + if (provider.provider !== 'jan' || !Array.isArray(provider.models)) { + continue + } + + const updatedModels: StoredModel[] = [] + + for (const model of provider.models) { + const modelId = typeof model.id === 'string' ? model.id : null + if (!modelId) { + storageUpdated = true + continue + } + + const remoteModel = remoteModelMap.get(modelId) + if (!remoteModel) { + console.log(`Removing unknown Jan model from localStorage: ${modelId}`) + storageUpdated = true + continue + } + + const storedCapabilities = normalizeCapabilities(model.capabilities) + const remoteCapabilities = normalizeCapabilities(remoteModel.capabilities) + + const capabilitiesMatch = + storedCapabilities.length === remoteCapabilities.length && + storedCapabilities.every((cap, index) => cap === remoteCapabilities[index]) + + if (!capabilitiesMatch) { + console.log( + `Updating capabilities for Jan model ${modelId}:`, + storedCapabilities, + '=>', + remoteCapabilities + ) + updatedModels.push({ + ...model, + capabilities: remoteModel.capabilities, + }) + storageUpdated = true + } else { + updatedModels.push(model) + } + } + + if (updatedModels.length !== provider.models.length) { + storageUpdated = true + } + + provider.models = updatedModels + } + + if (storageUpdated) { + localStorage.setItem(storageKey, JSON.stringify(storedState)) + } + + return storageUpdated +} diff --git a/extensions-web/src/jan-provider-web/provider.ts b/extensions-web/src/jan-provider-web/provider.ts index 3375fd351..a535b0fa0 100644 --- a/extensions-web/src/jan-provider-web/provider.ts +++ b/extensions-web/src/jan-provider-web/provider.ts @@ -14,12 +14,10 @@ import { ImportOptions, } from '@janhq/core' // cspell: disable-line import { janApiClient, JanChatMessage } from './api' +import { syncJanModelsLocalStorage } from './helpers' import { janProviderStore } from './store' import { ApiError } from '../shared/types/errors' -// Jan models support tools via MCP -const JAN_MODEL_CAPABILITIES = ['tools'] as const - export default class JanProviderWeb extends AIEngine { readonly provider = 'jan' private activeSessions: Map = new Map() @@ -28,11 +26,11 @@ export default class JanProviderWeb extends AIEngine { console.log('Loading Jan Provider Extension...') try { - // Check and clear invalid Jan models (capabilities mismatch) - this.validateJanModelsLocalStorage() - - // Initialize authentication and fetch models + // Initialize authentication await janApiClient.initialize() + // Check and sync stored Jan models against latest catalog data + await this.validateJanModelsLocalStorage() + console.log('Jan Provider Extension loaded successfully') } catch (error) { console.error('Failed to load Jan Provider Extension:', error) @@ -43,46 +41,17 @@ export default class JanProviderWeb extends AIEngine { } // Verify Jan models capabilities in localStorage - private validateJanModelsLocalStorage() { + private async validateJanModelsLocalStorage(): Promise { try { - console.log("Validating Jan models in localStorage...") - const storageKey = 'model-provider' - const data = localStorage.getItem(storageKey) - if (!data) return + console.log('Validating Jan models in localStorage...') - const parsed = JSON.parse(data) - if (!parsed?.state?.providers) return + const remoteModels = await janApiClient.getModels() + const storageUpdated = syncJanModelsLocalStorage(remoteModels) - // Check if any Jan model has incorrect capabilities - let hasInvalidModel = false - - for (const provider of parsed.state.providers) { - if (provider.provider === 'jan' && provider.models) { - for (const model of provider.models) { - console.log(`Checking Jan model: ${model.id}`, model.capabilities) - if (JSON.stringify(model.capabilities) !== JSON.stringify(JAN_MODEL_CAPABILITIES)) { - hasInvalidModel = true - console.log(`Found invalid Jan model: ${model.id}, clearing localStorage`) - break - } - } - } - if (hasInvalidModel) break - } - - // If any invalid model found, just clear the storage - if (hasInvalidModel) { - // Force clear the storage - localStorage.removeItem(storageKey) - // Verify it's actually removed - const afterRemoval = localStorage.getItem(storageKey) - // If still present, try setting to empty state - if (afterRemoval) { - // Try alternative clearing method - localStorage.setItem(storageKey, JSON.stringify({ state: { providers: [] }, version: parsed.version || 3 })) - } - console.log('Cleared model-provider from localStorage due to invalid Jan capabilities') - // Force a page reload to ensure clean state + if (storageUpdated) { + console.log( + 'Synchronized Jan models in localStorage with server capabilities; reloading...' + ) window.location.reload() } } catch (error) { @@ -119,7 +88,7 @@ export default class JanProviderWeb extends AIEngine { path: undefined, // Remote model, no local path owned_by: model.owned_by, object: model.object, - capabilities: [...JAN_MODEL_CAPABILITIES], + capabilities: [...model.capabilities], } : undefined ) @@ -140,7 +109,7 @@ export default class JanProviderWeb extends AIEngine { path: undefined, // Remote model, no local path owned_by: model.owned_by, object: model.object, - capabilities: [...JAN_MODEL_CAPABILITIES], + capabilities: [...model.capabilities], })) } catch (error) { console.error('Failed to list Jan models:', error) @@ -159,6 +128,7 @@ export default class JanProviderWeb extends AIEngine { port: 443, // HTTPS port model_id: modelId, model_path: `remote:${modelId}`, // Indicate this is a remote model + is_embedding: false, // assume false here, TODO: might need further implementation api_key: '', // API key handled by auth service } @@ -193,8 +163,12 @@ export default class JanProviderWeb extends AIEngine { console.error(`Failed to unload Jan session ${sessionId}:`, error) return { success: false, - error: error instanceof ApiError ? error.message : - error instanceof Error ? error.message : 'Unknown error', + error: + error instanceof ApiError + ? error.message + : error instanceof Error + ? error.message + : 'Unknown error', } } } diff --git a/extensions-web/src/jan-provider-web/store.ts b/extensions-web/src/jan-provider-web/store.ts index 2ff341147..887a72246 100644 --- a/extensions-web/src/jan-provider-web/store.ts +++ b/extensions-web/src/jan-provider-web/store.ts @@ -9,6 +9,9 @@ export interface JanModel { id: string object: string owned_by: string + created?: number + capabilities: string[] + supportedParameters?: string[] } export interface JanProviderState { diff --git a/extensions-web/src/mcp-web/index.ts b/extensions-web/src/mcp-web/index.ts index 3d588753f..6545fd426 100644 --- a/extensions-web/src/mcp-web/index.ts +++ b/extensions-web/src/mcp-web/index.ts @@ -12,8 +12,8 @@ import { JanMCPOAuthProvider } from './oauth-provider' import { WebSearchButton } from './components' import type { ComponentType } from 'react' -// JAN_API_BASE is defined in vite.config.ts (defaults to 'https://api-dev.jan.ai/jan/v1') -declare const JAN_API_BASE: string +// MENLO_PLATFORM_BASE_URL is defined in vite.config.ts (defaults to 'https://api-dev.menlo.ai/jan/v1') +declare const MENLO_PLATFORM_BASE_URL: string export default class MCPExtensionWeb extends MCPExtension { private mcpEndpoint = '/mcp' @@ -77,7 +77,7 @@ export default class MCPExtensionWeb extends MCPExtension { // Create transport with OAuth provider (handles token refresh automatically) const transport = new StreamableHTTPClientTransport( - new URL(`${JAN_API_BASE}${this.mcpEndpoint}`), + new URL(`${MENLO_PLATFORM_BASE_URL}${this.mcpEndpoint}`), { authProvider: this.oauthProvider // No sessionId needed - server will generate one automatically diff --git a/extensions-web/src/shared/auth/api.ts b/extensions-web/src/shared/auth/api.ts index 61163984b..a14617190 100644 --- a/extensions-web/src/shared/auth/api.ts +++ b/extensions-web/src/shared/auth/api.ts @@ -6,13 +6,13 @@ import { AuthTokens } from './types' import { AUTH_ENDPOINTS } from './const' -declare const JAN_API_BASE: string +declare const MENLO_PLATFORM_BASE_URL: string /** * Logout user on server */ export async function logoutUser(): Promise { - const response = await fetch(`${JAN_API_BASE}${AUTH_ENDPOINTS.LOGOUT}`, { + const response = await fetch(`${MENLO_PLATFORM_BASE_URL}${AUTH_ENDPOINTS.LOGOUT}`, { method: 'GET', credentials: 'include', headers: { @@ -29,7 +29,7 @@ export async function logoutUser(): Promise { * Guest login */ export async function guestLogin(): Promise { - const response = await fetch(`${JAN_API_BASE}${AUTH_ENDPOINTS.GUEST_LOGIN}`, { + const response = await fetch(`${MENLO_PLATFORM_BASE_URL}${AUTH_ENDPOINTS.GUEST_LOGIN}`, { method: 'POST', credentials: 'include', headers: { @@ -51,7 +51,7 @@ export async function guestLogin(): Promise { */ export async function refreshToken(): Promise { const response = await fetch( - `${JAN_API_BASE}${AUTH_ENDPOINTS.REFRESH_TOKEN}`, + `${MENLO_PLATFORM_BASE_URL}${AUTH_ENDPOINTS.REFRESH_TOKEN}`, { method: 'GET', credentials: 'include', diff --git a/extensions-web/src/shared/auth/providers/api.ts b/extensions-web/src/shared/auth/providers/api.ts index f2830e911..f63b5a915 100644 --- a/extensions-web/src/shared/auth/providers/api.ts +++ b/extensions-web/src/shared/auth/providers/api.ts @@ -5,10 +5,10 @@ import { AuthTokens, LoginUrlResponse } from './types' -declare const JAN_API_BASE: string +declare const MENLO_PLATFORM_BASE_URL: string export async function getLoginUrl(endpoint: string): Promise { - const response: Response = await fetch(`${JAN_API_BASE}${endpoint}`, { + const response: Response = await fetch(`${MENLO_PLATFORM_BASE_URL}${endpoint}`, { method: 'GET', credentials: 'include', headers: { @@ -30,7 +30,7 @@ export async function handleOAuthCallback( code: string, state?: string ): Promise { - const response: Response = await fetch(`${JAN_API_BASE}${endpoint}`, { + const response: Response = await fetch(`${MENLO_PLATFORM_BASE_URL}${endpoint}`, { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/extensions-web/src/shared/auth/service.ts b/extensions-web/src/shared/auth/service.ts index eb15c4893..fc12b5ffd 100644 --- a/extensions-web/src/shared/auth/service.ts +++ b/extensions-web/src/shared/auth/service.ts @@ -3,9 +3,9 @@ * Handles authentication flows for any OAuth provider */ -declare const JAN_API_BASE: string +declare const MENLO_PLATFORM_BASE_URL: string -import { User, AuthState, AuthBroadcastMessage } from './types' +import { User, AuthState, AuthBroadcastMessage, AuthTokens } from './types' import { AUTH_STORAGE_KEYS, AUTH_ENDPOINTS, @@ -115,7 +115,7 @@ export class JanAuthService { // Store tokens and set authenticated state this.accessToken = tokens.access_token - this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000 + this.tokenExpiryTime = this.computeTokenExpiry(tokens) this.setAuthProvider(providerId) this.authBroadcast.broadcastLogin() @@ -158,7 +158,7 @@ export class JanAuthService { const tokens = await refreshToken() this.accessToken = tokens.access_token - this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000 + this.tokenExpiryTime = this.computeTokenExpiry(tokens) } catch (error) { console.error('Failed to refresh access token:', error) if (error instanceof ApiError && error.isStatus(401)) { @@ -343,6 +343,23 @@ export class JanAuthService { localStorage.removeItem(AUTH_STORAGE_KEYS.AUTH_PROVIDER) } + private computeTokenExpiry(tokens: AuthTokens): number { + if (tokens.expires_at) { + const expiresAt = new Date(tokens.expires_at).getTime() + if (!Number.isNaN(expiresAt)) { + return expiresAt + } + console.warn('Invalid expires_at format in auth tokens:', tokens.expires_at) + } + + if (typeof tokens.expires_in === 'number') { + return Date.now() + tokens.expires_in * 1000 + } + + console.warn('Auth tokens missing expiry information; defaulting to immediate expiry') + return Date.now() + } + /** * Ensure guest access is available */ @@ -352,7 +369,7 @@ export class JanAuthService { if (!this.accessToken || Date.now() > this.tokenExpiryTime) { const tokens = await guestLogin() this.accessToken = tokens.access_token - this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000 + this.tokenExpiryTime = this.computeTokenExpiry(tokens) } } catch (error) { console.error('Failed to ensure guest access:', error) @@ -387,7 +404,6 @@ export class JanAuthService { case AUTH_EVENTS.LOGOUT: // Another tab logged out, clear our state this.clearAuthState() - this.ensureGuestAccess().catch(console.error) break } }) @@ -413,7 +429,7 @@ export class JanAuthService { private async fetchUserProfile(): Promise { try { return await this.makeAuthenticatedRequest( - `${JAN_API_BASE}${AUTH_ENDPOINTS.ME}` + `${MENLO_PLATFORM_BASE_URL}${AUTH_ENDPOINTS.ME}` ) } catch (error) { console.error('Failed to fetch user profile:', error) diff --git a/extensions-web/src/shared/auth/types.ts b/extensions-web/src/shared/auth/types.ts index 65f2dd06a..3e5df6e3c 100644 --- a/extensions-web/src/shared/auth/types.ts +++ b/extensions-web/src/shared/auth/types.ts @@ -16,7 +16,8 @@ export type AuthType = ProviderType | 'guest' export interface AuthTokens { access_token: string - expires_in: number + expires_in?: number + expires_at?: string object: string } diff --git a/extensions-web/src/types/global.d.ts b/extensions-web/src/types/global.d.ts index 8d70d398b..22dea22d0 100644 --- a/extensions-web/src/types/global.d.ts +++ b/extensions-web/src/types/global.d.ts @@ -1,5 +1,5 @@ export {} declare global { - declare const JAN_API_BASE: string + declare const MENLO_PLATFORM_BASE_URL: string } diff --git a/extensions-web/vite.config.ts b/extensions-web/vite.config.ts index 8c144b0ab..b68fc4d5a 100644 --- a/extensions-web/vite.config.ts +++ b/extensions-web/vite.config.ts @@ -14,6 +14,6 @@ export default defineConfig({ emptyOutDir: false // Don't clean the output directory }, define: { - JAN_API_BASE: JSON.stringify(process.env.JAN_API_BASE || 'https://api-dev.jan.ai/v1'), + MENLO_PLATFORM_BASE_URL: JSON.stringify(process.env.MENLO_PLATFORM_BASE_URL || 'https://api-dev.menlo.ai/v1'), } }) diff --git a/extensions/llamacpp-extension/src/backend.ts b/extensions/llamacpp-extension/src/backend.ts index a313e01c6..bd0543227 100644 --- a/extensions/llamacpp-extension/src/backend.ts +++ b/extensions/llamacpp-extension/src/backend.ts @@ -156,8 +156,13 @@ export async function listSupportedBackends(): Promise< supportedBackends.push('macos-arm64') } // get latest backends from Github - const remoteBackendVersions = + let remoteBackendVersions = [] + try { + remoteBackendVersions = await fetchRemoteSupportedBackends(supportedBackends) + } catch (e) { + console.debug(`Not able to get remote backends, Jan might be offline or network problem: ${String(e)}`) + } // Get locally installed versions const localBackendVersions = await getLocalInstalledBackends() diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts index 07d49cd53..8d4f277b6 100644 --- a/extensions/llamacpp-extension/src/index.ts +++ b/extensions/llamacpp-extension/src/index.ts @@ -39,7 +39,6 @@ import { getProxyConfig } from './util' import { basename } from '@tauri-apps/api/path' import { readGgufMetadata, - estimateKVCacheSize, getModelSize, isModelSupported, planModelLoadInternal, @@ -58,6 +57,8 @@ type LlamacppConfig = { chat_template: string n_gpu_layers: number offload_mmproj: boolean + cpu_moe: boolean + n_cpu_moe: number override_tensor_buffer_t: string ctx_size: number threads: number @@ -1527,6 +1528,7 @@ export default class llamacpp_extension extends AIEngine { if ( this.autoUnload && + !isEmbedding && (loadedModels.length > 0 || otherLoadingPromises.length > 0) ) { // Wait for OTHER loading models to finish, then unload everything @@ -1534,10 +1536,33 @@ export default class llamacpp_extension extends AIEngine { await Promise.all(otherLoadingPromises) } - // Now unload all loaded models + // Now unload all loaded Text models excluding embedding models const allLoadedModels = await this.getLoadedModels() if (allLoadedModels.length > 0) { - await Promise.all(allLoadedModels.map((model) => this.unload(model))) + const sessionInfos: (SessionInfo | null)[] = await Promise.all( + allLoadedModels.map(async (modelId) => { + try { + return await this.findSessionByModel(modelId) + } catch (e) { + logger.warn(`Unable to find session for model "${modelId}": ${e}`) + return null // treat as “not‑eligible for unload” + } + }) + ) + + logger.info(JSON.stringify(sessionInfos)) + + const nonEmbeddingModels: string[] = sessionInfos + .filter( + (s): s is SessionInfo => s !== null && s.is_embedding === false + ) + .map((s) => s.model_id) + + if (nonEmbeddingModels.length > 0) { + await Promise.all( + nonEmbeddingModels.map((modelId) => this.unload(modelId)) + ) + } } } const args: string[] = [] @@ -1581,6 +1606,10 @@ export default class llamacpp_extension extends AIEngine { ]) args.push('--jinja') args.push('-m', modelPath) + if (cfg.cpu_moe) args.push('--cpu-moe') + if (cfg.n_cpu_moe && cfg.n_cpu_moe > 0) { + args.push('--n-cpu-moe', String(cfg.n_cpu_moe)) + } // For overriding tensor buffer type, useful where // massive MOE models can be made faster by keeping attention on the GPU // and offloading the expert FFNs to the CPU. @@ -1631,7 +1660,7 @@ export default class llamacpp_extension extends AIEngine { if (cfg.no_kv_offload) args.push('--no-kv-offload') if (isEmbedding) { args.push('--embedding') - args.push('--pooling mean') + args.push('--pooling', 'mean') } else { if (cfg.ctx_size > 0) args.push('--ctx-size', String(cfg.ctx_size)) if (cfg.n_predict > 0) args.push('--n-predict', String(cfg.n_predict)) @@ -1670,6 +1699,7 @@ export default class llamacpp_extension extends AIEngine { libraryPath, args, envs, + isEmbedding, } ) return sInfo @@ -2005,6 +2035,69 @@ export default class llamacpp_extension extends AIEngine { libraryPath, envs, }) + // On Linux with AMD GPUs, llama.cpp via Vulkan may report UMA (shared) memory as device-local. + // For clearer UX, override with dedicated VRAM from the hardware plugin when available. + try { + const sysInfo = await getSystemInfo() + if (sysInfo?.os_type === 'linux' && Array.isArray(sysInfo.gpus)) { + const usage = await getSystemUsage() + if (usage && Array.isArray(usage.gpus)) { + const uuidToUsage: Record = {} + for (const u of usage.gpus as any[]) { + if (u && typeof u.uuid === 'string') { + uuidToUsage[u.uuid] = u + } + } + + const indexToAmdUuid = new Map() + for (const gpu of sysInfo.gpus as any[]) { + const vendorStr = + typeof gpu?.vendor === 'string' + ? gpu.vendor + : typeof gpu?.vendor === 'object' && gpu.vendor !== null + ? String(gpu.vendor) + : '' + if ( + vendorStr.toUpperCase().includes('AMD') && + gpu?.vulkan_info && + typeof gpu.vulkan_info.index === 'number' && + typeof gpu.uuid === 'string' + ) { + indexToAmdUuid.set(gpu.vulkan_info.index, gpu.uuid) + } + } + + if (indexToAmdUuid.size > 0) { + const adjusted = dList.map((dev) => { + if (dev.id?.startsWith('Vulkan')) { + const match = /^Vulkan(\d+)/.exec(dev.id) + if (match) { + const vIdx = Number(match[1]) + const uuid = indexToAmdUuid.get(vIdx) + if (uuid) { + const u = uuidToUsage[uuid] + if ( + u && + typeof u.total_memory === 'number' && + typeof u.used_memory === 'number' + ) { + const total = Math.max(0, Math.floor(u.total_memory)) + const free = Math.max(0, Math.floor(u.total_memory - u.used_memory)) + return { ...dev, mem: total, free } + } + } + } + } + return dev + }) + return adjusted + } + } + } + } catch (e) { + logger.warn('Device memory override (AMD/Linux) failed:', e) + } + return dList } catch (error) { logger.error('Failed to query devices:\n', error) @@ -2013,6 +2106,7 @@ export default class llamacpp_extension extends AIEngine { } async embed(text: string[]): Promise { + // Ensure the sentence-transformer model is present let sInfo = await this.findSessionByModel('sentence-transformer-mini') if (!sInfo) { const downloadedModelList = await this.list() @@ -2026,30 +2120,45 @@ export default class llamacpp_extension extends AIEngine { 'https://huggingface.co/second-state/All-MiniLM-L6-v2-Embedding-GGUF/resolve/main/all-MiniLM-L6-v2-ggml-model-f16.gguf?download=true', }) } - sInfo = await this.load('sentence-transformer-mini') + // Load specifically in embedding mode + sInfo = await this.load('sentence-transformer-mini', undefined, true) } - const baseUrl = `http://localhost:${sInfo.port}/v1/embeddings` - const headers = { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${sInfo.api_key}`, + + const attemptRequest = async (session: SessionInfo) => { + const baseUrl = `http://localhost:${session.port}/v1/embeddings` + const headers = { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${session.api_key}`, + } + const body = JSON.stringify({ + input: text, + model: session.model_id, + encoding_format: 'float', + }) + const response = await fetch(baseUrl, { + method: 'POST', + headers, + body, + }) + return response + } + + // First try with the existing session (may have been started without --embedding previously) + let response = await attemptRequest(sInfo) + + // If embeddings endpoint is not available (501), reload with embedding mode and retry once + if (response.status === 501) { + try { + await this.unload('sentence-transformer-mini') + } catch {} + sInfo = await this.load('sentence-transformer-mini', undefined, true) + response = await attemptRequest(sInfo) } - const body = JSON.stringify({ - input: text, - model: sInfo.model_id, - encoding_format: 'float', - }) - const response = await fetch(baseUrl, { - method: 'POST', - headers, - body, - }) if (!response.ok) { const errorData = await response.json().catch(() => null) throw new Error( - `API request failed with status ${response.status}: ${JSON.stringify( - errorData - )}` + `API request failed with status ${response.status}: ${JSON.stringify(errorData)}` ) } const responseData = await response.json() @@ -2151,7 +2260,12 @@ export default class llamacpp_extension extends AIEngine { if (mmprojPath && !this.isAbsolutePath(mmprojPath)) mmprojPath = await joinPath([await getJanDataFolderPath(), path]) try { - const result = await planModelLoadInternal(path, this.memoryMode, mmprojPath, requestedCtx) + const result = await planModelLoadInternal( + path, + this.memoryMode, + mmprojPath, + requestedCtx + ) return result } catch (e) { throw new Error(String(e)) @@ -2279,12 +2393,18 @@ export default class llamacpp_extension extends AIEngine { } // Calculate text tokens - const messages = JSON.stringify({ messages: opts.messages }) + // Use chat_template_kwargs from opts if provided, otherwise default to disable enable_thinking + const tokenizeRequest = { + messages: opts.messages, + chat_template_kwargs: opts.chat_template_kwargs || { + enable_thinking: false, + }, + } let parseResponse = await fetch(`${baseUrl}/apply-template`, { method: 'POST', headers: headers, - body: messages, + body: JSON.stringify(tokenizeRequest), }) if (!parseResponse.ok) { diff --git a/extensions/rag-extension/package.json b/extensions/rag-extension/package.json new file mode 100644 index 000000000..5634d5416 --- /dev/null +++ b/extensions/rag-extension/package.json @@ -0,0 +1,33 @@ +{ + "name": "@janhq/rag-extension", + "productName": "RAG Tools", + "version": "0.1.0", + "description": "Registers RAG tools and orchestrates retrieval across parser, embeddings, and vector DB", + "main": "dist/index.js", + "module": "dist/module.js", + "author": "Jan ", + "license": "AGPL-3.0", + "scripts": { + "build": "rolldown -c rolldown.config.mjs", + "build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install" + }, + "devDependencies": { + "cpx": "1.5.0", + "rimraf": "6.0.1", + "rolldown": "1.0.0-beta.1", + "typescript": "5.9.2" + }, + "dependencies": { + "@janhq/core": "../../core/package.tgz", + "@janhq/tauri-plugin-rag-api": "link:../../src-tauri/plugins/tauri-plugin-rag", + "@janhq/tauri-plugin-vector-db-api": "link:../../src-tauri/plugins/tauri-plugin-vector-db" + }, + "files": [ + "dist/*", + "package.json" + ], + "installConfig": { + "hoistingLimits": "workspaces" + }, + "packageManager": "yarn@4.5.3" +} diff --git a/extensions/rag-extension/rolldown.config.mjs b/extensions/rag-extension/rolldown.config.mjs new file mode 100644 index 000000000..e9b190546 --- /dev/null +++ b/extensions/rag-extension/rolldown.config.mjs @@ -0,0 +1,14 @@ +import { defineConfig } from 'rolldown' +import settingJson from './settings.json' with { type: 'json' } + +export default defineConfig({ + input: 'src/index.ts', + output: { + format: 'esm', + file: 'dist/index.js', + }, + platform: 'browser', + define: { + SETTINGS: JSON.stringify(settingJson), + }, +}) diff --git a/extensions/rag-extension/settings.json b/extensions/rag-extension/settings.json new file mode 100644 index 000000000..1462db771 --- /dev/null +++ b/extensions/rag-extension/settings.json @@ -0,0 +1,58 @@ +[ + { + "key": "enabled", + "titleKey": "settings:attachments.enable", + "descriptionKey": "settings:attachments.enableDesc", + "controllerType": "checkbox", + "controllerProps": { "value": true } + }, + { + "key": "max_file_size_mb", + "titleKey": "settings:attachments.maxFile", + "descriptionKey": "settings:attachments.maxFileDesc", + "controllerType": "input", + "controllerProps": { "value": 20, "type": "number", "min": 1, "max": 200, "step": 1, "textAlign": "right" } + }, + { + "key": "retrieval_limit", + "titleKey": "settings:attachments.topK", + "descriptionKey": "settings:attachments.topKDesc", + "controllerType": "input", + "controllerProps": { "value": 3, "type": "number", "min": 1, "max": 20, "step": 1, "textAlign": "right" } + }, + { + "key": "retrieval_threshold", + "titleKey": "settings:attachments.threshold", + "descriptionKey": "settings:attachments.thresholdDesc", + "controllerType": "input", + "controllerProps": { "value": 0.3, "type": "number", "min": 0, "max": 1, "step": 0.01, "textAlign": "right" } + }, + { + "key": "chunk_size_tokens", + "titleKey": "settings:attachments.chunkSize", + "descriptionKey": "settings:attachments.chunkSizeDesc", + "controllerType": "input", + "controllerProps": { "value": 512, "type": "number", "min": 64, "max": 8192, "step": 64, "textAlign": "right" } + }, + { + "key": "overlap_tokens", + "titleKey": "settings:attachments.chunkOverlap", + "descriptionKey": "settings:attachments.chunkOverlapDesc", + "controllerType": "input", + "controllerProps": { "value": 64, "type": "number", "min": 0, "max": 1024, "step": 16, "textAlign": "right" } + }, + { + "key": "search_mode", + "titleKey": "settings:attachments.searchMode", + "descriptionKey": "settings:attachments.searchModeDesc", + "controllerType": "dropdown", + "controllerProps": { + "value": "auto", + "options": [ + { "name": "Auto (recommended)", "value": "auto" }, + { "name": "ANN (sqlite-vec)", "value": "ann" }, + { "name": "Linear", "value": "linear" } + ] + } + } +] diff --git a/extensions/rag-extension/src/env.d.ts b/extensions/rag-extension/src/env.d.ts new file mode 100644 index 000000000..512ce0505 --- /dev/null +++ b/extensions/rag-extension/src/env.d.ts @@ -0,0 +1,5 @@ +import type { SettingComponentProps } from '@janhq/core' +declare global { + const SETTINGS: SettingComponentProps[] +} +export {} diff --git a/extensions/rag-extension/src/global.d.ts b/extensions/rag-extension/src/global.d.ts new file mode 100644 index 000000000..f6fa6968e --- /dev/null +++ b/extensions/rag-extension/src/global.d.ts @@ -0,0 +1,14 @@ +import type { BaseExtension, ExtensionTypeEnum } from '@janhq/core' + +declare global { + interface Window { + core?: { + extensionManager: { + get(type: ExtensionTypeEnum): T | undefined + getByName(name: string): BaseExtension | undefined + } + } + } +} + +export {} diff --git a/extensions/rag-extension/src/index.ts b/extensions/rag-extension/src/index.ts new file mode 100644 index 000000000..4a2edc2ea --- /dev/null +++ b/extensions/rag-extension/src/index.ts @@ -0,0 +1,305 @@ +import { RAGExtension, MCPTool, MCPToolCallResult, ExtensionTypeEnum, VectorDBExtension, type AttachmentInput, type SettingComponentProps, AIEngine, type AttachmentFileInfo } from '@janhq/core' +import './env.d' +import { getRAGTools, RETRIEVE, LIST_ATTACHMENTS, GET_CHUNKS } from './tools' + +export default class RagExtension extends RAGExtension { + private config = { + enabled: true, + retrievalLimit: 3, + retrievalThreshold: 0.3, + chunkSizeTokens: 512, + overlapTokens: 64, + searchMode: 'auto' as 'auto' | 'ann' | 'linear', + maxFileSizeMB: 20, + } + + async onLoad(): Promise { + const settings = structuredClone(SETTINGS) as SettingComponentProps[] + await this.registerSettings(settings) + this.config.enabled = await this.getSetting('enabled', this.config.enabled) + this.config.maxFileSizeMB = await this.getSetting('max_file_size_mb', this.config.maxFileSizeMB) + this.config.retrievalLimit = await this.getSetting('retrieval_limit', this.config.retrievalLimit) + this.config.retrievalThreshold = await this.getSetting('retrieval_threshold', this.config.retrievalThreshold) + this.config.chunkSizeTokens = await this.getSetting('chunk_size_tokens', this.config.chunkSizeTokens) + this.config.overlapTokens = await this.getSetting('overlap_tokens', this.config.overlapTokens) + this.config.searchMode = await this.getSetting('search_mode', this.config.searchMode) + + // Check ANN availability on load + try { + const vec = window.core?.extensionManager.get(ExtensionTypeEnum.VectorDB) as unknown as VectorDBExtension + if (vec?.getStatus) { + const status = await vec.getStatus() + console.log('[RAG] Vector DB ANN support:', status.ann_available ? '✓ AVAILABLE' : '✗ NOT AVAILABLE') + if (!status.ann_available) { + console.warn('[RAG] Warning: sqlite-vec not loaded. Collections will use slower linear search.') + } + } + } catch (e) { + console.error('[RAG] Failed to check ANN status:', e) + } + } + + onUnload(): void {} + + async getTools(): Promise { + return getRAGTools(this.config.retrievalLimit) + } + + async getToolNames(): Promise { + // Keep this in sync with getTools() but without building full schemas + return [LIST_ATTACHMENTS, RETRIEVE, GET_CHUNKS] + } + + async callTool(toolName: string, args: Record): Promise { + switch (toolName) { + case LIST_ATTACHMENTS: + return this.listAttachments(args) + case RETRIEVE: + return this.retrieve(args) + case GET_CHUNKS: + return this.getChunks(args) + default: + return { + error: `Unknown tool: ${toolName}`, + content: [{ type: 'text', text: `Unknown tool: ${toolName}` }], + } + } + } + + private async listAttachments(args: Record): Promise { + const threadId = String(args['thread_id'] || '') + if (!threadId) { + return { error: 'Missing thread_id', content: [{ type: 'text', text: 'Missing thread_id' }] } + } + try { + const vec = window.core?.extensionManager.get(ExtensionTypeEnum.VectorDB) as unknown as VectorDBExtension + if (!vec?.listAttachments) { + return { error: 'Vector DB extension missing listAttachments', content: [{ type: 'text', text: 'Vector DB extension missing listAttachments' }] } + } + const files = await vec.listAttachments(threadId) + return { + error: '', + content: [ + { + type: 'text', + text: JSON.stringify({ thread_id: threadId, attachments: files || [] }), + }, + ], + } + } catch (e) { + const msg = e instanceof Error ? e.message : String(e) + return { error: msg, content: [{ type: 'text', text: `List attachments failed: ${msg}` }] } + } + } + + private async retrieve(args: Record): Promise { + const threadId = String(args['thread_id'] || '') + const query = String(args['query'] || '') + const fileIds = args['file_ids'] as string[] | undefined + + const s = this.config + const topK = (args['top_k'] as number) || s.retrievalLimit || 3 + const threshold = s.retrievalThreshold ?? 0.3 + const mode: 'auto' | 'ann' | 'linear' = s.searchMode || 'auto' + + if (s.enabled === false) { + return { + error: 'Attachments feature disabled', + content: [ + { + type: 'text', + text: 'Attachments are disabled in Settings. Enable them to use retrieval.', + }, + ], + } + } + if (!threadId || !query) { + return { + error: 'Missing thread_id or query', + content: [{ type: 'text', text: 'Missing required parameters' }], + } + } + + try { + // Resolve extensions + const vec = window.core?.extensionManager.get(ExtensionTypeEnum.VectorDB) as unknown as VectorDBExtension + if (!vec?.searchCollection) { + return { + error: 'RAG dependencies not available', + content: [ + { type: 'text', text: 'Vector DB extension not available' }, + ], + } + } + + const queryEmb = (await this.embedTexts([query]))?.[0] + if (!queryEmb) { + return { + error: 'Failed to compute embeddings', + content: [{ type: 'text', text: 'Failed to compute embeddings' }], + } + } + + const results = await vec.searchCollection( + threadId, + queryEmb, + topK, + threshold, + mode, + fileIds + ) + + const payload = { + thread_id: threadId, + query, + citations: results?.map((r: any) => ({ + id: r.id, + text: r.text, + score: r.score, + file_id: r.file_id, + chunk_file_order: r.chunk_file_order + })) ?? [], + mode, + } + return { error: '', content: [{ type: 'text', text: JSON.stringify(payload) }] } + } catch (e) { + console.error('[RAG] Retrieve error:', e) + let msg = 'Unknown error' + if (e instanceof Error) { + msg = e.message + } else if (typeof e === 'string') { + msg = e + } else if (e && typeof e === 'object') { + msg = JSON.stringify(e) + } + return { error: msg, content: [{ type: 'text', text: `Retrieve failed: ${msg}` }] } + } + } + + private async getChunks(args: Record): Promise { + const threadId = String(args['thread_id'] || '') + const fileId = String(args['file_id'] || '') + const startOrder = args['start_order'] as number | undefined + const endOrder = args['end_order'] as number | undefined + + if (!threadId || !fileId || startOrder === undefined || endOrder === undefined) { + return { + error: 'Missing thread_id, file_id, start_order, or end_order', + content: [{ type: 'text', text: 'Missing required parameters' }], + } + } + + try { + const vec = window.core?.extensionManager.get(ExtensionTypeEnum.VectorDB) as unknown as VectorDBExtension + if (!vec?.getChunks) { + return { + error: 'Vector DB extension not available', + content: [{ type: 'text', text: 'Vector DB extension not available' }], + } + } + + const chunks = await vec.getChunks(threadId, fileId, startOrder, endOrder) + + const payload = { + thread_id: threadId, + file_id: fileId, + chunks: chunks || [], + } + return { error: '', content: [{ type: 'text', text: JSON.stringify(payload) }] } + } catch (e) { + const msg = e instanceof Error ? e.message : String(e) + return { error: msg, content: [{ type: 'text', text: `Get chunks failed: ${msg}` }] } + } + } + + // Desktop-only ingestion by file paths + async ingestAttachments( + threadId: string, + files: AttachmentInput[] + ): Promise<{ filesProcessed: number; chunksInserted: number; files: AttachmentFileInfo[] }> { + if (!threadId || !Array.isArray(files) || files.length === 0) { + return { filesProcessed: 0, chunksInserted: 0, files: [] } + } + + // Respect feature flag: do nothing when disabled + if (this.config.enabled === false) { + return { filesProcessed: 0, chunksInserted: 0, files: [] } + } + + const vec = window.core?.extensionManager.get(ExtensionTypeEnum.VectorDB) as unknown as VectorDBExtension + if (!vec?.createCollection || !vec?.insertChunks) { + throw new Error('Vector DB extension not available') + } + + // Load settings + const s = this.config + const maxSize = (s?.enabled === false ? 0 : s?.maxFileSizeMB) || undefined + const chunkSize = s?.chunkSizeTokens as number | undefined + const chunkOverlap = s?.overlapTokens as number | undefined + + let totalChunks = 0 + const processedFiles: AttachmentFileInfo[] = [] + + for (const f of files) { + if (!f?.path) continue + if (maxSize && f.size && f.size > maxSize * 1024 * 1024) { + throw new Error(`File '${f.name}' exceeds size limit (${f.size} bytes > ${maxSize} MB).`) + } + + const fileName = f.name || f.path.split(/[\\/]/).pop() + // Preferred/required path: let Vector DB extension handle full file ingestion + const canIngestFile = typeof (vec as any)?.ingestFile === 'function' + if (!canIngestFile) { + console.error('[RAG] Vector DB extension missing ingestFile; cannot ingest document') + continue + } + const info = await (vec as VectorDBExtension).ingestFile( + threadId, + { path: f.path, name: fileName, type: f.type, size: f.size }, + { chunkSize: chunkSize ?? 512, chunkOverlap: chunkOverlap ?? 64 } + ) + totalChunks += Number(info?.chunk_count || 0) + processedFiles.push(info) + } + + // Return files we ingested with real IDs directly from ingestFile + return { filesProcessed: processedFiles.length, chunksInserted: totalChunks, files: processedFiles } + } + + onSettingUpdate(key: string, value: T): void { + switch (key) { + case 'enabled': + this.config.enabled = Boolean(value) + break + case 'max_file_size_mb': + this.config.maxFileSizeMB = Number(value) + break + case 'retrieval_limit': + this.config.retrievalLimit = Number(value) + break + case 'retrieval_threshold': + this.config.retrievalThreshold = Number(value) + break + case 'chunk_size_tokens': + this.config.chunkSizeTokens = Number(value) + break + case 'overlap_tokens': + this.config.overlapTokens = Number(value) + break + case 'search_mode': + this.config.searchMode = String(value) as 'auto' | 'ann' | 'linear' + break + } + } + + // Locally implement embedding logic (previously in embeddings-extension) + private async embedTexts(texts: string[]): Promise { + const llm = window.core?.extensionManager.getByName('@janhq/llamacpp-extension') as AIEngine & { embed?: (texts: string[]) => Promise<{ data: Array<{ embedding: number[]; index: number }> }> } + if (!llm?.embed) throw new Error('llamacpp extension not available') + const res = await llm.embed(texts) + const data: Array<{ embedding: number[]; index: number }> = res?.data || [] + const out: number[][] = new Array(texts.length) + for (const item of data) out[item.index] = item.embedding + return out + } +} diff --git a/extensions/rag-extension/src/tools.ts b/extensions/rag-extension/src/tools.ts new file mode 100644 index 000000000..a881891b4 --- /dev/null +++ b/extensions/rag-extension/src/tools.ts @@ -0,0 +1,58 @@ +import { MCPTool, RAG_INTERNAL_SERVER } from '@janhq/core' + +// Tool names +export const RETRIEVE = 'retrieve' +export const LIST_ATTACHMENTS = 'list_attachments' +export const GET_CHUNKS = 'get_chunks' + +export function getRAGTools(retrievalLimit: number): MCPTool[] { + const maxTopK = Math.max(1, Number(retrievalLimit ?? 3)) + + return [ + { + name: LIST_ATTACHMENTS, + description: + 'List files attached to the current thread. Thread is inferred automatically; you may optionally provide {"scope":"thread"}. Returns basic file info (name/path).', + inputSchema: { + type: 'object', + properties: { + scope: { type: 'string', enum: ['thread'], description: 'Retrieval scope; currently only thread is supported' }, + }, + required: ['scope'], + }, + server: RAG_INTERNAL_SERVER, + }, + { + name: RETRIEVE, + description: + 'Retrieve relevant snippets from locally attached, indexed documents. Use query only; do not pass raw document content. Thread context is inferred automatically; you may optionally provide {"scope":"thread"}. Use file_ids to search within specific files only.', + inputSchema: { + type: 'object', + properties: { + query: { type: 'string', description: 'User query to search for' }, + top_k: { type: 'number', description: 'Optional: Max citations to return. Adjust as needed.', minimum: 1, maximum: maxTopK, default: retrievalLimit ?? 3 }, + scope: { type: 'string', enum: ['thread'], description: 'Retrieval scope; currently only thread is supported' }, + file_ids: { type: 'array', items: { type: 'string' }, description: 'Optional: Filter search to specific file IDs from list_attachments' }, + }, + required: ['query', 'scope'], + }, + server: RAG_INTERNAL_SERVER, + }, + { + name: GET_CHUNKS, + description: + 'Retrieve chunks from a file by their order range. For a single chunk, use start_order = end_order. Thread context is inferred automatically; you may optionally provide {"scope":"thread"}. Use sparingly; intended for advanced usage. Prefer using retrieve instead for relevance-based fetching.', + inputSchema: { + type: 'object', + properties: { + file_id: { type: 'string', description: 'File ID from list_attachments' }, + start_order: { type: 'number', description: 'Start of chunk range (inclusive, 0-indexed)' }, + end_order: { type: 'number', description: 'End of chunk range (inclusive, 0-indexed). For single chunk, use start_order = end_order.' }, + scope: { type: 'string', enum: ['thread'], description: 'Retrieval scope; currently only thread is supported' }, + }, + required: ['file_id', 'start_order', 'end_order', 'scope'], + }, + server: RAG_INTERNAL_SERVER, + }, + ] +} diff --git a/extensions/vector-db-extension/package.json b/extensions/vector-db-extension/package.json new file mode 100644 index 000000000..ba3a8d439 --- /dev/null +++ b/extensions/vector-db-extension/package.json @@ -0,0 +1,33 @@ +{ + "name": "@janhq/vector-db-extension", + "productName": "Vector DB", + "version": "0.1.0", + "description": "Vector DB integration using sqlite-vec if available with linear fallback", + "main": "dist/index.js", + "module": "dist/module.js", + "author": "Jan ", + "license": "AGPL-3.0", + "scripts": { + "build": "rolldown -c rolldown.config.mjs", + "build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install" + }, + "devDependencies": { + "cpx": "1.5.0", + "rimraf": "6.0.1", + "rolldown": "1.0.0-beta.1", + "typescript": "5.9.2" + }, + "dependencies": { + "@janhq/core": "../../core/package.tgz", + "@janhq/tauri-plugin-rag-api": "link:../../src-tauri/plugins/tauri-plugin-rag", + "@janhq/tauri-plugin-vector-db-api": "link:../../src-tauri/plugins/tauri-plugin-vector-db" + }, + "files": [ + "dist/*", + "package.json" + ], + "installConfig": { + "hoistingLimits": "workspaces" + }, + "packageManager": "yarn@4.5.3" +} diff --git a/extensions/vector-db-extension/rolldown.config.mjs b/extensions/vector-db-extension/rolldown.config.mjs new file mode 100644 index 000000000..7c4b94017 --- /dev/null +++ b/extensions/vector-db-extension/rolldown.config.mjs @@ -0,0 +1,11 @@ +import { defineConfig } from 'rolldown' + +export default defineConfig({ + input: 'src/index.ts', + output: { + format: 'esm', + file: 'dist/index.js', + }, + platform: 'browser', + define: {}, +}) diff --git a/extensions/vector-db-extension/src/index.ts b/extensions/vector-db-extension/src/index.ts new file mode 100644 index 000000000..f55710f07 --- /dev/null +++ b/extensions/vector-db-extension/src/index.ts @@ -0,0 +1,107 @@ +import { VectorDBExtension, type SearchMode, type VectorDBStatus, type VectorChunkInput, type VectorSearchResult, type AttachmentFileInfo, type VectorDBFileInput, type VectorDBIngestOptions, AIEngine } from '@janhq/core' +import * as vecdb from '@janhq/tauri-plugin-vector-db-api' +import * as ragApi from '@janhq/tauri-plugin-rag-api' + +export default class VectorDBExt extends VectorDBExtension { + async onLoad(): Promise { + // no-op + } + + onUnload(): void {} + + async getStatus(): Promise { + return await vecdb.getStatus() as VectorDBStatus + } + + private collectionForThread(threadId: string): string { + return `attachments_${threadId}` + } + + async createCollection(threadId: string, dimension: number): Promise { + return await vecdb.createCollection(this.collectionForThread(threadId), dimension) + } + + async insertChunks(threadId: string, fileId: string, chunks: VectorChunkInput[]): Promise { + return await vecdb.insertChunks(this.collectionForThread(threadId), fileId, chunks) + } + + async searchCollection( + threadId: string, + query_embedding: number[], + limit: number, + threshold: number, + mode?: SearchMode, + fileIds?: string[] + ): Promise { + return await vecdb.searchCollection(this.collectionForThread(threadId), query_embedding, limit, threshold, mode, fileIds) as VectorSearchResult[] + } + + async deleteChunks(threadId: string, ids: string[]): Promise { + return await vecdb.deleteChunks(this.collectionForThread(threadId), ids) + } + + async deleteCollection(threadId: string): Promise { + return await vecdb.deleteCollection(this.collectionForThread(threadId)) + } + + // Optional helper for chunking + private async chunkText(text: string, chunkSize: number, chunkOverlap: number): Promise { + return await vecdb.chunkText(text, chunkSize, chunkOverlap) + } + + private async embedTexts(texts: string[]): Promise { + const llm = window.core?.extensionManager.getByName('@janhq/llamacpp-extension') as AIEngine & { embed?: (texts: string[]) => Promise<{ data: Array<{ embedding: number[]; index: number }> }> } + if (!llm?.embed) throw new Error('llamacpp extension not available') + const res = await llm.embed(texts) + const data: Array<{ embedding: number[]; index: number }> = res?.data || [] + const out: number[][] = new Array(texts.length) + for (const item of data) out[item.index] = item.embedding + return out + } + + async ingestFile(threadId: string, file: VectorDBFileInput, opts: VectorDBIngestOptions): Promise { + // Check for duplicate file (same name + path) + const existingFiles = await vecdb.listAttachments(this.collectionForThread(threadId)).catch(() => []) + const duplicate = existingFiles.find((f: any) => f.name === file.name && f.path === file.path) + if (duplicate) { + throw new Error(`File '${file.name}' has already been attached to this thread`) + } + + const text = await ragApi.parseDocument(file.path, file.type || 'application/octet-stream') + const chunks = await this.chunkText(text, opts.chunkSize, opts.chunkOverlap) + if (!chunks.length) { + const fi = await vecdb.createFile(this.collectionForThread(threadId), file) + return fi + } + const embeddings = await this.embedTexts(chunks) + const dimension = embeddings[0]?.length || 0 + if (dimension <= 0) throw new Error('Embedding dimension not available') + await this.createCollection(threadId, dimension) + const fi = await vecdb.createFile(this.collectionForThread(threadId), file) + await vecdb.insertChunks( + this.collectionForThread(threadId), + fi.id, + chunks.map((t, i) => ({ text: t, embedding: embeddings[i] })) + ) + const infos = await vecdb.listAttachments(this.collectionForThread(threadId)) + const updated = infos.find((e) => e.id === fi.id) + return updated || { ...fi, chunk_count: chunks.length } + } + + async listAttachments(threadId: string, limit?: number): Promise { + return await vecdb.listAttachments(this.collectionForThread(threadId), limit) as AttachmentFileInfo[] + } + + async getChunks( + threadId: string, + fileId: string, + startOrder: number, + endOrder: number + ): Promise { + return await vecdb.getChunks(this.collectionForThread(threadId), fileId, startOrder, endOrder) as VectorSearchResult[] + } + + async deleteFile(threadId: string, fileId: string): Promise { + return await vecdb.deleteFile(this.collectionForThread(threadId), fileId) + } +} diff --git a/package.json b/package.json index cf3767e66..386721def 100644 --- a/package.json +++ b/package.json @@ -26,17 +26,16 @@ "serve:web-app": "yarn workspace @janhq/web-app serve:web", "build:serve:web-app": "yarn build:web-app && yarn serve:web-app", "dev:tauri": "yarn build:icon && yarn copy:assets:tauri && cross-env IS_CLEAN=true tauri dev", - "dev:ios": "yarn build:extensions-web && yarn copy:assets:mobile && RUSTC_WRAPPER= yarn tauri ios dev --features mobile", - "dev:android": "yarn build:extensions-web && yarn copy:assets:mobile && cross-env IS_CLEAN=true TAURI_ANDROID_BUILD=true yarn tauri android dev --features mobile", - "build:android": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_CLEAN=true TAURI_ANDROID_BUILD=true yarn tauri android build -- --no-default-features --features mobile", - "build:ios": "yarn copy:assets:mobile && yarn tauri ios build -- --no-default-features --features mobile", - "build:ios:device": "yarn build:icon && yarn copy:assets:mobile && yarn tauri ios build -- --no-default-features --features mobile --export-method debugging", + "dev:ios": "yarn copy:assets:mobile && RUSTC_WRAPPER= cross-env IS_IOS=true yarn tauri ios dev --features mobile", + "dev:android": "yarn copy:assets:mobile && cross-env IS_ANDROID=true yarn tauri android dev --features mobile", + "build:android": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_CLEAN=true yarn tauri android build -- --no-default-features --features mobile", + "build:ios": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile", + "build:ios:device": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile --export-method debugging", "copy:assets:tauri": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"", "copy:assets:mobile": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"", "download:lib": "node ./scripts/download-lib.mjs", "download:bin": "node ./scripts/download-bin.mjs", - "download:windows-installer": "node ./scripts/download-win-installer-deps.mjs", - "build:tauri:win32": "yarn download:bin && yarn download:windows-installer && yarn tauri build", + "build:tauri:win32": "yarn download:bin && yarn tauri build", "build:tauri:linux": "yarn download:bin && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh", "build:tauri:darwin": "yarn download:bin && yarn tauri build --target universal-apple-darwin", "build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os", diff --git a/scripts/download-bin.mjs b/scripts/download-bin.mjs index 68f09bf5f..b6ef81bb2 100644 --- a/scripts/download-bin.mjs +++ b/scripts/download-bin.mjs @@ -56,6 +56,75 @@ async function decompress(filePath, targetDir) { } } +async function getJson(url, headers = {}) { + return new Promise((resolve, reject) => { + const opts = new URL(url) + opts.headers = { + 'User-Agent': 'jan-app', + 'Accept': 'application/vnd.github+json', + ...headers, + } + https + .get(opts, (res) => { + if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) { + return getJson(res.headers.location, headers).then(resolve, reject) + } + if (res.statusCode !== 200) { + reject(new Error(`GET ${url} failed with status ${res.statusCode}`)) + return + } + let data = '' + res.on('data', (chunk) => (data += chunk)) + res.on('end', () => { + try { + resolve(JSON.parse(data)) + } catch (e) { + reject(e) + } + }) + }) + .on('error', reject) + }) +} + +function matchSqliteVecAsset(assets, platform, arch) { + const osHints = + platform === 'darwin' + ? ['darwin', 'macos', 'apple-darwin'] + : platform === 'win32' + ? ['windows', 'win', 'msvc'] + : ['linux'] + + const archHints = arch === 'arm64' ? ['arm64', 'aarch64'] : ['x86_64', 'x64', 'amd64'] + const extHints = ['zip', 'tar.gz'] + + const lc = (s) => s.toLowerCase() + const candidates = assets + .filter((a) => a && a.browser_download_url && a.name) + .map((a) => ({ name: lc(a.name), url: a.browser_download_url })) + + // Prefer exact OS + arch matches + let matches = candidates.filter((c) => osHints.some((o) => c.name.includes(o)) && archHints.some((h) => c.name.includes(h)) && extHints.some((e) => c.name.endsWith(e))) + if (matches.length) return matches[0].url + // Fallback: OS only + matches = candidates.filter((c) => osHints.some((o) => c.name.includes(o)) && extHints.some((e) => c.name.endsWith(e))) + if (matches.length) return matches[0].url + // Last resort: any asset with shared library extension inside is unknown here, so pick any zip/tar.gz + matches = candidates.filter((c) => extHints.some((e) => c.name.endsWith(e))) + return matches.length ? matches[0].url : null +} + +async function fetchLatestSqliteVecUrl(platform, arch) { + try { + const rel = await getJson('https://api.github.com/repos/asg017/sqlite-vec/releases/latest') + const url = matchSqliteVecAsset(rel.assets || [], platform, arch) + return url + } catch (e) { + console.log('Failed to query sqlite-vec latest release:', e.message) + return null + } +} + function getPlatformArch() { const platform = os.platform() // 'darwin', 'linux', 'win32' const arch = os.arch() // 'x64', 'arm64', etc. @@ -266,6 +335,64 @@ async function main() { } console.log('UV downloaded.') + // ----- sqlite-vec (optional, ANN acceleration) ----- + try { + const binDir = 'src-tauri/resources/bin' + const platform = os.platform() + const ext = platform === 'darwin' ? 'dylib' : platform === 'win32' ? 'dll' : 'so' + const targetLibPath = path.join(binDir, `sqlite-vec.${ext}`) + + if (fs.existsSync(targetLibPath)) { + console.log(`sqlite-vec already present at ${targetLibPath}`) + } else { + let sqlvecUrl = await fetchLatestSqliteVecUrl(platform, os.arch()) + // Allow override via env if needed + if ((process.env.SQLVEC_URL || process.env.JAN_SQLITE_VEC_URL) && !sqlvecUrl) { + sqlvecUrl = process.env.SQLVEC_URL || process.env.JAN_SQLITE_VEC_URL + } + if (!sqlvecUrl) { + console.log('Could not determine sqlite-vec download URL; skipping (linear fallback will be used).') + } else { + console.log(`Downloading sqlite-vec from ${sqlvecUrl}...`) + const sqlvecArchive = path.join(tempBinDir, `sqlite-vec-download`) + const guessedExt = sqlvecUrl.endsWith('.zip') ? '.zip' : sqlvecUrl.endsWith('.tar.gz') ? '.tar.gz' : '' + const archivePath = sqlvecArchive + guessedExt + await download(sqlvecUrl, archivePath) + if (!guessedExt) { + console.log('Unknown archive type for sqlite-vec; expecting .zip or .tar.gz') + } else { + await decompress(archivePath, tempBinDir) + // Try to find a shared library in the extracted files + const candidates = [] + function walk(dir) { + for (const entry of fs.readdirSync(dir)) { + const full = path.join(dir, entry) + const stat = fs.statSync(full) + if (stat.isDirectory()) walk(full) + else if (full.endsWith(`.${ext}`)) candidates.push(full) + } + } + walk(tempBinDir) + if (candidates.length === 0) { + console.log('No sqlite-vec shared library found in archive; skipping copy.') + } else { + // Pick the first match and copy/rename to sqlite-vec. + const libSrc = candidates[0] + // Ensure we copy the FILE, not a directory (fs-extra copySync can copy dirs) + if (fs.statSync(libSrc).isFile()) { + fs.copyFileSync(libSrc, targetLibPath) + console.log(`sqlite-vec installed at ${targetLibPath}`) + } else { + console.log(`Found non-file at ${libSrc}; skipping.`) + } + } + } + } + } + } catch (err) { + console.log('sqlite-vec download step failed (non-fatal):', err) + } + console.log('Downloads completed.') } diff --git a/scripts/download-win-installer-deps.mjs b/scripts/download-win-installer-deps.mjs deleted file mode 100644 index 33bbbe04b..000000000 --- a/scripts/download-win-installer-deps.mjs +++ /dev/null @@ -1,83 +0,0 @@ -console.log('Downloading Windows installer dependencies...') -// scripts/download-win-installer-deps.mjs -import https from 'https' -import fs, { mkdirSync } from 'fs' -import os from 'os' -import path from 'path' -import { copySync } from 'cpx' - -function download(url, dest) { - return new Promise((resolve, reject) => { - console.log(`Downloading ${url} to ${dest}`) - const file = fs.createWriteStream(dest) - https - .get(url, (response) => { - console.log(`Response status code: ${response.statusCode}`) - if ( - response.statusCode >= 300 && - response.statusCode < 400 && - response.headers.location - ) { - // Handle redirect - const redirectURL = response.headers.location - console.log(`Redirecting to ${redirectURL}`) - download(redirectURL, dest).then(resolve, reject) // Recursive call - return - } else if (response.statusCode !== 200) { - reject(`Failed to get '${url}' (${response.statusCode})`) - return - } - response.pipe(file) - file.on('finish', () => { - file.close(resolve) - }) - }) - .on('error', (err) => { - fs.unlink(dest, () => reject(err.message)) - }) - }) -} - -async function main() { - console.log('Starting Windows installer dependencies download') - const platform = os.platform() // 'darwin', 'linux', 'win32' - const arch = os.arch() // 'x64', 'arm64', etc. - - if (arch != 'x64') return - - - const libDir = 'src-tauri/resources/lib' - const tempDir = 'scripts/dist' - - try { - mkdirSync('scripts/dist') - } catch (err) { - // Expect EEXIST error if the directory already exists - } - - // Download VC++ Redistributable 17 - if (platform == 'win32') { - const vcFilename = 'vc_redist.x64.exe' - const vcUrl = 'https://aka.ms/vs/17/release/vc_redist.x64.exe' - - console.log(`Downloading VC++ Redistributable...`) - const vcSavePath = path.join(tempDir, vcFilename) - if (!fs.existsSync(vcSavePath)) { - await download(vcUrl, vcSavePath) - } - - // copy to tauri resources - try { - copySync(vcSavePath, libDir) - } catch (err) { - // Expect EEXIST error - } - } - - console.log('Windows installer dependencies downloads completed.') -} - -main().catch((err) => { - console.error('Error:', err) - process.exit(1) -}) diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index da2ca059e..1f2850c2f 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -14,7 +14,7 @@ dependencies = [ "hyper 0.14.32", "jan-utils", "libc", - "libloading 0.8.8", + "libloading 0.8.9", "log", "nix", "once_cell", @@ -23,6 +23,7 @@ dependencies = [ "serde", "serde_json", "serde_yaml", + "sqlx", "tar", "tauri", "tauri-build", @@ -34,12 +35,14 @@ dependencies = [ "tauri-plugin-log", "tauri-plugin-opener", "tauri-plugin-os", + "tauri-plugin-rag", "tauri-plugin-shell", "tauri-plugin-single-instance", "tauri-plugin-store", "tauri-plugin-updater", + "tauri-plugin-vector-db", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "url", @@ -50,9 +53,9 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ "gimli", ] @@ -63,6 +66,15 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" +[[package]] +name = "adobe-cmap-parser" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae8abfa9a4688de8fc9f42b3f013b6fffec18ed8a554f5f113577e0b9b3212a3" +dependencies = [ + "pom", +] + [[package]] name = "aes" version = "0.8.4" @@ -108,10 +120,10 @@ dependencies = [ ] [[package]] -name = "android-tzdata" -version = "0.1.1" +name = "allocator-api2" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android_log-sys" @@ -141,15 +153,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arbitrary" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" dependencies = [ "derive_arbitrary", ] @@ -216,9 +228,9 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.13.2" +version = "1.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb812ffb58524bdd10860d7d974e2f01cc0950c2438a74ee5ec2e2280c6c4ffa" +checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8" dependencies = [ "async-task", "concurrent-queue", @@ -230,11 +242,11 @@ dependencies = [ [[package]] name = "async-io" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19634d6336019ef220f09fd31168ce5c184b295cbf80345437cc36094ef223ca" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" dependencies = [ - "async-lock", + "autocfg", "cfg-if", "concurrent-queue", "futures-io", @@ -243,7 +255,7 @@ dependencies = [ "polling", "rustix", "slab", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -259,9 +271,9 @@ dependencies = [ [[package]] name = "async-process" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65daa13722ad51e6ab1a1b9c01299142bc75135b337923cfa10e79bbbd669f00" +checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" dependencies = [ "async-channel", "async-io", @@ -283,14 +295,14 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "async-signal" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f567af260ef69e1d52c2b560ce0ea230763e6fbb9214a85d768760a920e3e3c1" +checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" dependencies = [ "async-io", "async-lock", @@ -301,7 +313,7 @@ dependencies = [ "rustix", "signal-hook-registry", "slab", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -312,13 +324,13 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.88" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -344,6 +356,15 @@ dependencies = [ "system-deps", ] +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -358,9 +379,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "backtrace" -version = "0.3.75" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", "cfg-if", @@ -368,7 +389,7 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets 0.52.6", + "windows-link 0.2.1", ] [[package]] @@ -397,9 +418,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.1" +version = "2.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" dependencies = [ "serde", ] @@ -436,11 +457,11 @@ dependencies = [ [[package]] name = "block2" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "340d2f0bdb2a43c1d3cd40513185b2bd7def0aa1052f956455114bc98f82dcf2" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" dependencies = [ - "objc2 0.6.1", + "objc2 0.6.3", ] [[package]] @@ -473,10 +494,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdd1d3c0c2f5833f22386f252fe8ed005c7f59fdcddeef025c01b4c3b9fd9ac3" dependencies = [ "once_cell", - "proc-macro-crate 3.3.0", + "proc-macro-crate 3.4.0", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -520,22 +541,22 @@ dependencies = [ [[package]] name = "bytemuck" -version = "1.23.1" +version = "1.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422" +checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.10.1" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f154e572231cb6ba2bd1176980827e3d5dc04cc183a75dea38109fbdd672d29" +checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -579,7 +600,7 @@ version = "0.18.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ca26ef0159422fb77631dc9d17b102f253b876fe1586b03b803e63a309b4ee2" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cairo-sys-rs", "glib", "libc", @@ -599,12 +620,27 @@ dependencies = [ ] [[package]] -name = "camino" -version = "1.1.10" +name = "calamine" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab" +checksum = "47a4d6ea525ea187df1e3a1c4b23469b1cbe60c5bafc1c0ef14b2b8738a8303d" dependencies = [ + "byteorder", + "codepage", + "encoding_rs", + "log", + "quick-xml 0.31.0", "serde", + "zip 0.6.6", +] + +[[package]] +name = "camino" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" +dependencies = [ + "serde_core", ] [[package]] @@ -627,7 +663,7 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -637,15 +673,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "374b7c592d9c00c1f4972ea58390ac6b18cbb6ab79011f3bdc90a0b82ca06b77" dependencies = [ "serde", - "toml 0.9.5", + "toml 0.9.7", ] [[package]] name = "cc" -version = "1.2.31" +version = "1.2.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3a42d84bb6b69d3a8b3eaacf0d88f179e1929695e1ad012b6cf64d9caaa5fd2" +checksum = "e1d05d92f4b1fd76aad469d46cdd858ca761576082cd37df81416691e50199fb" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", @@ -680,9 +717,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" [[package]] name = "cfg_aliases" @@ -691,18 +728,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] -name = "chrono" -version = "0.4.41" +name = "chardetng" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "14b8f0b65b7b08ae3c8187e8d77174de20cb6777864c6b832d8ad365999cf1ea" +dependencies = [ + "cfg-if", + "encoding_rs", + "memchr", +] + +[[package]] +name = "chrono" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", "wasm-bindgen", - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -715,6 +762,15 @@ dependencies = [ "inout", ] +[[package]] +name = "codepage" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48f68d061bc2828ae826206326e61251aca94c1e4a5305cf52d9138639c918b4" +dependencies = [ + "encoding_rs", +] + [[package]] name = "combine" version = "4.6.7" @@ -734,6 +790,12 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + [[package]] name = "const-random" version = "0.1.18" @@ -827,7 +889,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "core-foundation 0.10.1", "core-graphics-types 0.2.0", "foreign-types 0.5.0", @@ -851,7 +913,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "core-foundation 0.10.1", "libc", ] @@ -865,6 +927,21 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.5.0" @@ -938,7 +1015,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.104", + "syn 2.0.106", +] + +[[package]] +name = "csv" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" +dependencies = [ + "memchr", ] [[package]] @@ -948,7 +1046,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" dependencies = [ "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -963,12 +1061,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08440b3dd222c3d0433e63e097463969485f112baff337dfdaca043a0d760570" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" dependencies = [ - "darling_core 0.21.2", - "darling_macro 0.21.2", + "darling_core 0.21.3", + "darling_macro 0.21.3", ] [[package]] @@ -982,21 +1080,21 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "darling_core" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25b7912bc28a04ab1b7715a68ea03aaa15662b43a1a4b2c480531fd19f8bf7e" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1007,45 +1105,69 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core 0.20.11", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "darling_macro" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce154b9bea7fb0c8e8326e62d00354000c36e79770ff21b8c84e3aa267d9d531" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ - "darling_core 0.21.2", + "darling_core 0.21.3", "quote", - "syn 2.0.104", + "syn 2.0.106", +] + +[[package]] +name = "dashmap" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +dependencies = [ + "cfg-if", + "hashbrown 0.12.3", + "lock_api", + "once_cell", + "parking_lot_core", ] [[package]] name = "data-url" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" +checksum = "be1e0bca6c3637f992fc1cc7cbc52a78c1ef6db076dbf1059c4323d6a2048376" + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] [[package]] name = "deranged" -version = "0.4.0" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071" dependencies = [ "powerfmt", - "serde", + "serde_core", ] [[package]] name = "derive_arbitrary" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1058,7 +1180,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1068,6 +1190,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] @@ -1090,7 +1213,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -1105,10 +1228,10 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ - "bitflags 2.9.1", - "block2 0.6.1", + "bitflags 2.9.4", + "block2 0.6.2", "libc", - "objc2 0.6.1", + "objc2 0.6.3", ] [[package]] @@ -1119,7 +1242,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1128,7 +1251,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412" dependencies = [ - "libloading 0.8.8", + "libloading 0.8.9", ] [[package]] @@ -1151,7 +1274,7 @@ checksum = "788160fb30de9cdd857af31c6a2675904b16ece8fc2737b2c7127ba368c9d0f4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1172,6 +1295,12 @@ dependencies = [ "litrs", ] +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + [[package]] name = "downcast-rs" version = "1.2.1" @@ -1215,15 +1344,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" [[package]] -name = "embed-resource" -version = "3.0.5" +name = "either" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6d81016d6c977deefb2ef8d8290da019e27cc26167e102185da528e6c0ab38" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + +[[package]] +name = "embed-resource" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55a075fc573c64510038d7ee9abc7990635863992f83ebc52c8b433b8411a02e" dependencies = [ "cc", "memchr", "rustc_version", - "toml 0.9.5", + "toml 0.9.7", "vswhom", "winreg 0.55.0", ] @@ -1267,7 +1405,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1297,22 +1435,43 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "erased-serde" -version = "0.4.6" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" +checksum = "259d404d09818dec19332e31d94558aeb442fea04c817006456c24b5460bbd4b" dependencies = [ "serde", + "serde_core", "typeid", ] [[package]] name = "errno" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.2", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "euclid" +version = "0.20.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bb7ef65b3777a325d1eeefefab5b6d4959da54747e33bd6258e789640f307ad" +dependencies = [ + "num-traits", ] [[package]] @@ -1336,6 +1495,18 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "fallible-iterator" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" + +[[package]] +name = "fallible-streaming-iterator" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" + [[package]] name = "fastrand" version = "2.3.0" @@ -1372,20 +1543,26 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.25" +version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" dependencies = [ "cfg-if", "libc", "libredox", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0399f9d26e5191ce32c498bebd31e7a3ceabc2745f0ac54af3f335126c3f24b3" + [[package]] name = "fix-path-env" version = "0.0.0" -source = "git+https://github.com/tauri-apps/fix-path-env-rs#0e479e2804edc1a7e5f15ece2b48ee30858c2838" +source = "git+https://github.com/tauri-apps/fix-path-env-rs#c4c45d503ea115a839aae718d02f79e7c7f0f673" dependencies = [ "home", "strip-ansi-escapes", @@ -1394,20 +1571,37 @@ dependencies = [ [[package]] name = "flate2" -version = "1.1.2" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +checksum = "dc5a4e564e38c699f2880d3fda590bedc2e69f3f84cd48b457bd892ce61d0aa9" dependencies = [ "crc32fast", "miniz_oxide", ] +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "spin", +] + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foreign-types" version = "0.3.2" @@ -1435,7 +1629,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1452,9 +1646,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -1517,6 +1711,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + [[package]] name = "futures-io" version = "0.3.31" @@ -1544,7 +1749,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1739,15 +1944,15 @@ dependencies = [ "js-sys", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasi 0.14.7+wasi-0.2.4", "wasm-bindgen", ] [[package]] name = "gimli" -version = "0.31.1" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" [[package]] name = "gio" @@ -1787,7 +1992,7 @@ version = "0.18.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "233daaf6e83ae6a12a52055f568f9d7cf4671dabb78ff9560ab6da230ce00ee5" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "futures-channel", "futures-core", "futures-executor", @@ -1811,11 +2016,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc" dependencies = [ "heck 0.4.1", - "proc-macro-crate 2.0.0", + "proc-macro-crate 2.0.2", "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1830,9 +2035,9 @@ dependencies = [ [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "gobject-sys" @@ -1894,7 +2099,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1909,7 +2114,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.10.0", + "indexmap 2.11.4", "slab", "tokio", "tokio-util", @@ -1928,7 +2133,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.3.1", - "indexmap 2.10.0", + "indexmap 2.11.4", "slab", "tokio", "tokio-util", @@ -1960,12 +2165,44 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash 0.8.12", +] [[package]] name = "hashbrown" -version = "0.15.4" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" + +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] [[package]] name = "heck" @@ -1991,6 +2228,15 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -2009,6 +2255,35 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "html2text" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d21a727ee791bce84d364a69b0f84a5d99f06278adfe4dbd431d475ea28e338" +dependencies = [ + "dashmap", + "html5ever 0.26.0", + "markup5ever 0.11.0", + "tendril", + "thiserror 1.0.69", + "unicode-width", + "xml5ever", +] + +[[package]] +name = "html5ever" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" +dependencies = [ + "log", + "mac", + "markup5ever 0.11.0", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "html5ever" version = "0.29.1" @@ -2017,7 +2292,7 @@ checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c" dependencies = [ "log", "mac", - "markup5ever", + "markup5ever 0.14.1", "match_token", ] @@ -2121,24 +2396,40 @@ dependencies = [ [[package]] name = "hyper" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "h2 0.4.12", "http 1.3.1", "http-body 1.0.1", "httparse", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.7" @@ -2146,14 +2437,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ "http 1.3.1", - "hyper 1.6.0", + "hyper 1.7.0", "hyper-util", - "rustls", + "rustls 0.23.32", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.4", "tower-service", - "webpki-roots", + "webpki-roots 1.0.3", ] [[package]] @@ -2171,9 +2462,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" dependencies = [ "base64 0.22.1", "bytes", @@ -2182,7 +2473,7 @@ dependencies = [ "futures-util", "http 1.3.1", "http-body 1.0.1", - "hyper 1.6.0", + "hyper 1.7.0", "ipnet", "libc", "percent-encoding", @@ -2197,9 +2488,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.63" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2207,7 +2498,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.61.2", + "windows-core 0.62.2", ] [[package]] @@ -2323,9 +2614,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -2355,13 +2646,23 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.10.0" +version = "2.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.15.4", + "hashbrown 0.16.0", "serde", + "serde_core", +] + +[[package]] +name = "infer" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb33622da908807a06f9513c19b3c1ad50fab3e4137d82a78107d502075aa199" +dependencies = [ + "cfb", ] [[package]] @@ -2384,11 +2685,11 @@ dependencies = [ [[package]] name = "io-uring" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "libc", ] @@ -2508,9 +2809,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305" dependencies = [ "once_cell", "wasm-bindgen", @@ -2544,7 +2845,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b750dcadc39a09dbadd74e118f6dd6598df77fa01df0cfcdc52c28dece74528a" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "serde", "unicode-segmentation", ] @@ -2556,8 +2857,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02cb977175687f33fa4afa0c95c112b987ea1443e5a51c8f8ff27dc618270cc2" dependencies = [ "cssparser", - "html5ever", - "indexmap 2.10.0", + "html5ever 0.29.1", + "indexmap 2.11.4", "selectors", ] @@ -2566,6 +2867,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "libappindicator" @@ -2593,9 +2897,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.174" +version = "0.2.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "libloading" @@ -2609,30 +2913,47 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" +checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ "cfg-if", - "windows-targets 0.53.3", + "windows-link 0.2.1", ] [[package]] -name = "libredox" -version = "0.1.9" +name = "libm" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" + +[[package]] +name = "libredox" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "libc", "redox_syscall", ] [[package]] -name = "linux-raw-sys" -version = "0.9.4" +name = "libsqlite3-sys" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" @@ -2648,23 +2969,40 @@ checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" [[package]] name = "lock_api" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" dependencies = [ "value-bag", ] +[[package]] +name = "lopdf" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5c8ecfc6c72051981c0459f75ccc585e7ff67c70829560cda8e647882a9abff" +dependencies = [ + "encoding_rs", + "flate2", + "indexmap 2.11.4", + "itoa", + "log", + "md-5", + "nom", + "rangemap", + "time", + "weezl", +] + [[package]] name = "lru-slab" version = "0.1.2" @@ -2686,6 +3024,20 @@ dependencies = [ "libc", ] +[[package]] +name = "markup5ever" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" +dependencies = [ + "log", + "phf 0.10.1", + "phf_codegen 0.10.0", + "string_cache", + "string_cache_codegen", + "tendril", +] + [[package]] name = "markup5ever" version = "0.14.1" @@ -2708,7 +3060,7 @@ checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2718,10 +3070,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] -name = "memchr" -version = "2.7.5" +name = "md-5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "memoffset" @@ -2738,6 +3100,12 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "minisign-verify" version = "0.2.4" @@ -2775,14 +3143,14 @@ dependencies = [ "dpi", "gtk", "keyboard-types", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "once_cell", "png", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "windows-sys 0.60.2", ] @@ -2809,7 +3177,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "jni-sys", "log", "ndk-sys", @@ -2845,7 +3213,7 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "cfg_aliases", "libc", @@ -2858,6 +3226,16 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + [[package]] name = "ntapi" version = "0.4.1" @@ -2867,12 +3245,49 @@ dependencies = [ "winapi", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -2880,6 +3295,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -2898,10 +3314,10 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d" dependencies = [ - "proc-macro-crate 3.3.0", + "proc-macro-crate 3.4.0", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2919,8 +3335,8 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c9bff0aa1d48904a1385ea2a8b97576fbdcbc9a3cfccd0d31fe978e1c4038c5" dependencies = [ - "bitflags 2.9.1", - "libloading 0.8.8", + "bitflags 2.9.4", + "libloading 0.8.9", "nvml-wrapper-sys", "static_assertions", "thiserror 1.0.69", @@ -2933,7 +3349,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "698d45156f28781a4e79652b6ebe2eaa0589057d588d3aec1333f6466f13fcb5" dependencies = [ - "libloading 0.8.8", + "libloading 0.8.9", ] [[package]] @@ -2963,9 +3379,9 @@ dependencies = [ [[package]] name = "objc2" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88c6597e14493ab2e44ce58f2fdecf095a51f12ca57bec060a11c57332520551" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" dependencies = [ "objc2-encode", "objc2-exception-helper", @@ -2973,77 +3389,104 @@ dependencies = [ [[package]] name = "objc2-app-kit" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc" +checksum = "d49e936b501e5c5bf01fda3a9452ff86dc3ea98ad5f283e1455153142d97518c" dependencies = [ - "bitflags 2.9.1", - "block2 0.6.1", + "bitflags 2.9.4", + "block2 0.6.2", "libc", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-cloud-kit", "objc2-core-data", "objc2-core-foundation", "objc2-core-graphics", "objc2-core-image", - "objc2-foundation 0.3.1", - "objc2-quartz-core 0.3.1", + "objc2-core-text", + "objc2-core-video", + "objc2-foundation 0.3.2", + "objc2-quartz-core 0.3.2", ] [[package]] name = "objc2-cloud-kit" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17614fdcd9b411e6ff1117dfb1d0150f908ba83a7df81b1f118005fe0a8ea15d" +checksum = "73ad74d880bb43877038da939b7427bba67e9dd42004a18b809ba7d87cee241c" dependencies = [ - "bitflags 2.9.1", - "objc2 0.6.1", - "objc2-foundation 0.3.1", + "bitflags 2.9.4", + "objc2 0.6.3", + "objc2-foundation 0.3.2", ] [[package]] name = "objc2-core-data" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291fbbf7d29287518e8686417cf7239c74700fd4b607623140a7d4a3c834329d" +checksum = "0b402a653efbb5e82ce4df10683b6b28027616a2715e90009947d50b8dd298fa" dependencies = [ - "bitflags 2.9.1", - "objc2 0.6.1", - "objc2-foundation 0.3.1", + "bitflags 2.9.4", + "objc2 0.6.3", + "objc2-foundation 0.3.2", ] [[package]] name = "objc2-core-foundation" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" +checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "dispatch2", - "objc2 0.6.1", + "objc2 0.6.3", ] [[package]] name = "objc2-core-graphics" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989c6c68c13021b5c2d6b71456ebb0f9dc78d752e86a98da7c716f4f9470f5a4" +checksum = "e022c9d066895efa1345f8e33e584b9f958da2fd4cd116792e15e07e4720a807" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "dispatch2", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-core-foundation", "objc2-io-surface", ] [[package]] name = "objc2-core-image" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79b3dc0cc4386b6ccf21c157591b34a7f44c8e75b064f85502901ab2188c007e" +checksum = "e5d563b38d2b97209f8e861173de434bd0214cf020e3423a52624cd1d989f006" dependencies = [ - "objc2 0.6.1", - "objc2-foundation 0.3.1", + "objc2 0.6.3", + "objc2-foundation 0.3.2", +] + +[[package]] +name = "objc2-core-text" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde0dfb48d25d2b4862161a4d5fcc0e3c24367869ad306b0c9ec0073bfed92d" +dependencies = [ + "bitflags 2.9.4", + "objc2 0.6.3", + "objc2-core-foundation", + "objc2-core-graphics", +] + +[[package]] +name = "objc2-core-video" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d425caf1df73233f29fd8a5c3e5edbc30d2d4307870f802d18f00d83dc5141a6" +dependencies = [ + "bitflags 2.9.4", + "objc2 0.6.3", + "objc2-core-foundation", + "objc2-core-graphics", + "objc2-io-surface", ] [[package]] @@ -3067,7 +3510,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ee638a5da3799329310ad4cfa62fbf045d5f56e3ef5ba4149e7452dcf89d5a8" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "block2 0.5.1", "libc", "objc2 0.5.2", @@ -3075,35 +3518,35 @@ dependencies = [ [[package]] name = "objc2-foundation" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" +checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" dependencies = [ - "bitflags 2.9.1", - "block2 0.6.1", + "bitflags 2.9.4", + "block2 0.6.2", "libc", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-core-foundation", ] [[package]] name = "objc2-io-surface" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7282e9ac92529fa3457ce90ebb15f4ecbc383e8338060960760fa2cf75420c3c" +checksum = "180788110936d59bab6bd83b6060ffdfffb3b922ba1396b312ae795e1de9d81d" dependencies = [ - "bitflags 2.9.1", - "objc2 0.6.1", + "bitflags 2.9.4", + "objc2 0.6.3", "objc2-core-foundation", ] [[package]] name = "objc2-javascript-core" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9052cb1bb50a4c161d934befcf879526fb87ae9a68858f241e693ca46225cf5a" +checksum = "2a1e6550c4caed348956ce3370c9ffeca70bb1dbed4fa96112e7c6170e074586" dependencies = [ - "objc2 0.6.1", + "objc2 0.6.3", "objc2-core-foundation", ] @@ -3113,7 +3556,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd0cba1276f6023976a406a14ffa85e1fdd19df6b0f737b063b95f6c8c7aadd6" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "block2 0.5.1", "objc2 0.5.2", "objc2-foundation 0.2.2", @@ -3121,14 +3564,14 @@ dependencies = [ [[package]] name = "objc2-osa-kit" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26bb88504b5a050dbba515d2414607bf5e57dd56b107bc5f0351197a3e7bdc5d" +checksum = "f112d1746737b0da274ef79a23aac283376f335f4095a083a267a082f21db0c0" dependencies = [ - "bitflags 2.9.1", - "objc2 0.6.1", + "bitflags 2.9.4", + "objc2 0.6.3", "objc2-app-kit", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", ] [[package]] @@ -3137,7 +3580,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e42bee7bff906b14b167da2bac5efe6b6a07e6f7c0a21a7308d40c960242dc7a" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "block2 0.5.1", "objc2 0.5.2", "objc2-foundation 0.2.2", @@ -3146,59 +3589,59 @@ dependencies = [ [[package]] name = "objc2-quartz-core" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ffb6a0cd5f182dc964334388560b12a57f7b74b3e2dec5e2722aa2dfb2ccd5" +checksum = "96c1358452b371bf9f104e21ec536d37a650eb10f7ee379fff67d2e08d537f1f" dependencies = [ - "bitflags 2.9.1", - "objc2 0.6.1", - "objc2-foundation 0.3.1", + "bitflags 2.9.4", + "objc2 0.6.3", + "objc2-foundation 0.3.2", ] [[package]] name = "objc2-security" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f8e0ef3ab66b08c42644dcb34dba6ec0a574bbd8adbb8bdbdc7a2779731a44" +checksum = "709fe137109bd1e8b5a99390f77a7d8b2961dafc1a1c5db8f2e60329ad6d895a" dependencies = [ - "bitflags 2.9.1", - "objc2 0.6.1", + "bitflags 2.9.4", + "objc2 0.6.3", "objc2-core-foundation", ] [[package]] name = "objc2-ui-kit" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b1312ad7bc8a0e92adae17aa10f90aae1fb618832f9b993b022b591027daed" +checksum = "d87d638e33c06f577498cbcc50491496a3ed4246998a7fbba7ccb98b1e7eab22" dependencies = [ - "bitflags 2.9.1", - "objc2 0.6.1", + "bitflags 2.9.4", + "objc2 0.6.3", "objc2-core-foundation", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", ] [[package]] name = "objc2-web-kit" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91672909de8b1ce1c2252e95bbee8c1649c9ad9d14b9248b3d7b4c47903c47ad" +checksum = "b2e5aaab980c433cf470df9d7af96a7b46a9d892d521a2cbbb2f8a4c16751e7f" dependencies = [ - "bitflags 2.9.1", - "block2 0.6.1", - "objc2 0.6.1", + "bitflags 2.9.4", + "block2 0.6.2", + "objc2 0.6.3", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "objc2-javascript-core", "objc2-security", ] [[package]] name = "object" -version = "0.36.7" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] @@ -3227,7 +3670,7 @@ version = "0.10.73" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "foreign-types 0.3.2", "libc", @@ -3244,7 +3687,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3255,9 +3698,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-src" -version = "300.5.2+3.5.2" +version = "300.5.3+3.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d270b79e2926f5150189d475bc7e9d2c69f9c4697b185fa917d5a32b792d21b4" +checksum = "dc6bad8cd0233b63971e232cc9c5e83039375b8586d2312f31fda85db8f888c2" dependencies = [ "cc", ] @@ -3329,12 +3772,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "732c71caeaa72c065bb69d7ea08717bd3f4863a4f451402fc9513e29dbd5261b" dependencies = [ - "objc2 0.6.1", - "objc2-foundation 0.3.1", + "objc2 0.6.3", + "objc2-foundation 0.3.2", "objc2-osa-kit", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -3370,9 +3813,9 @@ checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", @@ -3380,15 +3823,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.11" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-link 0.2.1", ] [[package]] @@ -3427,10 +3870,34 @@ dependencies = [ ] [[package]] -name = "percent-encoding" -version = "2.3.1" +name = "pdf-extract" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "cbb3a5387b94b9053c1e69d8abfd4dd6dae7afda65a5c5279bc1f42ab39df575" +dependencies = [ + "adobe-cmap-parser", + "encoding_rs", + "euclid", + "lopdf", + "postscript", + "type1-encoding-parser", + "unicode-normalization", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "phf" @@ -3472,6 +3939,16 @@ dependencies = [ "phf_shared 0.8.0", ] +[[package]] +name = "phf_codegen" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", +] + [[package]] name = "phf_codegen" version = "0.11.3" @@ -3536,7 +4013,7 @@ dependencies = [ "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3589,6 +4066,27 @@ dependencies = [ "futures-io", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + [[package]] name = "pkg-config" version = "0.3.32" @@ -3597,13 +4095,13 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "plist" -version = "1.7.4" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1" +checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07" dependencies = [ "base64 0.22.1", - "indexmap 2.10.0", - "quick-xml 0.38.1", + "indexmap 2.11.4", + "quick-xml 0.38.3", "serde", "time", ] @@ -3623,23 +4121,35 @@ dependencies = [ [[package]] name = "polling" -version = "3.10.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5bd19146350fe804f7cb2669c851c03d69da628803dab0d98018142aaa5d829" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ "cfg-if", "concurrent-queue", "hermit-abi", "pin-project-lite", "rustix", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] -name = "potential_utf" -version = "0.1.2" +name = "pom" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +checksum = "60f6ce597ecdcc9a098e7fddacb1065093a3d66446fa16c675e7e71d1b5c28e6" + +[[package]] +name = "postscript" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78451badbdaebaf17f053fd9152b3ffb33b516104eacb45e7864aaa9c712f306" + +[[package]] +name = "potential_utf" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" dependencies = [ "zerovec", ] @@ -3677,20 +4187,21 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "2.0.0" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +checksum = "b00f26d3400549137f92511a46ac1cd8ce37cb5598a96d382381458b992a5d24" dependencies = [ - "toml_edit 0.20.7", + "toml_datetime 0.6.3", + "toml_edit 0.20.2", ] [[package]] name = "proc-macro-crate" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ - "toml_edit 0.22.27", + "toml_edit 0.23.6", ] [[package]] @@ -3725,9 +4236,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] @@ -3739,7 +4250,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1" dependencies = [ "futures", - "indexmap 2.10.0", + "indexmap 2.11.4", "nix", "tokio", "tracing", @@ -3782,6 +4293,17 @@ dependencies = [ "psl-types", ] +[[package]] +name = "quick-xml" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +dependencies = [ + "encoding_rs", + "memchr", + "serde", +] + [[package]] name = "quick-xml" version = "0.37.5" @@ -3793,18 +4315,18 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.38.1" +version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9845d9dccf565065824e69f9f235fafba1587031eda353c1f1561cd6a6be78f4" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" dependencies = [ "memchr", ] [[package]] name = "quinn" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" dependencies = [ "bytes", "cfg_aliases", @@ -3812,9 +4334,9 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls", - "socket2 0.5.10", - "thiserror 2.0.12", + "rustls 0.23.32", + "socket2 0.6.0", + "thiserror 2.0.17", "tokio", "tracing", "web-time", @@ -3822,9 +4344,9 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.12" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" dependencies = [ "bytes", "getrandom 0.3.3", @@ -3832,10 +4354,10 @@ dependencies = [ "rand 0.9.2", "ring", "rustc-hash", - "rustls", + "rustls 0.23.32", "rustls-pki-types", "slab", - "thiserror 2.0.12", + "thiserror 2.0.17", "tinyvec", "tracing", "web-time", @@ -3843,23 +4365,23 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.13" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2 0.5.10", + "socket2 0.6.0", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.40" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" dependencies = [ "proc-macro2", ] @@ -3986,6 +4508,12 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rangemap" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93e7e49bb0bf967717f7bd674458b3d6b0c5f48ec7e3038166026a69fc22223" + [[package]] name = "raw-window-handle" version = "0.5.2" @@ -4000,11 +4528,11 @@ checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" [[package]] name = "redox_syscall" -version = "0.5.17" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", ] [[package]] @@ -4015,34 +4543,34 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ "getrandom 0.2.16", "libredox", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "ref-cast" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c" dependencies = [ "aho-corasick", "memchr", @@ -4052,9 +4580,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad" dependencies = [ "aho-corasick", "memchr", @@ -4063,9 +4591,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" [[package]] name = "rend" @@ -4091,6 +4619,7 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", + "hyper-rustls 0.24.2", "hyper-tls", "ipnet", "js-sys", @@ -4100,6 +4629,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls 0.21.12", "rustls-pemfile", "serde", "serde_json", @@ -4108,6 +4638,7 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", + "tokio-rustls 0.24.1", "tokio-util", "tower-service", "url", @@ -4115,14 +4646,15 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", + "webpki-roots 0.25.4", "winreg 0.50.0", ] [[package]] name = "reqwest" -version = "0.12.22" +version = "0.12.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" dependencies = [ "base64 0.22.1", "bytes", @@ -4135,8 +4667,8 @@ dependencies = [ "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", - "hyper-rustls", + "hyper 1.7.0", + "hyper-rustls 0.27.7", "hyper-util", "js-sys", "log", @@ -4144,14 +4676,14 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls", + "rustls 0.23.32", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.4", "tokio-util", "tower", "tower-http", @@ -4161,7 +4693,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", + "webpki-roots 1.0.3", ] [[package]] @@ -4171,17 +4703,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2bee61e6cffa4635c72d7d81a84294e28f0930db0ddcb0f66d10244674ebed" dependencies = [ "ashpd", - "block2 0.6.1", + "block2 0.6.2", "dispatch2", "glib-sys", "gobject-sys", "gtk-sys", "js-sys", "log", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "raw-window-handle 0.6.2", "wasm-bindgen", "wasm-bindgen-futures", @@ -4234,9 +4766,9 @@ dependencies = [ [[package]] name = "rmcp" -version = "0.6.0" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb21cd3555f1059f27e4813827338dec44429a08ecd0011acc41d9907b160c00" +checksum = "41ab0892f4938752b34ae47cb53910b1b0921e55e77ddb6e44df666cab17939f" dependencies = [ "base64 0.22.1", "chrono", @@ -4245,13 +4777,13 @@ dependencies = [ "paste", "pin-project-lite", "process-wrap", - "reqwest 0.12.22", + "reqwest 0.12.23", "rmcp-macros", "schemars 1.0.4", "serde", "serde_json", "sse-stream", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-stream", "tokio-util", @@ -4261,22 +4793,56 @@ dependencies = [ [[package]] name = "rmcp-macros" -version = "0.6.0" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5d16ae1ff3ce2c5fd86c37047b2869b75bec795d53a4b1d8257b15415a2354" +checksum = "1827cd98dab34cade0513243c6fe0351f0f0b2c9d6825460bcf45b42804bdda0" dependencies = [ - "darling 0.21.2", + "darling 0.21.3", "proc-macro2", "quote", "serde_json", - "syn 2.0.104", + "syn 2.0.106", +] + +[[package]] +name = "rsa" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "rusqlite" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e" +dependencies = [ + "bitflags 2.9.4", + "fallible-iterator", + "fallible-streaming-iterator", + "hashlink 0.9.1", + "libsqlite3-sys", + "smallvec", ] [[package]] name = "rust-ini" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7295b7ce3bf4806b419dc3420745998b447178b7005e2011947b38fc5aa6791" +checksum = "796e8d2b6696392a43bea58116b667fb4c29727dc5abd27d6acf338bb4f688c7" dependencies = [ "cfg-if", "ordered-multimap", @@ -4284,9 +4850,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.37.2" +version = "1.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b203a6425500a03e0919c42d3c47caca51e79f1132046626d2c8871c5092035d" +checksum = "c8975fc98059f365204d635119cf9c5a60ae67b841ed49b5422a9a7e56cdfac0" dependencies = [ "arrayvec", "borsh", @@ -4321,27 +4887,39 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.8" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "errno", "libc", "linux-raw-sys", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] name = "rustls" -version = "0.23.31" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.23.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd3c25631629d034ce7cd9940adc9d45762d46de2b0f57193c4443b92c6d4d40" dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki", + "rustls-webpki 0.103.7", "subtle", "zeroize", ] @@ -4367,9 +4945,19 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.4" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10b3f4191e8a80e6b43eebabfac91e5dcecebb27a71f04e820c47ec41d314bf" dependencies = [ "ring", "rustls-pki-types", @@ -4378,9 +4966,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" @@ -4399,11 +4987,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -4456,7 +5044,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4468,7 +5056,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4483,6 +5071,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "seahash" version = "4.1.0" @@ -4495,7 +5093,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "core-foundation 0.9.4", "core-foundation-sys", "libc", @@ -4504,9 +5102,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.14.0" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -4532,42 +5130,54 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" dependencies = [ "serde", + "serde_core", ] [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] [[package]] name = "serde-untagged" -version = "0.1.7" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "299d9c19d7d466db4ab10addd5703e4c615dec2a5a16dbbafe191045e87ee66e" +checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058" dependencies = [ "erased-serde", "serde", + "serde_core", "typeid", ] [[package]] -name = "serde_derive" -version = "1.0.219" +name = "serde_core" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4578,19 +5188,20 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "serde_json" -version = "1.0.142" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -4601,7 +5212,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4615,11 +5226,11 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" +checksum = "5417783452c2be558477e104686f7de5dae53dba813c28435e0e70f82d9b04ee" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -4636,19 +5247,18 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +checksum = "6093cd8c01b25262b84927e0f7151692158fab02d961e04c979d3903eba7ecc5" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.10.0", + "indexmap 2.11.4", "schemars 0.9.0", "schemars 1.0.4", - "serde", - "serde_derive", + "serde_core", "serde_json", "serde_with_macros", "time", @@ -4656,14 +5266,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +checksum = "a7e6c180db0816026a61afa1cff5344fb7ebded7e4d3062772179f2501481c27" dependencies = [ - "darling 0.20.11", + "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4672,7 +5282,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.10.0", + "indexmap 2.11.4", "itoa", "ryu", "serde", @@ -4698,7 +5308,7 @@ checksum = "772ee033c0916d670af7860b6e1ef7d658a4629a6d0b4c8c3e67f09b3765b75d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4780,6 +5390,16 @@ dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + [[package]] name = "simd-adler32" version = "0.3.7" @@ -4806,15 +5426,18 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] [[package]] name = "socket2" @@ -4884,6 +5507,213 @@ dependencies = [ "system-deps", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlx" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" +dependencies = [ + "base64 0.22.1", + "bytes", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.5", + "hashlink 0.10.0", + "indexmap 2.11.4", + "log", + "memchr", + "once_cell", + "percent-encoding", + "serde", + "serde_json", + "sha2", + "smallvec", + "thiserror 2.0.17", + "tokio", + "tokio-stream", + "tracing", + "url", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.106", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" +dependencies = [ + "dotenvy", + "either", + "heck 0.5.0", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 2.0.106", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.9.4", + "byteorder", + "bytes", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.9.4", + "byteorder", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" +dependencies = [ + "atoi", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.17", + "tracing", + "url", +] + [[package]] name = "sse-stream" version = "0.2.1" @@ -4934,6 +5764,17 @@ dependencies = [ "quote", ] +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + [[package]] name = "strip-ansi-escapes" version = "0.2.1" @@ -4979,9 +5820,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.104" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -5011,7 +5852,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -5053,7 +5894,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "core-foundation 0.9.4", "system-configuration-sys 0.6.0", ] @@ -5087,7 +5928,7 @@ dependencies = [ "cfg-expr", "heck 0.5.0", "pkg-config", - "toml 0.8.23", + "toml 0.8.2", "version-compare", ] @@ -5097,8 +5938,8 @@ version = "0.34.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "959469667dbcea91e5485fc48ba7dd6023face91bb0f1a14681a70f99847c3f7" dependencies = [ - "bitflags 2.9.1", - "block2 0.6.1", + "bitflags 2.9.4", + "block2 0.6.2", "core-foundation 0.10.1", "core-graphics", "crossbeam-channel", @@ -5115,9 +5956,9 @@ dependencies = [ "ndk", "ndk-context", "ndk-sys", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "once_cell", "parking_lot", "raw-window-handle 0.6.2", @@ -5139,7 +5980,7 @@ checksum = "f4e16beb8b2ac17db28eab8bca40e62dbfbb34c0fcdc6d9826b11b7b5d047dfd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -5188,15 +6029,15 @@ dependencies = [ "log", "mime", "muda", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "objc2-ui-kit", "objc2-web-kit", "percent-encoding", "plist", "raw-window-handle 0.6.2", - "reqwest 0.12.22", + "reqwest 0.12.23", "serde", "serde_json", "serde_repr", @@ -5207,7 +6048,7 @@ dependencies = [ "tauri-runtime", "tauri-runtime-wry", "tauri-utils", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tray-icon", "url", @@ -5236,7 +6077,7 @@ dependencies = [ "serde_json", "tauri-utils", "tauri-winres", - "toml 0.9.5", + "toml 0.9.7", "walkdir", ] @@ -5257,9 +6098,9 @@ dependencies = [ "serde", "serde_json", "sha2", - "syn 2.0.104", + "syn 2.0.106", "tauri-utils", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "url", "uuid", @@ -5275,7 +6116,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "tauri-codegen", "tauri-utils", ] @@ -5293,7 +6134,7 @@ dependencies = [ "serde", "serde_json", "tauri-utils", - "toml 0.9.5", + "toml 0.9.7", "walkdir", ] @@ -5311,7 +6152,7 @@ dependencies = [ "tauri", "tauri-plugin", "tauri-utils", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "url", "windows-registry", @@ -5320,9 +6161,9 @@ dependencies = [ [[package]] name = "tauri-plugin-dialog" -version = "2.3.2" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37e5858cc7b455a73ab4ea2ebc08b5be33682c00ff1bf4cad5537d4fb62499d9" +checksum = "0beee42a4002bc695550599b011728d9dfabf82f767f134754ed6655e434824e" dependencies = [ "log", "raw-window-handle 0.6.2", @@ -5332,15 +6173,15 @@ dependencies = [ "tauri", "tauri-plugin", "tauri-plugin-fs", - "thiserror 2.0.12", + "thiserror 2.0.17", "url", ] [[package]] name = "tauri-plugin-fs" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c6ef84ee2f2094ce093e55106d90d763ba343fad57566992962e8f76d113f99" +checksum = "315784ec4be45e90a987687bae7235e6be3d6e9e350d2b75c16b8a4bf22c1db7" dependencies = [ "anyhow", "dunce", @@ -5353,8 +6194,8 @@ dependencies = [ "tauri", "tauri-plugin", "tauri-utils", - "thiserror 2.0.12", - "toml 0.8.23", + "thiserror 2.0.17", + "toml 0.9.7", "url", ] @@ -5364,7 +6205,7 @@ version = "0.6.599" dependencies = [ "ash", "libc", - "libloading 0.8.8", + "libloading 0.8.9", "log", "nvml-wrapper", "serde", @@ -5377,23 +6218,23 @@ dependencies = [ [[package]] name = "tauri-plugin-http" -version = "2.5.1" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcde333d97e565a7765aad82f32d8672458f7bd77b6ee653830d5dded9d7b5c2" +checksum = "938a3d7051c9a82b431e3a0f3468f85715b3442b3c3a3913095e9fa509e2652c" dependencies = [ "bytes", "cookie_store", "data-url", "http 1.3.1", "regex", - "reqwest 0.12.22", + "reqwest 0.12.23", "schemars 0.8.22", "serde", "serde_json", "tauri", "tauri-plugin", "tauri-plugin-fs", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "url", "urlpattern", @@ -5417,49 +6258,49 @@ dependencies = [ "tauri", "tauri-plugin", "tauri-plugin-hardware", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", ] [[package]] name = "tauri-plugin-log" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a59139183e0907cec1499dddee4e085f5a801dc659efa0848ee224f461371426" +checksum = "61c1438bc7662acd16d508c919b3c087efd63669a4c75625dff829b1c75975ec" dependencies = [ "android_logger", "byte-unit", "fern", "log", - "objc2 0.6.1", - "objc2-foundation 0.3.1", + "objc2 0.6.3", + "objc2-foundation 0.3.2", "serde", "serde_json", "serde_repr", "swift-rs", "tauri", "tauri-plugin", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] [[package]] name = "tauri-plugin-opener" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecee219f11cdac713ab32959db5d0cceec4810ba4f4458da992292ecf9660321" +checksum = "786156aa8e89e03d271fbd3fe642207da8e65f3c961baa9e2930f332bf80a1f5" dependencies = [ "dunce", "glob", "objc2-app-kit", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "open", "schemars 0.8.22", "serde", "serde_json", "tauri", "tauri-plugin", - "thiserror 2.0.12", + "thiserror 2.0.17", "url", "windows 0.61.3", "zbus", @@ -5480,14 +6321,36 @@ dependencies = [ "sys-locale", "tauri", "tauri-plugin", - "thiserror 2.0.12", + "thiserror 2.0.17", +] + +[[package]] +name = "tauri-plugin-rag" +version = "0.1.0" +dependencies = [ + "calamine", + "chardetng", + "csv", + "encoding_rs", + "html2text", + "infer 0.15.0", + "log", + "pdf-extract", + "quick-xml 0.31.0", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "thiserror 2.0.17", + "tokio", + "zip 0.6.6", ] [[package]] name = "tauri-plugin-shell" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b9ffadec5c3523f11e8273465cacb3d86ea7652a28e6e2a2e9b5c182f791d25" +checksum = "54777d0c0d8add34eea3ced84378619ef5b97996bd967d3038c668feefd21071" dependencies = [ "encoding_rs", "log", @@ -5500,7 +6363,7 @@ dependencies = [ "shared_child", "tauri", "tauri-plugin", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", ] @@ -5514,7 +6377,7 @@ dependencies = [ "serde_json", "tauri", "tauri-plugin-deep-link", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "windows-sys 0.60.2", "zbus", @@ -5522,16 +6385,16 @@ dependencies = [ [[package]] name = "tauri-plugin-store" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5916c609664a56c82aeaefffca9851fd072d4d41f73d63f22ee3ee451508194f" +checksum = "d85dd80d60a76ee2c2fdce09e9ef30877b239c2a6bb76e6d7d03708aa5f13a19" dependencies = [ "dunce", "serde", "serde_json", "tauri", "tauri-plugin", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", ] @@ -5547,12 +6410,12 @@ dependencies = [ "flate2", "futures-util", "http 1.3.1", - "infer", + "infer 0.19.0", "log", "minisign-verify", "osakit", "percent-encoding", - "reqwest 0.12.22", + "reqwest 0.12.23", "semver", "serde", "serde_json", @@ -5560,12 +6423,28 @@ dependencies = [ "tauri", "tauri-plugin", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "url", "windows-sys 0.60.2", - "zip 4.3.0", + "zip 4.6.1", +] + +[[package]] +name = "tauri-plugin-vector-db" +version = "0.1.0" +dependencies = [ + "dirs", + "log", + "rusqlite", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "thiserror 2.0.17", + "tokio", + "uuid", ] [[package]] @@ -5579,14 +6458,14 @@ dependencies = [ "gtk", "http 1.3.1", "jni", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-ui-kit", "objc2-web-kit", "raw-window-handle 0.6.2", "serde", "serde_json", "tauri-utils", - "thiserror 2.0.12", + "thiserror 2.0.17", "url", "webkit2gtk", "webview2-com", @@ -5603,9 +6482,9 @@ dependencies = [ "http 1.3.1", "jni", "log", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "once_cell", "percent-encoding", "raw-window-handle 0.6.2", @@ -5631,9 +6510,9 @@ dependencies = [ "ctor", "dunce", "glob", - "html5ever", + "html5ever 0.29.1", "http 1.3.1", - "infer", + "infer 0.19.0", "json-patch", "kuchikiki", "log", @@ -5649,8 +6528,8 @@ dependencies = [ "serde_json", "serde_with", "swift-rs", - "thiserror 2.0.12", - "toml 0.9.5", + "thiserror 2.0.17", + "toml 0.9.7", "url", "urlpattern", "uuid", @@ -5659,26 +6538,25 @@ dependencies = [ [[package]] name = "tauri-winres" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c6d9028d41d4de835e3c482c677a8cb88137ac435d6ff9a71f392d4421576c9" +checksum = "fd21509dd1fa9bd355dc29894a6ff10635880732396aa38c0066c1e6c1ab8074" dependencies = [ "embed-resource", - "indexmap 2.10.0", - "toml 0.9.5", + "toml 0.9.7", ] [[package]] name = "tempfile" -version = "3.20.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.3", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -5703,11 +6581,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.17", ] [[package]] @@ -5718,18 +6596,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -5743,9 +6621,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.41" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -5760,15 +6638,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.22" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -5795,9 +6673,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -5837,7 +6715,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -5852,11 +6730,21 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.2" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls 0.23.32", "tokio", ] @@ -5886,47 +6774,47 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.23" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d" dependencies = [ "serde", "serde_spanned 0.6.9", - "toml_datetime 0.6.11", - "toml_edit 0.22.27", + "toml_datetime 0.6.3", + "toml_edit 0.20.2", ] [[package]] name = "toml" -version = "0.9.5" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8" +checksum = "00e5e5d9bf2475ac9d4f0d9edab68cc573dc2fd644b0dba36b0c30a92dd9eaa0" dependencies = [ - "indexmap 2.10.0", - "serde", - "serde_spanned 1.0.0", - "toml_datetime 0.7.0", + "indexmap 2.11.4", + "serde_core", + "serde_spanned 1.0.2", + "toml_datetime 0.7.2", "toml_parser", "toml_writer", - "winnow 0.7.12", + "winnow 0.7.13", ] [[package]] name = "toml_datetime" -version = "0.6.11" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" dependencies = [ "serde", ] [[package]] name = "toml_datetime" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -5935,56 +6823,50 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.10.0", - "toml_datetime 0.6.11", + "indexmap 2.11.4", + "toml_datetime 0.6.3", "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.20.7" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" dependencies = [ - "indexmap 2.10.0", - "toml_datetime 0.6.11", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" -dependencies = [ - "indexmap 2.10.0", + "indexmap 2.11.4", "serde", "serde_spanned 0.6.9", - "toml_datetime 0.6.11", - "toml_write", - "winnow 0.7.12", + "toml_datetime 0.6.3", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.23.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3effe7c0e86fdff4f69cdd2ccc1b96f933e24811c5441d44904e8683e27184b" +dependencies = [ + "indexmap 2.11.4", + "toml_datetime 0.7.2", + "toml_parser", + "winnow 0.7.13", ] [[package]] name = "toml_parser" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10" +checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627" dependencies = [ - "winnow 0.7.12", + "winnow 0.7.13", ] -[[package]] -name = "toml_write" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" - [[package]] name = "toml_writer" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" +checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109" [[package]] name = "tower" @@ -6007,7 +6889,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "bytes", "futures-util", "http 1.3.1", @@ -6037,6 +6919,7 @@ version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -6050,7 +6933,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -6072,15 +6955,15 @@ dependencies = [ "dirs", "libappindicator", "muda", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", "objc2-core-foundation", "objc2-core-graphics", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "once_cell", "png", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "windows-sys 0.59.0", ] @@ -6090,6 +6973,15 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "type1-encoding-parser" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3d6cc09e1a99c7e01f2afe4953789311a1c50baebbdac5b477ecf78e2e92a5b" +dependencies = [ + "pom", +] + [[package]] name = "typeid" version = "1.0.3" @@ -6098,9 +6990,9 @@ checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" [[package]] name = "typenum" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "uds_windows" @@ -6155,10 +7047,31 @@ dependencies = [ ] [[package]] -name = "unicode-ident" -version = "1.0.18" +name = "unicode-bidi" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "unicode-segmentation" @@ -6166,6 +7079,12 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -6180,9 +7099,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -6222,9 +7141,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.17.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ "getrandom 0.3.3", "js-sys", @@ -6307,8 +7226,8 @@ dependencies = [ "crossbeam-queue", "half", "heck 0.4.1", - "indexmap 2.10.0", - "libloading 0.8.8", + "indexmap 2.11.4", + "libloading 0.8.9", "objc", "once_cell", "parking_lot", @@ -6369,44 +7288,60 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" -version = "0.14.2+wasi-0.2.4" +version = "0.14.7+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" dependencies = [ - "wit-bindgen-rt", + "wasip2", ] [[package]] -name = "wasm-bindgen" -version = "0.2.100" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", + "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" dependencies = [ "cfg-if", "js-sys", @@ -6417,9 +7352,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6427,22 +7362,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1" dependencies = [ "unicode-ident", ] @@ -6480,7 +7415,7 @@ version = "0.31.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c66a47e840dc20793f2264eb4b3e4ecb4b75d91c0dd4af04b456128e0bdd449d" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "rustix", "wayland-backend", "wayland-scanner", @@ -6492,7 +7427,7 @@ version = "0.32.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "efa790ed75fbfd71283bd2521a1cfdc022aabcc28bdcff00851f9e4ae88d9901" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "wayland-backend", "wayland-client", "wayland-scanner", @@ -6522,9 +7457,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120" dependencies = [ "js-sys", "wasm-bindgen", @@ -6586,9 +7521,15 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "1.0.2" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "webpki-roots" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b130c0d2d49f8b6889abc456e795e82525204f27c42cf767cf0d7734e089b8" dependencies = [ "rustls-pki-types", ] @@ -6603,8 +7544,8 @@ dependencies = [ "webview2-com-sys", "windows 0.61.3", "windows-core 0.61.2", - "windows-implement 0.60.0", - "windows-interface 0.59.1", + "windows-implement 0.60.2", + "windows-interface 0.59.3", ] [[package]] @@ -6615,7 +7556,7 @@ checksum = "1d228f15bba3b9d56dde8bddbee66fa24545bd17b48d5128ccf4a8742b18e431" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -6624,11 +7565,27 @@ version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36695906a1b53a3bf5c4289621efedac12b73eeb0b89e7e1a89b517302d5d75c" dependencies = [ - "thiserror 2.0.12", + "thiserror 2.0.17", "windows 0.61.3", "windows-core 0.61.2", ] +[[package]] +name = "weezl" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + [[package]] name = "winapi" version = "0.3.9" @@ -6647,11 +7604,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -6666,10 +7623,10 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9bec5a31f3f9362f2258fd0e9c9dd61a9ca432e7306cc78c444258f0dce9a9c" dependencies = [ - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "raw-window-handle 0.6.2", "windows-sys 0.59.0", "windows-version", @@ -6694,7 +7651,7 @@ dependencies = [ "windows-collections", "windows-core 0.61.2", "windows-future", - "windows-link", + "windows-link 0.1.3", "windows-numerics", ] @@ -6725,11 +7682,24 @@ version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ - "windows-implement 0.60.0", - "windows-interface 0.59.1", - "windows-link", + "windows-implement 0.60.2", + "windows-interface 0.59.3", + "windows-link 0.1.3", "windows-result 0.3.4", - "windows-strings", + "windows-strings 0.4.2", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement 0.60.2", + "windows-interface 0.59.3", + "windows-link 0.2.1", + "windows-result 0.4.1", + "windows-strings 0.5.1", ] [[package]] @@ -6739,7 +7709,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" dependencies = [ "windows-core 0.61.2", - "windows-link", + "windows-link 0.1.3", "windows-threading", ] @@ -6751,18 +7721,18 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "windows-implement" -version = "0.60.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -6773,18 +7743,18 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "windows-interface" -version = "0.59.1" +version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -6793,6 +7763,12 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-numerics" version = "0.2.0" @@ -6800,7 +7776,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" dependencies = [ "windows-core 0.61.2", - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -6809,9 +7785,9 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ - "windows-link", + "windows-link 0.1.3", "windows-result 0.3.4", - "windows-strings", + "windows-strings 0.4.2", ] [[package]] @@ -6829,7 +7805,16 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-link", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link 0.2.1", ] [[package]] @@ -6838,7 +7823,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ - "windows-link", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link 0.2.1", ] [[package]] @@ -6883,7 +7877,16 @@ version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.53.3", + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link 0.2.1", ] [[package]] @@ -6934,19 +7937,19 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.3" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows-link", - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", + "windows-link 0.2.1", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", ] [[package]] @@ -6955,16 +7958,16 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] name = "windows-version" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04a5c6627e310a23ad2358483286c7df260c964eb2d003d8efd6d0f4e79265c" +checksum = "e4060a1da109b9d0326b7262c8e12c84df67cc0dbc9e33cf49e01ccc2eb63631" dependencies = [ - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -6987,9 +7990,9 @@ checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" @@ -7011,9 +8014,9 @@ checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" @@ -7035,9 +8038,9 @@ checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -7047,9 +8050,9 @@ checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" @@ -7071,9 +8074,9 @@ checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" @@ -7095,9 +8098,9 @@ checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" @@ -7119,9 +8122,9 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" @@ -7143,9 +8146,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winnow" @@ -7158,9 +8161,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] @@ -7186,13 +8189,10 @@ dependencies = [ ] [[package]] -name = "wit-bindgen-rt" -version = "0.39.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags 2.9.1", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "wrapcenum-derive" @@ -7203,7 +8203,7 @@ dependencies = [ "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -7214,12 +8214,12 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "wry" -version = "0.53.3" +version = "0.53.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f0e9642a0d061f6236c54ccae64c2722a7879ad4ec7dff59bd376d446d8e90" +checksum = "6d78ec082b80fa088569a970d043bb3050abaabf4454101d44514ee8d9a8c9f6" dependencies = [ "base64 0.22.1", - "block2 0.6.1", + "block2 0.6.2", "cookie", "crossbeam-channel", "dirs", @@ -7227,17 +8227,17 @@ dependencies = [ "dunce", "gdkx11", "gtk", - "html5ever", + "html5ever 0.29.1", "http 1.3.1", "javascriptcore-rs", "jni", "kuchikiki", "libc", "ndk", - "objc2 0.6.1", + "objc2 0.6.3", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.1", + "objc2-foundation 0.3.2", "objc2-ui-kit", "objc2-web-kit", "once_cell", @@ -7246,7 +8246,7 @@ dependencies = [ "sha2", "soup3", "tao-macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "url", "webkit2gtk", "webkit2gtk-sys", @@ -7289,9 +8289,9 @@ dependencies = [ [[package]] name = "xattr" -version = "1.5.1" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" dependencies = [ "libc", "rustix", @@ -7303,6 +8303,17 @@ version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fd8403733700263c6eb89f192880191f1b83e332f7a20371ddcf421c4a337c7" +[[package]] +name = "xml5ever" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4034e1d05af98b51ad7214527730626f019682d797ba38b51689212118d8e650" +dependencies = [ + "log", + "mac", + "markup5ever 0.11.0", +] + [[package]] name = "yoke" version = "0.8.0" @@ -7323,15 +8334,15 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "synstructure", ] [[package]] name = "zbus" -version = "5.9.0" +version = "5.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bb4f9a464286d42851d18a605f7193b8febaf5b0919d71c6399b7b26e5b0aad" +checksum = "2d07e46d035fb8e375b2ce63ba4e4ff90a7f73cf2ffb0138b29e1158d2eaadf7" dependencies = [ "async-broadcast", "async-executor", @@ -7354,8 +8365,8 @@ dependencies = [ "tokio", "tracing", "uds_windows", - "windows-sys 0.59.0", - "winnow 0.7.12", + "windows-sys 0.60.2", + "winnow 0.7.13", "zbus_macros", "zbus_names", "zvariant", @@ -7363,14 +8374,14 @@ dependencies = [ [[package]] name = "zbus_macros" -version = "5.9.0" +version = "5.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef9859f68ee0c4ee2e8cde84737c78e3f4c54f946f2a38645d0d4c7a95327659" +checksum = "57e797a9c847ed3ccc5b6254e8bcce056494b375b511b3d6edcec0aeb4defaca" dependencies = [ - "proc-macro-crate 3.3.0", + "proc-macro-crate 3.4.0", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "zbus_names", "zvariant", "zvariant_utils", @@ -7384,28 +8395,28 @@ checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97" dependencies = [ "serde", "static_assertions", - "winnow 0.7.12", + "winnow 0.7.13", "zvariant", ] [[package]] name = "zerocopy" -version = "0.8.26" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.26" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -7425,15 +8436,15 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zerotrie" @@ -7465,7 +8476,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -7490,13 +8501,13 @@ dependencies = [ [[package]] name = "zip" -version = "4.3.0" +version = "4.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aed4ac33e8eb078c89e6cbb1d5c4c7703ec6d299fc3e7c3695af8f8b423468b" +checksum = "caa8cd6af31c3b31c6631b8f483848b91589021b28fffe50adada48d4f4d2ed1" dependencies = [ "arbitrary", "crc32fast", - "indexmap 2.10.0", + "indexmap 2.11.4", "memchr", ] @@ -7531,42 +8542,41 @@ dependencies = [ [[package]] name = "zvariant" -version = "5.6.0" +version = "5.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91b3680bb339216abd84714172b5138a4edac677e641ef17e1d8cb1b3ca6e6f" +checksum = "999dd3be73c52b1fccd109a4a81e4fcd20fab1d3599c8121b38d04e1419498db" dependencies = [ "endi", "enumflags2", "serde", "url", - "winnow 0.7.12", + "winnow 0.7.13", "zvariant_derive", "zvariant_utils", ] [[package]] name = "zvariant_derive" -version = "5.6.0" +version = "5.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8c68501be459a8dbfffbe5d792acdd23b4959940fc87785fb013b32edbc208" +checksum = "6643fd0b26a46d226bd90d3f07c1b5321fe9bb7f04673cb37ac6d6883885b68e" dependencies = [ - "proc-macro-crate 3.3.0", + "proc-macro-crate 3.4.0", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "zvariant_utils", ] [[package]] name = "zvariant_utils" -version = "3.2.0" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16edfee43e5d7b553b77872d99bc36afdda75c223ca7ad5e3fbecd82ca5fc34" +checksum = "c6949d142f89f6916deca2232cf26a8afacf2b9fdc35ce766105e104478be599" dependencies = [ "proc-macro2", "quote", "serde", - "static_assertions", - "syn 2.0.104", - "winnow 0.7.12", + "syn 2.0.106", + "winnow 0.7.13", ] diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 43738b032..7407bfa87 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -35,6 +35,7 @@ mobile = [ "tauri/protocol-asset", "tauri/test", "tauri/wry", + "dep:sqlx", ] test-tauri = [ "tauri/wry", @@ -59,11 +60,12 @@ hyper = { version = "0.14", features = ["server"] } jan-utils = { path = "./utils" } libloading = "0.8.7" log = "0.4" -reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls-vendored"] } rmcp = { version = "0.6.0", features = [ "client", "transport-sse-client", + "transport-sse-client-reqwest", "transport-streamable-http-client", + "transport-streamable-http-client-reqwest", "transport-child-process", "tower", "reqwest", @@ -77,12 +79,15 @@ tauri-plugin-dialog = "2.2.1" tauri-plugin-deep-link = { version = "2", optional = true } tauri-plugin-hardware = { path = "./plugins/tauri-plugin-hardware", optional = true } tauri-plugin-llamacpp = { path = "./plugins/tauri-plugin-llamacpp" } +tauri-plugin-vector-db = { path = "./plugins/tauri-plugin-vector-db" } +tauri-plugin-rag = { path = "./plugins/tauri-plugin-rag" } tauri-plugin-http = { version = "2", features = ["unsafe-headers"] } tauri-plugin-log = "2.0.0-rc" tauri-plugin-opener = "2.2.7" tauri-plugin-os = "2.2.1" tauri-plugin-shell = "2.2.0" tauri-plugin-store = "2" +sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"], optional = true } thiserror = "2.0.12" tokio = { version = "1", features = ["full"] } tokio-util = "0.7.14" @@ -105,11 +110,13 @@ libc = "0.2.172" windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] } [target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls-vendored"] } tauri-plugin-updater = "2" once_cell = "1.18" tauri-plugin-single-instance = { version = "2", features = ["deep-link"] } [target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "rustls-tls"], default-features = false } tauri-plugin-dialog = { version = "2.2.1", default-features = false } tauri-plugin-http = { version = "2", default-features = false } tauri-plugin-log = { version = "2.0.0-rc", default-features = false } diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json index 5c5e7d48d..8d054b0c1 100644 --- a/src-tauri/capabilities/default.json +++ b/src-tauri/capabilities/default.json @@ -22,6 +22,8 @@ "core:webview:allow-create-webview-window", "opener:allow-open-url", "store:default", + "vector-db:default", + "rag:default", { "identifier": "http:default", "allow": [ diff --git a/src-tauri/capabilities/desktop.json b/src-tauri/capabilities/desktop.json index 41be646d3..2e2eaa109 100644 --- a/src-tauri/capabilities/desktop.json +++ b/src-tauri/capabilities/desktop.json @@ -12,6 +12,8 @@ "core:webview:allow-set-webview-zoom", "core:window:allow-start-dragging", "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", "shell:allow-spawn", "shell:allow-open", "core:app:allow-set-app-theme", @@ -23,6 +25,8 @@ "core:webview:allow-create-webview-window", "opener:allow-open-url", "store:default", + "vector-db:default", + "rag:default", "llamacpp:default", "deep-link:default", "hardware:default", @@ -60,4 +64,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/src-tauri/capabilities/log-app-window.json b/src-tauri/capabilities/log-app-window.json index 9f95d1bb9..1bc329ab4 100644 --- a/src-tauri/capabilities/log-app-window.json +++ b/src-tauri/capabilities/log-app-window.json @@ -1,14 +1,18 @@ { "$schema": "../gen/schemas/desktop-schema.json", - "identifier": "logs-app-window", + "identifier": "log-app-window", "description": "enables permissions for the logs app window", "windows": ["logs-app-window"], + "platforms": ["linux", "macOS", "windows"], "permissions": [ "core:default", "core:window:allow-start-dragging", "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", "log:default", "core:webview:allow-create-webview-window", + "core:webview:allow-get-all-webviews", "core:window:allow-set-focus" ] } diff --git a/src-tauri/capabilities/logs-window.json b/src-tauri/capabilities/logs-window.json index ef56e6f75..1a166f503 100644 --- a/src-tauri/capabilities/logs-window.json +++ b/src-tauri/capabilities/logs-window.json @@ -3,12 +3,16 @@ "identifier": "logs-window", "description": "enables permissions for the logs window", "windows": ["logs-window-local-api-server"], + "platforms": ["linux", "macOS", "windows"], "permissions": [ "core:default", "core:window:allow-start-dragging", "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", "log:default", "core:webview:allow-create-webview-window", + "core:webview:allow-get-all-webviews", "core:window:allow-set-focus" ] } diff --git a/src-tauri/capabilities/system-monitor-window.json b/src-tauri/capabilities/system-monitor-window.json index 68a75e9fb..cec43f8d8 100644 --- a/src-tauri/capabilities/system-monitor-window.json +++ b/src-tauri/capabilities/system-monitor-window.json @@ -8,13 +8,28 @@ "core:default", "core:window:allow-start-dragging", "core:window:allow-set-theme", + "core:window:allow-get-all-windows", + "core:event:allow-listen", "log:default", "core:webview:allow-create-webview-window", + "core:webview:allow-get-all-webviews", "core:window:allow-set-focus", "hardware:allow-get-system-info", "hardware:allow-get-system-usage", "llamacpp:allow-get-devices", "llamacpp:allow-read-gguf-metadata", - "deep-link:allow-get-current" + "deep-link:allow-get-current", + { + "identifier": "http:default", + "allow": [ + { + "url": "https://*:*" + }, + { + "url": "http://*:*" + } + ], + "deny": [] + } ] } diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml b/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml index e1a57b962..38f7de3bd 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml +++ b/src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml @@ -23,9 +23,14 @@ sysinfo = "0.34.2" tauri = { version = "2.5.0", default-features = false, features = [] } thiserror = "2.0.12" tokio = { version = "1", features = ["full"] } -reqwest = { version = "0.11", features = ["json", "blocking", "stream"] } tauri-plugin-hardware = { path = "../tauri-plugin-hardware" } +[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls"] } + +[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies] +reqwest = { version = "0.11", features = ["json", "blocking", "stream", "rustls-tls"], default-features = false } + # Unix-specific dependencies [target.'cfg(unix)'.dependencies] nix = { version = "=0.30.1", features = ["signal", "process"] } diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts b/src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts index 957839a63..7c0e3e4be 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts +++ b/src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts @@ -30,12 +30,14 @@ export async function cleanupLlamaProcesses(): Promise { export async function loadLlamaModel( backendPath: string, libraryPath?: string, - args: string[] = [] + args: string[] = [], + isEmbedding: boolean = false ): Promise { return await invoke('plugin:llamacpp|load_llama_model', { backendPath, libraryPath, args, + isEmbedding, }) } diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs index 96ecb36bc..1d898b4d9 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs @@ -44,6 +44,7 @@ pub async fn load_llama_model( library_path: Option<&str>, mut args: Vec, envs: HashMap, + is_embedding: bool, ) -> ServerResult { let state: State = app_handle.state(); let mut process_map = state.llama_server_process.lock().await; @@ -223,6 +224,7 @@ pub async fn load_llama_model( port: port, model_id: model_id, model_path: model_path_pb.display().to_string(), + is_embedding: is_embedding, api_key: api_key, mmproj_path: mmproj_path_string, }; diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs index 03e949eba..5af92f91d 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/commands.rs @@ -87,19 +87,25 @@ pub async fn is_model_supported( ); const RESERVE_BYTES: u64 = 2288490189; - let total_system_memory = system_info.total_memory * 1024 * 1024; + let total_system_memory: u64 = match system_info.gpus.is_empty() { + // on MacOS with unified memory, treat RAM = 0 for now + true => 0, + false => system_info.total_memory * 1024 * 1024, + }; + // Calculate total VRAM from all GPUs - let total_vram: u64 = if system_info.gpus.is_empty() { + let total_vram: u64 = match system_info.gpus.is_empty() { // On macOS with unified memory, GPU info may be empty // Use total RAM as VRAM since memory is shared - log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); - total_system_memory - } else { - system_info + true => { + log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); + system_info.total_memory * 1024 * 1024 + } + false => system_info .gpus .iter() .map(|g| g.total_memory * 1024 * 1024) - .sum::() + .sum::(), }; log::info!("Total VRAM reported/calculated (in bytes): {}", &total_vram); @@ -113,7 +119,7 @@ pub async fn is_model_supported( let usable_total_memory = if total_system_memory > RESERVE_BYTES { (total_system_memory - RESERVE_BYTES) + usable_vram } else { - 0 + usable_vram }; log::info!("System RAM: {} bytes", &total_system_memory); log::info!("Total VRAM: {} bytes", &total_vram); diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs index 14642af60..aad9dfe16 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/model_planner.rs @@ -80,25 +80,25 @@ pub async fn plan_model_load( log::info!("Got GPUs:\n{:?}", &sys_info.gpus); - let total_ram: u64 = sys_info.total_memory * 1024 * 1024; - log::info!( - "Total system memory reported from tauri_plugin_hardware(in bytes): {}", - &total_ram - ); + let total_ram: u64 = match sys_info.gpus.is_empty() { + // Consider RAM as 0 for unified memory + true => 0, + false => sys_info.total_memory * 1024 * 1024, + }; - let total_vram: u64 = if sys_info.gpus.is_empty() { - // On macOS with unified memory, GPU info may be empty - // Use total RAM as VRAM since memory is shared - log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); - total_ram - } else { - sys_info + // Calculate total VRAM from all GPUs + let total_vram: u64 = match sys_info.gpus.is_empty() { + true => { + log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM"); + sys_info.total_memory * 1024 * 1024 + } + false => sys_info .gpus .iter() .map(|g| g.total_memory * 1024 * 1024) - .sum::() + .sum::(), }; - + log::info!("Total RAM reported/calculated (in bytes): {}", &total_ram); log::info!("Total VRAM reported/calculated (in bytes): {}", &total_vram); let usable_vram: u64 = if total_vram > RESERVE_BYTES { (((total_vram - RESERVE_BYTES) as f64) * multiplier) as u64 diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs index 2aad02ecf..a299ec9c5 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs @@ -10,6 +10,7 @@ pub struct SessionInfo { pub port: i32, // llama-server output port pub model_id: String, pub model_path: String, // path of the loaded model + pub is_embedding: bool, pub api_key: String, #[serde(default)] pub mmproj_path: Option, diff --git a/src-tauri/plugins/tauri-plugin-rag/.gitignore b/src-tauri/plugins/tauri-plugin-rag/.gitignore new file mode 100644 index 000000000..50d8e32e8 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/.gitignore @@ -0,0 +1,17 @@ +/.vs +.DS_Store +.Thumbs.db +*.sublime* +.idea/ +debug.log +package-lock.json +.vscode/settings.json +yarn.lock + +/.tauri +/target +Cargo.lock +node_modules/ + +dist-js +dist diff --git a/src-tauri/plugins/tauri-plugin-rag/Cargo.toml b/src-tauri/plugins/tauri-plugin-rag/Cargo.toml new file mode 100644 index 000000000..340873551 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "tauri-plugin-rag" +version = "0.1.0" +authors = ["Jan "] +description = "Tauri plugin for RAG utilities (document parsing, types)" +license = "MIT" +repository = "https://github.com/menloresearch/jan" +edition = "2021" +rust-version = "1.77.2" +exclude = ["/examples", "/dist-js", "/guest-js", "/node_modules"] +links = "tauri-plugin-rag" + +[dependencies] +tauri = { version = "2.8.5", default-features = false } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +thiserror = "2.0" +tokio = { version = "1", features = ["full"] } +log = "0.4" +pdf-extract = "0.7" +zip = "0.6" +quick-xml = { version = "0.31", features = ["serialize"] } +csv = "1.3" +calamine = "0.23" +html2text = "0.11" +chardetng = "0.1" +encoding_rs = "0.8" +infer = "0.15" + +[build-dependencies] +tauri-plugin = { version = "2.3.1", features = ["build"] } diff --git a/src-tauri/plugins/tauri-plugin-rag/build.rs b/src-tauri/plugins/tauri-plugin-rag/build.rs new file mode 100644 index 000000000..30c58872d --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/build.rs @@ -0,0 +1,7 @@ +fn main() { + tauri_plugin::Builder::new(&[ + "parse_document", + ]) + .build(); +} + diff --git a/src-tauri/plugins/tauri-plugin-rag/guest-js/index.ts b/src-tauri/plugins/tauri-plugin-rag/guest-js/index.ts new file mode 100644 index 000000000..9f7965159 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/guest-js/index.ts @@ -0,0 +1,6 @@ +import { invoke } from '@tauri-apps/api/core' + +export async function parseDocument(filePath: string, fileType: string): Promise { + // Send both snake_case and camelCase for compatibility across runtimes/builds + return await invoke('plugin:rag|parse_document', { filePath, fileType }) +} diff --git a/src-tauri/plugins/tauri-plugin-rag/package.json b/src-tauri/plugins/tauri-plugin-rag/package.json new file mode 100644 index 000000000..bac28917d --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/package.json @@ -0,0 +1,33 @@ +{ + "name": "@janhq/tauri-plugin-rag-api", + "version": "0.1.0", + "private": true, + "description": "Guest JS API for Jan RAG plugin", + "type": "module", + "types": "./dist-js/index.d.ts", + "main": "./dist-js/index.cjs", + "module": "./dist-js/index.js", + "exports": { + "types": "./dist-js/index.d.ts", + "import": "./dist-js/index.js", + "require": "./dist-js/index.cjs" + }, + "files": [ + "dist-js", + "README.md" + ], + "scripts": { + "build": "rollup -c", + "prepublishOnly": "yarn build", + "pretest": "yarn build" + }, + "dependencies": { + "@tauri-apps/api": ">=2.0.0-beta.6" + }, + "devDependencies": { + "@rollup/plugin-typescript": "^12.0.0", + "rollup": "^4.9.6", + "tslib": "^2.6.2", + "typescript": "^5.3.3" + } +} diff --git a/src-tauri/plugins/tauri-plugin-rag/permissions/autogenerated/commands/parse_document.toml b/src-tauri/plugins/tauri-plugin-rag/permissions/autogenerated/commands/parse_document.toml new file mode 100644 index 000000000..5cb5da40f --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/permissions/autogenerated/commands/parse_document.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-parse-document" +description = "Enables the parse_document command without any pre-configured scope." +commands.allow = ["parse_document"] + +[[permission]] +identifier = "deny-parse-document" +description = "Denies the parse_document command without any pre-configured scope." +commands.deny = ["parse_document"] diff --git a/src-tauri/plugins/tauri-plugin-rag/permissions/autogenerated/reference.md b/src-tauri/plugins/tauri-plugin-rag/permissions/autogenerated/reference.md new file mode 100644 index 000000000..148c91dfa --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/permissions/autogenerated/reference.md @@ -0,0 +1,43 @@ +## Default Permission + +Default permissions for the rag plugin + +#### This default permission set includes the following: + +- `allow-parse-document` + +## Permission Table + + + + + + + + + + + + + + + + + +
IdentifierDescription
+ +`rag:allow-parse-document` + + + +Enables the parse_document command without any pre-configured scope. + +
+ +`rag:deny-parse-document` + + + +Denies the parse_document command without any pre-configured scope. + +
diff --git a/src-tauri/plugins/tauri-plugin-rag/permissions/default.toml b/src-tauri/plugins/tauri-plugin-rag/permissions/default.toml new file mode 100644 index 000000000..3c8dd7537 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/permissions/default.toml @@ -0,0 +1,6 @@ +[default] +description = "Default permissions for the rag plugin" +permissions = [ + "allow-parse-document", +] + diff --git a/src-tauri/plugins/tauri-plugin-rag/permissions/schemas/schema.json b/src-tauri/plugins/tauri-plugin-rag/permissions/schemas/schema.json new file mode 100644 index 000000000..a4b5488ac --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/permissions/schemas/schema.json @@ -0,0 +1,318 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "PermissionFile", + "description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.", + "type": "object", + "properties": { + "default": { + "description": "The default permission set for the plugin", + "anyOf": [ + { + "$ref": "#/definitions/DefaultPermission" + }, + { + "type": "null" + } + ] + }, + "set": { + "description": "A list of permissions sets defined", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionSet" + } + }, + "permission": { + "description": "A list of inlined permissions", + "default": [], + "type": "array", + "items": { + "$ref": "#/definitions/Permission" + } + } + }, + "definitions": { + "DefaultPermission": { + "description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.", + "type": "object", + "required": [ + "permissions" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "PermissionSet": { + "description": "A set of direct permissions grouped together under a new name.", + "type": "object", + "required": [ + "description", + "identifier", + "permissions" + ], + "properties": { + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does.", + "type": "string" + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionKind" + } + } + } + }, + "Permission": { + "description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.", + "type": "object", + "required": [ + "identifier" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri internal convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "commands": { + "description": "Allowed or denied commands when using this permission.", + "default": { + "allow": [], + "deny": [] + }, + "allOf": [ + { + "$ref": "#/definitions/Commands" + } + ] + }, + "scope": { + "description": "Allowed or denied scoped when using this permission.", + "allOf": [ + { + "$ref": "#/definitions/Scopes" + } + ] + }, + "platforms": { + "description": "Target platforms this permission applies. By default all platforms are affected by this permission.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Target" + } + } + } + }, + "Commands": { + "description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.", + "type": "object", + "properties": { + "allow": { + "description": "Allowed command.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "deny": { + "description": "Denied command, which takes priority.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "Scopes": { + "description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```", + "type": "object", + "properties": { + "allow": { + "description": "Data that defines what is allowed by the scope.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + }, + "deny": { + "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + } + } + }, + "Value": { + "description": "All supported ACL values.", + "anyOf": [ + { + "description": "Represents a null JSON value.", + "type": "null" + }, + { + "description": "Represents a [`bool`].", + "type": "boolean" + }, + { + "description": "Represents a valid ACL [`Number`].", + "allOf": [ + { + "$ref": "#/definitions/Number" + } + ] + }, + { + "description": "Represents a [`String`].", + "type": "string" + }, + { + "description": "Represents a list of other [`Value`]s.", + "type": "array", + "items": { + "$ref": "#/definitions/Value" + } + }, + { + "description": "Represents a map of [`String`] keys to [`Value`]s.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Value" + } + } + ] + }, + "Number": { + "description": "A valid ACL number.", + "anyOf": [ + { + "description": "Represents an [`i64`].", + "type": "integer", + "format": "int64" + }, + { + "description": "Represents a [`f64`].", + "type": "number", + "format": "double" + } + ] + }, + "Target": { + "description": "Platform target.", + "oneOf": [ + { + "description": "MacOS.", + "type": "string", + "enum": [ + "macOS" + ] + }, + { + "description": "Windows.", + "type": "string", + "enum": [ + "windows" + ] + }, + { + "description": "Linux.", + "type": "string", + "enum": [ + "linux" + ] + }, + { + "description": "Android.", + "type": "string", + "enum": [ + "android" + ] + }, + { + "description": "iOS.", + "type": "string", + "enum": [ + "iOS" + ] + } + ] + }, + "PermissionKind": { + "type": "string", + "oneOf": [ + { + "description": "Enables the parse_document command without any pre-configured scope.", + "type": "string", + "const": "allow-parse-document", + "markdownDescription": "Enables the parse_document command without any pre-configured scope." + }, + { + "description": "Denies the parse_document command without any pre-configured scope.", + "type": "string", + "const": "deny-parse-document", + "markdownDescription": "Denies the parse_document command without any pre-configured scope." + }, + { + "description": "Default permissions for the rag plugin\n#### This default permission set includes:\n\n- `allow-parse-document`", + "type": "string", + "const": "default", + "markdownDescription": "Default permissions for the rag plugin\n#### This default permission set includes:\n\n- `allow-parse-document`" + } + ] + } + } +} \ No newline at end of file diff --git a/src-tauri/plugins/tauri-plugin-rag/rollup.config.js b/src-tauri/plugins/tauri-plugin-rag/rollup.config.js new file mode 100644 index 000000000..5047bf72d --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/rollup.config.js @@ -0,0 +1,32 @@ +import { readFileSync } from 'node:fs' +import { dirname, join } from 'node:path' +import { cwd } from 'node:process' +import typescript from '@rollup/plugin-typescript' + +const pkg = JSON.parse(readFileSync(join(cwd(), 'package.json'), 'utf8')) + +export default { + input: 'guest-js/index.ts', + output: [ + { + file: pkg.exports.import, + format: 'esm' + }, + { + file: pkg.exports.require, + format: 'cjs' + } + ], + plugins: [ + typescript({ + declaration: true, + declarationDir: dirname(pkg.exports.import) + }) + ], + external: [ + /^@tauri-apps\/api/, + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.peerDependencies || {}) + ] +} + diff --git a/src-tauri/plugins/tauri-plugin-rag/src/commands.rs b/src-tauri/plugins/tauri-plugin-rag/src/commands.rs new file mode 100644 index 000000000..6f2a0f112 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/src/commands.rs @@ -0,0 +1,12 @@ +use crate::{RagError, parser}; + +#[tauri::command] +pub async fn parse_document( + _app: tauri::AppHandle, + file_path: String, + file_type: String, +) -> Result { + log::info!("Parsing document: {} (type: {})", file_path, file_type); + let res = parser::parse_document(&file_path, &file_type); + res +} diff --git a/src-tauri/plugins/tauri-plugin-rag/src/error.rs b/src-tauri/plugins/tauri-plugin-rag/src/error.rs new file mode 100644 index 000000000..fe693130b --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/src/error.rs @@ -0,0 +1,20 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, thiserror::Error, Serialize, Deserialize)] +pub enum RagError { + #[error("Failed to parse document: {0}")] + ParseError(String), + + #[error("Unsupported file type: {0}")] + UnsupportedFileType(String), + + #[error("IO error: {0}")] + IoError(String), +} + +impl From for RagError { + fn from(err: std::io::Error) -> Self { + RagError::IoError(err.to_string()) + } +} + diff --git a/src-tauri/plugins/tauri-plugin-rag/src/lib.rs b/src-tauri/plugins/tauri-plugin-rag/src/lib.rs new file mode 100644 index 000000000..1c66e3388 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/src/lib.rs @@ -0,0 +1,20 @@ +use tauri::{ + plugin::{Builder, TauriPlugin}, + Runtime, +}; + +mod parser; +mod error; +mod commands; + +pub use error::RagError; + +pub fn init() -> TauriPlugin { + Builder::new("rag") + .invoke_handler(tauri::generate_handler![ + commands::parse_document, + ]) + .setup(|_app, _api| Ok(())) + .build() +} + diff --git a/src-tauri/plugins/tauri-plugin-rag/src/parser.rs b/src-tauri/plugins/tauri-plugin-rag/src/parser.rs new file mode 100644 index 000000000..d21c1de5d --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/src/parser.rs @@ -0,0 +1,274 @@ +use crate::RagError; +use std::fs; +use std::io::{Read, Cursor}; +use zip::read::ZipArchive; +use quick_xml::events::Event; +use quick_xml::Reader; +use csv as csv_crate; +use calamine::{Reader as _, open_workbook_auto, DataType}; +use html2text; +use chardetng::EncodingDetector; +use infer; +use std::borrow::Cow; + +pub fn parse_pdf(file_path: &str) -> Result { + let bytes = fs::read(file_path)?; + let text = pdf_extract::extract_text_from_mem(&bytes) + .map_err(|e| RagError::ParseError(format!("PDF parse error: {}", e)))?; + + // Validate that the PDF has extractable text (not image-based/scanned) + // Count meaningful characters (excluding whitespace) + let meaningful_chars = text.chars() + .filter(|c| !c.is_whitespace()) + .count(); + + // Require at least 50 non-whitespace characters to consider it a text PDF + // This threshold filters out PDFs that are purely images or scanned documents + if meaningful_chars < 50 { + return Err(RagError::ParseError( + "PDF appears to be image-based or scanned. OCR is not supported yet. Please use a text-based PDF.".to_string() + )); + } + + Ok(text) +} + +pub fn parse_text(file_path: &str) -> Result { + read_text_auto(file_path) +} + +pub fn parse_document(file_path: &str, file_type: &str) -> Result { + match file_type.to_lowercase().as_str() { + "pdf" | "application/pdf" => parse_pdf(file_path), + "txt" | "text/plain" | "md" | "text/markdown" => parse_text(file_path), + "csv" | "text/csv" => parse_csv(file_path), + // Excel family via calamine + "xlsx" + | "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + | "xls" + | "application/vnd.ms-excel" + | "ods" + | "application/vnd.oasis.opendocument.spreadsheet" => parse_spreadsheet(file_path), + // PowerPoint + "pptx" + | "application/vnd.openxmlformats-officedocument.presentationml.presentation" => parse_pptx(file_path), + // HTML + "html" | "htm" | "text/html" => parse_html(file_path), + "docx" + | "application/vnd.openxmlformats-officedocument.wordprocessingml.document" => { + parse_docx(file_path) + } + other => { + // Try MIME sniffing when extension or MIME is unknown + if let Ok(Some(k)) = infer::get_from_path(file_path) { + let mime = k.mime_type(); + return parse_document(file_path, mime); + } + Err(RagError::UnsupportedFileType(other.to_string())) + } + } +} + +fn parse_docx(file_path: &str) -> Result { + let file = std::fs::File::open(file_path)?; + let mut zip = ZipArchive::new(file).map_err(|e| RagError::ParseError(e.to_string()))?; + + // Standard DOCX stores document text at word/document.xml + let mut doc_xml = match zip.by_name("word/document.xml") { + Ok(f) => f, + Err(_) => return Err(RagError::ParseError("document.xml not found".into())), + }; + let mut xml_content = String::new(); + doc_xml + .read_to_string(&mut xml_content) + .map_err(|e| RagError::ParseError(e.to_string()))?; + + // Parse XML and extract text from w:t nodes; add newlines on w:p boundaries + let mut reader = Reader::from_str(&xml_content); + reader.trim_text(true); + let mut buf = Vec::new(); + let mut result = String::new(); + let mut in_text = false; + + loop { + match reader.read_event_into(&mut buf) { + Ok(Event::Start(e)) => { + let name: String = reader + .decoder() + .decode(e.name().as_ref()) + .unwrap_or(Cow::Borrowed("")) + .into_owned(); + if name.ends_with(":t") || name == "w:t" || name == "t" { + in_text = true; + } + } + Ok(Event::End(e)) => { + let name: String = reader + .decoder() + .decode(e.name().as_ref()) + .unwrap_or(Cow::Borrowed("")) + .into_owned(); + if name.ends_with(":t") || name == "w:t" || name == "t" { + in_text = false; + result.push(' '); + } + if name.ends_with(":p") || name == "w:p" || name == "p" { + // Paragraph end – add newline + result.push_str("\n\n"); + } + } + Ok(Event::Text(t)) => { + if in_text { + let text = t.unescape().unwrap_or_default(); + result.push_str(&text); + } + } + Ok(Event::Eof) => break, + Err(e) => return Err(RagError::ParseError(e.to_string())), + _ => {} + } + } + + // Normalize whitespace + let normalized = result + .lines() + .map(|l| l.trim()) + .filter(|l| !l.is_empty()) + .collect::>() + .join("\n"); + Ok(normalized) +} + +fn parse_csv(file_path: &str) -> Result { + let mut rdr = csv_crate::ReaderBuilder::new() + .has_headers(false) + .flexible(true) + .from_path(file_path) + .map_err(|e| RagError::ParseError(e.to_string()))?; + let mut out = String::new(); + for rec in rdr.records() { + let rec = rec.map_err(|e| RagError::ParseError(e.to_string()))?; + out.push_str(&rec.iter().collect::>().join(", ")); + out.push('\n'); + } + Ok(out) +} + +fn parse_spreadsheet(file_path: &str) -> Result { + let mut workbook = open_workbook_auto(file_path) + .map_err(|e| RagError::ParseError(e.to_string()))?; + let mut out = String::new(); + for sheet_name in workbook.sheet_names().to_owned() { + if let Ok(range) = workbook.worksheet_range(&sheet_name) { + out.push_str(&format!("# Sheet: {}\n", sheet_name)); + for row in range.rows() { + let cells = row + .iter() + .map(|c| match c { + DataType::Empty => "".to_string(), + DataType::String(s) => s.to_string(), + DataType::Float(f) => format!("{}", f), + DataType::Int(i) => i.to_string(), + DataType::Bool(b) => b.to_string(), + DataType::DateTime(f) => format!("{}", f), + other => other.to_string(), + }) + .collect::>() + .join("\t"); + out.push_str(&cells); + out.push('\n'); + } + out.push_str("\n"); + } + } + Ok(out) +} + +fn parse_pptx(file_path: &str) -> Result { + let file = std::fs::File::open(file_path)?; + let mut zip = ZipArchive::new(file).map_err(|e| RagError::ParseError(e.to_string()))?; + + // Collect slide files: ppt/slides/slide*.xml + let mut slides = Vec::new(); + for i in 0..zip.len() { + let name = zip.by_index(i).map(|f| f.name().to_string()).unwrap_or_default(); + if name.starts_with("ppt/slides/") && name.ends_with(".xml") { + slides.push(name); + } + } + slides.sort(); + + let mut output = String::new(); + for slide_name in slides { + let mut file = zip.by_name(&slide_name).map_err(|e| RagError::ParseError(e.to_string()))?; + let mut xml = String::new(); + file.read_to_string(&mut xml).map_err(|e| RagError::ParseError(e.to_string()))?; + output.push_str(&extract_pptx_text(&xml)); + output.push_str("\n\n"); + } + Ok(output) +} + +fn extract_pptx_text(xml: &str) -> String { + let mut reader = Reader::from_str(xml); + reader.trim_text(true); + let mut buf = Vec::new(); + let mut result = String::new(); + let mut in_text = false; + loop { + match reader.read_event_into(&mut buf) { + Ok(Event::Start(e)) => { + let name: String = reader + .decoder() + .decode(e.name().as_ref()) + .unwrap_or(Cow::Borrowed("")) + .into_owned(); + if name.ends_with(":t") || name == "a:t" || name == "t" { + in_text = true; + } + } + Ok(Event::End(e)) => { + let name: String = reader + .decoder() + .decode(e.name().as_ref()) + .unwrap_or(Cow::Borrowed("")) + .into_owned(); + if name.ends_with(":t") || name == "a:t" || name == "t" { + in_text = false; + result.push(' '); + } + } + Ok(Event::Text(t)) => { + if in_text { + let text = t.unescape().unwrap_or_default(); + result.push_str(&text); + } + } + Ok(Event::Eof) => break, + Err(_) => break, + _ => {} + } + } + result +} + +fn parse_html(file_path: &str) -> Result { + let html = read_text_auto(file_path)?; + // 80-column wrap default + Ok(html2text::from_read(Cursor::new(html), 80)) +} + +fn read_text_auto(file_path: &str) -> Result { + let bytes = fs::read(file_path)?; + // Detect encoding + let mut detector = EncodingDetector::new(); + detector.feed(&bytes, true); + let enc = detector.guess(None, true); + let (decoded, _, had_errors) = enc.decode(&bytes); + if had_errors { + // fallback to UTF-8 lossy + Ok(String::from_utf8_lossy(&bytes).to_string()) + } else { + Ok(decoded.to_string()) + } +} diff --git a/src-tauri/plugins/tauri-plugin-rag/tsconfig.json b/src-tauri/plugins/tauri-plugin-rag/tsconfig.json new file mode 100644 index 000000000..60bc6a8eb --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-rag/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "es2021", + "module": "esnext", + "moduleResolution": "bundler", + "skipLibCheck": true, + "strict": true, + "noUnusedLocals": true, + "noImplicitAny": true, + "noEmit": true + }, + "include": ["guest-js/*.ts"], + "exclude": ["dist-js", "node_modules"] +} + diff --git a/src-tauri/plugins/tauri-plugin-vector-db/.gitignore b/src-tauri/plugins/tauri-plugin-vector-db/.gitignore new file mode 100644 index 000000000..50d8e32e8 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/.gitignore @@ -0,0 +1,17 @@ +/.vs +.DS_Store +.Thumbs.db +*.sublime* +.idea/ +debug.log +package-lock.json +.vscode/settings.json +yarn.lock + +/.tauri +/target +Cargo.lock +node_modules/ + +dist-js +dist diff --git a/src-tauri/plugins/tauri-plugin-vector-db/Cargo.toml b/src-tauri/plugins/tauri-plugin-vector-db/Cargo.toml new file mode 100644 index 000000000..eb377c157 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "tauri-plugin-vector-db" +version = "0.1.0" +authors = ["Jan "] +description = "Tauri plugin for vector storage and similarity search" +license = "MIT" +repository = "https://github.com/menloresearch/jan" +edition = "2021" +rust-version = "1.77.2" +exclude = ["/examples", "/dist-js", "/guest-js", "/node_modules"] +links = "tauri-plugin-vector-db" + +[dependencies] +tauri = { version = "2.8.5", default-features = false } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +thiserror = "2.0" +tokio = { version = "1", features = ["full"] } +log = "0.4" +rusqlite = { version = "0.32", features = ["bundled", "load_extension"] } +uuid = { version = "1.7", features = ["v4", "serde"] } +dirs = "6.0.0" + +[build-dependencies] +tauri-plugin = { version = "2.3.1", features = ["build"] } diff --git a/src-tauri/plugins/tauri-plugin-vector-db/build.rs b/src-tauri/plugins/tauri-plugin-vector-db/build.rs new file mode 100644 index 000000000..0f2f3f4fd --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/build.rs @@ -0,0 +1,16 @@ +fn main() { + tauri_plugin::Builder::new(&[ + "create_collection", + "create_file", + "insert_chunks", + "search_collection", + "delete_chunks", + "delete_file", + "delete_collection", + "chunk_text", + "get_status", + "list_attachments", + "get_chunks", + ]) + .build(); +} diff --git a/src-tauri/plugins/tauri-plugin-vector-db/guest-js/index.ts b/src-tauri/plugins/tauri-plugin-vector-db/guest-js/index.ts new file mode 100644 index 000000000..d66af5cb4 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/guest-js/index.ts @@ -0,0 +1,114 @@ +import { invoke } from '@tauri-apps/api/core' + +export type SearchMode = 'auto' | 'ann' | 'linear' + +export interface SearchResult { + id: string + text: string + score?: number + file_id: string + chunk_file_order: number +} + +export interface Status { + ann_available: boolean +} + +export interface AttachmentFileInfo { + id: string + name?: string + path?: string + type?: string + size?: number + chunk_count: number +} + +// Events +// Events are not exported in guest-js to keep API minimal + +export async function getStatus(): Promise { + return await invoke('plugin:vector-db|get_status') +} + +export async function createCollection(name: string, dimension: number): Promise { + // Use camelCase param name `dimension` to match Tauri v2 argument keys + return await invoke('plugin:vector-db|create_collection', { name, dimension }) +} + +export async function createFile( + collection: string, + file: { path: string; name?: string; type?: string; size?: number } +): Promise { + return await invoke('plugin:vector-db|create_file', { collection, file }) +} + +export async function insertChunks( + collection: string, + fileId: string, + chunks: Array<{ text: string; embedding: number[] }> +): Promise { + return await invoke('plugin:vector-db|insert_chunks', { collection, fileId, chunks }) +} + +export async function deleteFile( + collection: string, + fileId: string +): Promise { + return await invoke('plugin:vector-db|delete_file', { collection, fileId }) +} + +export async function searchCollection( + collection: string, + queryEmbedding: number[], + limit: number, + threshold: number, + mode?: SearchMode, + fileIds?: string[] +): Promise { + return await invoke('plugin:vector-db|search_collection', { + collection, + queryEmbedding, + limit, + threshold, + mode, + fileIds, + }) +} + +export async function deleteChunks(collection: string, ids: string[]): Promise { + return await invoke('plugin:vector-db|delete_chunks', { collection, ids }) +} + +export async function deleteCollection(collection: string): Promise { + return await invoke('plugin:vector-db|delete_collection', { collection }) +} + +export async function chunkText( + text: string, + chunkSize: number, + chunkOverlap: number +): Promise { + // Use snake_case to match Rust command parameter names + return await invoke('plugin:vector-db|chunk_text', { text, chunkSize, chunkOverlap }) +} + +export async function listAttachments( + collection: string, + limit?: number +): Promise { + return await invoke('plugin:vector-db|list_attachments', { collection, limit }) +} + +export async function getChunks( + collection: string, + fileId: string, + startOrder: number, + endOrder: number +): Promise { + return await invoke('plugin:vector-db|get_chunks', { + collection, + fileId, + startOrder, + endOrder, + }) +} diff --git a/src-tauri/plugins/tauri-plugin-vector-db/package.json b/src-tauri/plugins/tauri-plugin-vector-db/package.json new file mode 100644 index 000000000..d2db2bbbe --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/package.json @@ -0,0 +1,33 @@ +{ + "name": "@janhq/tauri-plugin-vector-db-api", + "version": "0.1.0", + "private": true, + "description": "Guest JS API for Jan vector DB plugin", + "type": "module", + "types": "./dist-js/index.d.ts", + "main": "./dist-js/index.cjs", + "module": "./dist-js/index.js", + "exports": { + "types": "./dist-js/index.d.ts", + "import": "./dist-js/index.js", + "require": "./dist-js/index.cjs" + }, + "files": [ + "dist-js", + "README.md" + ], + "scripts": { + "build": "rollup -c", + "prepublishOnly": "yarn build", + "pretest": "yarn build" + }, + "dependencies": { + "@tauri-apps/api": ">=2.0.0-beta.6" + }, + "devDependencies": { + "@rollup/plugin-typescript": "^12.0.0", + "rollup": "^4.9.6", + "tslib": "^2.6.2", + "typescript": "^5.3.3" + } +} diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/chunk_text.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/chunk_text.toml new file mode 100644 index 000000000..341a0a194 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/chunk_text.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-chunk-text" +description = "Enables the chunk_text command without any pre-configured scope." +commands.allow = ["chunk_text"] + +[[permission]] +identifier = "deny-chunk-text" +description = "Denies the chunk_text command without any pre-configured scope." +commands.deny = ["chunk_text"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/create_collection.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/create_collection.toml new file mode 100644 index 000000000..402644497 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/create_collection.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-create-collection" +description = "Enables the create_collection command without any pre-configured scope." +commands.allow = ["create_collection"] + +[[permission]] +identifier = "deny-create-collection" +description = "Denies the create_collection command without any pre-configured scope." +commands.deny = ["create_collection"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/create_file.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/create_file.toml new file mode 100644 index 000000000..7fc6c3ff8 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/create_file.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-create-file" +description = "Enables the create_file command without any pre-configured scope." +commands.allow = ["create_file"] + +[[permission]] +identifier = "deny-create-file" +description = "Denies the create_file command without any pre-configured scope." +commands.deny = ["create_file"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_chunks.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_chunks.toml new file mode 100644 index 000000000..ecf2055a8 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_chunks.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-delete-chunks" +description = "Enables the delete_chunks command without any pre-configured scope." +commands.allow = ["delete_chunks"] + +[[permission]] +identifier = "deny-delete-chunks" +description = "Denies the delete_chunks command without any pre-configured scope." +commands.deny = ["delete_chunks"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_collection.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_collection.toml new file mode 100644 index 000000000..5a24329cb --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_collection.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-delete-collection" +description = "Enables the delete_collection command without any pre-configured scope." +commands.allow = ["delete_collection"] + +[[permission]] +identifier = "deny-delete-collection" +description = "Denies the delete_collection command without any pre-configured scope." +commands.deny = ["delete_collection"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_file.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_file.toml new file mode 100644 index 000000000..aafe069a6 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/delete_file.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-delete-file" +description = "Enables the delete_file command without any pre-configured scope." +commands.allow = ["delete_file"] + +[[permission]] +identifier = "deny-delete-file" +description = "Denies the delete_file command without any pre-configured scope." +commands.deny = ["delete_file"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/get_chunks.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/get_chunks.toml new file mode 100644 index 000000000..6dc03e311 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/get_chunks.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-chunks" +description = "Enables the get_chunks command without any pre-configured scope." +commands.allow = ["get_chunks"] + +[[permission]] +identifier = "deny-get-chunks" +description = "Denies the get_chunks command without any pre-configured scope." +commands.deny = ["get_chunks"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/get_status.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/get_status.toml new file mode 100644 index 000000000..ff573a743 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/get_status.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-get-status" +description = "Enables the get_status command without any pre-configured scope." +commands.allow = ["get_status"] + +[[permission]] +identifier = "deny-get-status" +description = "Denies the get_status command without any pre-configured scope." +commands.deny = ["get_status"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/insert_chunks.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/insert_chunks.toml new file mode 100644 index 000000000..c83e268d2 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/insert_chunks.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-insert-chunks" +description = "Enables the insert_chunks command without any pre-configured scope." +commands.allow = ["insert_chunks"] + +[[permission]] +identifier = "deny-insert-chunks" +description = "Denies the insert_chunks command without any pre-configured scope." +commands.deny = ["insert_chunks"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/list_attachments.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/list_attachments.toml new file mode 100644 index 000000000..bbf2d996f --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/list_attachments.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-list-attachments" +description = "Enables the list_attachments command without any pre-configured scope." +commands.allow = ["list_attachments"] + +[[permission]] +identifier = "deny-list-attachments" +description = "Denies the list_attachments command without any pre-configured scope." +commands.deny = ["list_attachments"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/search_collection.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/search_collection.toml new file mode 100644 index 000000000..e408b935c --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/commands/search_collection.toml @@ -0,0 +1,13 @@ +# Automatically generated - DO NOT EDIT! + +"$schema" = "../../schemas/schema.json" + +[[permission]] +identifier = "allow-search-collection" +description = "Enables the search_collection command without any pre-configured scope." +commands.allow = ["search_collection"] + +[[permission]] +identifier = "deny-search-collection" +description = "Denies the search_collection command without any pre-configured scope." +commands.deny = ["search_collection"] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/reference.md b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/reference.md new file mode 100644 index 000000000..b859ecb87 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/autogenerated/reference.md @@ -0,0 +1,313 @@ +## Default Permission + +Default permissions for the vector-db plugin + +#### This default permission set includes the following: + +- `allow-get-status` +- `allow-create-collection` +- `allow-insert-chunks` +- `allow-create-file` +- `allow-search-collection` +- `allow-delete-chunks` +- `allow-delete-file` +- `allow-delete-collection` +- `allow-chunk-text` +- `allow-list-attachments` +- `allow-get-chunks` + +## Permission Table + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
IdentifierDescription
+ +`vector-db:allow-chunk-text` + + + +Enables the chunk_text command without any pre-configured scope. + +
+ +`vector-db:deny-chunk-text` + + + +Denies the chunk_text command without any pre-configured scope. + +
+ +`vector-db:allow-create-collection` + + + +Enables the create_collection command without any pre-configured scope. + +
+ +`vector-db:deny-create-collection` + + + +Denies the create_collection command without any pre-configured scope. + +
+ +`vector-db:allow-create-file` + + + +Enables the create_file command without any pre-configured scope. + +
+ +`vector-db:deny-create-file` + + + +Denies the create_file command without any pre-configured scope. + +
+ +`vector-db:allow-delete-chunks` + + + +Enables the delete_chunks command without any pre-configured scope. + +
+ +`vector-db:deny-delete-chunks` + + + +Denies the delete_chunks command without any pre-configured scope. + +
+ +`vector-db:allow-delete-collection` + + + +Enables the delete_collection command without any pre-configured scope. + +
+ +`vector-db:deny-delete-collection` + + + +Denies the delete_collection command without any pre-configured scope. + +
+ +`vector-db:allow-delete-file` + + + +Enables the delete_file command without any pre-configured scope. + +
+ +`vector-db:deny-delete-file` + + + +Denies the delete_file command without any pre-configured scope. + +
+ +`vector-db:allow-get-chunks` + + + +Enables the get_chunks command without any pre-configured scope. + +
+ +`vector-db:deny-get-chunks` + + + +Denies the get_chunks command without any pre-configured scope. + +
+ +`vector-db:allow-get-status` + + + +Enables the get_status command without any pre-configured scope. + +
+ +`vector-db:deny-get-status` + + + +Denies the get_status command without any pre-configured scope. + +
+ +`vector-db:allow-insert-chunks` + + + +Enables the insert_chunks command without any pre-configured scope. + +
+ +`vector-db:deny-insert-chunks` + + + +Denies the insert_chunks command without any pre-configured scope. + +
+ +`vector-db:allow-list-attachments` + + + +Enables the list_attachments command without any pre-configured scope. + +
+ +`vector-db:deny-list-attachments` + + + +Denies the list_attachments command without any pre-configured scope. + +
+ +`vector-db:allow-search-collection` + + + +Enables the search_collection command without any pre-configured scope. + +
+ +`vector-db:deny-search-collection` + + + +Denies the search_collection command without any pre-configured scope. + +
diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/default.toml b/src-tauri/plugins/tauri-plugin-vector-db/permissions/default.toml new file mode 100644 index 000000000..a29998640 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/default.toml @@ -0,0 +1,15 @@ +[default] +description = "Default permissions for the vector-db plugin" +permissions = [ + "allow-get-status", + "allow-create-collection", + "allow-insert-chunks", + "allow-create-file", + "allow-search-collection", + "allow-delete-chunks", + "allow-delete-file", + "allow-delete-collection", + "allow-chunk-text", + "allow-list-attachments", + "allow-get-chunks", +] diff --git a/src-tauri/plugins/tauri-plugin-vector-db/permissions/schemas/schema.json b/src-tauri/plugins/tauri-plugin-vector-db/permissions/schemas/schema.json new file mode 100644 index 000000000..6410337c5 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/permissions/schemas/schema.json @@ -0,0 +1,438 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "PermissionFile", + "description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.", + "type": "object", + "properties": { + "default": { + "description": "The default permission set for the plugin", + "anyOf": [ + { + "$ref": "#/definitions/DefaultPermission" + }, + { + "type": "null" + } + ] + }, + "set": { + "description": "A list of permissions sets defined", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionSet" + } + }, + "permission": { + "description": "A list of inlined permissions", + "default": [], + "type": "array", + "items": { + "$ref": "#/definitions/Permission" + } + } + }, + "definitions": { + "DefaultPermission": { + "description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.", + "type": "object", + "required": [ + "permissions" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "PermissionSet": { + "description": "A set of direct permissions grouped together under a new name.", + "type": "object", + "required": [ + "description", + "identifier", + "permissions" + ], + "properties": { + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does.", + "type": "string" + }, + "permissions": { + "description": "All permissions this set contains.", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionKind" + } + } + } + }, + "Permission": { + "description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.", + "type": "object", + "required": [ + "identifier" + ], + "properties": { + "version": { + "description": "The version of the permission.", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 1.0 + }, + "identifier": { + "description": "A unique identifier for the permission.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what the permission does. Tauri internal convention is to use `

` headings in markdown content for Tauri documentation generation purposes.", + "type": [ + "string", + "null" + ] + }, + "commands": { + "description": "Allowed or denied commands when using this permission.", + "default": { + "allow": [], + "deny": [] + }, + "allOf": [ + { + "$ref": "#/definitions/Commands" + } + ] + }, + "scope": { + "description": "Allowed or denied scoped when using this permission.", + "allOf": [ + { + "$ref": "#/definitions/Scopes" + } + ] + }, + "platforms": { + "description": "Target platforms this permission applies. By default all platforms are affected by this permission.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Target" + } + } + } + }, + "Commands": { + "description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.", + "type": "object", + "properties": { + "allow": { + "description": "Allowed command.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "deny": { + "description": "Denied command, which takes priority.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "Scopes": { + "description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```", + "type": "object", + "properties": { + "allow": { + "description": "Data that defines what is allowed by the scope.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + }, + "deny": { + "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/Value" + } + } + } + }, + "Value": { + "description": "All supported ACL values.", + "anyOf": [ + { + "description": "Represents a null JSON value.", + "type": "null" + }, + { + "description": "Represents a [`bool`].", + "type": "boolean" + }, + { + "description": "Represents a valid ACL [`Number`].", + "allOf": [ + { + "$ref": "#/definitions/Number" + } + ] + }, + { + "description": "Represents a [`String`].", + "type": "string" + }, + { + "description": "Represents a list of other [`Value`]s.", + "type": "array", + "items": { + "$ref": "#/definitions/Value" + } + }, + { + "description": "Represents a map of [`String`] keys to [`Value`]s.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Value" + } + } + ] + }, + "Number": { + "description": "A valid ACL number.", + "anyOf": [ + { + "description": "Represents an [`i64`].", + "type": "integer", + "format": "int64" + }, + { + "description": "Represents a [`f64`].", + "type": "number", + "format": "double" + } + ] + }, + "Target": { + "description": "Platform target.", + "oneOf": [ + { + "description": "MacOS.", + "type": "string", + "enum": [ + "macOS" + ] + }, + { + "description": "Windows.", + "type": "string", + "enum": [ + "windows" + ] + }, + { + "description": "Linux.", + "type": "string", + "enum": [ + "linux" + ] + }, + { + "description": "Android.", + "type": "string", + "enum": [ + "android" + ] + }, + { + "description": "iOS.", + "type": "string", + "enum": [ + "iOS" + ] + } + ] + }, + "PermissionKind": { + "type": "string", + "oneOf": [ + { + "description": "Enables the chunk_text command without any pre-configured scope.", + "type": "string", + "const": "allow-chunk-text", + "markdownDescription": "Enables the chunk_text command without any pre-configured scope." + }, + { + "description": "Denies the chunk_text command without any pre-configured scope.", + "type": "string", + "const": "deny-chunk-text", + "markdownDescription": "Denies the chunk_text command without any pre-configured scope." + }, + { + "description": "Enables the create_collection command without any pre-configured scope.", + "type": "string", + "const": "allow-create-collection", + "markdownDescription": "Enables the create_collection command without any pre-configured scope." + }, + { + "description": "Denies the create_collection command without any pre-configured scope.", + "type": "string", + "const": "deny-create-collection", + "markdownDescription": "Denies the create_collection command without any pre-configured scope." + }, + { + "description": "Enables the create_file command without any pre-configured scope.", + "type": "string", + "const": "allow-create-file", + "markdownDescription": "Enables the create_file command without any pre-configured scope." + }, + { + "description": "Denies the create_file command without any pre-configured scope.", + "type": "string", + "const": "deny-create-file", + "markdownDescription": "Denies the create_file command without any pre-configured scope." + }, + { + "description": "Enables the delete_chunks command without any pre-configured scope.", + "type": "string", + "const": "allow-delete-chunks", + "markdownDescription": "Enables the delete_chunks command without any pre-configured scope." + }, + { + "description": "Denies the delete_chunks command without any pre-configured scope.", + "type": "string", + "const": "deny-delete-chunks", + "markdownDescription": "Denies the delete_chunks command without any pre-configured scope." + }, + { + "description": "Enables the delete_collection command without any pre-configured scope.", + "type": "string", + "const": "allow-delete-collection", + "markdownDescription": "Enables the delete_collection command without any pre-configured scope." + }, + { + "description": "Denies the delete_collection command without any pre-configured scope.", + "type": "string", + "const": "deny-delete-collection", + "markdownDescription": "Denies the delete_collection command without any pre-configured scope." + }, + { + "description": "Enables the delete_file command without any pre-configured scope.", + "type": "string", + "const": "allow-delete-file", + "markdownDescription": "Enables the delete_file command without any pre-configured scope." + }, + { + "description": "Denies the delete_file command without any pre-configured scope.", + "type": "string", + "const": "deny-delete-file", + "markdownDescription": "Denies the delete_file command without any pre-configured scope." + }, + { + "description": "Enables the get_chunks command without any pre-configured scope.", + "type": "string", + "const": "allow-get-chunks", + "markdownDescription": "Enables the get_chunks command without any pre-configured scope." + }, + { + "description": "Denies the get_chunks command without any pre-configured scope.", + "type": "string", + "const": "deny-get-chunks", + "markdownDescription": "Denies the get_chunks command without any pre-configured scope." + }, + { + "description": "Enables the get_status command without any pre-configured scope.", + "type": "string", + "const": "allow-get-status", + "markdownDescription": "Enables the get_status command without any pre-configured scope." + }, + { + "description": "Denies the get_status command without any pre-configured scope.", + "type": "string", + "const": "deny-get-status", + "markdownDescription": "Denies the get_status command without any pre-configured scope." + }, + { + "description": "Enables the insert_chunks command without any pre-configured scope.", + "type": "string", + "const": "allow-insert-chunks", + "markdownDescription": "Enables the insert_chunks command without any pre-configured scope." + }, + { + "description": "Denies the insert_chunks command without any pre-configured scope.", + "type": "string", + "const": "deny-insert-chunks", + "markdownDescription": "Denies the insert_chunks command without any pre-configured scope." + }, + { + "description": "Enables the list_attachments command without any pre-configured scope.", + "type": "string", + "const": "allow-list-attachments", + "markdownDescription": "Enables the list_attachments command without any pre-configured scope." + }, + { + "description": "Denies the list_attachments command without any pre-configured scope.", + "type": "string", + "const": "deny-list-attachments", + "markdownDescription": "Denies the list_attachments command without any pre-configured scope." + }, + { + "description": "Enables the search_collection command without any pre-configured scope.", + "type": "string", + "const": "allow-search-collection", + "markdownDescription": "Enables the search_collection command without any pre-configured scope." + }, + { + "description": "Denies the search_collection command without any pre-configured scope.", + "type": "string", + "const": "deny-search-collection", + "markdownDescription": "Denies the search_collection command without any pre-configured scope." + }, + { + "description": "Default permissions for the vector-db plugin\n#### This default permission set includes:\n\n- `allow-get-status`\n- `allow-create-collection`\n- `allow-insert-chunks`\n- `allow-create-file`\n- `allow-search-collection`\n- `allow-delete-chunks`\n- `allow-delete-file`\n- `allow-delete-collection`\n- `allow-chunk-text`\n- `allow-list-attachments`\n- `allow-get-chunks`", + "type": "string", + "const": "default", + "markdownDescription": "Default permissions for the vector-db plugin\n#### This default permission set includes:\n\n- `allow-get-status`\n- `allow-create-collection`\n- `allow-insert-chunks`\n- `allow-create-file`\n- `allow-search-collection`\n- `allow-delete-chunks`\n- `allow-delete-file`\n- `allow-delete-collection`\n- `allow-chunk-text`\n- `allow-list-attachments`\n- `allow-get-chunks`" + } + ] + } + } +} \ No newline at end of file diff --git a/src-tauri/plugins/tauri-plugin-vector-db/rollup.config.js b/src-tauri/plugins/tauri-plugin-vector-db/rollup.config.js new file mode 100644 index 000000000..5047bf72d --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/rollup.config.js @@ -0,0 +1,32 @@ +import { readFileSync } from 'node:fs' +import { dirname, join } from 'node:path' +import { cwd } from 'node:process' +import typescript from '@rollup/plugin-typescript' + +const pkg = JSON.parse(readFileSync(join(cwd(), 'package.json'), 'utf8')) + +export default { + input: 'guest-js/index.ts', + output: [ + { + file: pkg.exports.import, + format: 'esm' + }, + { + file: pkg.exports.require, + format: 'cjs' + } + ], + plugins: [ + typescript({ + declaration: true, + declarationDir: dirname(pkg.exports.import) + }) + ], + external: [ + /^@tauri-apps\/api/, + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.peerDependencies || {}) + ] +} + diff --git a/src-tauri/plugins/tauri-plugin-vector-db/src/commands.rs b/src-tauri/plugins/tauri-plugin-vector-db/src/commands.rs new file mode 100644 index 000000000..9996a9083 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/src/commands.rs @@ -0,0 +1,206 @@ +use crate::{VectorDBError, VectorDBState}; +use crate::db::{ + self, AttachmentFileInfo, SearchResult, MinimalChunkInput, +}; +use serde::{Deserialize, Serialize}; +use tauri::State; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Status { + pub ann_available: bool, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct FileInput { + pub path: String, + pub name: Option, + #[serde(rename = "type")] + pub file_type: Option, + pub size: Option, +} + +// ============================================================================ +// Tauri Command Handlers +// ============================================================================ + +#[tauri::command] +pub async fn get_status(state: State<'_, VectorDBState>) -> Result { + println!("[VectorDB] Checking ANN availability..."); + let temp = db::collection_path(&state.base_dir, "__status__"); + let conn = db::open_or_init_conn(&temp)?; + + // Verbose version for startup diagnostics + let ann = { + if conn.execute("CREATE VIRTUAL TABLE IF NOT EXISTS temp.temp_vec USING vec0(embedding float[1])", []).is_ok() { + let _ = conn.execute("DROP TABLE IF EXISTS temp.temp_vec", []); + println!("[VectorDB] ✓ sqlite-vec already loaded"); + true + } else { + unsafe { let _ = conn.load_extension_enable(); } + let paths = db::possible_sqlite_vec_paths(); + println!("[VectorDB] Trying {} bundled paths...", paths.len()); + let mut found = false; + for p in paths { + println!("[VectorDB] Trying: {}", p); + unsafe { + if let Ok(_) = conn.load_extension(&p, Some("sqlite3_vec_init")) { + if conn.execute("CREATE VIRTUAL TABLE IF NOT EXISTS temp.temp_vec USING vec0(embedding float[1])", []).is_ok() { + let _ = conn.execute("DROP TABLE IF EXISTS temp.temp_vec", []); + println!("[VectorDB] ✓ sqlite-vec loaded from: {}", p); + found = true; + break; + } + } + } + } + if !found { + println!("[VectorDB] ✗ Failed to load sqlite-vec from all paths"); + } + found + } + }; + + println!("[VectorDB] ANN status: {}", if ann { "AVAILABLE ✓" } else { "NOT AVAILABLE ✗" }); + Ok(Status { ann_available: ann }) +} + +#[tauri::command] +pub async fn create_collection( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + name: String, + dimension: usize, +) -> Result<(), VectorDBError> { + let path = db::collection_path(&state.base_dir, &name); + let conn = db::open_or_init_conn(&path)?; + + let has_ann = db::create_schema(&conn, dimension)?; + if has_ann { + println!("[VectorDB] ✓ Collection '{}' created with ANN support", name); + } else { + println!("[VectorDB] ⚠ Collection '{}' created WITHOUT ANN support (will use linear search)", name); + } + Ok(()) +} + +#[tauri::command] +pub async fn create_file( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, + file: FileInput, +) -> Result { + let path = db::collection_path(&state.base_dir, &collection); + let conn = db::open_or_init_conn(&path)?; + db::create_file( + &conn, + &file.path, + file.name.as_deref(), + file.file_type.as_deref(), + file.size, + ) +} + +#[tauri::command] +pub async fn insert_chunks( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, + file_id: String, + chunks: Vec, +) -> Result<(), VectorDBError> { + let path = db::collection_path(&state.base_dir, &collection); + let conn = db::open_or_init_conn(&path)?; + let vec_loaded = db::try_load_sqlite_vec(&conn); + db::insert_chunks(&conn, &file_id, chunks, vec_loaded) +} + +#[tauri::command] +pub async fn delete_file( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, + file_id: String, +) -> Result<(), VectorDBError> { + let path = db::collection_path(&state.base_dir, &collection); + let conn = db::open_or_init_conn(&path)?; + db::delete_file(&conn, &file_id) +} + +#[tauri::command] +pub async fn search_collection( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, + query_embedding: Vec, + limit: usize, + threshold: f32, + mode: Option, + file_ids: Option>, +) -> Result, VectorDBError> { + let path = db::collection_path(&state.base_dir, &collection); + let conn = db::open_or_init_conn(&path)?; + let vec_loaded = db::try_load_sqlite_vec(&conn); + db::search_collection(&conn, &query_embedding, limit, threshold, mode, vec_loaded, file_ids) +} + +#[tauri::command] +pub async fn list_attachments( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, + limit: Option, +) -> Result, VectorDBError> { + let path = db::collection_path(&state.base_dir, &collection); + let conn = db::open_or_init_conn(&path)?; + db::list_attachments(&conn, limit) +} + +#[tauri::command] +pub async fn delete_chunks( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, + ids: Vec, +) -> Result<(), VectorDBError> { + let path = db::collection_path(&state.base_dir, &collection); + let conn = db::open_or_init_conn(&path)?; + db::delete_chunks(&conn, ids) +} + +#[tauri::command] +pub async fn delete_collection( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, +) -> Result<(), VectorDBError> { + let path = db::collection_path(&state.base_dir, &collection); + if path.exists() { + std::fs::remove_file(path).ok(); + } + Ok(()) +} + +#[tauri::command] +pub async fn chunk_text( + _app: tauri::AppHandle, + text: String, + chunk_size: usize, + chunk_overlap: usize, +) -> Result, VectorDBError> { + Ok(db::chunk_text(text, chunk_size, chunk_overlap)) +} + +#[tauri::command] +pub async fn get_chunks( + _app: tauri::AppHandle, + state: State<'_, VectorDBState>, + collection: String, + file_id: String, + start_order: i64, + end_order: i64, +) -> Result, VectorDBError> { + let path = db::collection_path(&state.base_dir, &collection); + let conn = db::open_or_init_conn(&path)?; + db::get_chunks(&conn, file_id, start_order, end_order) +} diff --git a/src-tauri/plugins/tauri-plugin-vector-db/src/db.rs b/src-tauri/plugins/tauri-plugin-vector-db/src/db.rs new file mode 100644 index 000000000..c050a82ae --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/src/db.rs @@ -0,0 +1,630 @@ +use crate::VectorDBError; +use crate::utils::{cosine_similarity, from_le_bytes_vec, to_le_bytes_vec}; +use rusqlite::{params, Connection, OptionalExtension}; +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::PathBuf; +use uuid::Uuid; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct FileMetadata { + pub name: Option, + pub path: String, + #[serde(rename = "type")] + pub file_type: Option, + pub size: Option, +} + + +#[derive(Debug, Serialize, Deserialize)] +pub struct SearchResult { + pub id: String, + pub text: String, + pub score: Option, + pub file_id: String, + pub chunk_file_order: i64, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct AttachmentFileInfo { + pub id: String, + pub name: Option, + pub path: Option, + #[serde(rename = "type")] + pub file_type: Option, + pub size: Option, + pub chunk_count: i64, +} + +// New minimal chunk input (no id/metadata) for file-scoped insertion +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct MinimalChunkInput { + pub text: String, + pub embedding: Vec, +} + +// ============================================================================ +// Connection & Path Management +// ============================================================================ + +pub fn collection_path(base: &PathBuf, name: &str) -> PathBuf { + let mut p = base.clone(); + let clean = name.replace(['/', '\\'], "_"); + let filename = format!("{}.db", clean); + p.push(&filename); + p +} + +pub fn open_or_init_conn(path: &PathBuf) -> Result { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).ok(); + } + let conn = Connection::open(path)?; + Ok(conn) +} + +// ============================================================================ +// SQLite-vec Extension Loading +// ============================================================================ + +pub fn try_load_sqlite_vec(conn: &Connection) -> bool { + // Check if vec0 module is already available + if conn.execute("CREATE VIRTUAL TABLE IF NOT EXISTS temp.temp_vec USING vec0(embedding float[1])", []).is_ok() { + let _ = conn.execute("DROP TABLE IF EXISTS temp.temp_vec", []); + return true; + } + + unsafe { + let _ = conn.load_extension_enable(); + } + + let paths = possible_sqlite_vec_paths(); + for p in paths { + unsafe { + if let Ok(_) = conn.load_extension(&p, Some("sqlite3_vec_init")) { + if conn.execute("CREATE VIRTUAL TABLE IF NOT EXISTS temp.temp_vec USING vec0(embedding float[1])", []).is_ok() { + let _ = conn.execute("DROP TABLE IF EXISTS temp.temp_vec", []); + return true; + } + } + } + } + + false +} + +pub fn possible_sqlite_vec_paths() -> Vec { + let mut paths = Vec::new(); + + // Dev paths + paths.push("./src-tauri/resources/bin/sqlite-vec".to_string()); + paths.push("./resources/bin/sqlite-vec".to_string()); + + // Exe-relative paths + if let Ok(exe) = std::env::current_exe() { + if let Some(dir) = exe.parent() { + let mut d = dir.to_path_buf(); + d.push("resources"); + d.push("bin"); + d.push("sqlite-vec"); + paths.push(d.to_string_lossy().to_string()); + } + + #[cfg(target_os = "macos")] + { + if let Some(mac_dir) = exe.parent().and_then(|p| p.parent()) { + let mut r = mac_dir.to_path_buf(); + r.push("Resources"); + r.push("bin"); + r.push("sqlite-vec"); + paths.push(r.to_string_lossy().to_string()); + } + } + } + paths +} + +pub fn ensure_vec_table(conn: &Connection, dimension: usize) -> bool { + if try_load_sqlite_vec(conn) { + let create = format!( + "CREATE VIRTUAL TABLE IF NOT EXISTS chunks_vec USING vec0(embedding float[{}])", + dimension + ); + match conn.execute(&create, []) { + Ok(_) => return true, + Err(e) => { + println!("[VectorDB] ✗ Failed to create chunks_vec: {}", e); + } + } + } + false +} + +// ============================================================================ +// Schema Creation +// ============================================================================ + +pub fn create_schema(conn: &Connection, dimension: usize) -> Result { + // Files table + conn.execute( + "CREATE TABLE IF NOT EXISTS files ( + id TEXT PRIMARY KEY, + path TEXT UNIQUE NOT NULL, + name TEXT, + type TEXT, + size INTEGER, + chunk_count INTEGER DEFAULT 0 + )", + [], + )?; + + // Chunks table + conn.execute( + "CREATE TABLE IF NOT EXISTS chunks ( + id TEXT PRIMARY KEY, + text TEXT NOT NULL, + embedding BLOB NOT NULL, + file_id TEXT, + chunk_file_order INTEGER, + FOREIGN KEY (file_id) REFERENCES files(id) + )", + [], + )?; + + conn.execute("CREATE INDEX IF NOT EXISTS idx_chunks_id ON chunks(id)", [])?; + conn.execute("CREATE INDEX IF NOT EXISTS idx_chunks_file_id ON chunks(file_id)", [])?; + conn.execute("CREATE INDEX IF NOT EXISTS idx_chunks_file_order ON chunks(file_id, chunk_file_order)", [])?; + + // Try to create vec virtual table + let has_ann = ensure_vec_table(conn, dimension); + Ok(has_ann) +} + +// ============================================================================ +// Insert Operations +// ============================================================================ + +pub fn create_file( + conn: &Connection, + path: &str, + name: Option<&str>, + file_type: Option<&str>, + size: Option, +) -> Result { + let tx = conn.unchecked_transaction()?; + + // Try get existing by path + if let Ok(Some(id)) = tx + .prepare("SELECT id FROM files WHERE path = ?1") + .and_then(|mut s| s.query_row(params![path], |r| r.get::<_, String>(0)).optional()) + { + let row: AttachmentFileInfo = { + let mut stmt = tx.prepare( + "SELECT id, path, name, type, size, chunk_count FROM files WHERE id = ?1", + )?; + stmt.query_row(params![id.as_str()], |r| { + Ok(AttachmentFileInfo { + id: r.get(0)?, + path: r.get(1)?, + name: r.get(2)?, + file_type: r.get(3)?, + size: r.get(4)?, + chunk_count: r.get(5)?, + }) + })? + }; + tx.commit()?; + return Ok(row); + } + + let new_id = Uuid::new_v4().to_string(); + // Determine file size if not provided + let computed_size: Option = match size { + Some(s) if s > 0 => Some(s), + _ => { + match std::fs::metadata(path) { + Ok(meta) => Some(meta.len() as i64), + Err(_) => None, + } + } + }; + tx.execute( + "INSERT INTO files (id, path, name, type, size, chunk_count) VALUES (?1, ?2, ?3, ?4, ?5, 0)", + params![new_id, path, name, file_type, computed_size], + )?; + + let row: AttachmentFileInfo = { + let mut stmt = tx.prepare( + "SELECT id, path, name, type, size, chunk_count FROM files WHERE path = ?1", + )?; + stmt.query_row(params![path], |r| { + Ok(AttachmentFileInfo { + id: r.get(0)?, + path: r.get(1)?, + name: r.get(2)?, + file_type: r.get(3)?, + size: r.get(4)?, + chunk_count: r.get(5)?, + }) + })? + }; + + tx.commit()?; + Ok(row) +} + +pub fn insert_chunks( + conn: &Connection, + file_id: &str, + chunks: Vec, + vec_loaded: bool, +) -> Result<(), VectorDBError> { + let tx = conn.unchecked_transaction()?; + + // Check if vec table exists + let has_vec = if vec_loaded { + conn + .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='chunks_vec'") + .and_then(|mut s| s.query_row([], |r| r.get::<_, String>(0)).optional()) + .ok() + .flatten() + .is_some() + } else { + false + }; + + // Determine current max order + let mut current_order: i64 = tx + .query_row( + "SELECT COALESCE(MAX(chunk_file_order), -1) FROM chunks WHERE file_id = ?1", + params![file_id], + |row| row.get::<_, i64>(0), + ) + .unwrap_or(-1); + + for ch in chunks.into_iter() { + current_order += 1; + let emb = to_le_bytes_vec(&ch.embedding); + let chunk_id = Uuid::new_v4().to_string(); + tx.execute( + "INSERT OR REPLACE INTO chunks (id, text, embedding, file_id, chunk_file_order) VALUES (?1, ?2, ?3, ?4, ?5)", + params![chunk_id, ch.text, emb, file_id, current_order], + )?; + + if has_vec { + let rowid: i64 = tx + .prepare("SELECT rowid FROM chunks WHERE id=?1")? + .query_row(params![chunk_id], |r| r.get(0))?; + let json_vec = serde_json::to_string(&ch.embedding).unwrap_or("[]".to_string()); + let _ = tx.execute( + "INSERT OR REPLACE INTO chunks_vec(rowid, embedding) VALUES (?1, ?2)", + params![rowid, json_vec], + ); + } + } + + // Update chunk_count + let count: i64 = tx.query_row( + "SELECT COUNT(*) FROM chunks WHERE file_id = ?1", + params![file_id], + |row| row.get(0), + )?; + tx.execute( + "UPDATE files SET chunk_count = ?1 WHERE id = ?2", + params![count, file_id], + )?; + + tx.commit()?; + Ok(()) +} + +pub fn delete_file(conn: &Connection, file_id: &str) -> Result<(), VectorDBError> { + let tx = conn.unchecked_transaction()?; + tx.execute("DELETE FROM chunks WHERE file_id = ?1", params![file_id])?; + tx.execute("DELETE FROM files WHERE id = ?1", params![file_id])?; + tx.commit()?; + Ok(()) +} + +// ============================================================================ +// Search Operations +// ============================================================================ + +pub fn search_collection( + conn: &Connection, + query_embedding: &[f32], + limit: usize, + threshold: f32, + mode: Option, + vec_loaded: bool, + file_ids: Option>, +) -> Result, VectorDBError> { + let has_vec = if vec_loaded { + conn + .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='chunks_vec'") + .and_then(|mut s| s.query_row([], |r| r.get::<_, String>(0)).optional()) + .ok() + .flatten() + .is_some() + } else { + false + }; + + let prefer_ann = match mode.as_deref() { + Some("ann") => true, + Some("linear") => false, + _ => true, // auto prefers ANN when available + }; + + if has_vec && prefer_ann { + search_ann(conn, query_embedding, limit, file_ids) + } else { + search_linear(conn, query_embedding, limit, threshold, file_ids) + } +} + +fn search_ann( + conn: &Connection, + query_embedding: &[f32], + limit: usize, + file_ids: Option>, +) -> Result, VectorDBError> { + let json_vec = serde_json::to_string(&query_embedding).unwrap_or("[]".to_string()); + + // Build query with optional file_id filtering + let query = if let Some(ref ids) = file_ids { + let placeholders = ids.iter().map(|_| "?").collect::>().join(","); + format!( + "SELECT c.id, c.text, c.file_id, c.chunk_file_order, v.distance + FROM chunks_vec v + JOIN chunks c ON c.rowid = v.rowid + WHERE v.embedding MATCH ?1 AND k = ?2 AND c.file_id IN ({}) + ORDER BY v.distance", + placeholders + ) + } else { + "SELECT c.id, c.text, c.file_id, c.chunk_file_order, v.distance + FROM chunks_vec v + JOIN chunks c ON c.rowid = v.rowid + WHERE v.embedding MATCH ?1 AND k = ?2 + ORDER BY v.distance".to_string() + }; + + let mut stmt = match conn.prepare(&query) { + Ok(s) => s, + Err(e) => { + println!("[VectorDB] ✗ Failed to prepare ANN query: {}", e); + return Err(e.into()); + } + }; + + let mut rows = if let Some(ids) = file_ids { + let mut params: Vec> = vec![ + Box::new(json_vec), + Box::new(limit as i64), + ]; + for id in ids { + params.push(Box::new(id)); + } + let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect(); + match stmt.query(&*param_refs) { + Ok(r) => r, + Err(e) => { + println!("[VectorDB] ✗ Failed to execute ANN query: {}", e); + return Err(e.into()); + } + } + } else { + match stmt.query(params![json_vec, limit as i64]) { + Ok(r) => r, + Err(e) => { + println!("[VectorDB] ✗ Failed to execute ANN query: {}", e); + return Err(e.into()); + } + } + }; + + let mut results = Vec::new(); + while let Some(row) = rows.next()? { + let id: String = row.get(0)?; + let text: String = row.get(1)?; + let file_id: String = row.get(2)?; + let chunk_file_order: i64 = row.get(3)?; + let distance: f32 = row.get(4)?; + + results.push(SearchResult { + id, + text, + score: Some(distance), + file_id, + chunk_file_order, + }); + } + + println!("[VectorDB] ANN search returned {} results", results.len()); + Ok(results) +} + +fn search_linear( + conn: &Connection, + query_embedding: &[f32], + limit: usize, + threshold: f32, + file_ids: Option>, +) -> Result, VectorDBError> { + let (query, params_vec): (String, Vec>) = if let Some(ids) = file_ids { + let placeholders = ids.iter().map(|_| "?").collect::>().join(","); + let query_str = format!( + "SELECT c.id, c.text, c.embedding, c.file_id, c.chunk_file_order + FROM chunks c + WHERE c.file_id IN ({})", + placeholders + ); + let mut params: Vec> = Vec::new(); + for id in ids { + params.push(Box::new(id)); + } + (query_str, params) + } else { + ( + "SELECT c.id, c.text, c.embedding, c.file_id, c.chunk_file_order + FROM chunks c".to_string(), + Vec::new() + ) + }; + + let mut stmt = conn.prepare(&query)?; + let param_refs: Vec<&dyn rusqlite::ToSql> = params_vec.iter().map(|p| p.as_ref()).collect(); + let mut rows = if param_refs.is_empty() { + stmt.query([])? + } else { + stmt.query(&*param_refs)? + }; + let mut results: Vec = Vec::new(); + + while let Some(row) = rows.next()? { + let id: String = row.get(0)?; + let text: String = row.get(1)?; + let embedding_bytes: Vec = row.get(2)?; + let file_id: String = row.get(3)?; + let chunk_file_order: i64 = row.get(4)?; + + let emb = from_le_bytes_vec(&embedding_bytes); + let score = cosine_similarity(query_embedding, &emb)?; + + if score >= threshold { + results.push(SearchResult { + id, + text, + score: Some(score), + file_id, + chunk_file_order, + }); + } + } + + results.sort_by(|a, b| { + match (b.score, a.score) { + (Some(b_score), Some(a_score)) => b_score.partial_cmp(&a_score).unwrap_or(std::cmp::Ordering::Equal), + (Some(_), None) => std::cmp::Ordering::Less, + (None, Some(_)) => std::cmp::Ordering::Greater, + (None, None) => std::cmp::Ordering::Equal, + } + }); + let take: Vec = results.into_iter().take(limit).collect(); + println!("[VectorDB] Linear search returned {} results", take.len()); + Ok(take) +} + +// ============================================================================ +// List Operations +// ============================================================================ + +pub fn list_attachments( + conn: &Connection, + limit: Option, +) -> Result, VectorDBError> { + let query = if let Some(lim) = limit { + format!("SELECT id, path, name, type, size, chunk_count FROM files LIMIT {}", lim) + } else { + "SELECT id, path, name, type, size, chunk_count FROM files".to_string() + }; + + let mut stmt = conn.prepare(&query)?; + let mut rows = stmt.query([])?; + let mut out = Vec::new(); + + while let Some(row) = rows.next()? { + let id: String = row.get(0)?; + let path: Option = row.get(1)?; + let name: Option = row.get(2)?; + let file_type: Option = row.get(3)?; + let size: Option = row.get(4)?; + let chunk_count: i64 = row.get(5)?; + out.push(AttachmentFileInfo { + id, + name, + path, + file_type, + size, + chunk_count, + }); + } + + Ok(out) +} + +// ============================================================================ +// Delete Operations +// ============================================================================ + +pub fn delete_chunks(conn: &Connection, ids: Vec) -> Result<(), VectorDBError> { + let tx = conn.unchecked_transaction()?; + for id in ids { + tx.execute("DELETE FROM chunks WHERE id = ?1", params![id])?; + } + tx.commit()?; + Ok(()) +} + +// ============================================================================ +// Get Chunks by Order +// ============================================================================ + +pub fn get_chunks( + conn: &Connection, + file_id: String, + start_order: i64, + end_order: i64, +) -> Result, VectorDBError> { + let mut stmt = conn.prepare( + "SELECT id, text, chunk_file_order FROM chunks + WHERE file_id = ?1 AND chunk_file_order >= ?2 AND chunk_file_order <= ?3 + ORDER BY chunk_file_order" + )?; + let mut rows = stmt.query(params![&file_id, start_order, end_order])?; + + let mut results = Vec::new(); + while let Some(row) = rows.next()? { + results.push(SearchResult { + id: row.get(0)?, + text: row.get(1)?, + score: None, + file_id: file_id.clone(), + chunk_file_order: row.get(2)?, + }); + } + + Ok(results) +} + +// ============================================================================ +// Utility Operations +// ============================================================================ + +pub fn chunk_text(text: String, chunk_size: usize, chunk_overlap: usize) -> Vec { + if chunk_size == 0 { + return vec![]; + } + + let mut chunks = Vec::new(); + let chars: Vec = text.chars().collect(); + let mut start = 0usize; + + while start < chars.len() { + let end = (start + chunk_size).min(chars.len()); + let ch: String = chars[start..end].iter().collect(); + chunks.push(ch); + if end >= chars.len() { + break; + } + let advance = if chunk_overlap >= chunk_size { + 1 + } else { + chunk_size - chunk_overlap + }; + start += advance; + } + + chunks +} diff --git a/src-tauri/plugins/tauri-plugin-vector-db/src/error.rs b/src-tauri/plugins/tauri-plugin-vector-db/src/error.rs new file mode 100644 index 000000000..6c2fdcb3a --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/src/error.rs @@ -0,0 +1,23 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, thiserror::Error, Serialize, Deserialize)] +pub enum VectorDBError { + #[error("Database error: {0}")] + DatabaseError(String), + + #[error("Invalid input: {0}")] + InvalidInput(String), +} + +impl From for VectorDBError { + fn from(err: rusqlite::Error) -> Self { + VectorDBError::DatabaseError(err.to_string()) + } +} + +impl From for VectorDBError { + fn from(err: serde_json::Error) -> Self { + VectorDBError::DatabaseError(err.to_string()) + } +} + diff --git a/src-tauri/plugins/tauri-plugin-vector-db/src/lib.rs b/src-tauri/plugins/tauri-plugin-vector-db/src/lib.rs new file mode 100644 index 000000000..9c5d72c02 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/src/lib.rs @@ -0,0 +1,36 @@ +use tauri::{ + plugin::{Builder, TauriPlugin}, + Runtime, + Manager, +}; + +mod commands; +mod db; +mod error; +mod state; +mod utils; + +pub use error::VectorDBError; +pub use state::VectorDBState; + +pub fn init() -> TauriPlugin { + Builder::new("vector-db") + .invoke_handler(tauri::generate_handler![ + commands::create_collection, + commands::insert_chunks, + commands::create_file, + commands::search_collection, + commands::delete_chunks, + commands::delete_file, + commands::delete_collection, + commands::chunk_text, + commands::get_status, + commands::list_attachments, + commands::get_chunks, + ]) + .setup(|app, _api| { + app.manage(state::VectorDBState::new()); + Ok(()) + }) + .build() +} diff --git a/src-tauri/plugins/tauri-plugin-vector-db/src/state.rs b/src-tauri/plugins/tauri-plugin-vector-db/src/state.rs new file mode 100644 index 000000000..8813625e2 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/src/state.rs @@ -0,0 +1,17 @@ +use std::path::PathBuf; + +pub struct VectorDBState { + pub base_dir: PathBuf, +} + +impl VectorDBState { + pub fn new() -> Self { + // Default vector db path: /Jan/data/db + let mut base = dirs::data_dir().unwrap_or_else(|| PathBuf::from(".")); + base.push("Jan"); + base.push("data"); + base.push("db"); + std::fs::create_dir_all(&base).ok(); + Self { base_dir: base } + } +} diff --git a/src-tauri/plugins/tauri-plugin-vector-db/src/utils.rs b/src-tauri/plugins/tauri-plugin-vector-db/src/utils.rs new file mode 100644 index 000000000..be0b54796 --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/src/utils.rs @@ -0,0 +1,27 @@ +use crate::VectorDBError; + +pub fn cosine_similarity(a: &[f32], b: &[f32]) -> Result { + if a.len() != b.len() { + return Err(VectorDBError::InvalidInput( + "Vector dimensions don't match".to_string(), + )); + } + + let dot: f32 = a.iter().zip(b.iter()).map(|(x, y)| x * y).sum(); + let mag_a: f32 = a.iter().map(|x| x * x).sum::().sqrt(); + let mag_b: f32 = b.iter().map(|x| x * x).sum::().sqrt(); + if mag_a == 0.0 || mag_b == 0.0 { return Ok(0.0); } + Ok(dot / (mag_a * mag_b)) +} + +pub fn to_le_bytes_vec(v: &[f32]) -> Vec { + v.iter().flat_map(|f| f.to_le_bytes()).collect::>() +} + +pub fn from_le_bytes_vec(bytes: &[u8]) -> Vec { + bytes + .chunks_exact(4) + .map(|b| f32::from_le_bytes([b[0], b[1], b[2], b[3]])) + .collect::>() +} + diff --git a/src-tauri/plugins/tauri-plugin-vector-db/tsconfig.json b/src-tauri/plugins/tauri-plugin-vector-db/tsconfig.json new file mode 100644 index 000000000..60bc6a8eb --- /dev/null +++ b/src-tauri/plugins/tauri-plugin-vector-db/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "es2021", + "module": "esnext", + "moduleResolution": "bundler", + "skipLibCheck": true, + "strict": true, + "noUnusedLocals": true, + "noImplicitAny": true, + "noEmit": true + }, + "include": ["guest-js/*.ts"], + "exclude": ["dist-js", "node_modules"] +} + diff --git a/src-tauri/src/core/app/commands.rs b/src-tauri/src/core/app/commands.rs index 0d9c66c12..18e746869 100644 --- a/src-tauri/src/core/app/commands.rs +++ b/src-tauri/src/core/app/commands.rs @@ -19,10 +19,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap let default_data_folder = default_data_folder_path(app_handle.clone()); if !configuration_file.exists() { - log::info!( - "App config not found, creating default config at {:?}", - configuration_file - ); + log::info!("App config not found, creating default config at {configuration_file:?}"); app_default_configuration.data_folder = default_data_folder; @@ -30,7 +27,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap &configuration_file, serde_json::to_string(&app_default_configuration).unwrap(), ) { - log::error!("Failed to create default config: {}", err); + log::error!("Failed to create default config: {err}"); } return app_default_configuration; @@ -40,18 +37,12 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap Ok(content) => match serde_json::from_str::(&content) { Ok(app_configurations) => app_configurations, Err(err) => { - log::error!( - "Failed to parse app config, returning default config instead. Error: {}", - err - ); + log::error!("Failed to parse app config, returning default config instead. Error: {err}"); app_default_configuration } }, Err(err) => { - log::error!( - "Failed to read app config, returning default config instead. Error: {}", - err - ); + log::error!("Failed to read app config, returning default config instead. Error: {err}"); app_default_configuration } } @@ -63,10 +54,7 @@ pub fn update_app_configuration( configuration: AppConfiguration, ) -> Result<(), String> { let configuration_file = get_configuration_file_path(app_handle); - log::info!( - "update_app_configuration, configuration_file: {:?}", - configuration_file - ); + log::info!("update_app_configuration, configuration_file: {configuration_file:?}"); fs::write( configuration_file, @@ -95,8 +83,7 @@ pub fn get_jan_data_folder_path(app_handle: tauri::AppHandle) -> pub fn get_configuration_file_path(app_handle: tauri::AppHandle) -> PathBuf { let app_path = app_handle.path().app_data_dir().unwrap_or_else(|err| { log::error!( - "Failed to get app data directory: {}. Using home directory instead.", - err + "Failed to get app data directory: {err}. Using home directory instead." ); let home_dir = std::env::var(if cfg!(target_os = "windows") { @@ -130,9 +117,9 @@ pub fn get_configuration_file_path(app_handle: tauri::AppHandle) .join(package_name); if old_data_dir.exists() { - return old_data_dir.join(CONFIGURATION_FILE_NAME); + old_data_dir.join(CONFIGURATION_FILE_NAME) } else { - return app_path.join(CONFIGURATION_FILE_NAME); + app_path.join(CONFIGURATION_FILE_NAME) } } @@ -156,7 +143,7 @@ pub fn default_data_folder_path(app_handle: tauri::AppHandle) -> #[tauri::command] pub fn get_user_home_path(app: AppHandle) -> String { - return get_app_configurations(app.clone()).data_folder; + get_app_configurations(app.clone()).data_folder } #[tauri::command] @@ -171,16 +158,12 @@ pub fn change_app_data_folder( // Create the new data folder if it doesn't exist if !new_data_folder_path.exists() { fs::create_dir_all(&new_data_folder_path) - .map_err(|e| format!("Failed to create new data folder: {}", e))?; + .map_err(|e| format!("Failed to create new data folder: {e}"))?; } // Copy all files from the old folder to the new one if current_data_folder.exists() { - log::info!( - "Copying data from {:?} to {:?}", - current_data_folder, - new_data_folder_path - ); + log::info!("Copying data from {current_data_folder:?} to {new_data_folder_path:?}"); // Check if this is a parent directory to avoid infinite recursion if new_data_folder_path.starts_with(¤t_data_folder) { @@ -193,7 +176,7 @@ pub fn change_app_data_folder( &new_data_folder_path, &[".uvx", ".npx"], ) - .map_err(|e| format!("Failed to copy data to new folder: {}", e))?; + .map_err(|e| format!("Failed to copy data to new folder: {e}"))?; } else { log::info!("Current data folder does not exist, nothing to copy"); } diff --git a/src-tauri/src/core/downloads/commands.rs b/src-tauri/src/core/downloads/commands.rs index 6d50ed1a3..a24ae32f0 100644 --- a/src-tauri/src/core/downloads/commands.rs +++ b/src-tauri/src/core/downloads/commands.rs @@ -19,7 +19,7 @@ pub async fn download_files( { let mut download_manager = state.download_manager.lock().await; if download_manager.cancel_tokens.contains_key(task_id) { - return Err(format!("task_id {} exists", task_id)); + return Err(format!("task_id {task_id} exists")); } download_manager .cancel_tokens @@ -60,9 +60,9 @@ pub async fn cancel_download_task(state: State<'_, AppState>, task_id: &str) -> let mut download_manager = state.download_manager.lock().await; if let Some(token) = download_manager.cancel_tokens.remove(task_id) { token.cancel(); - log::info!("Cancelled download task: {}", task_id); + log::info!("Cancelled download task: {task_id}"); Ok(()) } else { - Err(format!("No download task: {}", task_id)) + Err(format!("No download task: {task_id}")) } } diff --git a/src-tauri/src/core/downloads/helpers.rs b/src-tauri/src/core/downloads/helpers.rs index d3d8f6b7c..3ce1d89fa 100644 --- a/src-tauri/src/core/downloads/helpers.rs +++ b/src-tauri/src/core/downloads/helpers.rs @@ -15,7 +15,7 @@ use url::Url; // ===== UTILITY FUNCTIONS ===== pub fn err_to_string(e: E) -> String { - format!("Error: {}", e) + format!("Error: {e}") } @@ -55,7 +55,7 @@ async fn validate_downloaded_file( ) .unwrap(); - log::info!("Starting validation for model: {}", model_id); + log::info!("Starting validation for model: {model_id}"); // Validate size if provided (fast check first) if let Some(expected_size) = &item.size { @@ -73,8 +73,7 @@ async fn validate_downloaded_file( actual_size ); return Err(format!( - "Size verification failed. Expected {} bytes but got {} bytes.", - expected_size, actual_size + "Size verification failed. Expected {expected_size} bytes but got {actual_size} bytes." )); } @@ -90,7 +89,7 @@ async fn validate_downloaded_file( save_path.display(), e ); - return Err(format!("Failed to verify file size: {}", e)); + return Err(format!("Failed to verify file size: {e}")); } } } @@ -115,9 +114,7 @@ async fn validate_downloaded_file( computed_sha256 ); - return Err(format!( - "Hash verification failed. The downloaded file is corrupted or has been tampered with." - )); + return Err("Hash verification failed. The downloaded file is corrupted or has been tampered with.".to_string()); } log::info!("Hash verification successful for {}", item.url); @@ -128,7 +125,7 @@ async fn validate_downloaded_file( save_path.display(), e ); - return Err(format!("Failed to verify file integrity: {}", e)); + return Err(format!("Failed to verify file integrity: {e}")); } } } @@ -140,14 +137,14 @@ async fn validate_downloaded_file( pub fn validate_proxy_config(config: &ProxyConfig) -> Result<(), String> { // Validate proxy URL format if let Err(e) = Url::parse(&config.url) { - return Err(format!("Invalid proxy URL '{}': {}", config.url, e)); + return Err(format!("Invalid proxy URL '{}': {e}", config.url)); } // Check if proxy URL has valid scheme let url = Url::parse(&config.url).unwrap(); // Safe to unwrap as we just validated it match url.scheme() { "http" | "https" | "socks4" | "socks5" => {} - scheme => return Err(format!("Unsupported proxy scheme: {}", scheme)), + scheme => return Err(format!("Unsupported proxy scheme: {scheme}")), } // Validate authentication credentials @@ -167,7 +164,7 @@ pub fn validate_proxy_config(config: &ProxyConfig) -> Result<(), String> { } // Basic validation for wildcard patterns if entry.starts_with("*.") && entry.len() < 3 { - return Err(format!("Invalid wildcard pattern: {}", entry)); + return Err(format!("Invalid wildcard pattern: {entry}")); } } } @@ -214,8 +211,7 @@ pub fn should_bypass_proxy(url: &str, no_proxy: &[String]) -> bool { } // Simple wildcard matching - if entry.starts_with("*.") { - let domain = &entry[2..]; + if let Some(domain) = entry.strip_prefix("*.") { if host.ends_with(domain) { return true; } @@ -305,7 +301,7 @@ pub async fn _download_files_internal( resume: bool, cancel_token: CancellationToken, ) -> Result<(), String> { - log::info!("Start download task: {}", task_id); + log::info!("Start download task: {task_id}"); let header_map = _convert_headers(headers).map_err(err_to_string)?; @@ -320,9 +316,9 @@ pub async fn _download_files_internal( } let total_size: u64 = file_sizes.values().sum(); - log::info!("Total download size: {}", total_size); + log::info!("Total download size: {total_size}"); - let evt_name = format!("download-{}", task_id); + let evt_name = format!("download-{task_id}"); // Create progress tracker let progress_tracker = ProgressTracker::new(items, file_sizes.clone()); @@ -352,7 +348,7 @@ pub async fn _download_files_internal( let cancel_token_clone = cancel_token.clone(); let evt_name_clone = evt_name.clone(); let progress_tracker_clone = progress_tracker.clone(); - let file_id = format!("{}-{}", task_id, index); + let file_id = format!("{task_id}-{index}"); let file_size = file_sizes.get(&item.url).copied().unwrap_or(0); let task = tokio::spawn(async move { @@ -377,7 +373,7 @@ pub async fn _download_files_internal( // Wait for all downloads to complete let mut validation_tasks = Vec::new(); for (task, item) in download_tasks.into_iter().zip(items.iter()) { - let result = task.await.map_err(|e| format!("Task join error: {}", e))?; + let result = task.await.map_err(|e| format!("Task join error: {e}"))?; match result { Ok(downloaded_path) => { @@ -399,7 +395,7 @@ pub async fn _download_files_internal( for (validation_task, save_path, _item) in validation_tasks { let validation_result = validation_task .await - .map_err(|e| format!("Validation task join error: {}", e))?; + .map_err(|e| format!("Validation task join error: {e}"))?; if let Err(validation_error) = validation_result { // Clean up the file if validation fails @@ -448,7 +444,7 @@ async fn download_single_file( if current_extension.is_empty() { ext.to_string() } else { - format!("{}.{}", current_extension, ext) + format!("{current_extension}.{ext}") } }; let tmp_save_path = save_path.with_extension(append_extension("tmp")); @@ -469,8 +465,8 @@ async fn download_single_file( let decoded_url = url::Url::parse(&item.url) .map(|u| u.to_string()) .unwrap_or_else(|_| item.url.clone()); - log::info!("Started downloading: {}", decoded_url); - let client = _get_client_for_item(item, &header_map).map_err(err_to_string)?; + log::info!("Started downloading: {decoded_url}"); + let client = _get_client_for_item(item, header_map).map_err(err_to_string)?; let mut download_delta = 0u64; let mut initial_progress = 0u64; @@ -503,7 +499,7 @@ async fn download_single_file( } Err(e) => { // fallback to normal download - log::warn!("Failed to resume download: {}", e); + log::warn!("Failed to resume download: {e}"); should_resume = false; _get_maybe_resume(&client, &item.url, 0).await? } @@ -592,7 +588,7 @@ async fn download_single_file( let decoded_url = url::Url::parse(&item.url) .map(|u| u.to_string()) .unwrap_or_else(|_| item.url.clone()); - log::info!("Finished downloading: {}", decoded_url); + log::info!("Finished downloading: {decoded_url}"); Ok(save_path.to_path_buf()) } @@ -606,7 +602,7 @@ pub async fn _get_maybe_resume( if start_bytes > 0 { let resp = client .get(url) - .header("Range", format!("bytes={}-", start_bytes)) + .header("Range", format!("bytes={start_bytes}-")) .send() .await .map_err(err_to_string)?; diff --git a/src-tauri/src/core/extensions/commands.rs b/src-tauri/src/core/extensions/commands.rs index 4c5a44a53..e416a03a3 100644 --- a/src-tauri/src/core/extensions/commands.rs +++ b/src-tauri/src/core/extensions/commands.rs @@ -13,41 +13,51 @@ pub fn get_jan_extensions_path(app_handle: tauri::AppHandle) -> P #[tauri::command] pub fn install_extensions(app: AppHandle) { if let Err(err) = setup::install_extensions(app, true) { - log::error!("Failed to install extensions: {}", err); + log::error!("Failed to install extensions: {err}"); } } #[tauri::command] pub fn get_active_extensions(app: AppHandle) -> Vec { - let mut path = get_jan_extensions_path(app); - path.push("extensions.json"); - log::info!("get jan extensions, path: {:?}", path); + // On mobile platforms, extensions are pre-bundled in the frontend + // Return empty array so frontend's MobileCoreService handles it + #[cfg(any(target_os = "android", target_os = "ios"))] + { + return vec![]; + } - let contents = fs::read_to_string(path); - let contents: Vec = match contents { - Ok(data) => match serde_json::from_str::>(&data) { - Ok(exts) => exts - .into_iter() - .map(|ext| { - serde_json::json!({ - "url": ext["url"], - "name": ext["name"], - "productName": ext["productName"], - "active": ext["_active"], - "description": ext["description"], - "version": ext["version"] + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + let mut path = get_jan_extensions_path(app); + path.push("extensions.json"); + log::info!("get jan extensions, path: {path:?}"); + + let contents = fs::read_to_string(path); + let contents: Vec = match contents { + Ok(data) => match serde_json::from_str::>(&data) { + Ok(exts) => exts + .into_iter() + .map(|ext| { + serde_json::json!({ + "url": ext["url"], + "name": ext["name"], + "productName": ext["productName"], + "active": ext["_active"], + "description": ext["description"], + "version": ext["version"] + }) }) - }) - .collect(), + .collect(), + Err(error) => { + log::error!("Failed to parse extensions.json: {error}"); + vec![] + } + }, Err(error) => { - log::error!("Failed to parse extensions.json: {}", error); + log::error!("Failed to read extensions.json: {error}"); vec![] } - }, - Err(error) => { - log::error!("Failed to read extensions.json: {}", error); - vec![] - } - }; - return contents; + }; + return contents; + } } diff --git a/src-tauri/src/core/filesystem/tests.rs b/src-tauri/src/core/filesystem/tests.rs index b4e96e994..b89b834d6 100644 --- a/src-tauri/src/core/filesystem/tests.rs +++ b/src-tauri/src/core/filesystem/tests.rs @@ -9,7 +9,7 @@ fn test_rm() { let app = mock_app(); let path = "test_rm_dir"; fs::create_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap(); - let args = vec![format!("file://{}", path).to_string()]; + let args = vec![format!("file://{path}").to_string()]; let result = rm(app.handle().clone(), args); assert!(result.is_ok()); assert!(!get_jan_data_folder_path(app.handle().clone()) @@ -21,7 +21,7 @@ fn test_rm() { fn test_mkdir() { let app = mock_app(); let path = "test_mkdir_dir"; - let args = vec![format!("file://{}", path).to_string()]; + let args = vec![format!("file://{path}").to_string()]; let result = mkdir(app.handle().clone(), args); assert!(result.is_ok()); assert!(get_jan_data_folder_path(app.handle().clone()) @@ -39,7 +39,7 @@ fn test_join_path() { assert_eq!( result, get_jan_data_folder_path(app.handle().clone()) - .join(&format!("test_dir{}test_file", std::path::MAIN_SEPARATOR)) + .join(format!("test_dir{}test_file", std::path::MAIN_SEPARATOR)) .to_string_lossy() .to_string() ); diff --git a/src-tauri/src/core/mcp/commands.rs b/src-tauri/src/core/mcp/commands.rs index a86db598e..6eb6dab40 100644 --- a/src-tauri/src/core/mcp/commands.rs +++ b/src-tauri/src/core/mcp/commands.rs @@ -30,28 +30,28 @@ pub async fn activate_mcp_server( #[tauri::command] pub async fn deactivate_mcp_server(state: State<'_, AppState>, name: String) -> Result<(), String> { - log::info!("Deactivating MCP server: {}", name); + log::info!("Deactivating MCP server: {name}"); // First, mark server as manually deactivated to prevent restart // Remove from active servers list to prevent restart { let mut active_servers = state.mcp_active_servers.lock().await; active_servers.remove(&name); - log::info!("Removed MCP server {} from active servers list", name); + log::info!("Removed MCP server {name} from active servers list"); } // Mark as not successfully connected to prevent restart logic { let mut connected = state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), false); - log::info!("Marked MCP server {} as not successfully connected", name); + log::info!("Marked MCP server {name} as not successfully connected"); } // Reset restart count { let mut counts = state.mcp_restart_counts.lock().await; counts.remove(&name); - log::info!("Reset restart count for MCP server {}", name); + log::info!("Reset restart count for MCP server {name}"); } // Now remove and stop the server @@ -60,7 +60,7 @@ pub async fn deactivate_mcp_server(state: State<'_, AppState>, name: String) -> let service = servers_map .remove(&name) - .ok_or_else(|| format!("Server {} not found", name))?; + .ok_or_else(|| format!("Server {name} not found"))?; // Release the lock before calling cancel drop(servers_map); @@ -89,7 +89,7 @@ pub async fn restart_mcp_servers(app: AppHandle, state: State<'_, restart_active_mcp_servers(&app, servers).await?; app.emit("mcp-update", "MCP servers updated") - .map_err(|e| format!("Failed to emit event: {}", e))?; + .map_err(|e| format!("Failed to emit event: {e}"))?; Ok(()) } @@ -110,9 +110,7 @@ pub async fn reset_mcp_restart_count( let old_count = *count; *count = 0; log::info!( - "MCP server {} restart count reset from {} to 0.", - server_name, - old_count + "MCP server {server_name} restart count reset from {old_count} to 0." ); Ok(()) } @@ -219,7 +217,7 @@ pub async fn call_tool( continue; // Tool not found in this server, try next } - println!("Found tool {} in server", tool_name); + println!("Found tool {tool_name} in server"); // Call the tool with timeout and cancellation support let tool_call = service.call_tool(CallToolRequestParam { @@ -234,22 +232,20 @@ pub async fn call_tool( match result { Ok(call_result) => call_result.map_err(|e| e.to_string()), Err(_) => Err(format!( - "Tool call '{}' timed out after {} seconds", - tool_name, + "Tool call '{tool_name}' timed out after {} seconds", MCP_TOOL_CALL_TIMEOUT.as_secs() )), } } _ = cancel_rx => { - Err(format!("Tool call '{}' was cancelled", tool_name)) + Err(format!("Tool call '{tool_name}' was cancelled")) } } } else { match timeout(MCP_TOOL_CALL_TIMEOUT, tool_call).await { Ok(call_result) => call_result.map_err(|e| e.to_string()), Err(_) => Err(format!( - "Tool call '{}' timed out after {} seconds", - tool_name, + "Tool call '{tool_name}' timed out after {} seconds", MCP_TOOL_CALL_TIMEOUT.as_secs() )), } @@ -264,7 +260,7 @@ pub async fn call_tool( return result; } - Err(format!("Tool {} not found", tool_name)) + Err(format!("Tool {tool_name} not found")) } /// Cancels a running tool call by its cancellation token @@ -285,10 +281,10 @@ pub async fn cancel_tool_call( if let Some(cancel_tx) = cancellations.remove(&cancellation_token) { // Send cancellation signal - ignore if receiver is already dropped let _ = cancel_tx.send(()); - println!("Tool call with token {} cancelled", cancellation_token); + println!("Tool call with token {cancellation_token} cancelled"); Ok(()) } else { - Err(format!("Cancellation token {} not found", cancellation_token)) + Err(format!("Cancellation token {cancellation_token} not found")) } } @@ -301,7 +297,7 @@ pub async fn get_mcp_configs(app: AppHandle) -> Result(app: AppHandle) -> Result(app: AppHandle, configs: String) -> Result<(), String> { let mut path = get_jan_data_folder_path(app); path.push("mcp_config.json"); - log::info!("save mcp configs, path: {:?}", path); + log::info!("save mcp configs, path: {path:?}"); fs::write(path, configs).map_err(|e| e.to_string()) } diff --git a/src-tauri/src/core/mcp/helpers.rs b/src-tauri/src/core/mcp/helpers.rs index 48c92ba2c..fe914c79a 100644 --- a/src-tauri/src/core/mcp/helpers.rs +++ b/src-tauri/src/core/mcp/helpers.rs @@ -56,22 +56,13 @@ pub fn calculate_exponential_backoff_delay(attempt: u32) -> u64 { let hash = hasher.finish(); // Convert hash to jitter value in range [-jitter_range, +jitter_range] - let jitter_offset = (hash % (jitter_range * 2)) as i64 - jitter_range as i64; - jitter_offset + (hash % (jitter_range * 2)) as i64 - jitter_range as i64 } else { 0 }; // Apply jitter while ensuring delay stays positive and within bounds - let final_delay = cmp::max( - 100, // Minimum 100ms delay - cmp::min( - MCP_MAX_RESTART_DELAY_MS, - (capped_delay as i64 + jitter) as u64, - ), - ); - - final_delay + ((capped_delay as i64 + jitter) as u64).clamp(100, MCP_MAX_RESTART_DELAY_MS) } /// Runs MCP commands by reading configuration from a JSON file and initializing servers @@ -135,9 +126,7 @@ pub async fn run_mcp_commands( // If initial startup failed, we still want to continue with other servers if let Err(e) = &result { log::error!( - "Initial startup failed for MCP server {}: {}", - name_clone, - e + "Initial startup failed for MCP server {name_clone}: {e}" ); } @@ -155,25 +144,23 @@ pub async fn run_mcp_commands( match handle.await { Ok((name, result)) => match result { Ok(_) => { - log::info!("MCP server {} initialized successfully", name); + log::info!("MCP server {name} initialized successfully"); successful_count += 1; } Err(e) => { - log::error!("MCP server {} failed to initialize: {}", name, e); + log::error!("MCP server {name} failed to initialize: {e}"); failed_count += 1; } }, Err(e) => { - log::error!("Failed to join startup task: {}", e); + log::error!("Failed to join startup task: {e}"); failed_count += 1; } } } log::info!( - "MCP server initialization complete: {} successful, {} failed", - successful_count, - failed_count + "MCP server initialization complete: {successful_count} successful, {failed_count} failed" ); Ok(()) @@ -184,7 +171,7 @@ pub async fn monitor_mcp_server_handle( servers_state: SharedMcpServers, name: String, ) -> Option { - log::info!("Monitoring MCP server {} health", name); + log::info!("Monitoring MCP server {name} health"); // Monitor server health with periodic checks loop { @@ -202,17 +189,17 @@ pub async fn monitor_mcp_server_handle( true } Ok(Err(e)) => { - log::warn!("MCP server {} health check failed: {}", name, e); + log::warn!("MCP server {name} health check failed: {e}"); false } Err(_) => { - log::warn!("MCP server {} health check timed out", name); + log::warn!("MCP server {name} health check timed out"); false } } } else { // Server was removed from HashMap (e.g., by deactivate_mcp_server) - log::info!("MCP server {} no longer in running services", name); + log::info!("MCP server {name} no longer in running services"); return Some(rmcp::service::QuitReason::Closed); } }; @@ -220,8 +207,7 @@ pub async fn monitor_mcp_server_handle( if !health_check_result { // Server failed health check - remove it and return log::error!( - "MCP server {} failed health check, removing from active servers", - name + "MCP server {name} failed health check, removing from active servers" ); let mut servers = servers_state.lock().await; if let Some(service) = servers.remove(&name) { @@ -262,7 +248,7 @@ pub async fn start_mcp_server_with_restart( let max_restarts = max_restarts.unwrap_or(5); // Try the first start attempt and return its result - log::info!("Starting MCP server {} (Initial attempt)", name); + log::info!("Starting MCP server {name} (Initial attempt)"); let first_start_result = schedule_mcp_start_task( app.clone(), servers_state.clone(), @@ -273,7 +259,7 @@ pub async fn start_mcp_server_with_restart( match first_start_result { Ok(_) => { - log::info!("MCP server {} started successfully on first attempt", name); + log::info!("MCP server {name} started successfully on first attempt"); reset_restart_count(&restart_counts, &name).await; // Check if server was marked as successfully connected (passed verification) @@ -298,18 +284,15 @@ pub async fn start_mcp_server_with_restart( Ok(()) } else { // Server failed verification, don't monitor for restarts - log::error!("MCP server {} failed verification after startup", name); + log::error!("MCP server {name} failed verification after startup"); Err(format!( - "MCP server {} failed verification after startup", - name + "MCP server {name} failed verification after startup" )) } } Err(e) => { log::error!( - "Failed to start MCP server {} on first attempt: {}", - name, - e + "Failed to start MCP server {name} on first attempt: {e}" ); Err(e) } @@ -336,9 +319,7 @@ pub async fn start_restart_loop( if current_restart_count > max_restarts { log::error!( - "MCP server {} reached maximum restart attempts ({}). Giving up.", - name, - max_restarts + "MCP server {name} reached maximum restart attempts ({max_restarts}). Giving up." ); if let Err(e) = app.emit( "mcp_max_restarts_reached", @@ -353,19 +334,13 @@ pub async fn start_restart_loop( } log::info!( - "Restarting MCP server {} (Attempt {}/{})", - name, - current_restart_count, - max_restarts + "Restarting MCP server {name} (Attempt {current_restart_count}/{max_restarts})" ); // Calculate exponential backoff delay let delay_ms = calculate_exponential_backoff_delay(current_restart_count); log::info!( - "Waiting {}ms before restart attempt {} for MCP server {}", - delay_ms, - current_restart_count, - name + "Waiting {delay_ms}ms before restart attempt {current_restart_count} for MCP server {name}" ); sleep(Duration::from_millis(delay_ms)).await; @@ -380,7 +355,7 @@ pub async fn start_restart_loop( match start_result { Ok(_) => { - log::info!("MCP server {} restarted successfully.", name); + log::info!("MCP server {name} restarted successfully."); // Check if server passed verification (was marked as successfully connected) let passed_verification = { @@ -390,8 +365,7 @@ pub async fn start_restart_loop( if !passed_verification { log::error!( - "MCP server {} failed verification after restart - stopping permanently", - name + "MCP server {name} failed verification after restart - stopping permanently" ); break; } @@ -402,9 +376,7 @@ pub async fn start_restart_loop( if let Some(count) = counts.get_mut(&name) { if *count > 0 { log::info!( - "MCP server {} restarted successfully, resetting restart count from {} to 0.", - name, - *count + "MCP server {name} restarted successfully, resetting restart count from {count} to 0." ); *count = 0; } @@ -415,7 +387,7 @@ pub async fn start_restart_loop( let quit_reason = monitor_mcp_server_handle(servers_state.clone(), name.clone()).await; - log::info!("MCP server {} quit with reason: {:?}", name, quit_reason); + log::info!("MCP server {name} quit with reason: {quit_reason:?}"); // Check if server was marked as successfully connected let was_connected = { @@ -426,8 +398,7 @@ pub async fn start_restart_loop( // Only continue restart loop if server was previously connected if !was_connected { log::error!( - "MCP server {} failed before establishing successful connection - stopping permanently", - name + "MCP server {name} failed before establishing successful connection - stopping permanently" ); break; } @@ -435,11 +406,11 @@ pub async fn start_restart_loop( // Determine if we should restart based on quit reason let should_restart = match quit_reason { Some(reason) => { - log::warn!("MCP server {} terminated unexpectedly: {:?}", name, reason); + log::warn!("MCP server {name} terminated unexpectedly: {reason:?}"); true } None => { - log::info!("MCP server {} was manually stopped - not restarting", name); + log::info!("MCP server {name} was manually stopped - not restarting"); false } }; @@ -450,7 +421,7 @@ pub async fn start_restart_loop( // Continue the loop for another restart attempt } Err(e) => { - log::error!("Failed to restart MCP server {}: {}", name, e); + log::error!("Failed to restart MCP server {name}: {e}"); // Check if server was marked as successfully connected before let was_connected = { @@ -461,8 +432,7 @@ pub async fn start_restart_loop( // Only continue restart attempts if server was previously connected if !was_connected { log::error!( - "MCP server {} failed restart and was never successfully connected - stopping permanently", - name + "MCP server {name} failed restart and was never successfully connected - stopping permanently" ); break; } @@ -526,10 +496,13 @@ async fn schedule_mcp_start_task( client_info: Implementation { name: "Jan Streamable Client".to_string(), version: "0.0.1".to_string(), + title: None, + website_url: None, + icons: None, }, }; let client = client_info.serve(transport).await.inspect_err(|e| { - log::error!("client error: {:?}", e); + log::error!("client error: {e:?}"); }); match client { @@ -545,12 +518,12 @@ async fn schedule_mcp_start_task( let app_state = app.state::(); let mut connected = app_state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), true); - log::info!("Marked MCP server {} as successfully connected", name); + log::info!("Marked MCP server {name} as successfully connected"); } } Err(e) => { - log::error!("Failed to connect to server: {}", e); - return Err(format!("Failed to connect to server: {}", e)); + log::error!("Failed to connect to server: {e}"); + return Err(format!("Failed to connect to server: {e}")); } } } else if config_params.transport_type.as_deref() == Some("sse") && config_params.url.is_some() @@ -587,8 +560,8 @@ async fn schedule_mcp_start_task( ) .await .map_err(|e| { - log::error!("transport error: {:?}", e); - format!("Failed to start SSE transport: {}", e) + log::error!("transport error: {e:?}"); + format!("Failed to start SSE transport: {e}") })?; let client_info = ClientInfo { @@ -597,10 +570,13 @@ async fn schedule_mcp_start_task( client_info: Implementation { name: "Jan SSE Client".to_string(), version: "0.0.1".to_string(), + title: None, + website_url: None, + icons: None, }, }; let client = client_info.serve(transport).await.map_err(|e| { - log::error!("client error: {:?}", e); + log::error!("client error: {e:?}"); e.to_string() }); @@ -617,12 +593,12 @@ async fn schedule_mcp_start_task( let app_state = app.state::(); let mut connected = app_state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), true); - log::info!("Marked MCP server {} as successfully connected", name); + log::info!("Marked MCP server {name} as successfully connected"); } } Err(e) => { - log::error!("Failed to connect to server: {}", e); - return Err(format!("Failed to connect to server: {}", e)); + log::error!("Failed to connect to server: {e}"); + return Err(format!("Failed to connect to server: {e}")); } } } else { @@ -639,7 +615,7 @@ async fn schedule_mcp_start_task( cache_dir.push(".npx"); cmd = Command::new(bun_x_path.display().to_string()); cmd.arg("x"); - cmd.env("BUN_INSTALL", cache_dir.to_str().unwrap().to_string()); + cmd.env("BUN_INSTALL", cache_dir.to_str().unwrap()); } let uv_path = if cfg!(windows) { @@ -654,7 +630,7 @@ async fn schedule_mcp_start_task( cmd = Command::new(uv_path); cmd.arg("tool"); cmd.arg("run"); - cmd.env("UV_CACHE_DIR", cache_dir.to_str().unwrap().to_string()); + cmd.env("UV_CACHE_DIR", cache_dir.to_str().unwrap()); } #[cfg(windows)] { @@ -726,8 +702,7 @@ async fn schedule_mcp_start_task( if !server_still_running { return Err(format!( - "MCP server {} quit immediately after starting", - name + "MCP server {name} quit immediately after starting" )); } // Mark server as successfully connected (for restart policy) @@ -735,7 +710,7 @@ async fn schedule_mcp_start_task( let app_state = app.state::(); let mut connected = app_state.mcp_successfully_connected.lock().await; connected.insert(name.clone(), true); - log::info!("Marked MCP server {} as successfully connected", name); + log::info!("Marked MCP server {name} as successfully connected"); } } Ok(()) @@ -792,7 +767,7 @@ pub async fn restart_active_mcp_servers( ); for (name, config) in active_servers.iter() { - log::info!("Restarting MCP server: {}", name); + log::info!("Restarting MCP server: {name}"); // Start server with restart monitoring - spawn async task let app_clone = app.clone(); @@ -891,9 +866,7 @@ pub async fn spawn_server_monitoring_task( monitor_mcp_server_handle(servers_clone.clone(), name_clone.clone()).await; log::info!( - "MCP server {} quit with reason: {:?}", - name_clone, - quit_reason + "MCP server {name_clone} quit with reason: {quit_reason:?}" ); // Check if we should restart based on connection status and quit reason @@ -928,8 +901,7 @@ pub async fn should_restart_server( // Only restart if server was previously connected if !was_connected { log::error!( - "MCP server {} failed before establishing successful connection - stopping permanently", - name + "MCP server {name} failed before establishing successful connection - stopping permanently" ); return false; } @@ -937,11 +909,11 @@ pub async fn should_restart_server( // Determine if we should restart based on quit reason match quit_reason { Some(reason) => { - log::warn!("MCP server {} terminated unexpectedly: {:?}", name, reason); + log::warn!("MCP server {name} terminated unexpectedly: {reason:?}"); true } None => { - log::info!("MCP server {} was manually stopped - not restarting", name); + log::info!("MCP server {name} was manually stopped - not restarting"); false } } diff --git a/src-tauri/src/core/mcp/tests.rs b/src-tauri/src/core/mcp/tests.rs index d973ce647..71967cd96 100644 --- a/src-tauri/src/core/mcp/tests.rs +++ b/src-tauri/src/core/mcp/tests.rs @@ -70,7 +70,7 @@ fn test_add_server_config_new_file() { Some("mcp_config_test_new.json"), ); - assert!(result.is_ok(), "Failed to add server config: {:?}", result); + assert!(result.is_ok(), "Failed to add server config: {result:?}"); // Verify the config was added correctly let config_content = std::fs::read_to_string(&config_path) @@ -128,7 +128,7 @@ fn test_add_server_config_existing_servers() { Some("mcp_config_test_existing.json"), ); - assert!(result.is_ok(), "Failed to add server config: {:?}", result); + assert!(result.is_ok(), "Failed to add server config: {result:?}"); // Verify both servers exist let config_content = std::fs::read_to_string(&config_path) diff --git a/src-tauri/src/core/server/proxy.rs b/src-tauri/src/core/server/proxy.rs index 12398ac02..b832b03a2 100644 --- a/src-tauri/src/core/server/proxy.rs +++ b/src-tauri/src/core/server/proxy.rs @@ -67,7 +67,7 @@ async fn proxy_request( .any(|&method| method.eq_ignore_ascii_case(requested_method)); if !method_allowed { - log::warn!("CORS preflight: Method '{}' not allowed", requested_method); + log::warn!("CORS preflight: Method '{requested_method}' not allowed"); return Ok(Response::builder() .status(StatusCode::METHOD_NOT_ALLOWED) .body(Body::from("Method not allowed")) @@ -80,14 +80,12 @@ async fn proxy_request( let is_trusted = if is_whitelisted_path { log::debug!( - "CORS preflight: Bypassing host check for whitelisted path: {}", - request_path + "CORS preflight: Bypassing host check for whitelisted path: {request_path}" ); true } else if !host.is_empty() { log::debug!( - "CORS preflight: Host is '{}', trusted hosts: {:?}", - host, + "CORS preflight: Host is '{host}', trusted hosts: {:?}", &config.trusted_hosts ); is_valid_host(host, &config.trusted_hosts) @@ -98,9 +96,7 @@ async fn proxy_request( if !is_trusted { log::warn!( - "CORS preflight: Host '{}' not trusted for path '{}'", - host, - request_path + "CORS preflight: Host '{host}' not trusted for path '{request_path}'" ); return Ok(Response::builder() .status(StatusCode::FORBIDDEN) @@ -158,8 +154,7 @@ async fn proxy_request( if !headers_valid { log::warn!( - "CORS preflight: Some requested headers not allowed: {}", - requested_headers + "CORS preflight: Some requested headers not allowed: {requested_headers}" ); return Ok(Response::builder() .status(StatusCode::FORBIDDEN) @@ -186,9 +181,7 @@ async fn proxy_request( } log::debug!( - "CORS preflight response: host_trusted={}, origin='{}'", - is_trusted, - origin + "CORS preflight response: host_trusted={is_trusted}, origin='{origin}'" ); return Ok(response.body(Body::empty()).unwrap()); } @@ -252,7 +245,7 @@ async fn proxy_request( .unwrap()); } } else { - log::debug!("Bypassing host validation for whitelisted path: {}", path); + log::debug!("Bypassing host validation for whitelisted path: {path}"); } if !is_whitelisted_path && !config.proxy_api_key.is_empty() { @@ -285,8 +278,7 @@ async fn proxy_request( } } else if is_whitelisted_path { log::debug!( - "Bypassing authorization check for whitelisted path: {}", - path + "Bypassing authorization check for whitelisted path: {path}" ); } @@ -312,8 +304,7 @@ async fn proxy_request( | (hyper::Method::POST, "/completions") | (hyper::Method::POST, "/embeddings") => { log::debug!( - "Handling POST request to {} requiring model lookup in body", - destination_path + "Handling POST request to {destination_path} requiring model lookup in body", ); let body_bytes = match hyper::body::to_bytes(body).await { Ok(bytes) => bytes, @@ -336,13 +327,12 @@ async fn proxy_request( match serde_json::from_slice::(&body_bytes) { Ok(json_body) => { if let Some(model_id) = json_body.get("model").and_then(|v| v.as_str()) { - log::debug!("Extracted model_id: {}", model_id); + log::debug!("Extracted model_id: {model_id}"); let sessions_guard = sessions.lock().await; if sessions_guard.is_empty() { log::warn!( - "Request for model '{}' but no models are running.", - model_id + "Request for model '{model_id}' but no models are running." ); let mut error_response = Response::builder().status(StatusCode::SERVICE_UNAVAILABLE); @@ -363,9 +353,9 @@ async fn proxy_request( { target_port = Some(session.info.port); session_api_key = Some(session.info.api_key.clone()); - log::debug!("Found session for model_id {}", model_id,); + log::debug!("Found session for model_id {model_id}"); } else { - log::warn!("No running session found for model_id: {}", model_id); + log::warn!("No running session found for model_id: {model_id}"); let mut error_response = Response::builder().status(StatusCode::NOT_FOUND); error_response = add_cors_headers_with_host_and_origin( @@ -376,15 +366,13 @@ async fn proxy_request( ); return Ok(error_response .body(Body::from(format!( - "No running session found for model '{}'", - model_id + "No running session found for model '{model_id}'" ))) .unwrap()); } } else { log::warn!( - "POST body for {} is missing 'model' field or it's not a string", - destination_path + "POST body for {destination_path} is missing 'model' field or it's not a string" ); let mut error_response = Response::builder().status(StatusCode::BAD_REQUEST); @@ -401,9 +389,7 @@ async fn proxy_request( } Err(e) => { log::warn!( - "Failed to parse POST body for {} as JSON: {}", - destination_path, - e + "Failed to parse POST body for {destination_path} as JSON: {e}" ); let mut error_response = Response::builder().status(StatusCode::BAD_REQUEST); error_response = add_cors_headers_with_host_and_origin( @@ -535,7 +521,7 @@ async fn proxy_request( let is_explicitly_whitelisted_get = method == hyper::Method::GET && whitelisted_paths.contains(&destination_path.as_str()); if is_explicitly_whitelisted_get { - log::debug!("Handled whitelisted GET path: {}", destination_path); + log::debug!("Handled whitelisted GET path: {destination_path}"); let mut error_response = Response::builder().status(StatusCode::NOT_FOUND); error_response = add_cors_headers_with_host_and_origin( error_response, @@ -546,9 +532,7 @@ async fn proxy_request( return Ok(error_response.body(Body::from("Not Found")).unwrap()); } else { log::warn!( - "Unhandled method/path for dynamic routing: {} {}", - method, - destination_path + "Unhandled method/path for dynamic routing: {method} {destination_path}" ); let mut error_response = Response::builder().status(StatusCode::NOT_FOUND); error_response = add_cors_headers_with_host_and_origin( @@ -581,7 +565,7 @@ async fn proxy_request( } }; - let upstream_url = format!("http://127.0.0.1:{}{}", port, destination_path); + let upstream_url = format!("http://127.0.0.1:{port}{destination_path}"); let mut outbound_req = client.request(method.clone(), &upstream_url); @@ -593,13 +577,14 @@ async fn proxy_request( if let Some(key) = session_api_key { log::debug!("Adding session Authorization header"); - outbound_req = outbound_req.header("Authorization", format!("Bearer {}", key)); + outbound_req = outbound_req.header("Authorization", format!("Bearer {key}")); } else { log::debug!("No session API key available for this request"); } let outbound_req_with_body = if let Some(bytes) = buffered_body { - log::debug!("Sending buffered body ({} bytes)", bytes.len()); + let bytes_len = bytes.len(); + log::debug!("Sending buffered body ({bytes_len} bytes)"); outbound_req.body(bytes) } else { log::error!("Internal logic error: Request reached proxy stage without a buffered body."); @@ -618,7 +603,7 @@ async fn proxy_request( match outbound_req_with_body.send().await { Ok(response) => { let status = response.status(); - log::debug!("Received response with status: {}", status); + log::debug!("Received response with status: {status}"); let mut builder = Response::builder().status(status); @@ -648,7 +633,7 @@ async fn proxy_request( } } Err(e) => { - log::error!("Stream error: {}", e); + log::error!("Stream error: {e}"); break; } } @@ -659,8 +644,8 @@ async fn proxy_request( Ok(builder.body(body).unwrap()) } Err(e) => { - let error_msg = format!("Proxy request to model failed: {}", e); - log::error!("{}", error_msg); + let error_msg = format!("Proxy request to model failed: {e}"); + log::error!("{error_msg}"); let mut error_response = Response::builder().status(StatusCode::BAD_GATEWAY); error_response = add_cors_headers_with_host_and_origin( error_response, @@ -675,14 +660,12 @@ async fn proxy_request( fn add_cors_headers_with_host_and_origin( builder: hyper::http::response::Builder, - host: &str, + _host: &str, origin: &str, - trusted_hosts: &[Vec], + _trusted_hosts: &[Vec], ) -> hyper::http::response::Builder { let mut builder = builder; - let allow_origin_header = if !origin.is_empty() && is_valid_host(host, trusted_hosts) { - origin.to_string() - } else if !origin.is_empty() { + let allow_origin_header = if !origin.is_empty() { origin.to_string() } else { "*".to_string() @@ -706,6 +689,7 @@ pub async fn is_server_running(server_handle: Arc>>) handle_guard.is_some() } +#[allow(clippy::too_many_arguments)] pub async fn start_server( server_handle: Arc>>, sessions: Arc>>, @@ -721,9 +705,9 @@ pub async fn start_server( return Err("Server is already running".into()); } - let addr: SocketAddr = format!("{}:{}", host, port) + let addr: SocketAddr = format!("{host}:{port}") .parse() - .map_err(|e| format!("Invalid address: {}", e))?; + .map_err(|e| format!("Invalid address: {e}"))?; let config = ProxyConfig { prefix, @@ -752,15 +736,15 @@ pub async fn start_server( let server = match Server::try_bind(&addr) { Ok(builder) => builder.serve(make_svc), Err(e) => { - log::error!("Failed to bind to {}: {}", addr, e); + log::error!("Failed to bind to {addr}: {e}"); return Err(Box::new(e)); } }; - log::info!("Jan API server started on http://{}", addr); + log::info!("Jan API server started on http://{addr}"); let server_task = tokio::spawn(async move { if let Err(e) = server.await { - log::error!("Server error: {}", e); + log::error!("Server error: {e}"); return Err(Box::new(e) as Box); } Ok(()) @@ -768,7 +752,7 @@ pub async fn start_server( *handle_guard = Some(server_task); let actual_port = addr.port(); - log::info!("Jan API server started successfully on port {}", actual_port); + log::info!("Jan API server started successfully on port {actual_port}"); Ok(actual_port) } diff --git a/src-tauri/src/core/setup.rs b/src-tauri/src/core/setup.rs index 38eca440e..7ba8f2f74 100644 --- a/src-tauri/src/core/setup.rs +++ b/src-tauri/src/core/setup.rs @@ -7,7 +7,7 @@ use std::{ }; use tar::Archive; use tauri::{ - App, Emitter, Manager, Runtime, Wry + App, Emitter, Manager, Runtime, Wry, WindowEvent }; #[cfg(desktop)] @@ -24,6 +24,13 @@ use super::{ }; pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> { + // Skip extension installation on mobile platforms + // Mobile uses pre-bundled extensions loaded via MobileCoreService in the frontend + #[cfg(any(target_os = "android", target_os = "ios"))] + { + return Ok(()); + } + let extensions_path = get_jan_extensions_path(app.clone()); let pre_install_path = app .path() @@ -38,7 +45,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> if std::env::var("IS_CLEAN").is_ok() { clean_up = true; } - log::info!("Installing extensions. Clean up: {}", clean_up); + log::info!("Installing extensions. Clean up: {clean_up}"); if !clean_up && extensions_path.exists() { return Ok(()); } @@ -68,7 +75,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> let entry = entry.map_err(|e| e.to_string())?; let path = entry.path(); - if path.extension().map_or(false, |ext| ext == "tgz") { + if path.extension().is_some_and(|ext| ext == "tgz") { let tar_gz = File::open(&path).map_err(|e| e.to_string())?; let gz_decoder = GzDecoder::new(tar_gz); let mut archive = Archive::new(gz_decoder); @@ -134,7 +141,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> extensions_list.push(new_extension); - log::info!("Installed extension to {:?}", extension_dir); + log::info!("Installed extension to {extension_dir:?}"); } } fs::write( @@ -154,7 +161,7 @@ pub fn migrate_mcp_servers( let mcp_version = store .get("mcp_version") .and_then(|v| v.as_i64()) - .unwrap_or_else(|| 0); + .unwrap_or(0); if mcp_version < 1 { log::info!("Migrating MCP schema version 1"); let result = add_server_config( @@ -168,7 +175,7 @@ pub fn migrate_mcp_servers( }), ); if let Err(e) = result { - log::error!("Failed to add server config: {}", e); + log::error!("Failed to add server config: {e}"); } } store.set("mcp_version", 1); @@ -212,7 +219,7 @@ pub fn setup_mcp(app: &App) { let app_handle = app.handle().clone(); tauri::async_runtime::spawn(async move { if let Err(e) = run_mcp_commands(&app_handle, servers).await { - log::error!("Failed to run mcp commands: {}", e); + log::error!("Failed to run mcp commands: {e}"); } app_handle .emit("mcp-update", "MCP servers updated") @@ -258,8 +265,37 @@ pub fn setup_tray(app: &App) -> tauri::Result { app.exit(0); } other => { - println!("menu item {} not handled", other); + println!("menu item {other} not handled"); } }) .build(app) } + +pub fn setup_theme_listener(app: &App) -> tauri::Result<()> { + // Setup theme listener for main window + if let Some(window) = app.get_webview_window("main") { + setup_window_theme_listener(app.handle().clone(), window); + } + + Ok(()) +} + +fn setup_window_theme_listener( + app_handle: tauri::AppHandle, + window: tauri::WebviewWindow, +) { + let window_label = window.label().to_string(); + let app_handle_clone = app_handle.clone(); + + window.on_window_event(move |event| { + if let WindowEvent::ThemeChanged(theme) = event { + let theme_str = match theme { + tauri::Theme::Light => "light", + tauri::Theme::Dark => "dark", + _ => "auto", + }; + log::info!("System theme changed to: {} for window: {}", theme_str, window_label); + let _ = app_handle_clone.emit("theme-changed", theme_str); + } + }); +} diff --git a/src-tauri/src/core/system/commands.rs b/src-tauri/src/core/system/commands.rs index f5e9d7618..9c72fd4da 100644 --- a/src-tauri/src/core/system/commands.rs +++ b/src-tauri/src/core/system/commands.rs @@ -18,12 +18,12 @@ pub fn factory_reset(app_handle: tauri::AppHandle, state: State<' let windows = app_handle.webview_windows(); for (label, window) in windows.iter() { window.close().unwrap_or_else(|_| { - log::warn!("Failed to close window: {:?}", label); + log::warn!("Failed to close window: {label:?}"); }); } } let data_folder = get_jan_data_folder_path(app_handle.clone()); - log::info!("Factory reset, removing data folder: {:?}", data_folder); + log::info!("Factory reset, removing data folder: {data_folder:?}"); tauri::async_runtime::block_on(async { clean_up_mcp_servers(state.clone()).await; @@ -31,7 +31,7 @@ pub fn factory_reset(app_handle: tauri::AppHandle, state: State<' if data_folder.exists() { if let Err(e) = fs::remove_dir_all(&data_folder) { - log::error!("Failed to remove data folder: {}", e); + log::error!("Failed to remove data folder: {e}"); return; } } @@ -59,17 +59,17 @@ pub fn open_app_directory(app: AppHandle) { if cfg!(target_os = "windows") { std::process::Command::new("explorer") .arg(app_path) - .spawn() + .status() .expect("Failed to open app directory"); } else if cfg!(target_os = "macos") { std::process::Command::new("open") .arg(app_path) - .spawn() + .status() .expect("Failed to open app directory"); } else { std::process::Command::new("xdg-open") .arg(app_path) - .spawn() + .status() .expect("Failed to open app directory"); } } @@ -80,17 +80,17 @@ pub fn open_file_explorer(path: String) { if cfg!(target_os = "windows") { std::process::Command::new("explorer") .arg(path) - .spawn() + .status() .expect("Failed to open file explorer"); } else if cfg!(target_os = "macos") { std::process::Command::new("open") .arg(path) - .spawn() + .status() .expect("Failed to open file explorer"); } else { std::process::Command::new("xdg-open") .arg(path) - .spawn() + .status() .expect("Failed to open file explorer"); } } @@ -102,7 +102,7 @@ pub async fn read_logs(app: AppHandle) -> Result let content = fs::read_to_string(log_path).map_err(|e| e.to_string())?; Ok(content) } else { - Err(format!("Log file not found")) + Err("Log file not found".to_string()) } } @@ -112,8 +112,9 @@ pub fn is_library_available(library: &str) -> bool { match unsafe { libloading::Library::new(library) } { Ok(_) => true, Err(e) => { - log::info!("Library {} is not available: {}", library, e); + log::info!("Library {library} is not available: {e}"); false } } } + diff --git a/src-tauri/src/core/threads/commands.rs b/src-tauri/src/core/threads/commands.rs index 44ac1964d..07bf46094 100644 --- a/src-tauri/src/core/threads/commands.rs +++ b/src-tauri/src/core/threads/commands.rs @@ -3,8 +3,11 @@ use std::io::Write; use tauri::Runtime; use uuid::Uuid; +#[cfg(any(target_os = "android", target_os = "ios"))] +use super::db; use super::helpers::{ - get_lock_for_thread, read_messages_from_file, update_thread_metadata, write_messages_to_file, + get_lock_for_thread, read_messages_from_file, should_use_sqlite, update_thread_metadata, + write_messages_to_file, }; use super::{ constants::THREADS_FILE, @@ -14,12 +17,19 @@ use super::{ }, }; -/// Lists all threads by reading their metadata from the threads directory. +/// Lists all threads by reading their metadata from the threads directory or database. /// Returns a vector of thread metadata as JSON values. #[tauri::command] pub async fn list_threads( app_handle: tauri::AppHandle, ) -> Result, String> { + if should_use_sqlite() { + // Use SQLite on mobile platforms + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_list_threads(app_handle).await; + } + + // Use file-based storage on desktop ensure_data_dirs(app_handle.clone())?; let data_dir = get_data_dir(app_handle.clone()); let mut threads = Vec::new(); @@ -38,7 +48,7 @@ pub async fn list_threads( match serde_json::from_str(&data) { Ok(thread) => threads.push(thread), Err(e) => { - println!("Failed to parse thread file: {}", e); + println!("Failed to parse thread file: {e}"); continue; // skip invalid thread files } } @@ -56,6 +66,12 @@ pub async fn create_thread( app_handle: tauri::AppHandle, mut thread: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_create_thread(app_handle, thread).await; + } + + // Use file-based storage on desktop ensure_data_dirs(app_handle.clone())?; let uuid = Uuid::new_v4().to_string(); thread["id"] = serde_json::Value::String(uuid.clone()); @@ -76,6 +92,12 @@ pub async fn modify_thread( app_handle: tauri::AppHandle, thread: serde_json::Value, ) -> Result<(), String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_modify_thread(app_handle, thread).await; + } + + // Use file-based storage on desktop let thread_id = thread .get("id") .and_then(|id| id.as_str()) @@ -96,6 +118,12 @@ pub async fn delete_thread( app_handle: tauri::AppHandle, thread_id: String, ) -> Result<(), String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_delete_thread(app_handle, &thread_id).await; + } + + // Use file-based storage on desktop let thread_dir = get_thread_dir(app_handle.clone(), &thread_id); if thread_dir.exists() { let _ = fs::remove_dir_all(thread_dir); @@ -110,6 +138,12 @@ pub async fn list_messages( app_handle: tauri::AppHandle, thread_id: String, ) -> Result, String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_list_messages(app_handle, &thread_id).await; + } + + // Use file-based storage on desktop read_messages_from_file(app_handle, &thread_id) } @@ -120,6 +154,12 @@ pub async fn create_message( app_handle: tauri::AppHandle, mut message: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_create_message(app_handle, message).await; + } + + // Use file-based storage on desktop let thread_id = { let id = message .get("thread_id") @@ -149,7 +189,7 @@ pub async fn create_message( .map_err(|e| e.to_string())?; let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; - writeln!(file, "{}", data).map_err(|e| e.to_string())?; + writeln!(file, "{data}").map_err(|e| e.to_string())?; // Explicitly flush to ensure data is written before returning file.flush().map_err(|e| e.to_string())?; @@ -166,6 +206,12 @@ pub async fn modify_message( app_handle: tauri::AppHandle, message: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_modify_message(app_handle, message).await; + } + + // Use file-based storage on desktop let thread_id = message .get("thread_id") .and_then(|v| v.as_str()) @@ -204,6 +250,12 @@ pub async fn delete_message( thread_id: String, message_id: String, ) -> Result<(), String> { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_delete_message(app_handle, &thread_id, &message_id).await; + } + + // Use file-based storage on desktop // Acquire per-thread lock before modifying { let lock = get_lock_for_thread(&thread_id).await; @@ -227,6 +279,12 @@ pub async fn get_thread_assistant( app_handle: tauri::AppHandle, thread_id: String, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_get_thread_assistant(app_handle, &thread_id).await; + } + + // Use file-based storage on desktop let path = get_thread_metadata_path(app_handle, &thread_id); if !path.exists() { return Err("Thread not found".to_string()); @@ -234,7 +292,7 @@ pub async fn get_thread_assistant( let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; let thread: serde_json::Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) { - if let Some(first) = assistants.get(0) { + if let Some(first) = assistants.first() { Ok(first.clone()) } else { Err("Assistant not found".to_string()) @@ -252,6 +310,12 @@ pub async fn create_thread_assistant( thread_id: String, assistant: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_create_thread_assistant(app_handle, &thread_id, assistant).await; + } + + // Use file-based storage on desktop let path = get_thread_metadata_path(app_handle.clone(), &thread_id); if !path.exists() { return Err("Thread not found".to_string()); @@ -277,6 +341,12 @@ pub async fn modify_thread_assistant( thread_id: String, assistant: serde_json::Value, ) -> Result { + if should_use_sqlite() { + #[cfg(any(target_os = "android", target_os = "ios"))] + return db::db_modify_thread_assistant(app_handle, &thread_id, assistant).await; + } + + // Use file-based storage on desktop let path = get_thread_metadata_path(app_handle.clone(), &thread_id); if !path.exists() { return Err("Thread not found".to_string()); diff --git a/src-tauri/src/core/threads/db.rs b/src-tauri/src/core/threads/db.rs new file mode 100644 index 000000000..b888b94bb --- /dev/null +++ b/src-tauri/src/core/threads/db.rs @@ -0,0 +1,397 @@ +/*! + SQLite Database Module for Mobile Thread Storage + + This module provides SQLite-based storage for threads and messages on mobile platforms. + It ensures data persistence and retrieval work correctly on Android and iOS devices. + + Note: This module is only compiled and used on mobile platforms (Android/iOS). + On desktop, the file-based storage in helpers.rs is used instead. +*/ + +#![allow(dead_code)] // Functions only used on mobile platforms + +use serde_json::Value; +use sqlx::sqlite::{SqliteConnectOptions, SqlitePool, SqlitePoolOptions}; +use sqlx::Row; +use std::str::FromStr; +use std::sync::OnceLock; +use tauri::{AppHandle, Manager, Runtime}; +use tokio::sync::Mutex; + +const DB_NAME: &str = "jan.db"; + +/// Global database pool for mobile platforms +static DB_POOL: OnceLock>> = OnceLock::new(); + +/// Initialize database with connection pool and run migrations +pub async fn init_database(app: &AppHandle) -> Result<(), String> { + // Get app data directory + let app_data_dir = app + .path() + .app_data_dir() + .map_err(|e| format!("Failed to get app data dir: {}", e))?; + + // Ensure directory exists + std::fs::create_dir_all(&app_data_dir) + .map_err(|e| format!("Failed to create app data dir: {}", e))?; + + // Create database path + let db_path = app_data_dir.join(DB_NAME); + let db_url = format!("sqlite:{}", db_path.display()); + + log::info!("Initializing SQLite database at: {}", db_url); + + // Create connection options + let connect_options = SqliteConnectOptions::from_str(&db_url) + .map_err(|e| format!("Failed to parse connection options: {}", e))? + .create_if_missing(true); + + // Create connection pool + let pool = SqlitePoolOptions::new() + .max_connections(5) + .connect_with(connect_options) + .await + .map_err(|e| format!("Failed to create connection pool: {}", e))?; + + // Run migrations + sqlx::query( + r#" + CREATE TABLE IF NOT EXISTS threads ( + id TEXT PRIMARY KEY, + data TEXT NOT NULL, + created_at INTEGER DEFAULT (strftime('%s', 'now')), + updated_at INTEGER DEFAULT (strftime('%s', 'now')) + ); + "#, + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create threads table: {}", e))?; + + sqlx::query( + r#" + CREATE TABLE IF NOT EXISTS messages ( + id TEXT PRIMARY KEY, + thread_id TEXT NOT NULL, + data TEXT NOT NULL, + created_at INTEGER DEFAULT (strftime('%s', 'now')), + FOREIGN KEY (thread_id) REFERENCES threads(id) ON DELETE CASCADE + ); + "#, + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create messages table: {}", e))?; + + // Create indexes + sqlx::query( + "CREATE INDEX IF NOT EXISTS idx_messages_thread_id ON messages(thread_id);", + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create thread_id index: {}", e))?; + + sqlx::query( + "CREATE INDEX IF NOT EXISTS idx_messages_created_at ON messages(created_at);", + ) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create created_at index: {}", e))?; + + // Store pool globally + DB_POOL + .get_or_init(|| Mutex::new(None)) + .lock() + .await + .replace(pool); + + log::info!("SQLite database initialized successfully for mobile platform"); + Ok(()) +} + +/// Get database pool +async fn get_pool() -> Result { + let pool_mutex = DB_POOL + .get() + .ok_or("Database not initialized")?; + + let pool_guard = pool_mutex.lock().await; + pool_guard + .clone() + .ok_or("Database pool not available".to_string()) +} + +/// List all threads from database +pub async fn db_list_threads( + _app_handle: AppHandle, +) -> Result, String> { + let pool = get_pool().await?; + + let rows = sqlx::query("SELECT data FROM threads ORDER BY updated_at DESC") + .fetch_all(&pool) + .await + .map_err(|e| format!("Failed to list threads: {}", e))?; + + let threads: Result, _> = rows + .iter() + .map(|row| { + let data: String = row.get("data"); + serde_json::from_str(&data).map_err(|e| e.to_string()) + }) + .collect(); + + threads +} + +/// Create a new thread in database +pub async fn db_create_thread( + _app_handle: AppHandle, + thread: Value, +) -> Result { + let pool = get_pool().await?; + + let thread_id = thread + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread id")?; + + let data = serde_json::to_string(&thread).map_err(|e| e.to_string())?; + + sqlx::query("INSERT INTO threads (id, data) VALUES (?1, ?2)") + .bind(thread_id) + .bind(&data) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create thread: {}", e))?; + + Ok(thread) +} + +/// Modify an existing thread in database +pub async fn db_modify_thread( + _app_handle: AppHandle, + thread: Value, +) -> Result<(), String> { + let pool = get_pool().await?; + + let thread_id = thread + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread id")?; + + let data = serde_json::to_string(&thread).map_err(|e| e.to_string())?; + + sqlx::query("UPDATE threads SET data = ?1, updated_at = strftime('%s', 'now') WHERE id = ?2") + .bind(&data) + .bind(thread_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to modify thread: {}", e))?; + + Ok(()) +} + +/// Delete a thread from database +pub async fn db_delete_thread( + _app_handle: AppHandle, + thread_id: &str, +) -> Result<(), String> { + let pool = get_pool().await?; + + // Messages will be auto-deleted via CASCADE + sqlx::query("DELETE FROM threads WHERE id = ?1") + .bind(thread_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to delete thread: {}", e))?; + + Ok(()) +} + +/// List all messages for a thread from database +pub async fn db_list_messages( + _app_handle: AppHandle, + thread_id: &str, +) -> Result, String> { + let pool = get_pool().await?; + + let rows = sqlx::query( + "SELECT data FROM messages WHERE thread_id = ?1 ORDER BY created_at ASC", + ) + .bind(thread_id) + .fetch_all(&pool) + .await + .map_err(|e| format!("Failed to list messages: {}", e))?; + + let messages: Result, _> = rows + .iter() + .map(|row| { + let data: String = row.get("data"); + serde_json::from_str(&data).map_err(|e| e.to_string()) + }) + .collect(); + + messages +} + +/// Create a new message in database +pub async fn db_create_message( + _app_handle: AppHandle, + message: Value, +) -> Result { + let pool = get_pool().await?; + + let message_id = message + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing message id")?; + + let thread_id = message + .get("thread_id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread_id")?; + + let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; + + sqlx::query("INSERT INTO messages (id, thread_id, data) VALUES (?1, ?2, ?3)") + .bind(message_id) + .bind(thread_id) + .bind(&data) + .execute(&pool) + .await + .map_err(|e| format!("Failed to create message: {}", e))?; + + Ok(message) +} + +/// Modify an existing message in database +pub async fn db_modify_message( + _app_handle: AppHandle, + message: Value, +) -> Result { + let pool = get_pool().await?; + + let message_id = message + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing message id")?; + + let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; + + sqlx::query("UPDATE messages SET data = ?1 WHERE id = ?2") + .bind(&data) + .bind(message_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to modify message: {}", e))?; + + Ok(message) +} + +/// Delete a message from database +pub async fn db_delete_message( + _app_handle: AppHandle, + _thread_id: &str, + message_id: &str, +) -> Result<(), String> { + let pool = get_pool().await?; + + sqlx::query("DELETE FROM messages WHERE id = ?1") + .bind(message_id) + .execute(&pool) + .await + .map_err(|e| format!("Failed to delete message: {}", e))?; + + Ok(()) +} + +/// Get thread assistant information from thread metadata +pub async fn db_get_thread_assistant( + _app_handle: AppHandle, + thread_id: &str, +) -> Result { + let pool = get_pool().await?; + + let row = sqlx::query("SELECT data FROM threads WHERE id = ?1") + .bind(thread_id) + .fetch_optional(&pool) + .await + .map_err(|e| format!("Failed to get thread: {}", e))? + .ok_or("Thread not found")?; + + let data: String = row.get("data"); + let thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + + if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) { + assistants + .first() + .cloned() + .ok_or("Assistant not found".to_string()) + } else { + Err("Assistant not found".to_string()) + } +} + +/// Create thread assistant in database +pub async fn db_create_thread_assistant( + app_handle: AppHandle, + thread_id: &str, + assistant: Value, +) -> Result { + let pool = get_pool().await?; + + let row = sqlx::query("SELECT data FROM threads WHERE id = ?1") + .bind(thread_id) + .fetch_optional(&pool) + .await + .map_err(|e| format!("Failed to get thread: {}", e))? + .ok_or("Thread not found")?; + + let data: String = row.get("data"); + let mut thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + + if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) { + assistants.push(assistant.clone()); + } else { + thread["assistants"] = Value::Array(vec![assistant.clone()]); + } + + db_modify_thread(app_handle, thread).await?; + Ok(assistant) +} + +/// Modify thread assistant in database +pub async fn db_modify_thread_assistant( + app_handle: AppHandle, + thread_id: &str, + assistant: Value, +) -> Result { + let pool = get_pool().await?; + + let row = sqlx::query("SELECT data FROM threads WHERE id = ?1") + .bind(thread_id) + .fetch_optional(&pool) + .await + .map_err(|e| format!("Failed to get thread: {}", e))? + .ok_or("Thread not found")?; + + let data: String = row.get("data"); + let mut thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + + let assistant_id = assistant + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing assistant id")?; + + if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) { + if let Some(index) = assistants + .iter() + .position(|a| a.get("id").and_then(|v| v.as_str()) == Some(assistant_id)) + { + assistants[index] = assistant.clone(); + db_modify_thread(app_handle, thread).await?; + } + } + + Ok(assistant) +} diff --git a/src-tauri/src/core/threads/helpers.rs b/src-tauri/src/core/threads/helpers.rs index 76d2c2e59..1710c5767 100644 --- a/src-tauri/src/core/threads/helpers.rs +++ b/src-tauri/src/core/threads/helpers.rs @@ -13,6 +13,11 @@ use super::utils::{get_messages_path, get_thread_metadata_path}; // Global per-thread locks for message file writes pub static MESSAGE_LOCKS: OnceLock>>>> = OnceLock::new(); +/// Check if the platform should use SQLite (mobile platforms) +pub fn should_use_sqlite() -> bool { + cfg!(any(target_os = "android", target_os = "ios")) +} + /// Get a lock for a specific thread to ensure thread-safe message file operations pub async fn get_lock_for_thread(thread_id: &str) -> Arc> { let locks = MESSAGE_LOCKS.get_or_init(|| Mutex::new(HashMap::new())); @@ -33,7 +38,7 @@ pub fn write_messages_to_file( let mut file = File::create(path).map_err(|e| e.to_string())?; for msg in messages { let data = serde_json::to_string(msg).map_err(|e| e.to_string())?; - writeln!(file, "{}", data).map_err(|e| e.to_string())?; + writeln!(file, "{data}").map_err(|e| e.to_string())?; } Ok(()) } diff --git a/src-tauri/src/core/threads/mod.rs b/src-tauri/src/core/threads/mod.rs index 25225d538..99c00253e 100644 --- a/src-tauri/src/core/threads/mod.rs +++ b/src-tauri/src/core/threads/mod.rs @@ -12,6 +12,8 @@ pub mod commands; mod constants; +#[cfg(any(target_os = "android", target_os = "ios"))] +pub mod db; pub mod helpers; pub mod utils; diff --git a/src-tauri/src/core/threads/tests.rs b/src-tauri/src/core/threads/tests.rs index 8d3524d06..15c91de85 100644 --- a/src-tauri/src/core/threads/tests.rs +++ b/src-tauri/src/core/threads/tests.rs @@ -1,5 +1,7 @@ use super::commands::*; +use super::helpers::should_use_sqlite; +use futures_util::future; use serde_json::json; use std::fs; use std::path::PathBuf; @@ -16,13 +18,39 @@ fn mock_app_with_temp_data_dir() -> (tauri::App, PathBuf) { .as_nanos(); let data_dir = std::env::current_dir() .unwrap_or_else(|_| PathBuf::from(".")) - .join(format!("test-data-{:?}-{}", unique_id, timestamp)); + .join(format!("test-data-{unique_id:?}-{timestamp}")); println!("Mock app data dir: {}", data_dir.display()); // Ensure the unique test directory exists let _ = fs::create_dir_all(&data_dir); (app, data_dir) } +// Helper to create a basic thread +fn create_test_thread(title: &str) -> serde_json::Value { + json!({ + "object": "thread", + "title": title, + "assistants": [], + "created": 123, + "updated": 123, + "metadata": null + }) +} + +// Helper to create a basic message +fn create_test_message(thread_id: &str, content_text: &str) -> serde_json::Value { + json!({ + "object": "message", + "thread_id": thread_id, + "role": "user", + "content": [{"type": "text", "text": content_text}], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null + }) +} + #[tokio::test] async fn test_create_and_list_threads() { let (app, data_dir) = mock_app_with_temp_data_dir(); @@ -42,7 +70,7 @@ async fn test_create_and_list_threads() { // List threads let threads = list_threads(app.handle().clone()).await.unwrap(); - assert!(threads.len() > 0); + assert!(!threads.is_empty()); // Clean up let _ = fs::remove_dir_all(data_dir); @@ -88,7 +116,7 @@ async fn test_create_and_list_messages() { let messages = list_messages(app.handle().clone(), thread_id.clone()) .await .unwrap(); - assert!(messages.len() > 0, "Expected at least one message, but got none. Thread ID: {}", thread_id); + assert!(!messages.is_empty(), "Expected at least one message, but got none. Thread ID: {thread_id}"); assert_eq!(messages[0]["role"], "user"); // Clean up @@ -137,3 +165,314 @@ async fn test_create_and_get_thread_assistant() { // Clean up let _ = fs::remove_dir_all(data_dir); } + +#[test] +fn test_should_use_sqlite_platform_detection() { + // Test that should_use_sqlite returns correct value based on platform + // On desktop platforms (macOS, Linux, Windows), it should return false + // On mobile platforms (Android, iOS), it should return true + + #[cfg(any(target_os = "android", target_os = "ios"))] + { + assert!(should_use_sqlite(), "should_use_sqlite should return true on mobile platforms"); + } + + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + assert!(!should_use_sqlite(), "should_use_sqlite should return false on desktop platforms"); + } +} + +#[tokio::test] +async fn test_desktop_storage_backend() { + // This test verifies that on desktop platforms, the file-based storage is used + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + let (app, _data_dir) = mock_app_with_temp_data_dir(); + + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Desktop Test Thread", + "assistants": [], + "created": 1234567890, + "updated": 1234567890, + "metadata": null + }); + + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Verify we can retrieve the thread (which proves file storage works) + let threads = list_threads(app.handle().clone()).await.unwrap(); + let found = threads.iter().any(|t| t["id"] == thread_id); + assert!(found, "Thread should be retrievable from file-based storage"); + + // Create a message + let message = json!({ + "object": "message", + "thread_id": thread_id, + "role": "user", + "content": [], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null + }); + + let _created_msg = create_message(app.handle().clone(), message).await.unwrap(); + + // Verify we can retrieve the message (which proves file storage works) + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(messages.len(), 1, "Message should be retrievable from file-based storage"); + + // Clean up - get the actual data directory used by the app + use super::utils::get_data_dir; + let actual_data_dir = get_data_dir(app.handle().clone()); + let _ = fs::remove_dir_all(actual_data_dir); + } +} + +#[tokio::test] +async fn test_modify_and_delete_thread() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Original Title", + "assistants": [], + "created": 1234567890, + "updated": 1234567890, + "metadata": null + }); + + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Modify the thread + let mut modified_thread = created.clone(); + modified_thread["title"] = json!("Modified Title"); + + modify_thread(app.handle().clone(), modified_thread.clone()) + .await + .unwrap(); + + // Verify modification by listing threads + let threads = list_threads(app.handle().clone()).await.unwrap(); + let found_thread = threads.iter().find(|t| t["id"] == thread_id); + assert!(found_thread.is_some(), "Modified thread should exist"); + assert_eq!(found_thread.unwrap()["title"], "Modified Title"); + + // Delete the thread + delete_thread(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + + // Verify deletion + #[cfg(not(any(target_os = "android", target_os = "ios")))] + { + let thread_dir = data_dir.join(&thread_id); + assert!(!thread_dir.exists(), "Thread directory should be deleted"); + } + + // Clean up + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_modify_and_delete_message() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Message Test Thread", + "assistants": [], + "created": 123, + "updated": 123, + "metadata": null + }); + + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Create a message + let message = json!({ + "object": "message", + "thread_id": thread_id, + "role": "user", + "content": [{"type": "text", "text": "Original content"}], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null + }); + + let created_msg = create_message(app.handle().clone(), message).await.unwrap(); + let message_id = created_msg["id"].as_str().unwrap().to_string(); + + // Modify the message + let mut modified_msg = created_msg.clone(); + modified_msg["content"] = json!([{"type": "text", "text": "Modified content"}]); + + modify_message(app.handle().clone(), modified_msg.clone()) + .await + .unwrap(); + + // Verify modification + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(messages.len(), 1); + assert_eq!(messages[0]["content"][0]["text"], "Modified content"); + + // Delete the message + delete_message(app.handle().clone(), thread_id.clone(), message_id.clone()) + .await + .unwrap(); + + // Verify deletion + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(messages.len(), 0, "Message should be deleted"); + + // Clean up + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_modify_thread_assistant() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Assistant Mod Thread")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap(); + + let assistant = json!({ + "id": "assistant-1", + "assistant_name": "Original Assistant", + "model": {"id": "model-1", "name": "Test Model"} + }); + + create_thread_assistant(app_handle.clone(), thread_id.to_string(), assistant.clone()) + .await + .unwrap(); + + let mut modified_assistant = assistant; + modified_assistant["assistant_name"] = json!("Modified Assistant"); + + modify_thread_assistant(app_handle.clone(), thread_id.to_string(), modified_assistant) + .await + .unwrap(); + + let retrieved = get_thread_assistant(app_handle, thread_id.to_string()) + .await + .unwrap(); + assert_eq!(retrieved["assistant_name"], "Modified Assistant"); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_thread_not_found_errors() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + let fake_thread_id = "non-existent-thread-id".to_string(); + let assistant = json!({"id": "assistant-1", "assistant_name": "Test Assistant"}); + + assert!(get_thread_assistant(app_handle.clone(), fake_thread_id.clone()).await.is_err()); + assert!(create_thread_assistant(app_handle.clone(), fake_thread_id.clone(), assistant.clone()).await.is_err()); + assert!(modify_thread_assistant(app_handle, fake_thread_id, assistant).await.is_err()); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_message_without_id_gets_generated() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Message ID Test")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap(); + + let message = json!({"object": "message", "thread_id": thread_id, "role": "user", "content": [], "status": "sent"}); + let created_msg = create_message(app_handle, message).await.unwrap(); + + assert!(created_msg["id"].as_str().is_some_and(|id| !id.is_empty())); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_concurrent_message_operations() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Concurrent Test")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + let handles: Vec<_> = (0..5) + .map(|i| { + let app_h = app_handle.clone(); + let tid = thread_id.clone(); + tokio::spawn(async move { + create_message(app_h, create_test_message(&tid, &format!("Message {}", i))).await + }) + }) + .collect(); + + let results = future::join_all(handles).await; + assert!(results.iter().all(|r| r.is_ok() && r.as_ref().unwrap().is_ok())); + + let messages = list_messages(app_handle, thread_id).await.unwrap(); + assert_eq!(messages.len(), 5); + + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_empty_thread_list() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Clean up any leftover test data + let test_data_threads = std::env::current_dir() + .unwrap_or_else(|_| PathBuf::from(".")) + .join("test-data") + .join("threads"); + let _ = fs::remove_dir_all(&test_data_threads); + + let threads = list_threads(app.handle().clone()).await.unwrap(); + assert_eq!(threads.len(), 0); + let _ = fs::remove_dir_all(data_dir); +} + +#[tokio::test] +async fn test_empty_message_list() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + let app_handle = app.handle().clone(); + + let created = create_thread(app_handle.clone(), create_test_thread("Empty Messages Test")) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap(); + + let messages = list_messages(app_handle, thread_id.to_string()).await.unwrap(); + assert_eq!(messages.len(), 0); + + let _ = fs::remove_dir_all(data_dir); +} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index abd12ddb7..24da0e807 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -31,7 +31,9 @@ pub fn run() { .plugin(tauri_plugin_http::init()) .plugin(tauri_plugin_store::Builder::new().build()) .plugin(tauri_plugin_shell::init()) - .plugin(tauri_plugin_llamacpp::init()); + .plugin(tauri_plugin_llamacpp::init()) + .plugin(tauri_plugin_vector_db::init()) + .plugin(tauri_plugin_rag::init()); #[cfg(feature = "deep-link")] { @@ -151,17 +153,17 @@ pub fn run() { .config() .version .clone() - .unwrap_or_else(|| "".to_string()); + .unwrap_or_default(); // Migrate extensions if let Err(e) = setup::install_extensions(app.handle().clone(), stored_version != app_version) { - log::error!("Failed to install extensions: {}", e); + log::error!("Failed to install extensions: {e}"); } // Migrate MCP servers if let Err(e) = setup::migrate_mcp_servers(app.handle().clone(), store.clone()) { - log::error!("Failed to migrate MCP servers: {}", e); + log::error!("Failed to migrate MCP servers: {e}"); } // Store the new app version @@ -180,15 +182,28 @@ pub fn run() { use tauri_plugin_deep_link::DeepLinkExt; app.deep_link().register_all()?; } + + // Initialize SQLite database for mobile platforms + #[cfg(any(target_os = "android", target_os = "ios"))] + { + let app_handle = app.handle().clone(); + tauri::async_runtime::spawn(async move { + if let Err(e) = crate::core::threads::db::init_database(&app_handle).await { + log::error!("Failed to initialize mobile database: {}", e); + } + }); + } + setup_mcp(app); + setup::setup_theme_listener(app)?; Ok(()) }) .build(tauri::generate_context!()) .expect("error while running tauri application"); // Handle app lifecycle events - app.run(|app, event| match event { - RunEvent::Exit => { + app.run(|app, event| { + if let RunEvent::Exit = event { // This is called when the app is actually exiting (e.g., macOS dock quit) // We can't prevent this, so run cleanup quickly let app_handle = app.clone(); @@ -208,6 +223,5 @@ pub fn run() { }); }); } - _ => {} }); } diff --git a/src-tauri/tauri.android.conf.json b/src-tauri/tauri.android.conf.json index a0b795207..2f1144c20 100644 --- a/src-tauri/tauri.android.conf.json +++ b/src-tauri/tauri.android.conf.json @@ -2,7 +2,9 @@ "identifier": "jan.ai.app", "build": { "devUrl": null, - "frontendDist": "../web-app/dist" + "frontendDist": "../web-app/dist", + "beforeDevCommand": "cross-env IS_DEV=true IS_ANDROID=true yarn build:web", + "beforeBuildCommand": "cross-env IS_ANDROID=true yarn build:web" }, "app": { "security": { @@ -11,7 +13,11 @@ }, "plugins": {}, "bundle": { - "resources": ["resources/LICENSE"], + "active": true, + "resources": [ + "resources/pre-install/**/*", + "resources/LICENSE" + ], "externalBin": [], "android": { "minSdkVersion": 24 diff --git a/src-tauri/tauri.bundle.windows.nsis.template b/src-tauri/tauri.bundle.windows.nsis.template new file mode 100644 index 000000000..8e7602f25 --- /dev/null +++ b/src-tauri/tauri.bundle.windows.nsis.template @@ -0,0 +1,1006 @@ +Unicode true +ManifestDPIAware true +; Add in `dpiAwareness` `PerMonitorV2` to manifest for Windows 10 1607+ (note this should not affect lower versions since they should be able to ignore this and pick up `dpiAware` `true` set by `ManifestDPIAware true`) +; Currently undocumented on NSIS's website but is in the Docs folder of source tree, see +; https://github.com/kichik/nsis/blob/5fc0b87b819a9eec006df4967d08e522ddd651c9/Docs/src/attributes.but#L286-L300 +; https://github.com/tauri-apps/tauri/pull/10106 +ManifestDPIAwareness PerMonitorV2 + +!if "lzma" == "none" + SetCompress off +!else + ; Set the compression algorithm. We default to LZMA. + SetCompressor /SOLID "lzma" +!endif + +!include MUI2.nsh +!include FileFunc.nsh +!include x64.nsh +!include WordFunc.nsh +!include "utils.nsh" +!include "FileAssociation.nsh" +!include "Win\COM.nsh" +!include "Win\Propkey.nsh" +!include "StrFunc.nsh" +${StrCase} +${StrLoc} + + +!define WEBVIEW2APPGUID "{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}" + +!define MANUFACTURER "ai" +!define PRODUCTNAME "jan_productname" +!define VERSION "jan_version" +!define VERSIONWITHBUILD "jan_build" +!define HOMEPAGE "" +!define INSTALLMODE "currentUser" +!define LICENSE "" +!define INSTALLERICON "D:\a\jan\jan\src-tauri\icons\icon.ico" +!define SIDEBARIMAGE "" +!define HEADERIMAGE "" +!define MAINBINARYNAME "jan_mainbinaryname" +!define MAINBINARYSRCPATH "D:\a\jan\jan\src-tauri\target\release\jan_mainbinaryname.exe" +!define BUNDLEID "jan_mainbinaryname.ai.app" +!define COPYRIGHT "" +!define OUTFILE "nsis-output.exe" +!define ARCH "x64" +!define ADDITIONALPLUGINSPATH "D:\a\jan\jan\src-tauri\target\release\nsis\x64\Plugins\x86-unicode\additional" +!define ALLOWDOWNGRADES "true" +!define DISPLAYLANGUAGESELECTOR "false" +!define INSTALLWEBVIEW2MODE "downloadBootstrapper" +!define WEBVIEW2INSTALLERARGS "/silent" +!define WEBVIEW2BOOTSTRAPPERPATH "" +!define WEBVIEW2INSTALLERPATH "" +!define MINIMUMWEBVIEW2VERSION "" +!define UNINSTKEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCTNAME}" +!define MANUKEY "Software\${MANUFACTURER}" +!define MANUPRODUCTKEY "${MANUKEY}\${PRODUCTNAME}" +!define UNINSTALLERSIGNCOMMAND "$\"powershell$\" $\"-ExecutionPolicy$\" $\"Bypass$\" $\"-File$\" $\"./sign.ps1$\" $\"%1$\"" +!define ESTIMATEDSIZE "793795" +!define STARTMENUFOLDER "" +!define VCREDIST_URL "https://aka.ms/vs/17/release/vc_redist.x64.exe" +!define VCREDIST_FILENAME "vc_redist.x64.exe" + +Var PassiveMode +Var UpdateMode +Var NoShortcutMode +Var WixMode +Var OldMainBinaryName + +Name "${PRODUCTNAME}" +BrandingText "${COPYRIGHT}" +OutFile "${OUTFILE}" + +ShowInstDetails nevershow +ShowUninstDetails nevershow + +; We don't actually use this value as default install path, +; it's just for nsis to append the product name folder in the directory selector +; https://nsis.sourceforge.io/Reference/InstallDir +!define PLACEHOLDER_INSTALL_DIR "placeholder\${PRODUCTNAME}" +InstallDir "${PLACEHOLDER_INSTALL_DIR}" + +VIProductVersion "${VERSIONWITHBUILD}" +VIAddVersionKey "ProductName" "${PRODUCTNAME}" +VIAddVersionKey "FileDescription" "${PRODUCTNAME}" +VIAddVersionKey "LegalCopyright" "${COPYRIGHT}" +VIAddVersionKey "FileVersion" "${VERSION}" +VIAddVersionKey "ProductVersion" "${VERSION}" + +# additional plugins +!addplugindir "${ADDITIONALPLUGINSPATH}" + +; Uninstaller signing command +!if "${UNINSTALLERSIGNCOMMAND}" != "" + !uninstfinalize '${UNINSTALLERSIGNCOMMAND}' +!endif + +; Handle install mode, `perUser`, `perMachine` or `both` +!if "${INSTALLMODE}" == "perMachine" + RequestExecutionLevel highest +!endif + +!if "${INSTALLMODE}" == "currentUser" + RequestExecutionLevel user +!endif + +!if "${INSTALLMODE}" == "both" + !define MULTIUSER_MUI + !define MULTIUSER_INSTALLMODE_INSTDIR "${PRODUCTNAME}" + !define MULTIUSER_INSTALLMODE_COMMANDLINE + !if "${ARCH}" == "x64" + !define MULTIUSER_USE_PROGRAMFILES64 + !else if "${ARCH}" == "arm64" + !define MULTIUSER_USE_PROGRAMFILES64 + !endif + !define MULTIUSER_INSTALLMODE_DEFAULT_REGISTRY_KEY "${UNINSTKEY}" + !define MULTIUSER_INSTALLMODE_DEFAULT_REGISTRY_VALUENAME "CurrentUser" + !define MULTIUSER_INSTALLMODEPAGE_SHOWUSERNAME + !define MULTIUSER_INSTALLMODE_FUNCTION RestorePreviousInstallLocation + !define MULTIUSER_EXECUTIONLEVEL Highest + !include MultiUser.nsh +!endif + +; Installer icon +!if "${INSTALLERICON}" != "" + !define MUI_ICON "${INSTALLERICON}" +!endif + +; Installer sidebar image +!if "${SIDEBARIMAGE}" != "" + !define MUI_WELCOMEFINISHPAGE_BITMAP "${SIDEBARIMAGE}" +!endif + +; Installer header image +!if "${HEADERIMAGE}" != "" + !define MUI_HEADERIMAGE + !define MUI_HEADERIMAGE_BITMAP "${HEADERIMAGE}" +!endif + +; Define registry key to store installer language +!define MUI_LANGDLL_REGISTRY_ROOT "HKCU" +!define MUI_LANGDLL_REGISTRY_KEY "${MANUPRODUCTKEY}" +!define MUI_LANGDLL_REGISTRY_VALUENAME "Installer Language" + +; Installer pages, must be ordered as they appear +; 1. Welcome Page +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_WELCOME + +; 2. License Page (if defined) +!if "${LICENSE}" != "" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !insertmacro MUI_PAGE_LICENSE "${LICENSE}" +!endif + +; 3. Install mode (if it is set to `both`) +!if "${INSTALLMODE}" == "both" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !insertmacro MULTIUSER_PAGE_INSTALLMODE +!endif + +; 4. Custom page to ask user if he wants to reinstall/uninstall +; only if a previous installation was detected +Var ReinstallPageCheck +Page custom PageReinstall PageLeaveReinstall +Function PageReinstall + ; Uninstall previous WiX installation if exists. + ; + ; A WiX installer stores the installation info in registry + ; using a UUID and so we have to loop through all keys under + ; `HKLM\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall` + ; and check if `DisplayName` and `Publisher` keys match ${PRODUCTNAME} and ${MANUFACTURER} + ; + ; This has a potential issue that there maybe another installation that matches + ; our ${PRODUCTNAME} and ${MANUFACTURER} but wasn't installed by our WiX installer, + ; however, this should be fine since the user will have to confirm the uninstallation + ; and they can chose to abort it if doesn't make sense. + StrCpy $0 0 + wix_loop: + EnumRegKey $1 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall" $0 + StrCmp $1 "" wix_loop_done ; Exit loop if there is no more keys to loop on + IntOp $0 $0 + 1 + ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "DisplayName" + ReadRegStr $R1 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "Publisher" + StrCmp "$R0$R1" "${PRODUCTNAME}${MANUFACTURER}" 0 wix_loop + ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "UninstallString" + ${StrCase} $R1 $R0 "L" + ${StrLoc} $R0 $R1 "msiexec" ">" + StrCmp $R0 0 0 wix_loop_done + StrCpy $WixMode 1 + StrCpy $R6 "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" + Goto compare_version + wix_loop_done: + + ; Check if there is an existing installation, if not, abort the reinstall page + ReadRegStr $R0 SHCTX "${UNINSTKEY}" "" + ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString" + ${IfThen} "$R0$R1" == "" ${|} Abort ${|} + + ; Compare this installar version with the existing installation + ; and modify the messages presented to the user accordingly + compare_version: + StrCpy $R4 "$(older)" + ${If} $WixMode = 1 + ReadRegStr $R0 HKLM "$R6" "DisplayVersion" + ${Else} + ReadRegStr $R0 SHCTX "${UNINSTKEY}" "DisplayVersion" + ${EndIf} + ${IfThen} $R0 == "" ${|} StrCpy $R4 "$(unknown)" ${|} + + nsis_tauri_utils::SemverCompare "${VERSION}" $R0 + Pop $R0 + ; Reinstalling the same version + ${If} $R0 = 0 + StrCpy $R1 "$(alreadyInstalledLong)" + StrCpy $R2 "$(addOrReinstall)" + StrCpy $R3 "$(uninstallApp)" + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(chooseMaintenanceOption)" + ; Upgrading + ${ElseIf} $R0 = 1 + StrCpy $R1 "$(olderOrUnknownVersionInstalled)" + StrCpy $R2 "$(uninstallBeforeInstalling)" + StrCpy $R3 "$(dontUninstall)" + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(choowHowToInstall)" + ; Downgrading + ${ElseIf} $R0 = -1 + StrCpy $R1 "$(newerVersionInstalled)" + StrCpy $R2 "$(uninstallBeforeInstalling)" + !if "${ALLOWDOWNGRADES}" == "true" + StrCpy $R3 "$(dontUninstall)" + !else + StrCpy $R3 "$(dontUninstallDowngrade)" + !endif + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(choowHowToInstall)" + ${Else} + Abort + ${EndIf} + + ; Skip showing the page if passive + ; + ; Note that we don't call this earlier at the begining + ; of this function because we need to populate some variables + ; related to current installed version if detected and whether + ; we are downgrading or not. + ${If} $PassiveMode = 1 + Call PageLeaveReinstall + ${Else} + nsDialogs::Create 1018 + Pop $R4 + ${IfThen} $(^RTL) = 1 ${|} nsDialogs::SetRTL $(^RTL) ${|} + + ${NSD_CreateLabel} 0 0 100% 24u $R1 + Pop $R1 + + ${NSD_CreateRadioButton} 30u 50u -30u 8u $R2 + Pop $R2 + ${NSD_OnClick} $R2 PageReinstallUpdateSelection + + ${NSD_CreateRadioButton} 30u 70u -30u 8u $R3 + Pop $R3 + ; Disable this radio button if downgrading and downgrades are disabled + !if "${ALLOWDOWNGRADES}" == "false" + ${IfThen} $R0 = -1 ${|} EnableWindow $R3 0 ${|} + !endif + ${NSD_OnClick} $R3 PageReinstallUpdateSelection + + ; Check the first radio button if this the first time + ; we enter this page or if the second button wasn't + ; selected the last time we were on this page + ${If} $ReinstallPageCheck <> 2 + SendMessage $R2 ${BM_SETCHECK} ${BST_CHECKED} 0 + ${Else} + SendMessage $R3 ${BM_SETCHECK} ${BST_CHECKED} 0 + ${EndIf} + + ${NSD_SetFocus} $R2 + nsDialogs::Show + ${EndIf} +FunctionEnd +Function PageReinstallUpdateSelection + ${NSD_GetState} $R2 $R1 + ${If} $R1 == ${BST_CHECKED} + StrCpy $ReinstallPageCheck 1 + ${Else} + StrCpy $ReinstallPageCheck 2 + ${EndIf} +FunctionEnd +Function PageLeaveReinstall + ; In passive mode, always uninstall when upgrading + ${If} $PassiveMode = 1 + ${AndIf} $R0 = 1 ; Upgrading + Goto reinst_uninstall + ${EndIf} + + ${NSD_GetState} $R2 $R1 + + ; If migrating from Wix, always uninstall + ${If} $WixMode = 1 + Goto reinst_uninstall + ${EndIf} + + ; In update mode, always proceeds without uninstalling + ${If} $UpdateMode = 1 + Goto reinst_done + ${EndIf} + + ; $R0 holds whether same(0)/upgrading(1)/downgrading(-1) version + ; $R1 holds the radio buttons state: + ; 1 => first choice was selected + ; 0 => second choice was selected + ${If} $R0 = 0 ; Same version, proceed + ${If} $R1 = 1 ; User chose to add/reinstall + Goto reinst_done + ${Else} ; User chose to uninstall + Goto reinst_uninstall + ${EndIf} + ${ElseIf} $R0 = 1 ; Upgrading + ${If} $R1 = 1 ; User chose to uninstall + Goto reinst_uninstall + ${Else} + Goto reinst_done ; User chose NOT to uninstall + ${EndIf} + ${ElseIf} $R0 = -1 ; Downgrading + ${If} $R1 = 1 ; User chose to uninstall + Goto reinst_uninstall + ${Else} + Goto reinst_done ; User chose NOT to uninstall + ${EndIf} + ${EndIf} + + reinst_uninstall: + HideWindow + ClearErrors + + ${If} $WixMode = 1 + ReadRegStr $R1 HKLM "$R6" "UninstallString" + ExecWait '$R1' $0 + ${Else} + ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" "" + ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString" + ${IfThen} $UpdateMode = 1 ${|} StrCpy $R1 "$R1 /UPDATE" ${|} ; append /UPDATE + ${IfThen} $PassiveMode = 1 ${|} StrCpy $R1 "$R1 /P" ${|} ; append /P + StrCpy $R1 "$R1 _?=$4" ; append uninstall directory + ExecWait '$R1' $0 + ${EndIf} + + BringToFront + + ${IfThen} ${Errors} ${|} StrCpy $0 2 ${|} ; ExecWait failed, set fake exit code + + ${If} $0 <> 0 + ${OrIf} ${FileExists} "$INSTDIR\${MAINBINARYNAME}.exe" + ; User cancelled wix uninstaller? return to select un/reinstall page + ${If} $WixMode = 1 + ${AndIf} $0 = 1602 + Abort + ${EndIf} + + ; User cancelled NSIS uninstaller? return to select un/reinstall page + ${If} $0 = 1 + Abort + ${EndIf} + + ; Other erros? show generic error message and return to select un/reinstall page + MessageBox MB_ICONEXCLAMATION "$(unableToUninstall)" + Abort + ${EndIf} + reinst_done: +FunctionEnd + +; 5. Choose install directory page +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_DIRECTORY + +; 6. Start menu shortcut page +Var AppStartMenuFolder +!if "${STARTMENUFOLDER}" != "" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !define MUI_STARTMENUPAGE_DEFAULTFOLDER "${STARTMENUFOLDER}" +!else + !define MUI_PAGE_CUSTOMFUNCTION_PRE Skip +!endif +!insertmacro MUI_PAGE_STARTMENU Application $AppStartMenuFolder + +; 7. Installation page +!insertmacro MUI_PAGE_INSTFILES + +; 8. Finish page +; +; Don't auto jump to finish page after installation page, +; because the installation page has useful info that can be used debug any issues with the installer. +!define MUI_FINISHPAGE_NOAUTOCLOSE +; Use show readme button in the finish page as a button create a desktop shortcut +!define MUI_FINISHPAGE_SHOWREADME +!define MUI_FINISHPAGE_SHOWREADME_TEXT "$(createDesktop)" +!define MUI_FINISHPAGE_SHOWREADME_FUNCTION CreateOrUpdateDesktopShortcut +; Show run app after installation. +!define MUI_FINISHPAGE_RUN +!define MUI_FINISHPAGE_RUN_FUNCTION RunMainBinary +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_FINISH + +Function RunMainBinary + nsis_tauri_utils::RunAsUser "$INSTDIR\${MAINBINARYNAME}.exe" "" +FunctionEnd + +; Uninstaller Pages +; 1. Confirm uninstall page +Var DeleteAppDataCheckbox +Var DeleteAppDataCheckboxState +!define /ifndef WS_EX_LAYOUTRTL 0x00400000 +!define MUI_PAGE_CUSTOMFUNCTION_SHOW un.ConfirmShow +Function un.ConfirmShow ; Add add a `Delete app data` check box + ; $1 inner dialog HWND + ; $2 window DPI + ; $3 style + ; $4 x + ; $5 y + ; $6 width + ; $7 height + FindWindow $1 "#32770" "" $HWNDPARENT ; Find inner dialog + System::Call "user32::GetDpiForWindow(p r1) i .r2" + ${If} $(^RTL) = 1 + StrCpy $3 "${__NSD_CheckBox_EXSTYLE} | ${WS_EX_LAYOUTRTL}" + IntOp $4 50 * $2 + ${Else} + StrCpy $3 "${__NSD_CheckBox_EXSTYLE}" + IntOp $4 0 * $2 + ${EndIf} + IntOp $5 100 * $2 + IntOp $6 400 * $2 + IntOp $7 25 * $2 + IntOp $4 $4 / 96 + IntOp $5 $5 / 96 + IntOp $6 $6 / 96 + IntOp $7 $7 / 96 + System::Call 'user32::CreateWindowEx(i r3, w "${__NSD_CheckBox_CLASS}", w "$(deleteAppData)", i ${__NSD_CheckBox_STYLE}, i r4, i r5, i r6, i r7, p r1, i0, i0, i0) i .s' + Pop $DeleteAppDataCheckbox + SendMessage $HWNDPARENT ${WM_GETFONT} 0 0 $1 + SendMessage $DeleteAppDataCheckbox ${WM_SETFONT} $1 1 +FunctionEnd +!define MUI_PAGE_CUSTOMFUNCTION_LEAVE un.ConfirmLeave +Function un.ConfirmLeave + SendMessage $DeleteAppDataCheckbox ${BM_GETCHECK} 0 0 $DeleteAppDataCheckboxState +FunctionEnd +!define MUI_PAGE_CUSTOMFUNCTION_PRE un.SkipIfPassive +!insertmacro MUI_UNPAGE_CONFIRM + +; 2. Uninstalling Page +!insertmacro MUI_UNPAGE_INSTFILES + +;Languages +!insertmacro MUI_LANGUAGE "English" +!insertmacro MUI_RESERVEFILE_LANGDLL + !include "D:\a\jan\jan\src-tauri\target\release\nsis\x64\English.nsh" + +Function .onInit + ${GetOptions} $CMDLINE "/P" $PassiveMode + ${IfNot} ${Errors} + StrCpy $PassiveMode 1 + ${EndIf} + ; always run in passive mode + StrCpy $PassiveMode 1 + + ${GetOptions} $CMDLINE "/NS" $NoShortcutMode + ${IfNot} ${Errors} + StrCpy $NoShortcutMode 1 + ${EndIf} + + ${GetOptions} $CMDLINE "/UPDATE" $UpdateMode + ${IfNot} ${Errors} + StrCpy $UpdateMode 1 + ${EndIf} + + !if "${DISPLAYLANGUAGESELECTOR}" == "true" + !insertmacro MUI_LANGDLL_DISPLAY + !endif + + !insertmacro SetContext + + ${If} $INSTDIR == "${PLACEHOLDER_INSTALL_DIR}" + ; Set default install location + !if "${INSTALLMODE}" == "perMachine" + ${If} ${RunningX64} + !if "${ARCH}" == "x64" + StrCpy $INSTDIR "$PROGRAMFILES64\${PRODUCTNAME}" + !else if "${ARCH}" == "arm64" + StrCpy $INSTDIR "$PROGRAMFILES64\${PRODUCTNAME}" + !else + StrCpy $INSTDIR "$PROGRAMFILES\${PRODUCTNAME}" + !endif + ${Else} + StrCpy $INSTDIR "$PROGRAMFILES\${PRODUCTNAME}" + ${EndIf} + !else if "${INSTALLMODE}" == "currentUser" + StrCpy $INSTDIR "$LOCALAPPDATA\Programs\${PRODUCTNAME}" + !endif + + Call RestorePreviousInstallLocation + ${EndIf} + + ; Remove old Jan if it exists + ${If} ${FileExists} "$INSTDIR\LICENSE.electron.txt" + DeleteRegKey HKLM "Software\${PRODUCTNAME}" + RMDir /r "$INSTDIR" + Delete "$INSTDIR\*.*" + ${EndIf} + + !if "${INSTALLMODE}" == "both" + !insertmacro MULTIUSER_INIT + !endif +FunctionEnd + + +Section EarlyChecks + ; Abort silent installer if downgrades is disabled + !if "${ALLOWDOWNGRADES}" == "false" + ${If} ${Silent} + ; If downgrading + ${If} $R0 = -1 + System::Call 'kernel32::AttachConsole(i -1)i.r0' + ${If} $0 <> 0 + System::Call 'kernel32::GetStdHandle(i -11)i.r0' + System::call 'kernel32::SetConsoleTextAttribute(i r0, i 0x0004)' ; set red color + FileWrite $0 "$(silentDowngrades)" + ${EndIf} + Abort + ${EndIf} + ${EndIf} + !endif + +SectionEnd + +Section WebView2 + ; Check if Webview2 is already installed and skip this section + ${If} ${RunningX64} + ReadRegStr $4 HKLM "SOFTWARE\WOW6432Node\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${Else} + ReadRegStr $4 HKLM "SOFTWARE\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${EndIf} + ${If} $4 == "" + ReadRegStr $4 HKCU "SOFTWARE\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${EndIf} + + ${If} $4 == "" + ; Webview2 installation + ; + ; Skip if updating + ${If} $UpdateMode <> 1 + !if "${INSTALLWEBVIEW2MODE}" == "downloadBootstrapper" + Delete "$TEMP\MicrosoftEdgeWebview2Setup.exe" + DetailPrint "$(webview2Downloading)" + NSISdl::download "https://go.microsoft.com/fwlink/p/?LinkId=2124703" "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Pop $0 + ${If} $0 == "success" + DetailPrint "$(webview2DownloadSuccess)" + ${Else} + DetailPrint "$(webview2DownloadError)" + Abort "$(webview2AbortError)" + ${EndIf} + StrCpy $6 "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Goto install_webview2 + !endif + + !if "${INSTALLWEBVIEW2MODE}" == "embedBootstrapper" + Delete "$TEMP\MicrosoftEdgeWebview2Setup.exe" + File "/oname=$TEMP\MicrosoftEdgeWebview2Setup.exe" "${WEBVIEW2BOOTSTRAPPERPATH}" + DetailPrint "$(installingWebview2)" + StrCpy $6 "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Goto install_webview2 + !endif + + !if "${INSTALLWEBVIEW2MODE}" == "offlineInstaller" + Delete "$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" + File "/oname=$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" "${WEBVIEW2INSTALLERPATH}" + DetailPrint "$(installingWebview2)" + StrCpy $6 "$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" + Goto install_webview2 + !endif + + Goto webview2_done + + install_webview2: + DetailPrint "$(installingWebview2)" + ; $6 holds the path to the webview2 installer + ExecWait "$6 ${WEBVIEW2INSTALLERARGS} /install" $1 + ${If} $1 = 0 + DetailPrint "$(webview2InstallSuccess)" + ${Else} + DetailPrint "$(webview2InstallError)" + Abort "$(webview2AbortError)" + ${EndIf} + webview2_done: + ${EndIf} + ${Else} + !if "${MINIMUMWEBVIEW2VERSION}" != "" + ${VersionCompare} "${MINIMUMWEBVIEW2VERSION}" "$4" $R0 + ${If} $R0 = 1 + update_webview: + DetailPrint "$(installingWebview2)" + ${If} ${RunningX64} + ReadRegStr $R1 HKLM "SOFTWARE\WOW6432Node\Microsoft\EdgeUpdate" "path" + ${Else} + ReadRegStr $R1 HKLM "SOFTWARE\Microsoft\EdgeUpdate" "path" + ${EndIf} + ${If} $R1 == "" + ReadRegStr $R1 HKCU "SOFTWARE\Microsoft\EdgeUpdate" "path" + ${EndIf} + ${If} $R1 != "" + ; Chromium updater docs: https://source.chromium.org/chromium/chromium/src/+/main:docs/updater/user_manual.md + ; Modified from "HKEY_LOCAL_MACHINE\SOFTWARE\WOW6432Node\Microsoft\Windows\CurrentVersion\Uninstall\Microsoft EdgeWebView\ModifyPath" + ExecWait `"$R1" /install appguid=${WEBVIEW2APPGUID}&needsadmin=true` $1 + ${If} $1 = 0 + DetailPrint "$(webview2InstallSuccess)" + ${Else} + MessageBox MB_ICONEXCLAMATION|MB_ABORTRETRYIGNORE "$(webview2InstallError)" IDIGNORE ignore IDRETRY update_webview + Quit + ignore: + ${EndIf} + ${EndIf} + ${EndIf} + !endif + ${EndIf} +SectionEnd + +Section VCRedist + ; Check if VC++ Redistributable is already installed + ; Check for Visual Studio 2015-2022 redistributable (14.0 or higher) + ReadRegStr $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" + ${If} $0 == "" + ; Try alternative registry location + ReadRegStr $0 HKLM "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" + ${EndIf} + ${If} $0 == "" + ; Try checking for any version of Visual C++ 2015-2022 Redistributable + ReadRegStr $0 HKLM "SOFTWARE\Classes\Installer\Dependencies\Microsoft.VS.VC_RuntimeMinimumVSU_amd64,v14" "Version" + ${EndIf} + + ${If} $0 == "" + ; VC++ Redistributable not found, need to install + DetailPrint "Visual C++ Redistributable not found, downloading and installing..." + + ; Download VC++ Redistributable + Delete "$TEMP\${VCREDIST_FILENAME}" + DetailPrint "Downloading Visual C++ Redistributable..." + NSISdl::download "${VCREDIST_URL}" "$TEMP\${VCREDIST_FILENAME}" + Pop $1 + + ${If} $1 == "success" + DetailPrint "Visual C++ Redistributable download successful" + + ; Install VC++ Redistributable silently + DetailPrint "Installing Visual C++ Redistributable..." + ExecWait '"$TEMP\${VCREDIST_FILENAME}" /quiet /norestart' $2 + + ${If} $2 == 0 + DetailPrint "Visual C++ Redistributable installed successfully" + ${ElseIf} $2 == 1638 + DetailPrint "Visual C++ Redistributable already installed (newer version)" + ${ElseIf} $2 == 3010 + DetailPrint "Visual C++ Redistributable installed successfully (restart required)" + ; You might want to handle restart requirement here + ${Else} + DetailPrint "Visual C++ Redistributable installation failed with exit code: $2" + MessageBox MB_ICONEXCLAMATION|MB_YESNO "Visual C++ Redistributable installation failed. Continue anyway?" IDYES continue_install + Abort "Installation cancelled due to Visual C++ Redistributable failure" + continue_install: + ${EndIf} + + ; Clean up downloaded file + Delete "$TEMP\${VCREDIST_FILENAME}" + ${Else} + DetailPrint "Failed to download Visual C++ Redistributable: $1" + MessageBox MB_ICONEXCLAMATION|MB_YESNO "Failed to download Visual C++ Redistributable. Continue anyway?" IDYES continue_install_download_fail + Abort "Installation cancelled due to download failure" + continue_install_download_fail: + ${EndIf} + ${Else} + DetailPrint "Visual C++ Redistributable already installed (version: $0)" + ${EndIf} +SectionEnd + +Section Install + SetDetailsPrint none + SetOutPath $INSTDIR + + !ifmacrodef NSIS_HOOK_PREINSTALL + !insertmacro NSIS_HOOK_PREINSTALL + !endif + + !insertmacro CheckIfAppIsRunning "${MAINBINARYNAME}.exe" "${PRODUCTNAME}" + + ; Copy main executable + File "${MAINBINARYSRCPATH}" + + ; Copy resources + CreateDirectory "$INSTDIR\resources" + CreateDirectory "$INSTDIR\resources\pre-install" + SetOutPath $INSTDIR + File /a "/oname=LICENSE" "D:\a\jan\jan\src-tauri\resources\LICENSE" + SetOutPath "$INSTDIR\resources\pre-install" + File /nonfatal /a /r "D:\a\jan\jan\src-tauri\resources\pre-install\" + SetOutPath $INSTDIR + + ; Copy external binaries + File /a "/oname=bun.exe" "D:\a\jan\jan\src-tauri\resources\bin\bun-x86_64-pc-windows-msvc.exe" + File /a "/oname=uv.exe" "D:\a\jan\jan\src-tauri\resources\bin\uv-x86_64-pc-windows-msvc.exe" + + ; Create file associations + + ; Register deep links + + ; Create uninstaller + WriteUninstaller "$INSTDIR\uninstall.exe" + + ; Save $INSTDIR in registry for future installations + WriteRegStr SHCTX "${MANUPRODUCTKEY}" "" $INSTDIR + + !if "${INSTALLMODE}" == "both" + ; Save install mode to be selected by default for the next installation such as updating + ; or when uninstalling + WriteRegStr SHCTX "${UNINSTKEY}" $MultiUser.InstallMode 1 + !endif + + ; Remove old main binary if it doesn't match new main binary name + ReadRegStr $OldMainBinaryName SHCTX "${UNINSTKEY}" "MainBinaryName" + ${If} $OldMainBinaryName != "" + ${AndIf} $OldMainBinaryName != "${MAINBINARYNAME}.exe" + Delete "$INSTDIR\$OldMainBinaryName" + ${EndIf} + + ; Save current MAINBINARYNAME for future updates + WriteRegStr SHCTX "${UNINSTKEY}" "MainBinaryName" "${MAINBINARYNAME}.exe" + + ; Registry information for add/remove programs + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayName" "${PRODUCTNAME}" + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayIcon" "$\"$INSTDIR\${MAINBINARYNAME}.exe$\"" + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayVersion" "${VERSION}" + WriteRegStr SHCTX "${UNINSTKEY}" "Publisher" "${MANUFACTURER}" + WriteRegStr SHCTX "${UNINSTKEY}" "InstallLocation" "$\"$INSTDIR$\"" + WriteRegStr SHCTX "${UNINSTKEY}" "UninstallString" "$\"$INSTDIR\uninstall.exe$\"" + WriteRegDWORD SHCTX "${UNINSTKEY}" "NoModify" "1" + WriteRegDWORD SHCTX "${UNINSTKEY}" "NoRepair" "1" + + ${GetSize} "$INSTDIR" "/M=uninstall.exe /S=0K /G=0" $0 $1 $2 + IntOp $0 $0 + ${ESTIMATEDSIZE} + IntFmt $0 "0x%08X" $0 + WriteRegDWORD SHCTX "${UNINSTKEY}" "EstimatedSize" "$0" + + !if "${HOMEPAGE}" != "" + WriteRegStr SHCTX "${UNINSTKEY}" "URLInfoAbout" "${HOMEPAGE}" + WriteRegStr SHCTX "${UNINSTKEY}" "URLUpdateInfo" "${HOMEPAGE}" + WriteRegStr SHCTX "${UNINSTKEY}" "HelpLink" "${HOMEPAGE}" + !endif + + ; Create start menu shortcut + !insertmacro MUI_STARTMENU_WRITE_BEGIN Application + Call CreateOrUpdateStartMenuShortcut + !insertmacro MUI_STARTMENU_WRITE_END + + ; Create desktop shortcut for silent and passive installers + ; because finish page will be skipped + ${If} $PassiveMode = 1 + ${OrIf} ${Silent} + Call CreateOrUpdateDesktopShortcut + ${EndIf} + + !ifmacrodef NSIS_HOOK_POSTINSTALL + !insertmacro NSIS_HOOK_POSTINSTALL + !endif + + ; Auto close this page for passive mode + ${If} $PassiveMode = 1 + SetAutoClose true + ${EndIf} +SectionEnd + +Function .onInstSuccess + ; Check for `/R` flag only in silent and passive installers because + ; GUI installer has a toggle for the user to (re)start the app + ${If} $PassiveMode = 1 + ${OrIf} ${Silent} + ; ${GetOptions} $CMDLINE "/R" $R0 + ; ${IfNot} ${Errors} + ${GetOptions} $CMDLINE "/ARGS" $R0 + nsis_tauri_utils::RunAsUser "$INSTDIR\${MAINBINARYNAME}.exe" "$R0" + ; ${EndIf} + ${EndIf} +FunctionEnd + +Function un.onInit + !insertmacro SetContext + + !if "${INSTALLMODE}" == "both" + !insertmacro MULTIUSER_UNINIT + !endif + + !insertmacro MUI_UNGETLANGUAGE + + ${GetOptions} $CMDLINE "/P" $PassiveMode + ${IfNot} ${Errors} + StrCpy $PassiveMode 1 + ${EndIf} + + ${GetOptions} $CMDLINE "/UPDATE" $UpdateMode + ${IfNot} ${Errors} + StrCpy $UpdateMode 1 + ${EndIf} +FunctionEnd + +Section Uninstall + SetDetailsPrint none + + !ifmacrodef NSIS_HOOK_PREUNINSTALL + !insertmacro NSIS_HOOK_PREUNINSTALL + !endif + + !insertmacro CheckIfAppIsRunning "${MAINBINARYNAME}.exe" "${PRODUCTNAME}" + + ; Delete the app directory and its content from disk + ; Copy main executable + Delete "$INSTDIR\${MAINBINARYNAME}.exe" + + ; Delete LICENSE file + Delete "$INSTDIR\LICENSE" + + ; Delete resources + Delete "$INSTDIR\resources\pre-install\janhq-assistant-extension-1.0.2.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-conversational-extension-1.0.0.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-engine-management-extension-1.0.3.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-hardware-management-extension-1.0.0.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-inference-cortex-extension-1.0.25.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-model-extension-1.0.36.tgz" + + ; Delete external binaries + Delete "$INSTDIR\bun.exe" + Delete "$INSTDIR\uv.exe" + + ; Delete app associations + + ; Delete deep links + + + ; Delete uninstaller + Delete "$INSTDIR\uninstall.exe" + + RMDir /REBOOTOK "$INSTDIR\resources\pre-install" + RMDir /r /REBOOTOK "$INSTDIR\resources" + RMDir /r "$INSTDIR" + + ; Remove shortcuts if not updating + ${If} $UpdateMode <> 1 + !insertmacro DeleteAppUserModelId + + ; Remove start menu shortcut + !insertmacro MUI_STARTMENU_GETFOLDER Application $AppStartMenuFolder + !insertmacro IsShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + Delete "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + RMDir "$SMPROGRAMS\$AppStartMenuFolder" + ${EndIf} + !insertmacro IsShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$SMPROGRAMS\${PRODUCTNAME}.lnk" + Delete "$SMPROGRAMS\${PRODUCTNAME}.lnk" + ${EndIf} + + ; Remove desktop shortcuts + !insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$DESKTOP\${PRODUCTNAME}.lnk" + Delete "$DESKTOP\${PRODUCTNAME}.lnk" + ${EndIf} + ${EndIf} + + ; Remove registry information for add/remove programs + !if "${INSTALLMODE}" == "both" + DeleteRegKey SHCTX "${UNINSTKEY}" + !else if "${INSTALLMODE}" == "perMachine" + DeleteRegKey HKLM "${UNINSTKEY}" + !else + DeleteRegKey HKCU "${UNINSTKEY}" + !endif + + ; Removes the Autostart entry for ${PRODUCTNAME} from the HKCU Run key if it exists. + ; This ensures the program does not launch automatically after uninstallation if it exists. + ; If it doesn't exist, it does nothing. + ; We do this when not updating (to preserve the registry value on updates) + ${If} $UpdateMode <> 1 + DeleteRegValue HKCU "Software\Microsoft\Windows\CurrentVersion\Run" "${PRODUCTNAME}" + ${EndIf} + + ; Delete app data if the checkbox is selected + ; and if not updating + ${If} $DeleteAppDataCheckboxState = 1 + ${AndIf} $UpdateMode <> 1 + ; Clear the install location $INSTDIR from registry + DeleteRegKey SHCTX "${MANUPRODUCTKEY}" + DeleteRegKey /ifempty SHCTX "${MANUKEY}" + + ; Clear the install language from registry + DeleteRegValue HKCU "${MANUPRODUCTKEY}" "Installer Language" + DeleteRegKey /ifempty HKCU "${MANUPRODUCTKEY}" + DeleteRegKey /ifempty HKCU "${MANUKEY}" + + SetShellVarContext current + RmDir /r "$APPDATA\${BUNDLEID}" + RmDir /r "$LOCALAPPDATA\${BUNDLEID}" + ${EndIf} + + !ifmacrodef NSIS_HOOK_POSTUNINSTALL + !insertmacro NSIS_HOOK_POSTUNINSTALL + !endif + + ; Auto close if passive mode or updating + ${If} $PassiveMode = 1 + ${OrIf} $UpdateMode = 1 + SetAutoClose true + ${EndIf} +SectionEnd + +Function RestorePreviousInstallLocation + ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" "" + StrCmp $4 "" +2 0 + StrCpy $INSTDIR $4 +FunctionEnd + +Function Skip + Abort +FunctionEnd + +Function SkipIfPassive + ${IfThen} $PassiveMode = 1 ${|} Abort ${|} +FunctionEnd +Function un.SkipIfPassive + ${IfThen} $PassiveMode = 1 ${|} Abort ${|} +FunctionEnd + +Function CreateOrUpdateStartMenuShortcut + ; We used to use product name as MAINBINARYNAME + ; migrate old shortcuts to target the new MAINBINARYNAME + StrCpy $R0 0 + + !insertmacro IsShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + StrCpy $R0 1 + ${EndIf} + + !insertmacro IsShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + StrCpy $R0 1 + ${EndIf} + + ${If} $R0 = 1 + Return + ${EndIf} + + ; Skip creating shortcut if in update mode or no shortcut mode + ; but always create if migrating from wix + ${If} $WixMode = 0 + ${If} $UpdateMode = 1 + ${OrIf} $NoShortcutMode = 1 + Return + ${EndIf} + ${EndIf} + + !if "${STARTMENUFOLDER}" != "" + CreateDirectory "$SMPROGRAMS\$AppStartMenuFolder" + CreateShortcut "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + !else + CreateShortcut "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$SMPROGRAMS\${PRODUCTNAME}.lnk" + !endif +FunctionEnd + +Function CreateOrUpdateDesktopShortcut + ; We used to use product name as MAINBINARYNAME + ; migrate old shortcuts to target the new MAINBINARYNAME + !insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Return + ${EndIf} + + ; Skip creating shortcut if in update mode or no shortcut mode + ; but always create if migrating from wix + ${If} $WixMode = 0 + ${If} $UpdateMode = 1 + ${OrIf} $NoShortcutMode = 1 + Return + ${EndIf} + ${EndIf} + + CreateShortcut "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$DESKTOP\${PRODUCTNAME}.lnk" +FunctionEnd \ No newline at end of file diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index b0df3fc2f..90f0bcc69 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -13,7 +13,6 @@ "macOSPrivateApi": true, "windows": [ { - "zoomHotkeysEnabled": true, "label": "main", "title": "Jan", "width": 1024, @@ -40,7 +39,12 @@ } ], "security": { - "capabilities": ["default"], + "capabilities": [ + "default", + "logs-app-window", + "logs-window", + "system-monitor-window" + ], "csp": { "default-src": "'self' customprotocol: asset: http://localhost:* http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:*", "connect-src": "ipc: http://ipc.localhost http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:* https: http:", @@ -76,6 +80,7 @@ } }, "bundle": { + "licenseFile": "../LICENSE", "publisher": "Menlo Research Pte. Ltd.", "active": true, "createUpdaterArtifacts": false, diff --git a/src-tauri/tauri.ios.conf.json b/src-tauri/tauri.ios.conf.json index 546cb4950..347f16bbd 100644 --- a/src-tauri/tauri.ios.conf.json +++ b/src-tauri/tauri.ios.conf.json @@ -1,9 +1,11 @@ { + "identifier": "jan.ai.app.ios", "build": { "devUrl": null, - "frontendDist": "../web-app/dist" + "frontendDist": "../web-app/dist", + "beforeDevCommand": "cross-env IS_DEV=true IS_IOS=true yarn build:web", + "beforeBuildCommand": "cross-env IS_IOS=true yarn build:web" }, - "identifier": "jan.ai.app", "app": { "security": { "capabilities": ["mobile"] @@ -15,7 +17,10 @@ "iOS": { "developmentTeam": "" }, - "resources": ["resources/LICENSE"], + "resources": [ + "resources/pre-install/**/*", + "resources/LICENSE" + ], "externalBin": [] } } \ No newline at end of file diff --git a/src-tauri/tauri.linux.conf.json b/src-tauri/tauri.linux.conf.json index 85f39ba50..32f6068a2 100644 --- a/src-tauri/tauri.linux.conf.json +++ b/src-tauri/tauri.linux.conf.json @@ -1,12 +1,18 @@ { "app": { "security": { - "capabilities": ["desktop", "system-monitor-window"] + "capabilities": [ + "desktop", + "system-monitor-window", + "log-app-window", + "logs-window" + ] } }, "bundle": { "targets": ["deb", "appimage"], - "resources": ["resources/LICENSE"], + "resources": ["resources/pre-install/**/*", "resources/LICENSE"], + "externalBin": ["resources/bin/uv"], "linux": { "appimage": { "bundleMediaFramework": false, diff --git a/src-tauri/tauri.macos.conf.json b/src-tauri/tauri.macos.conf.json index 2113bd0fa..5c5b493fa 100644 --- a/src-tauri/tauri.macos.conf.json +++ b/src-tauri/tauri.macos.conf.json @@ -1,11 +1,17 @@ { "app": { "security": { - "capabilities": ["desktop", "system-monitor-window"] + "capabilities": [ + "desktop", + "system-monitor-window", + "log-app-window", + "logs-window" + ] } }, "bundle": { "targets": ["app", "dmg"], - "resources": ["resources/LICENSE"] + "resources": ["resources/pre-install/**/*", "resources/LICENSE"], + "externalBin": ["resources/bin/bun", "resources/bin/uv"] } } diff --git a/src-tauri/tauri.windows.conf.json b/src-tauri/tauri.windows.conf.json index 91e2eb374..bc8ede344 100644 --- a/src-tauri/tauri.windows.conf.json +++ b/src-tauri/tauri.windows.conf.json @@ -1,23 +1,23 @@ { "app": { "security": { - "capabilities": ["desktop"] + "capabilities": [ + "desktop", + "system-monitor-window", + "log-app-window", + "logs-window" + ] } }, "bundle": { - "targets": ["nsis"], + "targets": ["nsis", "msi"], "resources": [ "resources/pre-install/**/*", - "resources/lib/vc_redist.x64.exe", "resources/LICENSE" ], "externalBin": ["resources/bin/bun", "resources/bin/uv"], "windows": { - "nsis": { - "installerHooks": "./windows/hooks.nsh", - "installerIcon": "icons/icon.ico" - }, "webviewInstallMode": { "silent": true, "type": "downloadBootstrapper" diff --git a/src-tauri/utils/Cargo.toml b/src-tauri/utils/Cargo.toml index 7d313a42b..691f90a92 100644 --- a/src-tauri/utils/Cargo.toml +++ b/src-tauri/utils/Cargo.toml @@ -8,7 +8,6 @@ base64 = "0.22" hmac = "0.12" log = { version = "0.4", optional = true } rand = "0.8" -reqwest = { version = "0.11", features = ["json"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10" @@ -16,6 +15,12 @@ tokio = { version = "1", features = ["process", "fs", "macros", "rt"] } tokio-util = "0.7.14" url = "2.5" +[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +reqwest = { version = "0.11", features = ["json", "native-tls"] } + +[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies] +reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false } + [target.'cfg(windows)'.dependencies] windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] } diff --git a/src-tauri/windows/hooks.nsh b/src-tauri/windows/hooks.nsh deleted file mode 100644 index d1beed199..000000000 --- a/src-tauri/windows/hooks.nsh +++ /dev/null @@ -1,65 +0,0 @@ -!macro NSIS_HOOK_POSTINSTALL - ; Check if Visual C++ Redistributable is already installed - ReadRegStr $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" - ${If} $0 == "" - ; Try alternative registry location - ReadRegStr $0 HKLM "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version" - ${EndIf} - - ${If} $0 == "" - ; VC++ Redistributable not found, need to install - DetailPrint "Visual C++ Redistributable not found, installing from bundled file..." - - ; Install from bundled EXE if not installed - ${If} ${FileExists} "$INSTDIR\resources\lib\vc_redist.x64.exe" - DetailPrint "Installing Visual C++ Redistributable..." - ; Copy to TEMP folder and then execute installer - CopyFiles "$INSTDIR\resources\lib\vc_redist.x64.exe" "$TEMP\vc_redist.x64.exe" - ExecWait '"$TEMP\vc_redist.x64.exe" /quiet /norestart' $1 - - ; Check whether installation process exited successfully (code 0) or not - ${If} $1 == 0 - DetailPrint "Visual C++ Redistributable installed successfully" - ${ElseIf} $1 == 1638 - DetailPrint "Visual C++ Redistributable already installed (newer version)" - ${ElseIf} $1 == 3010 - DetailPrint "Visual C++ Redistributable installed successfully (restart required)" - ${Else} - DetailPrint "Visual C++ installation failed with exit code: $1" - ${EndIf} - - ; Clean up setup files from TEMP and your installed app - Delete "$TEMP\vc_redist.x64.exe" - Delete "$INSTDIR\resources\lib\vc_redist.x64.exe" - ${Else} - DetailPrint "Visual C++ Redistributable not found at expected location: $INSTDIR\resources\lib\vc_redist.x64.exe" - ${EndIf} - ${Else} - DetailPrint "Visual C++ Redistributable already installed (version: $0)" - ${EndIf} - - ; ---- Copy LICENSE to install root ---- - ${If} ${FileExists} "$INSTDIR\resources\LICENSE" - CopyFiles /SILENT "$INSTDIR\resources\LICENSE" "$INSTDIR\LICENSE" - DetailPrint "Copied LICENSE to install root" - - ; Optional cleanup - remove from resources folder - Delete "$INSTDIR\resources\LICENSE" - ${Else} - DetailPrint "LICENSE not found at expected location: $INSTDIR\resources\LICENSE" - ${EndIf} - - ; ---- Copy vulkan-1.dll to install root ---- - ${If} ${FileExists} "$INSTDIR\resources\lib\vulkan-1.dll" - CopyFiles /SILENT "$INSTDIR\resources\lib\vulkan-1.dll" "$INSTDIR\vulkan-1.dll" - DetailPrint "Copied vulkan-1.dll to install root" - - ; Optional cleanup - remove from resources folder - Delete "$INSTDIR\resources\lib\vulkan-1.dll" - - ; Only remove the lib directory if it's empty after removing both files - RMDir "$INSTDIR\resources\lib" - ${Else} - DetailPrint "vulkan-1.dll not found at expected location: $INSTDIR\resources\lib\vulkan-1.dll" - ${EndIf} -!macroend \ No newline at end of file diff --git a/tests/checklist.md b/tests/checklist.md index b2e1da7ca..8e9e65d4b 100644 --- a/tests/checklist.md +++ b/tests/checklist.md @@ -16,7 +16,7 @@ Before testing, set-up the following in the old version to make sure that we can - [ ] Change the `App Data` to some other folder - [ ] Create a Custom Provider - [ ] Disable some model providers -- [NEW] Change llama.cpp setting of 2 models +- [ ] Change llama.cpp setting of 2 models #### Validate that the update does not corrupt existing user data or settings (before and after update show the same information): - [ ] Threads - [ ] Previously used model and assistants is shown correctly @@ -73,35 +73,44 @@ Before testing, set-up the following in the old version to make sure that we can - [ ] Ensure that when this value is changed, there is no broken UI caused by it - [ ] Code Block - [ ] Show Line Numbers -- [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values -- [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values +- [ ] [0.7.0] Compact Token Counter will show token counter in side chat input when toggle, if not it will show a small token counter below the chat input +- [ ] [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values +- [ ] [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values #### In `Model Providers`: In `Llama.cpp`: - [ ] After downloading a model from hub, the model is listed with the correct name under `Models` - [ ] Can import `gguf` model with no error +- [ ] [0.7.0] While importing, there should be an import indication appear under `Models` - [ ] Imported model will be listed with correct name under the `Models` +- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works. +- [ ] [0.6.10] Can import vlm models and chat with images +- [ ] [0.6.10] Import a file that is not `mmproj` in the `mmproj field` should show validation error +- [ ] [0.6.10] Import `mmproj` from different models should error +- [ ] [0.7.0] Users can customize model display names according to their own preferences. - [ ] Check that when click `delete` the model will be removed from the list - [ ] Deleted model doesn't appear in the selectable models section in chat input (even in old threads that use the model previously) - [ ] Ensure that user can re-import deleted imported models +- [ ] [0.6.8] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users. +- [ ] [0.6.10] Change to an older version of llama.cpp backend. Click on `Check for Llamacpp Updates` it should alert that there is a new version. +- [ ] [0.7.0] Users can cancel a backend download while it is in progress. +- [ ] [0.6.10] Try `Install backend from file` for a backend and it should show as an option for backend +- [ ] [0.7.0] User can install a backend from file in both .tar.gz and .zip formats, and the backend appears in the backend selection menu +- [ ] [0.7.0] A manually installed backend is automatically selected after import, and the backend menu updates to show it as the latest imported backend. - [ ] Enable `Auto-Unload Old Models`, and ensure that only one model can run / start at a time. If there are two model running at the time of enable, both of them will be stopped. - [ ] Disable `Auto-Unload Old Models`, and ensure that multiple models can run at the same time. - [ ] Enable `Context Shift` and ensure that context can run for long without encountering memory error. Use the `banana test` by turn on fetch MCP => ask local model to fetch and summarize the history of banana (banana has a very long history on wiki it turns out). It should run out of context memory sufficiently fast if `Context Shift` is not enabled. + +In `Model Settings`: - [ ] [0.6.8] Ensure that user can change the Jinja chat template of individual model and it doesn't affect the template of other model -- [ ] [0.6.8] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users. - [ ] [0.6.8] Ensure we can override Tensor Buffer Type in the model settings to offload layers between GPU and CPU => Download any MoE Model (i.e., gpt-oss-20b) => Set tensor buffer type as `blk\\.([0-30]*[02468])\\.ffn_.*_exps\\.=CPU` => check if those tensors are in cpu and run inference (you can view the app.log if it contains `--override-tensor", "blk\\\\.([0-30]*[02468])\\\\.ffn_.*_exps\\\\.=CPU`) -- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works. -- [ ] [0.6.10] Can import vlm models and chat with images -- [ ] [0.6.10] Import model on mmproj field should show validation error -- [ ] [0.6.10] Import mmproj from different models should not be able to chat with the models -- [ ] [0.6.10] Change to an older version of llama.cpp backend. Click on `Check for Llamacpp Updates` it should alert that there is a new version. -- [ ] [0.6.10] Try `Install backend from file` for a backend and it should show as an option for backend In Remote Model Providers: - [ ] Check that the following providers are presence: - [ ] OpenAI - [ ] Anthropic + - [ ] [0.7.0] Azure - [ ] Cohere - [ ] OpenRouter - [ ] Mistral @@ -113,12 +122,15 @@ In Remote Model Providers: - [ ] Delete a model and ensure that it doesn't show up in the `Models` list view or in the selectable dropdown in chat input. - [ ] Ensure that a deleted model also not selectable or appear in old threads that used it. - [ ] Adding of new model manually works and user can chat with the newly added model without error (you can add back the model you just delete for testing) -- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan +- [ ] [0.7.0] Vision capabilities are now automatically detected for vision models +- [ ] [0.7.0] New default models are available for adding to remote providers through a drop down (OpenAI, Mistral, Groq) + In Custom Providers: - [ ] Ensure that user can create a new custom providers with the right baseURL and API key. - [ ] Click `Refresh` should retrieve a list of available models from the Custom Providers. - [ ] User can chat with the custom providers - [ ] Ensure that Custom Providers can be deleted and won't reappear in a new session +- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan In general: - [ ] Disabled Model Provider should not show up as selectable in chat input of new thread and old thread alike (old threads' chat input should show `Select Model` instead of disabled model) @@ -162,9 +174,10 @@ Ensure that the following section information show up for hardware - [ ] When the user click `Always Allow` on the pop up, the tool will retain permission and won't ask for confirmation again. (this applied at an individual tool level, not at the MCP server level) - [ ] If `Allow All MCP Tool Permissions` is enabled, in every new thread, there should not be any confirmation dialog pop up when a tool is called. - [ ] When the pop-up appear, make sure that the `Tool Parameters` is also shown with detail in the pop-up -- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCp => paste the JSON config inside => click `Save` => server works +- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCP => paste the JSON config inside => click `Save` => server works - [ ] [0.6.9] If individual JSON config format is failed, the MCP server should not be activated - [ ] [0.6.9] Make sure that MCP server can be used with streamable-http transport => connect to Smithery and test MCP server +- [ ] [0.7.0] When deleting an MCP Server, a toast notification is shown #### In `Local API Server`: - [ ] User can `Start Server` and chat with the default endpoint @@ -175,7 +188,8 @@ Ensure that the following section information show up for hardware - [ ] [0.6.9] When the startup configuration, the last used model is also automatically start (users does not have to manually start a model before starting the server) - [ ] [0.6.9] Make sure that you can send an image to a Local API Server and it also works (can set up Local API Server as a Custom Provider in Jan to test) - [ ] [0.6.10] Make sure you are still able to see API key when server local status is running - +- [ ] [0.7.0] Users can see the Jan API Server Swagger UI by opening the following path in their browser `http://:` +- [ ] [0.7.0] Users can set the trusted host to * in the server configuration to accept requests from all host or without host #### In `HTTPS Proxy`: - [ ] Model download request goes through proxy endpoint @@ -188,6 +202,7 @@ Ensure that the following section information show up for hardware - [ ] Clicking download work inside the Model card HTML - [ ] [0.6.9] Check that the model recommendation base on user hardware work as expected in the Model Hub - [ ] [0.6.10] Check that model of the same name but different author can be found in the Hub catalog (test with [https://huggingface.co/unsloth/Qwen3-4B-Thinking-2507-GGUF](https://huggingface.co/unsloth/Qwen3-4B-Thinking-2507-GGUF)) +- [ ] [0.7.0] Support downloading models with the same name from different authors, models not listed on the hub will be prefixed with the author name ## D. Threads @@ -214,19 +229,30 @@ Ensure that the following section information show up for hardware - [ ] User can send message with different type of text content (e.g text, emoji, ...) - [ ] When request model to generate a markdown table, the table is correctly formatted as returned from the model. - [ ] When model generate code, ensure that the code snippets is properly formatted according to the `Appearance -> Code Block` setting. +- [ ] [0.7.0] LaTeX formulas now render correctly in chat. Both inline \(...\) and block \[...\] formats are supported. Code blocks and HTML tags are not affected - [ ] Users can edit their old message and user can regenerate the answer based on the new message - [ ] User can click `Copy` to copy the model response +- [ ] [0.6.10] When click on copy code block from model generation, it will only copy one code-block at a time instead of multiple code block at once - [ ] User can click `Delete` to delete either the user message or the model response. - [ ] The token speed appear when a response from model is being generated and the final value is show under the response. - [ ] Make sure that user when using IME keyboard to type Chinese and Japanese character and they press `Enter`, the `Send` button doesn't trigger automatically after each words. -- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a remote model -- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a local model +- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a Remote model & Local model - [ ] [0.6.9] Check that you can paste an image to text box from your system clipboard (Copy - Paste) -- [ ] [0.6.9] Make sure that user can favourite a model in the llama.cpp list and see the favourite model selection in chat input +- [ ] [0.6.10] User can Paste (e.g Ctrl + v) text into chat input when it is a vision model +- [ ] [0.6.9] Make sure that user can favourite a model in the Model list and see the favourite model selection in chat input - [ ] [0.6.10] User can click mode's setting on chat, enable Auto-Optimize Settings, and continue chatting with the model without interruption. - [ ] Verify this works with at least two models of different sizes (e.g., 1B and 7B). -- [ ] [0.6.10] User can Paste (e.g Ctrl + v) text into chat input when it is a vision model -- [ ] [0.6.10] When click on copy code block from model generation, it will only copy one code-block at a time instead of multiple code block at once +- [ ] [0.7.0] When chatting with a model, the UI displays a token usage counter showing the percentage of context consumed. +- [ ] [0.7.0] When chatting with a model, the scroll no longer follows the model’s streaming response; it only auto-scrolls when the user sends a new message +#### In Project + +- [ ] [0.7.0] User can create new project +- [ ] [0.7.0] User can add existing threads to a project +- [ ] [0.7.0] When the user attempts to delete a project, a confirmation dialog must appear warning that this action will permanently delete the project and all its associated threads. +- [ ] [0.7.0] The user can successfully delete a project, and all threads contained within that project are also permanently deleted. +- [ ] [0.7.0] A thread that already belongs to a project cannot be re-added to the same project. +- [ ] [0.7.0] Favorited threads retain their "favorite" status even after being added to a project + ## E. Assistants - [ ] There is always at least one default Assistant which is Jan - [ ] The default Jan assistant has `stream = True` by default @@ -238,6 +264,7 @@ Ensure that the following section information show up for hardware In `Settings -> General`: - [ ] Change the location of the `App Data` to some other path that is not the default path +- [ ] [0.7.0] Users cannot set the data location to root directories (e.g., C:\, D:\ on Windows), but can select subfolders within those drives (e.g., C:\data, D:\data) - [ ] Click on `Reset` button in `Other` to factory reset the app: - [ ] All threads deleted - [ ] All Assistant deleted except for default Jan Assistant diff --git a/web-app/index.html b/web-app/index.html index dd2e76ee6..55625d33c 100644 --- a/web-app/index.html +++ b/web-app/index.html @@ -17,7 +17,7 @@ Jan diff --git a/web-app/package.json b/web-app/package.json index 88bbe411a..287756336 100644 --- a/web-app/package.json +++ b/web-app/package.json @@ -82,7 +82,7 @@ "remark-math": "6.0.0", "sonner": "2.0.5", "tailwindcss": "4.1.4", - "token.js": "npm:token.js-fork@0.7.27", + "token.js": "npm:token.js-fork@0.7.29", "tw-animate-css": "1.2.8", "ulidx": "2.4.1", "unified": "11.0.5", diff --git a/web-app/src/components/ui/dropdown-menu.tsx b/web-app/src/components/ui/dropdown-menu.tsx index 7a527aaca..4f92ebe60 100644 --- a/web-app/src/components/ui/dropdown-menu.tsx +++ b/web-app/src/components/ui/dropdown-menu.tsx @@ -41,7 +41,7 @@ function DropdownMenuContent({ data-slot="dropdown-menu-content" sideOffset={sideOffset} className={cn( - 'bg-main-view select-none text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-[51] max-h-(--radix-dropdown-menu-content-available-height) min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md', + 'bg-main-view select-none text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 max-h-(--radix-dropdown-menu-content-available-height) min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md z-[90]', className )} {...props} @@ -229,7 +229,7 @@ function DropdownMenuSubContent({ - >([]) + const [attachments, setAttachments] = useState([]) const [connectedServers, setConnectedServers] = useState([]) const [isDragOver, setIsDragOver] = useState(false) const [hasMmproj, setHasMmproj] = useState(false) const [hasActiveModels, setHasActiveModels] = useState(false) + const attachmentsEnabled = useAttachments((s) => s.enabled) + // Determine whether to show the Attach documents button (simple gating) + const showAttachmentButton = + attachmentsEnabled && PlatformFeatures[PlatformFeature.ATTACHMENTS] + // Derived: any document currently processing (ingestion in progress) + const ingestingDocs = attachments.some( + (a) => a.type === 'document' && a.processing + ) + const ingestingAny = attachments.some((a) => a.processing) // Check for connected MCP servers useEffect(() => { const checkConnectedServers = async () => { try { const servers = await serviceHub.mcp().getConnectedServers() - setConnectedServers(servers) + // Only update state if the servers list has actually changed + setConnectedServers((prev) => { + if (JSON.stringify(prev) === JSON.stringify(servers)) { + return prev + } + return servers + }) } catch (error) { console.error('Failed to get connected servers:', error) - setConnectedServers([]) + setConnectedServers((prev) => { + if (prev.length === 0) return prev + return [] + }) } } @@ -132,10 +155,25 @@ const ChatInput = ({ const activeModels = await serviceHub .models() .getActiveModels('llamacpp') - setHasActiveModels(activeModels.length > 0) + const hasMatchingActiveModel = activeModels.some( + (model) => String(model) === selectedModel?.id + ) + const newHasActiveModels = + activeModels.length > 0 && hasMatchingActiveModel + + // Only update state if the value has actually changed + setHasActiveModels((prev) => { + if (prev === newHasActiveModels) { + return prev + } + return newHasActiveModels + }) } catch (error) { console.error('Failed to get active models:', error) - setHasActiveModels(false) + setHasActiveModels((prev) => { + if (prev === false) return prev + return false + }) } } @@ -145,7 +183,7 @@ const ChatInput = ({ const intervalId = setInterval(checkActiveModels, 3000) return () => clearInterval(intervalId) - }, [serviceHub]) + }, [serviceHub, selectedModel?.id]) // Check for mmproj existence or vision capability when model changes useEffect(() => { @@ -176,23 +214,50 @@ const ChatInput = ({ const mcpExtension = extensionManager.get(ExtensionTypeEnum.MCP) const MCPToolComponent = mcpExtension?.getToolComponent?.() - - const handleSendMesage = async (prompt: string) => { + const handleSendMessage = async (prompt: string) => { if (!selectedModel) { setMessage('Please select a model to start chatting.') return } - if (!prompt.trim() && uploadedFiles.length === 0) { + if (!prompt.trim()) { return } + setMessage('') + + // Callback to update attachment processing state + const updateAttachmentProcessing = ( + fileName: string, + status: 'processing' | 'done' | 'error' | 'clear_docs' | 'clear_all' + ) => { + if (status === 'clear_docs') { + setAttachments((prev) => prev.filter((a) => a.type !== 'document')) + return + } + if (status === 'clear_all') { + setAttachments([]) + return + } + setAttachments((prev) => + prev.map((att) => + att.name === fileName + ? { + ...att, + processing: status === 'processing', + processed: status === 'done' ? true : att.processed, + } + : att + ) + ) + } + sendMessage( prompt, true, - uploadedFiles.length > 0 ? uploadedFiles : undefined, - projectId + attachments.length > 0 ? attachments : undefined, + projectId, + updateAttachmentProcessing ) - setUploadedFiles([]) } useEffect(() => { @@ -261,10 +326,160 @@ const ChatInput = ({ fileInputRef.current?.click() } - const handleRemoveFile = (indexToRemove: number) => { - setUploadedFiles((prev) => - prev.filter((_, index) => index !== indexToRemove) - ) + const handleAttachDocsIngest = async () => { + try { + if (!attachmentsEnabled) { + toast.info('Attachments are disabled in Settings') + return + } + const selection = await serviceHub.dialog().open({ + multiple: true, + filters: [ + { + name: 'Documents', + extensions: [ + 'pdf', + 'docx', + 'txt', + 'md', + 'csv', + 'xlsx', + 'xls', + 'ods', + 'pptx', + 'html', + 'htm', + ], + }, + ], + }) + if (!selection) return + const paths = Array.isArray(selection) ? selection : [selection] + if (!paths.length) return + + // Check for duplicates and fetch file sizes + const existingPaths = new Set( + attachments + .filter((a) => a.type === 'document' && a.path) + .map((a) => a.path) + ) + + const duplicates: string[] = [] + const newDocAttachments: Attachment[] = [] + + for (const p of paths) { + if (existingPaths.has(p)) { + duplicates.push(p.split(/[\\/]/).pop() || p) + continue + } + + const name = p.split(/[\\/]/).pop() || p + const fileType = name.split('.').pop()?.toLowerCase() + let size: number | undefined = undefined + try { + const stat = await fs.fileStat(p) + size = stat?.size ? Number(stat.size) : undefined + } catch (e) { + console.warn('Failed to read file size for', p, e) + } + newDocAttachments.push( + createDocumentAttachment({ + name, + path: p, + fileType, + size, + }) + ) + } + + if (duplicates.length > 0) { + toast.warning('Files already attached', { + description: `${duplicates.join(', ')} ${duplicates.length === 1 ? 'is' : 'are'} already in the list`, + }) + } + + if (newDocAttachments.length > 0) { + // Add to state first with processing flag + setAttachments((prev) => [...prev, ...newDocAttachments]) + + // If thread exists, ingest immediately + if (currentThreadId) { + const ragExtension = ExtensionManager.getInstance().get( + ExtensionTypeEnum.RAG + ) as RAGExtension | undefined + if (!ragExtension) { + toast.error('RAG extension not available') + return + } + + // Ingest each document + for (const doc of newDocAttachments) { + try { + // Mark as processing + setAttachments((prev) => + prev.map((a) => + a.path === doc.path && a.type === 'document' + ? { ...a, processing: true } + : a + ) + ) + + const result = await ragExtension.ingestAttachments( + currentThreadId, + [ + { + path: doc.path!, + name: doc.name, + type: doc.fileType, + size: doc.size, + }, + ] + ) + + const fileInfo = result.files?.[0] + if (fileInfo?.id) { + // Mark as processed with ID + setAttachments((prev) => + prev.map((a) => + a.path === doc.path && a.type === 'document' + ? { + ...a, + processing: false, + processed: true, + id: fileInfo.id, + chunkCount: fileInfo.chunk_count, + } + : a + ) + ) + } else { + throw new Error('No file ID returned from ingestion') + } + } catch (error) { + console.error('Failed to ingest document:', error) + // Remove failed document + setAttachments((prev) => + prev.filter( + (a) => !(a.path === doc.path && a.type === 'document') + ) + ) + toast.error(`Failed to ingest ${doc.name}`, { + description: + error instanceof Error ? error.message : String(error), + }) + } + } + } + } + } catch (e) { + console.error('Failed to attach documents:', e) + const desc = e instanceof Error ? e.message : String(e) + toast.error('Failed to attach documents', { description: desc }) + } + } + + const handleRemoveAttachment = (indexToRemove: number) => { + setAttachments((prev) => prev.filter((_, index) => index !== indexToRemove)) } const getFileTypeFromExtension = (fileName: string): string => { @@ -280,20 +495,36 @@ const ChatInput = ({ } } + const formatBytes = (bytes?: number): string => { + if (!bytes || bytes <= 0) return '' + const units = ['B', 'KB', 'MB', 'GB'] + let i = 0 + let val = bytes + while (val >= 1024 && i < units.length - 1) { + val /= 1024 + i++ + } + return `${val.toFixed(i === 0 ? 0 : 1)} ${units[i]}` + } + const handleFileChange = (e: React.ChangeEvent) => { const files = e.target.files if (files && files.length > 0) { const maxSize = 10 * 1024 * 1024 // 10MB in bytes - const newFiles: Array<{ - name: string - type: string - size: number - base64: string - dataUrl: string - }> = [] + const newFiles: Attachment[] = [] + const duplicates: string[] = [] + const existingImageNames = new Set( + attachments.filter((a) => a.type === 'image').map((a) => a.name) + ) Array.from(files).forEach((file) => { + // Check for duplicate image names + if (existingImageNames.has(file.name)) { + duplicates.push(file.name) + return + } + // Check file size if (file.size > maxSize) { setMessage(`File is too large. Maximum size is 10MB.`) @@ -327,26 +558,93 @@ const ChatInput = ({ const result = reader.result if (typeof result === 'string') { const base64String = result.split(',')[1] - const fileData = { + const att = createImageAttachment({ name: file.name, size: file.size, - type: actualType, + mimeType: actualType, base64: base64String, dataUrl: result, - } - newFiles.push(fileData) + }) + newFiles.push(att) // Update state if ( newFiles.length === Array.from(files).filter((f) => { const fType = getFileTypeFromExtension(f.name) || f.type - return f.size <= maxSize && allowedTypes.includes(fType) + return ( + f.size <= maxSize && + allowedTypes.includes(fType) && + !existingImageNames.has(f.name) + ) }).length ) { - setUploadedFiles((prev) => { - const updated = [...prev, ...newFiles] - return updated - }) + if (newFiles.length > 0) { + setAttachments((prev) => { + const updated = [...prev, ...newFiles] + return updated + }) + + // If thread exists, ingest images immediately + if (currentThreadId) { + void (async () => { + for (const img of newFiles) { + try { + // Mark as processing + setAttachments((prev) => + prev.map((a) => + a.name === img.name && a.type === 'image' + ? { ...a, processing: true } + : a + ) + ) + + const result = await serviceHub + .uploads() + .ingestImage(currentThreadId, img) + + if (result?.id) { + // Mark as processed with ID + setAttachments((prev) => + prev.map((a) => + a.name === img.name && a.type === 'image' + ? { + ...a, + processing: false, + processed: true, + id: result.id, + } + : a + ) + ) + } else { + throw new Error('No ID returned from image ingestion') + } + } catch (error) { + console.error('Failed to ingest image:', error) + // Remove failed image + setAttachments((prev) => + prev.filter( + (a) => !(a.name === img.name && a.type === 'image') + ) + ) + toast.error(`Failed to ingest ${img.name}`, { + description: + error instanceof Error + ? error.message + : String(error), + }) + } + } + })() + } + } + + if (duplicates.length > 0) { + toast.warning('Some images already attached', { + description: `${duplicates.join(', ')} ${duplicates.length === 1 ? 'is' : 'are'} already in the list`, + }) + } + // Reset the file input value to allow re-uploading the same file if (fileInputRef.current) { fileInputRef.current.value = '' @@ -560,38 +858,107 @@ const ChatInput = ({ onDragOver={hasMmproj ? handleDragOver : undefined} onDrop={hasMmproj ? handleDrop : undefined} > - {uploadedFiles.length > 0 && ( + {attachments.length > 0 && (
- {uploadedFiles.map((file, index) => { - return ( -
- {file.type.startsWith('image/') && ( - {`${file.name} - )} + {attachments + .map((att, idx) => ({ att, idx })) + .map(({ att, idx }) => { + const isImage = att.type === 'image' + const ext = att.fileType || att.mimeType?.split('/')[1] + return (
handleRemoveFile(index)} + key={`${att.type}-${idx}-${att.name}`} + className="relative" > - + + + +
+ {/* Inner content by state */} + {isImage && att.dataUrl ? ( + {`${att.name}`} + ) : ( +
+ + {ext && ( + + .{ext} + + )} +
+ )} + + {/* Overlay spinner when processing */} + {att.processing && ( +
+ +
+ )} + + {/* Overlay success check when processed */} + {att.processed && !att.processing && ( +
+
+ +
+
+ )} +
+
+ +
+
+ {att.name} +
+
+ {isImage + ? att.mimeType || 'image' + : ext + ? `.${ext}` + : 'document'} + {att.size + ? ` · ${formatBytes(att.size)}` + : ''} +
+
+
+
+
+ + {/* Remove button disabled while processing - outside overflow-hidden container */} + {!att.processing && ( +
handleRemoveAttachment(idx)} + > + +
+ )}
-
- ) - })} + ) + })}
)} )} - {/* File attachment - show only for models with mmproj */} + {/* Vision image attachment - show only for models with mmproj */} {hasMmproj && ( @@ -680,6 +1047,39 @@ const ChatInput = ({ )} + {/* RAG document attachments - desktop-only via dialog; shown when feature enabled */} + {selectedModel?.capabilities?.includes('tools') && + showAttachmentButton && ( + + + +
+ {ingestingDocs ? ( + + ) : ( + + )} +
+
+ +

+ {ingestingDocs + ? 'Indexing documents…' + : 'Attach documents'} +

+
+
+
+ )} {/* Microphone - always available - Temp Hide */} {/*
@@ -703,74 +1103,75 @@ const ChatInput = ({ )} {selectedModel?.capabilities?.includes('tools') && - hasActiveMCPServers && ( - MCPToolComponent ? ( - // Use custom MCP component - - ) : ( - // Use default tools dropdown - - + ) : ( + // Use default tools dropdown + + + - { + setDropdownToolsAvailable(false) + e.stopPropagation() + }} > -
{ - setDropdownToolsAvailable(false) - e.stopPropagation() + { + setDropdownToolsAvailable(isOpen) + if (isOpen) { + setTooltipToolsAvailable(false) + } }} > - { - setDropdownToolsAvailable(isOpen) - if (isOpen) { - setTooltipToolsAvailable(false) - } - }} - > - {(isOpen, toolsCount) => { - return ( -
- - {toolsCount > 0 && ( -
- - {toolsCount > 99 ? '99+' : toolsCount} - -
- )} -
- ) - }} -
-
-
- -

{t('tools')}

-
-
-
- ) - )} + {(isOpen, toolsCount) => { + return ( +
+ + {toolsCount > 0 && ( +
+ + {toolsCount > 99 ? '99+' : toolsCount} + +
+ )} +
+ ) + }} + +
+ + +

{t('tools')}

+
+ + + ))} {selectedModel?.capabilities?.includes('web_search') && ( @@ -818,7 +1219,15 @@ const ChatInput = ({ a.type === 'image' && a.dataUrl) + .map((a) => ({ + name: a.name, + type: a.mimeType || getFileTypeFromExtension(a.name), + size: a.size || 0, + base64: a.base64 || '', + dataUrl: a.dataUrl!, + }))} /> )} @@ -835,17 +1244,13 @@ const ChatInput = ({ ) : ( + )} + + + )} + {folders.length === 0 ? (
@@ -123,9 +169,19 @@ function ProjectContent() { {t('projects.noProjectsYetDesc')}

+ ) : filteredProjects.length === 0 ? ( +
+ +

+ {t('projects.noProjectsFound')} +

+

+ {t('projects.tryDifferentSearch')} +

+
) : (
- {folders + {filteredProjects .slice() .sort((a, b) => b.updated_at - a.updated_at) .map((folder) => { @@ -172,8 +228,8 @@ function ProjectContent() { className="size-8 cursor-pointer flex items-center justify-center rounded-md hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out mr-1" title={ isExpanded - ? t('projects.collapseThreads') - : t('projects.expandThreads') + ? t('projects.collapseProject') + : t('projects.expandProject') } onClick={() => toggleProjectExpansion(folder.id)} > @@ -218,7 +274,9 @@ function ProjectContent() { {/* Thread List */} {isExpanded && projectThreads.length > 0 && ( -
+
+ return { + min: props.min ?? -Infinity, + max: props.max ?? Infinity, + step: props.step ?? 1, + } +} + +// Helper to validate and clamp numeric values +function clampValue(val: unknown, def: SettingComponentProps, currentValue: number): number { + const num = typeof val === 'number' ? val : Number(val) + if (!Number.isFinite(num)) return currentValue + const { min, max, step } = getConstraints(def) + // Floor integer values, preserve decimals for threshold + const adjusted = step >= 1 ? Math.floor(num) : num + return Math.max(min, Math.min(max, adjusted)) +} + +function AttachmentsSettings() { + const { t } = useTranslation() + const hookDefs = useAttachments((s) => s.settingsDefs) + const loadDefs = useAttachments((s) => s.loadSettingsDefs) + const [defs, setDefs] = useState([]) + + // Load schema from extension via the hook once + useEffect(() => { + loadDefs() + }, [loadDefs]) + + // Mirror the hook's defs into local state for display + useEffect(() => { + setDefs(hookDefs) + }, [hookDefs]) + + // Track values for live updates + const sel = useAttachments( + useShallow((s) => ({ + enabled: s.enabled, + maxFileSizeMB: s.maxFileSizeMB, + retrievalLimit: s.retrievalLimit, + retrievalThreshold: s.retrievalThreshold, + chunkSizeTokens: s.chunkSizeTokens, + overlapTokens: s.overlapTokens, + searchMode: s.searchMode, + setEnabled: s.setEnabled, + setMaxFileSizeMB: s.setMaxFileSizeMB, + setRetrievalLimit: s.setRetrievalLimit, + setRetrievalThreshold: s.setRetrievalThreshold, + setChunkSizeTokens: s.setChunkSizeTokens, + setOverlapTokens: s.setOverlapTokens, + setSearchMode: s.setSearchMode, + })) + ) + + // Local state for inputs to allow intermediate values while typing + const [localValues, setLocalValues] = useState>({}) + + // Debounce timers + const timersRef = useRef>>({}) + + // Cleanup timers on unmount + useEffect(() => { + const timers = timersRef.current + return () => { + Object.values(timers).forEach(clearTimeout) + } + }, []) + + // Debounced setter with validation + const debouncedSet = useCallback((key: string, val: unknown, def: SettingComponentProps) => { + // Clear existing timer for this key + if (timersRef.current[key]) { + clearTimeout(timersRef.current[key]) + } + + // Set local value immediately for responsive UI + setLocalValues((prev) => ({ + ...prev, + [key]: val as string | number | boolean | string[] + })) + + // For non-numeric inputs, apply immediately without debounce + if (key === 'enabled' || key === 'search_mode') { + if (key === 'enabled') sel.setEnabled(!!val) + else if (key === 'search_mode') sel.setSearchMode(val as 'auto' | 'ann' | 'linear') + return + } + + // For numeric inputs, debounce the validation and sync + timersRef.current[key] = setTimeout(() => { + const currentStoreValue = (() => { + switch (key) { + case 'max_file_size_mb': return sel.maxFileSizeMB + case 'retrieval_limit': return sel.retrievalLimit + case 'retrieval_threshold': return sel.retrievalThreshold + case 'chunk_size_tokens': return sel.chunkSizeTokens + case 'overlap_tokens': return sel.overlapTokens + default: return 0 + } + })() + + const validated = clampValue(val, def, currentStoreValue) + + switch (key) { + case 'max_file_size_mb': + sel.setMaxFileSizeMB(validated) + break + case 'retrieval_limit': + sel.setRetrievalLimit(validated) + break + case 'retrieval_threshold': + sel.setRetrievalThreshold(validated) + break + case 'chunk_size_tokens': + sel.setChunkSizeTokens(validated) + break + case 'overlap_tokens': + sel.setOverlapTokens(validated) + break + } + + // Update local value to validated one + setLocalValues((prev) => ({ + ...prev, + [key]: validated as string | number | boolean | string[] + })) + }, 500) // 500ms debounce + }, [sel]) + + return ( + +
+ +

{t('common:settings')}

+
+
+ +
+
+ + {defs.map((d) => { + // Use local value if typing, else use store value + const storeValue = (() => { + switch (d.key) { + case 'enabled': + return sel.enabled + case 'max_file_size_mb': + return sel.maxFileSizeMB + case 'retrieval_limit': + return sel.retrievalLimit + case 'retrieval_threshold': + return sel.retrievalThreshold + case 'chunk_size_tokens': + return sel.chunkSizeTokens + case 'overlap_tokens': + return sel.overlapTokens + case 'search_mode': + return sel.searchMode + default: + return d?.controllerProps?.value + } + })() + + const currentValue = + localValues[d.key] !== undefined ? localValues[d.key] : storeValue + + // Convert to DynamicControllerSetting compatible props + const baseProps = d.controllerProps + const normalizedValue: string | number | boolean = (() => { + if (Array.isArray(currentValue)) { + return currentValue.join(',') + } + return currentValue as string | number | boolean + })() + + const props = { + value: normalizedValue, + placeholder: 'placeholder' in baseProps ? baseProps.placeholder : undefined, + type: 'type' in baseProps ? baseProps.type : undefined, + options: 'options' in baseProps ? baseProps.options : undefined, + input_actions: 'inputActions' in baseProps ? baseProps.inputActions : undefined, + rows: undefined, + min: 'min' in baseProps ? baseProps.min : undefined, + max: 'max' in baseProps ? baseProps.max : undefined, + step: 'step' in baseProps ? baseProps.step : undefined, + recommended: 'recommended' in baseProps ? baseProps.recommended : undefined, + } + + const title = d.titleKey ? t(d.titleKey) : d.title + const description = d.descriptionKey ? t(d.descriptionKey) : d.description + + return ( + debouncedSet(d.key, val, d)} + /> + } + /> + ) + })} + +
+
+
+
+
+ ) +} diff --git a/web-app/src/routes/settings/local-api-server.tsx b/web-app/src/routes/settings/local-api-server.tsx index 3628a6f8c..3ca2922b6 100644 --- a/web-app/src/routes/settings/local-api-server.tsx +++ b/web-app/src/routes/settings/local-api-server.tsx @@ -15,7 +15,6 @@ import { useLocalApiServer } from '@/hooks/useLocalApiServer' import { useAppState } from '@/hooks/useAppState' import { useModelProvider } from '@/hooks/useModelProvider' import { useServiceHub } from '@/hooks/useServiceHub' -import { localStorageKey } from '@/constants/localStorage' import { IconLogs } from '@tabler/icons-react' import { cn } from '@/lib/utils' import { ApiKeyInput } from '@/containers/ApiKeyInput' @@ -23,6 +22,7 @@ import { useEffect, useState } from 'react' import { PlatformGuard } from '@/lib/platform/PlatformGuard' import { PlatformFeature } from '@/lib/platform' import { toast } from 'sonner' +import { getModelToStart } from '@/utils/getModelToStart' // eslint-disable-next-line @typescript-eslint/no-explicit-any export const Route = createFileRoute(route.settings.local_api_server as any)({ @@ -82,54 +82,6 @@ function LocalAPIServerContent() { setIsApiKeyEmpty(!isValid) } - const getLastUsedModel = (): { provider: string; model: string } | null => { - try { - const stored = localStorage.getItem(localStorageKey.lastUsedModel) - return stored ? JSON.parse(stored) : null - } catch (error) { - console.debug('Failed to get last used model from localStorage:', error) - return null - } - } - - // Helper function to determine which model to start - const getModelToStart = () => { - // Use last used model if available - const lastUsedModel = getLastUsedModel() - if (lastUsedModel) { - const provider = getProviderByName(lastUsedModel.provider) - if ( - provider && - provider.models.some((m) => m.id === lastUsedModel.model) - ) { - return { model: lastUsedModel.model, provider } - } - } - - // Use selected model if available - if (selectedModel && selectedProvider) { - const provider = getProviderByName(selectedProvider) - if (provider) { - return { model: selectedModel.id, provider } - } - } - - // Use first model from llamacpp provider - const llamacppProvider = getProviderByName('llamacpp') - if ( - llamacppProvider && - llamacppProvider.models && - llamacppProvider.models.length > 0 - ) { - return { - model: llamacppProvider.models[0].id, - provider: llamacppProvider, - } - } - - return null - } - const [isModelLoading, setIsModelLoading] = useState(false) const toggleAPIServer = async () => { @@ -137,7 +89,7 @@ function LocalAPIServerContent() { if (serverStatus === 'stopped') { console.log('Starting server with port:', serverPort) toast.info('Starting server...', { - description: `Attempting to start server on port ${serverPort}` + description: `Attempting to start server on port ${serverPort}`, }) if (!apiKey || apiKey.toString().trim().length === 0) { @@ -146,28 +98,47 @@ function LocalAPIServerContent() { } setShowApiKeyError(false) - const modelToStart = getModelToStart() - // Only start server if we have a model to load - if (!modelToStart) { - console.warn( - 'Cannot start Local API Server: No model available to load' - ) - return - } - setServerStatus('pending') - setIsModelLoading(true) // Start loading state - // Start the model first + // Check if there's already a loaded model serviceHub .models() - .startModel(modelToStart.provider, modelToStart.model) - .then(() => { - console.log(`Model ${modelToStart.model} started successfully`) - setIsModelLoading(false) // Model loaded, stop loading state + .getActiveModels() + .then((loadedModels) => { + if (loadedModels && loadedModels.length > 0) { + console.log(`Using already loaded model: ${loadedModels[0]}`) + // Model already loaded, just start the server + return Promise.resolve() + } else { + // No loaded model, start one first + const modelToStart = getModelToStart({ + selectedModel, + selectedProvider, + getProviderByName, + }) - // Add a small delay for the backend to update state - return new Promise((resolve) => setTimeout(resolve, 500)) + // Only start server if we have a model to load + if (!modelToStart) { + console.warn( + 'Cannot start Local API Server: No model available to load' + ) + throw new Error('No model available to load') + } + + setIsModelLoading(true) // Start loading state + + // Start the model first + return serviceHub + .models() + .startModel(modelToStart.provider, modelToStart.model) + .then(() => { + console.log(`Model ${modelToStart.model} started successfully`) + setIsModelLoading(false) // Model loaded, stop loading state + + // Add a small delay for the backend to update state + return new Promise((resolve) => setTimeout(resolve, 500)) + }) + } }) .then(() => { // Then start the server @@ -196,31 +167,31 @@ function LocalAPIServerContent() { toast.dismiss() // Extract error message from various error formats - const errorMsg = error && typeof error === 'object' && 'message' in error - ? String(error.message) - : String(error) + const errorMsg = + error && typeof error === 'object' && 'message' in error + ? String(error.message) + : String(error) // Port-related errors (highest priority) if (errorMsg.includes('Address already in use')) { toast.error('Port has been occupied', { - description: `Port ${serverPort} is already in use. Please try a different port.` + description: `Port ${serverPort} is already in use. Please try a different port.`, }) } // Model-related errors else if (errorMsg.includes('Invalid or inaccessible model path')) { toast.error('Invalid or inaccessible model path', { - description: errorMsg + description: errorMsg, }) - } - else if (errorMsg.includes('model')) { + } else if (errorMsg.includes('model')) { toast.error('Failed to start model', { - description: errorMsg + description: errorMsg, }) } // Generic server errors else { toast.error('Failed to start server', { - description: errorMsg + description: errorMsg, }) } }) @@ -258,7 +229,7 @@ function LocalAPIServerContent() { } } - const isServerRunning = serverStatus === 'running' + const isServerRunning = serverStatus !== 'stopped' return (
@@ -294,6 +265,22 @@ function LocalAPIServerContent() {
} > + { + if (!apiKey || apiKey.toString().trim().length === 0) { + setShowApiKeyError(true) + return + } + setEnableOnStartup(checked) + }} + /> + } + /> } /> - - {/* Startup Configuration */} - { - if (!apiKey || apiKey.toString().trim().length === 0) { - setShowApiKeyError(true) - return - } - setEnableOnStartup(checked) - }} - /> + + + } /> diff --git a/web-app/src/routes/settings/providers/index.tsx b/web-app/src/routes/settings/providers/index.tsx index b22487390..7b9afd905 100644 --- a/web-app/src/routes/settings/providers/index.tsx +++ b/web-app/src/routes/settings/providers/index.tsx @@ -39,7 +39,7 @@ function ModelProviders() { toast.error(t('providerAlreadyExists', { name })) return } - const newProvider = { + const newProvider: ProviderObject = { provider: name, active: true, models: [], diff --git a/web-app/src/services/__tests__/serviceHub.integration.test.ts b/web-app/src/services/__tests__/serviceHub.integration.test.ts index 8a8a10344..b39a24831 100644 --- a/web-app/src/services/__tests__/serviceHub.integration.test.ts +++ b/web-app/src/services/__tests__/serviceHub.integration.test.ts @@ -4,7 +4,11 @@ import { isPlatformTauri } from '@/lib/platform/utils' // Mock platform detection vi.mock('@/lib/platform/utils', () => ({ - isPlatformTauri: vi.fn().mockReturnValue(false) + isPlatformTauri: vi.fn().mockReturnValue(false), + isPlatformIOS: vi.fn().mockReturnValue(false), + isPlatformAndroid: vi.fn().mockReturnValue(false), + isIOS: vi.fn().mockReturnValue(false), + isAndroid: vi.fn().mockReturnValue(false) })) // Mock @jan/extensions-web to return empty extensions for testing @@ -213,4 +217,4 @@ describe('ServiceHub Integration Tests', () => { }) }) -}) +}) diff --git a/web-app/src/services/core/mobile.ts b/web-app/src/services/core/mobile.ts new file mode 100644 index 000000000..e5aedefa0 --- /dev/null +++ b/web-app/src/services/core/mobile.ts @@ -0,0 +1,69 @@ +/** + * Mobile Core Service - Android/iOS implementation + * + * This service extends TauriCoreService but provides mobile-specific + * extension loading. Instead of reading extensions from the filesystem, + * it returns pre-bundled web extensions. + */ + +import { TauriCoreService } from './tauri' +import type { ExtensionManifest } from '@/lib/extension' +import JanConversationalExtension from '@janhq/conversational-extension' + +export class MobileCoreService extends TauriCoreService { + /** + * Override: Return pre-bundled extensions instead of reading from filesystem + */ + override async getActiveExtensions(): Promise { + return this.getBundledExtensions() + } + + /** + * Override: No-op on mobile - extensions are pre-bundled in the app + */ + override async installExtensions(): Promise { + console.log('[Mobile] Extensions are pre-bundled, skipping installation') + } + + /** + * Override: No-op on mobile - cannot install additional extensions + */ + override async installExtension(): Promise { + console.log('[Mobile] Cannot install extensions on mobile, they are pre-bundled') + return this.getBundledExtensions() + } + + /** + * Override: No-op on mobile - cannot uninstall bundled extensions + */ + override async uninstallExtension(): Promise { + console.log('[Mobile] Cannot uninstall pre-bundled extensions on mobile') + return false + } + + /** + * Private method to return pre-bundled mobile extensions + */ + private getBundledExtensions(): ExtensionManifest[] { + const conversationalExt = new JanConversationalExtension( + 'built-in', + '@janhq/conversational-extension', + 'Conversational Extension', + true, + 'Manages conversation threads and messages', + '1.0.0' + ) + + return [ + { + name: '@janhq/conversational-extension', + productName: 'Conversational Extension', + url: 'built-in', + active: true, + description: 'Manages conversation threads and messages', + version: '1.0.0', + extensionInstance: conversationalExt, + }, + ] + } +} diff --git a/web-app/src/services/index.ts b/web-app/src/services/index.ts index 0bfba90e6..65a117986 100644 --- a/web-app/src/services/index.ts +++ b/web-app/src/services/index.ts @@ -5,7 +5,7 @@ * then provides synchronous access to service instances throughout the app. */ -import { isPlatformTauri } from '@/lib/platform/utils' +import { isPlatformTauri, isPlatformIOS, isPlatformAndroid } from '@/lib/platform/utils' // Import default services import { DefaultThemeService } from './theme/default' @@ -27,6 +27,10 @@ import { DefaultPathService } from './path/default' import { DefaultCoreService } from './core/default' import { DefaultDeepLinkService } from './deeplink/default' import { DefaultProjectsService } from './projects/default' +import { DefaultRAGService } from './rag/default' +import type { RAGService } from './rag/types' +import { DefaultUploadsService } from './uploads/default' +import type { UploadsService } from './uploads/types' // Import service types import type { ThemeService } from './theme/types' @@ -70,6 +74,8 @@ export interface ServiceHub { core(): CoreService deeplink(): DeepLinkService projects(): ProjectsService + rag(): RAGService + uploads(): UploadsService } class PlatformServiceHub implements ServiceHub { @@ -92,6 +98,8 @@ class PlatformServiceHub implements ServiceHub { private coreService: CoreService = new DefaultCoreService() private deepLinkService: DeepLinkService = new DefaultDeepLinkService() private projectsService: ProjectsService = new DefaultProjectsService() + private ragService: RAGService = new DefaultRAGService() + private uploadsService: UploadsService = new DefaultUploadsService() private initialized = false /** @@ -102,11 +110,14 @@ class PlatformServiceHub implements ServiceHub { console.log( 'Initializing service hub for platform:', - isPlatformTauri() ? 'Tauri' : 'Web' + isPlatformTauri() && !isPlatformIOS() && !isPlatformAndroid() ? 'Tauri' : + isPlatformIOS() ? 'iOS' : + isPlatformAndroid() ? 'Android' : 'Web' ) try { - if (isPlatformTauri()) { + if (isPlatformTauri() && !isPlatformIOS() && !isPlatformAndroid()) { + // Desktop Tauri const [ themeModule, windowModule, @@ -150,6 +161,44 @@ class PlatformServiceHub implements ServiceHub { this.pathService = new pathModule.TauriPathService() this.coreService = new coreModule.TauriCoreService() this.deepLinkService = new deepLinkModule.TauriDeepLinkService() + } else if (isPlatformIOS() || isPlatformAndroid()) { + const [ + themeModule, + windowModule, + eventsModule, + appModule, + mcpModule, + providersModule, + dialogModule, + openerModule, + pathModule, + coreModule, + deepLinkModule, + ] = await Promise.all([ + import('./theme/tauri'), + import('./window/tauri'), + import('./events/tauri'), + import('./app/tauri'), + import('./mcp/tauri'), + import('./providers/tauri'), + import('./dialog/tauri'), + import('./opener/tauri'), + import('./path/tauri'), + import('./core/mobile'), // Use mobile-specific core service + import('./deeplink/tauri'), + ]) + + this.themeService = new themeModule.TauriThemeService() + this.windowService = new windowModule.TauriWindowService() + this.eventsService = new eventsModule.TauriEventsService() + this.appService = new appModule.TauriAppService() + this.mcpService = new mcpModule.TauriMCPService() + this.providersService = new providersModule.TauriProvidersService() + this.dialogService = new dialogModule.TauriDialogService() + this.openerService = new openerModule.TauriOpenerService() + this.pathService = new pathModule.TauriPathService() + this.coreService = new coreModule.MobileCoreService() // Mobile service with pre-loaded extensions + this.deepLinkService = new deepLinkModule.TauriDeepLinkService() } else { const [ themeModule, @@ -302,6 +351,16 @@ class PlatformServiceHub implements ServiceHub { this.ensureInitialized() return this.projectsService } + + rag(): RAGService { + this.ensureInitialized() + return this.ragService + } + + uploads(): UploadsService { + this.ensureInitialized() + return this.uploadsService + } } export async function initializeServiceHub(): Promise { diff --git a/web-app/src/services/models/default.ts b/web-app/src/services/models/default.ts index 746f869d1..203ab5ccd 100644 --- a/web-app/src/services/models/default.ts +++ b/web-app/src/services/models/default.ts @@ -578,6 +578,9 @@ export class DefaultModelsService implements ModelsService { } }> }> + chat_template_kwargs?: { + enable_thinking: boolean + } }) => Promise } @@ -654,6 +657,9 @@ export class DefaultModelsService implements ModelsService { return await engine.getTokensCount({ model: modelId, messages: transformedMessages, + chat_template_kwargs: { + enable_thinking: false, + }, }) } diff --git a/web-app/src/services/providers/tauri.ts b/web-app/src/services/providers/tauri.ts index 50f1217da..4e08ee751 100644 --- a/web-app/src/services/providers/tauri.ts +++ b/web-app/src/services/providers/tauri.ts @@ -10,6 +10,7 @@ import { modelSettings } from '@/lib/predefined' import { ExtensionManager } from '@/lib/extension' import { fetch as fetchTauri } from '@tauri-apps/plugin-http' import { DefaultProvidersService } from './default' +import { getModelCapabilities } from '@/lib/models' export class TauriProvidersService extends DefaultProvidersService { fetch(): typeof fetch { @@ -26,32 +27,16 @@ export class TauriProvidersService extends DefaultProvidersService { provider.provider as unknown as keyof typeof providerModels ].models as unknown as string[] - if (Array.isArray(builtInModels)) + if (Array.isArray(builtInModels)) { models = builtInModels.map((model) => { const modelManifest = models.find((e) => e.id === model) // TODO: Check chat_template for tool call support - const capabilities = [ - ModelCapabilities.COMPLETION, - ( - providerModels[ - provider.provider as unknown as keyof typeof providerModels - ]?.supportsToolCalls as unknown as string[] - )?.includes(model) - ? ModelCapabilities.TOOLS - : undefined, - ( - providerModels[ - provider.provider as unknown as keyof typeof providerModels - ]?.supportsImages as unknown as string[] - )?.includes(model) - ? ModelCapabilities.VISION - : undefined, - ].filter(Boolean) as string[] return { ...(modelManifest ?? { id: model, name: model }), - capabilities, + capabilities: getModelCapabilities(provider.provider, model), } as Model }) + } } return { @@ -166,6 +151,12 @@ export class TauriProvidersService extends DefaultProvidersService { headers['Authorization'] = `Bearer ${provider.api_key}` } + if (provider.custom_header) { + provider.custom_header.forEach((header) => { + headers[header.header] = header.value + }) + } + // Always use Tauri's fetch to avoid CORS issues const response = await fetchTauri(`${provider.base_url}/models`, { method: 'GET', diff --git a/web-app/src/services/providers/web.ts b/web-app/src/services/providers/web.ts index 6a7865be8..29d4a9cb7 100644 --- a/web-app/src/services/providers/web.ts +++ b/web-app/src/services/providers/web.ts @@ -11,6 +11,7 @@ import { ExtensionManager } from '@/lib/extension' import type { ProvidersService } from './types' import { PlatformFeatures } from '@/lib/platform/const' import { PlatformFeature } from '@/lib/platform/types' +import { getModelCapabilities } from '@/lib/models' export class WebProvidersService implements ProvidersService { async getProviders(): Promise { @@ -88,19 +89,9 @@ export class WebProvidersService implements ProvidersService { models = builtInModels.map((model) => { const modelManifest = models.find((e) => e.id === model) // TODO: Check chat_template for tool call support - const capabilities = [ - ModelCapabilities.COMPLETION, - ( - providerModels[ - provider.provider as unknown as keyof typeof providerModels - ]?.supportsToolCalls as unknown as string[] - )?.includes(model) - ? ModelCapabilities.TOOLS - : undefined, - ].filter(Boolean) as string[] return { ...(modelManifest ?? { id: model, name: model }), - capabilities, + capabilities: getModelCapabilities(provider.provider, model), } as Model }) } diff --git a/web-app/src/services/rag/default.ts b/web-app/src/services/rag/default.ts new file mode 100644 index 000000000..f4535c4fd --- /dev/null +++ b/web-app/src/services/rag/default.ts @@ -0,0 +1,50 @@ +import type { RAGService } from './types' +import type { MCPTool, MCPToolCallResult, RAGExtension } from '@janhq/core' +import { ExtensionManager } from '@/lib/extension' +import { ExtensionTypeEnum } from '@janhq/core' + +export class DefaultRAGService implements RAGService { + async getTools(): Promise { + const ext = ExtensionManager.getInstance().get(ExtensionTypeEnum.RAG) + if (ext?.getTools) { + try { + return await ext.getTools() + } catch (e) { + console.error('RAG extension getTools failed:', e) + } + } + return [] + } + + async callTool(args: { toolName: string; arguments: Record; threadId?: string }): Promise { + const ext = ExtensionManager.getInstance().get(ExtensionTypeEnum.RAG) + if (!ext?.callTool) { + return { error: 'RAG extension not available', content: [{ type: 'text', text: 'RAG extension not available' }] } + } + try { + // Inject thread context when scope requires it + type ToolCallArgs = Record & { scope?: string; thread_id?: string } + const a: ToolCallArgs = { ...(args.arguments as Record) } + if (!a.scope) a.scope = 'thread' + if (a.scope === 'thread' && !a.thread_id) { + a.thread_id = args.threadId + } + return await ext.callTool(args.toolName, a) + } catch (e) { + const msg = e instanceof Error ? e.message : String(e) + return { error: msg, content: [{ type: 'text', text: `RAG tool failed: ${msg}` }] } + } + } + + async getToolNames(): Promise { + try { + const ext = ExtensionManager.getInstance().get(ExtensionTypeEnum.RAG) + if (ext?.getToolNames) return await ext.getToolNames() + // No fallback to full tool list; return empty to save bandwidth + return [] + } catch (e) { + console.error('Failed to fetch RAG tool names:', e) + return [] + } + } +} diff --git a/web-app/src/services/rag/types.ts b/web-app/src/services/rag/types.ts new file mode 100644 index 000000000..085eb240b --- /dev/null +++ b/web-app/src/services/rag/types.ts @@ -0,0 +1,11 @@ +import { MCPTool } from '@janhq/core' +import type { MCPToolCallResult } from '@janhq/core' + +export interface RAGService { + // Return tools exposed by RAG-related extensions (e.g., retrieval, list_attachments) + getTools(): Promise + // Execute a RAG tool call (retrieve, list_attachments) + callTool(args: { toolName: string; arguments: object; threadId?: string }): Promise + // Convenience: return tool names for routing + getToolNames(): Promise +} diff --git a/web-app/src/services/theme/tauri.ts b/web-app/src/services/theme/tauri.ts index e05d60c74..8cd8c4e28 100644 --- a/web-app/src/services/theme/tauri.ts +++ b/web-app/src/services/theme/tauri.ts @@ -2,7 +2,8 @@ * Tauri Theme Service - Desktop implementation */ -import { getCurrentWindow, Theme } from '@tauri-apps/api/window' +import { Theme } from '@tauri-apps/api/window' +import { getAllWebviewWindows, type WebviewWindow } from '@tauri-apps/api/webviewWindow' import type { ThemeMode } from './types' import { DefaultThemeService } from './default' @@ -10,7 +11,27 @@ export class TauriThemeService extends DefaultThemeService { async setTheme(theme: ThemeMode): Promise { try { const tauriTheme = theme as Theme | null - await getCurrentWindow().setTheme(tauriTheme) + + // Update all open windows, not just the current one + const allWindows = await getAllWebviewWindows() + + // Convert to array if it's not already + const windowsArray: WebviewWindow[] = Array.isArray(allWindows) + ? allWindows + : Object.values(allWindows) + + await Promise.all( + windowsArray.map(async (window) => { + try { + await window.setTheme(tauriTheme) + } catch (error) { + console.error( + `Failed to set theme for window ${window.label}:`, + error + ) + } + }) + ) } catch (error) { console.error('Error setting theme in Tauri:', error) throw error @@ -21,7 +42,7 @@ export class TauriThemeService extends DefaultThemeService { return { setTheme: (theme: ThemeMode): Promise => { return this.setTheme(theme) - } + }, } } } diff --git a/web-app/src/services/uploads/default.ts b/web-app/src/services/uploads/default.ts new file mode 100644 index 000000000..d1a9b2d3b --- /dev/null +++ b/web-app/src/services/uploads/default.ts @@ -0,0 +1,32 @@ +import type { UploadsService, UploadResult } from './types' +import type { Attachment } from '@/types/attachment' +import { ulid } from 'ulidx' +import { ExtensionManager } from '@/lib/extension' +import { ExtensionTypeEnum, type RAGExtension, type IngestAttachmentsResult } from '@janhq/core' + +export class DefaultUploadsService implements UploadsService { + async ingestImage(_threadId: string, attachment: Attachment): Promise { + if (attachment.type !== 'image') throw new Error('ingestImage: attachment is not image') + // Placeholder upload flow; swap for real API call when backend is ready + await new Promise((r) => setTimeout(r, 100)) + return { id: ulid() } + } + + async ingestFileAttachment(threadId: string, attachment: Attachment): Promise { + if (attachment.type !== 'document') throw new Error('ingestFileAttachment: attachment is not document') + const ext = ExtensionManager.getInstance().get(ExtensionTypeEnum.RAG) + if (!ext?.ingestAttachments) throw new Error('RAG extension not available') + const res: IngestAttachmentsResult = await ext.ingestAttachments(threadId, [ + { path: attachment.path!, name: attachment.name, type: attachment.fileType, size: attachment.size }, + ]) + const files = res.files + if (Array.isArray(files) && files[0]?.id) { + return { + id: files[0].id, + size: typeof files[0].size === 'number' ? Number(files[0].size) : undefined, + chunkCount: typeof files[0].chunk_count === 'number' ? Number(files[0].chunk_count) : undefined, + } + } + throw new Error('Failed to resolve ingested attachment id') + } +} diff --git a/web-app/src/services/uploads/types.ts b/web-app/src/services/uploads/types.ts new file mode 100644 index 000000000..4f36b8d51 --- /dev/null +++ b/web-app/src/services/uploads/types.ts @@ -0,0 +1,16 @@ +import type { Attachment } from '@/types/attachment' + +export type UploadResult = { + id: string + url?: string + size?: number + chunkCount?: number +} + +export interface UploadsService { + // Ingest an image attachment (placeholder upload) + ingestImage(threadId: string, attachment: Attachment): Promise + + // Ingest a document attachment in the context of a thread + ingestFileAttachment(threadId: string, attachment: Attachment): Promise +} diff --git a/web-app/src/services/window/tauri.ts b/web-app/src/services/window/tauri.ts index a6dd643c7..a0e976ced 100644 --- a/web-app/src/services/window/tauri.ts +++ b/web-app/src/services/window/tauri.ts @@ -7,8 +7,39 @@ import type { WindowConfig, WebviewWindowInstance } from './types' import { DefaultWindowService } from './default' export class TauriWindowService extends DefaultWindowService { - async createWebviewWindow(config: WindowConfig): Promise { + async createWebviewWindow( + config: WindowConfig + ): Promise { try { + // Get current theme from localStorage + const storedTheme = localStorage.getItem('jan-theme') + let theme: 'light' | 'dark' | undefined = undefined + + if (storedTheme) { + try { + const themeData = JSON.parse(storedTheme) + const activeTheme = themeData?.state?.activeTheme + const isDark = themeData?.state?.isDark + + // Set theme based on stored preference + if (activeTheme === 'auto') { + theme = undefined // Let OS decide + } else if ( + activeTheme === 'dark' || + (activeTheme === 'auto' && isDark) + ) { + theme = 'dark' + } else if ( + activeTheme === 'light' || + (activeTheme === 'auto' && !isDark) + ) { + theme = 'light' + } + } catch (e) { + console.warn('Failed to parse theme from localStorage:', e) + } + } + const webviewWindow = new WebviewWindow(config.label, { url: config.url, title: config.title, @@ -20,8 +51,12 @@ export class TauriWindowService extends DefaultWindowService { maximizable: config.maximizable, closable: config.closable, fullscreen: config.fullscreen, + theme: theme, }) + // Setup theme listener for this window + this.setupThemeListenerForWindow(webviewWindow) + return { label: config.label, async close() { @@ -38,7 +73,7 @@ export class TauriWindowService extends DefaultWindowService { }, async setTitle(title: string) { await webviewWindow.setTitle(title) - } + }, } } catch (error) { console.error('Error creating Tauri window:', error) @@ -46,10 +81,12 @@ export class TauriWindowService extends DefaultWindowService { } } - async getWebviewWindowByLabel(label: string): Promise { + async getWebviewWindowByLabel( + label: string + ): Promise { try { const existingWindow = await WebviewWindow.getByLabel(label) - + if (existingWindow) { return { label: label, @@ -67,10 +104,10 @@ export class TauriWindowService extends DefaultWindowService { }, async setTitle(title: string) { await existingWindow.setTitle(title) - } + }, } } - + return null } catch (error) { console.error('Error getting Tauri window by label:', error) @@ -135,8 +172,35 @@ export class TauriWindowService extends DefaultWindowService { center: true, }) } catch (error) { - console.error('Error opening local API server logs window in Tauri:', error) + console.error( + 'Error opening local API server logs window in Tauri:', + error + ) throw error } } + + private setupThemeListenerForWindow(window: WebviewWindow): void { + // Listen to theme change events from Tauri backend + import('@tauri-apps/api/event') + .then(({ listen }) => { + return listen('theme-changed', async (event) => { + const theme = event.payload + try { + if (theme === 'dark') { + await window.setTheme('dark') + } else if (theme === 'light') { + await window.setTheme('light') + } else { + await window.setTheme(null) + } + } catch (err) { + console.error('Failed to update window theme:', err) + } + }) + }) + .catch((err) => { + console.error('Failed to setup theme listener for window:', err) + }) + } } diff --git a/web-app/src/test/setup.ts b/web-app/src/test/setup.ts index b2286c2f3..79045cec0 100644 --- a/web-app/src/test/setup.ts +++ b/web-app/src/test/setup.ts @@ -17,6 +17,7 @@ vi.mock('@/lib/platform/const', () => ({ systemIntegrations: true, httpsProxy: true, defaultProviders: true, + projects: true, analytics: true, webAutoModelSelection: false, modelProviderSettings: true, @@ -25,6 +26,7 @@ vi.mock('@/lib/platform/const', () => ({ extensionsSettings: true, assistants: true, authentication: false, + attachments: true, } })) diff --git a/web-app/src/types/attachment.ts b/web-app/src/types/attachment.ts new file mode 100644 index 000000000..9ae23eccd --- /dev/null +++ b/web-app/src/types/attachment.ts @@ -0,0 +1,57 @@ +/** + * Unified attachment type for both images and documents + */ +export type Attachment = { + name: string + type: 'image' | 'document' + + // Common fields + size?: number + chunkCount?: number + processing?: boolean + processed?: boolean + error?: string + + // For images (before upload) + base64?: string + dataUrl?: string + mimeType?: string + + // For documents (local files) + path?: string + fileType?: string // e.g., 'pdf', 'docx' + + // After processing (images uploaded, documents ingested) + id?: string +} + +/** + * Helper to create image attachment + */ +export function createImageAttachment(data: { + name: string + base64: string + dataUrl: string + mimeType: string + size: number +}): Attachment { + return { + ...data, + type: 'image', + } +} + +/** + * Helper to create document attachment + */ +export function createDocumentAttachment(data: { + name: string + path: string + fileType?: string + size?: number +}): Attachment { + return { + ...data, + type: 'document', + } +} diff --git a/web-app/src/types/modelProviders.d.ts b/web-app/src/types/modelProviders.d.ts index 93cdd0df2..2225adee0 100644 --- a/web-app/src/types/modelProviders.d.ts +++ b/web-app/src/types/modelProviders.d.ts @@ -48,6 +48,7 @@ type ProviderObject = { settings: ProviderSetting[] models: Model[] persist?: boolean + custom_header?: ProviderCustomHeader[] | null } /** @@ -71,3 +72,8 @@ type ProxyOptions = { verifyHostSSL: boolean noProxy: string } + +type ProviderCustomHeader = { + header: string + value: string +} \ No newline at end of file diff --git a/web-app/src/utils/blurSupport.ts b/web-app/src/utils/blurSupport.ts new file mode 100644 index 000000000..6a87be598 --- /dev/null +++ b/web-app/src/utils/blurSupport.ts @@ -0,0 +1,65 @@ +/** + * Utility to check if the system supports blur/acrylic effects + * based on OS information from hardware data + */ + +import type { HardwareData } from '@/hooks/useHardware' + +/** + * Check if Windows supports blur effects based on build number + * Windows 10 build 17134 (version 1803) and later support acrylic effects + */ +function checkWindowsBlurSupport(osName: string): boolean { + // os_name format: "Windows 10 Pro (build 22631)" or similar + const buildMatch = osName.match(/build\s+(\d+)/i) + + if (buildMatch && buildMatch[1]) { + const build = parseInt(buildMatch[1], 10) + return build >= 17134 + } + + // If we can't detect build number, assume modern Windows supports blur + return true +} + +/** + * Check if Linux supports blur effects based on desktop environment + */ +function checkLinuxBlurSupport(): boolean { + // Check environment variables (only available in Tauri) + if (typeof window === 'undefined') return false + + // These checks would need to be done on the backend + // For now, we'll assume Linux with common DEs supports blur + return true +} + +/** + * Check if the system supports blur/acrylic effects + * + * @param hardwareData - Hardware data from the hardware plugin + * @returns true if blur effects are supported + */ +export function supportsBlurEffects(hardwareData: HardwareData | null): boolean { + if (!hardwareData) return false + + const { os_type, os_name } = hardwareData + + // macOS always supports blur/vibrancy effects + if (os_type === 'macos') { + return true + } + + // Windows: Check build number + if (os_type === 'windows') { + return checkWindowsBlurSupport(os_name) + } + + // Linux: Check desktop environment (simplified for now) + if (os_type === 'linux') { + return checkLinuxBlurSupport() + } + + // Unknown platforms: assume no blur support + return false +} diff --git a/web-app/src/utils/getModelToStart.ts b/web-app/src/utils/getModelToStart.ts new file mode 100644 index 000000000..bea719ec0 --- /dev/null +++ b/web-app/src/utils/getModelToStart.ts @@ -0,0 +1,69 @@ +import { localStorageKey } from '@/constants/localStorage' +import type { ModelInfo } from '@janhq/core' + +export const getLastUsedModel = (): { + provider: string + model: string +} | null => { + try { + const stored = localStorage.getItem(localStorageKey.lastUsedModel) + return stored ? JSON.parse(stored) : null + } catch (error) { + console.debug('Failed to get last used model from localStorage:', error) + return null + } +} + +// Helper function to determine which model to start +export const getModelToStart = (params: { + selectedModel?: ModelInfo | null + selectedProvider?: string | null + getProviderByName: (name: string) => ModelProvider | undefined +}): { model: string; provider: ModelProvider } | null => { + const { selectedModel, selectedProvider, getProviderByName } = params + + // Use last used model if available + const lastUsedModel = getLastUsedModel() + if (lastUsedModel) { + const provider = getProviderByName(lastUsedModel.provider) + if (provider && provider.models.some((m) => m.id === lastUsedModel.model)) { + return { model: lastUsedModel.model, provider } + } else { + // Last used model not found under provider, fallback to first llamacpp model + const llamacppProvider = getProviderByName('llamacpp') + if ( + llamacppProvider && + llamacppProvider.models && + llamacppProvider.models.length > 0 + ) { + return { + model: llamacppProvider.models[0].id, + provider: llamacppProvider, + } + } + } + } + + // Use selected model if available + if (selectedModel && selectedProvider) { + const provider = getProviderByName(selectedProvider) + if (provider) { + return { model: selectedModel.id, provider } + } + } + + // Use first model from llamacpp provider + const llamacppProvider = getProviderByName('llamacpp') + if ( + llamacppProvider && + llamacppProvider.models && + llamacppProvider.models.length > 0 + ) { + return { + model: llamacppProvider.models[0].id, + provider: llamacppProvider, + } + } + + return null +} diff --git a/web-app/src/utils/reasoning.ts b/web-app/src/utils/reasoning.ts index a189639f0..32b2958e6 100644 --- a/web-app/src/utils/reasoning.ts +++ b/web-app/src/utils/reasoning.ts @@ -6,10 +6,42 @@ import { } from '@janhq/core' // Helper function to get reasoning content from an object -function getReasoning(obj: { reasoning_content?: string | null; reasoning?: string | null } | null | undefined): string | null { +function getReasoning( + obj: + | { reasoning_content?: string | null; reasoning?: string | null } + | null + | undefined +): string | null { return obj?.reasoning_content ?? obj?.reasoning ?? null } +/** + * Normalize the content of a message by removing reasoning content. + * This is useful to ensure that reasoning content does not get sent to the model. + * @param content + * @returns + */ +export function removeReasoningContent(content: string): string { + // Reasoning content should not be sent to the model + if (content.includes('')) { + const match = content.match(/([\s\S]*?)<\/think>/) + if (match?.index !== undefined) { + const splitIndex = match.index + match[0].length + content = content.slice(splitIndex).trim() + } + } + if (content.includes('<|channel|>analysis<|message|>')) { + const match = content.match( + /<\|channel\|>analysis<\|message\|>([\s\S]*?)<\|start\|>assistant<\|channel\|>final<\|message\|>/ + ) + if (match?.index !== undefined) { + const splitIndex = match.index + match[0].length + content = content.slice(splitIndex).trim() + } + } + return content +} + // Extract reasoning from a message (for completed responses) export function extractReasoningFromMessage( message: chatCompletionRequestMessage | ChatCompletionMessage diff --git a/web-app/tsconfig.app.json b/web-app/tsconfig.app.json index 0aefd5942..c672a79f1 100644 --- a/web-app/tsconfig.app.json +++ b/web-app/tsconfig.app.json @@ -25,7 +25,8 @@ /* Url */ "baseUrl": ".", "paths": { - "@/*": ["./src/*"] + "@/*": ["./src/*"], + "@janhq/conversational-extension": ["../extensions/conversational-extension/src/index.ts"] } }, "include": ["src"], diff --git a/web-app/tsconfig.json b/web-app/tsconfig.json index fec8c8e5c..ab1a13f13 100644 --- a/web-app/tsconfig.json +++ b/web-app/tsconfig.json @@ -7,7 +7,8 @@ "compilerOptions": { "baseUrl": ".", "paths": { - "@/*": ["./src/*"] + "@/*": ["./src/*"], + "@janhq/conversational-extension": ["../extensions/conversational-extension/src/index.ts"] } } } diff --git a/web-app/vite.config.ts b/web-app/vite.config.ts index befdaae57..298493889 100644 --- a/web-app/vite.config.ts +++ b/web-app/vite.config.ts @@ -64,6 +64,7 @@ export default defineConfig(({ mode }) => { resolve: { alias: { '@': path.resolve(__dirname, './src'), + '@janhq/conversational-extension': path.resolve(__dirname, '../extensions/conversational-extension/src/index.ts'), }, }, optimizeDeps: { diff --git a/web-app/vite.config.web.ts b/web-app/vite.config.web.ts index 4db29761d..05a340329 100644 --- a/web-app/vite.config.web.ts +++ b/web-app/vite.config.web.ts @@ -79,6 +79,7 @@ export default defineConfig({ resolve: { alias: { '@': path.resolve(__dirname, './src'), + '@janhq/conversational-extension': path.resolve(__dirname, '../extensions-web/src/conversational-web/index.ts'), }, }, define: { diff --git a/yarn.lock b/yarn.lock index c167e87f0..0929ec7d2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3491,7 +3491,7 @@ __metadata: "@vitest/coverage-v8": "npm:^2.1.8" "@vitest/ui": "npm:^2.1.8" eslint: "npm:8.57.0" - happy-dom: "npm:^15.11.6" + happy-dom: "npm:^20.0.0" pacote: "npm:^21.0.0" react: "npm:19.0.0" request: "npm:^2.88.2" @@ -3604,7 +3604,7 @@ __metadata: sonner: "npm:2.0.5" tailwind-merge: "npm:3.3.1" tailwindcss: "npm:4.1.4" - token.js: "npm:token.js-fork@0.7.27" + token.js: "npm:token.js-fork@0.7.29" tw-animate-css: "npm:1.2.8" typescript: "npm:5.9.2" typescript-eslint: "npm:8.31.0" @@ -10527,13 +10527,6 @@ __metadata: languageName: node linkType: hard -"entities@npm:^4.5.0": - version: 4.5.0 - resolution: "entities@npm:4.5.0" - checksum: 10c0/5b039739f7621f5d1ad996715e53d964035f75ad3b9a4d38c6b3804bb226e282ffeae2443624d8fdd9c47d8e926ae9ac009c54671243f0c3294c26af7cc85250 - languageName: node - linkType: hard - "entities@npm:^6.0.0": version: 6.0.1 resolution: "entities@npm:6.0.1" @@ -12257,14 +12250,14 @@ __metadata: languageName: node linkType: hard -"happy-dom@npm:^15.11.6": - version: 15.11.7 - resolution: "happy-dom@npm:15.11.7" +"happy-dom@npm:^20.0.0": + version: 20.0.1 + resolution: "happy-dom@npm:20.0.1" dependencies: - entities: "npm:^4.5.0" - webidl-conversions: "npm:^7.0.0" + "@types/node": "npm:^20.0.0" + "@types/whatwg-mimetype": "npm:^3.0.2" whatwg-mimetype: "npm:^3.0.0" - checksum: 10c0/22b08cac20192b08edf2e9c857ceeda8333a3301c4b5965a9550787b00db60d6d107c726390bd45a35305cd12ab086abd656bf957a408be0fcdc9fcd389f1973 + checksum: 10c0/fb867fcca270ebb185b6f2031721d3ea43c99e0699069187dceee99b14683baca243157feed2ce0da3ba8905b914262caa7bc8403384175a0ad2c81e19bf2f5a languageName: node linkType: hard @@ -19473,9 +19466,9 @@ __metadata: languageName: node linkType: hard -"token.js@npm:token.js-fork@0.7.27": - version: 0.7.27 - resolution: "token.js-fork@npm:0.7.27" +"token.js@npm:token.js-fork@0.7.29": + version: 0.7.29 + resolution: "token.js-fork@npm:0.7.29" dependencies: "@anthropic-ai/sdk": "npm:0.24.3" "@aws-sdk/client-bedrock-runtime": "npm:3.609.0" @@ -19486,7 +19479,7 @@ __metadata: mime-types: "npm:^2.1.35" nanoid: "npm:^5.0.7" openai: "npm:4.91.1" - checksum: 10c0/ec4e8e441b6747db29eed0d21e364eaf8d4636e3d8376bdd63d836499970de15357e8c0b2ef1e470027e7a2c8bc4924138a86f6d207469b6f0b6fb0f24f6d035 + checksum: 10c0/b045de56e06a1066b1fdfcca24bc57e7b10aa6cd1995b9ded27af699afcf0e72e216c3672cc3a85b10ce5b6ea81e7d1d453859f073861176b0c816e8f91e6627 languageName: node linkType: hard