diff --git a/.github/workflows/jan-server-web-ci.yml b/.github/workflows/jan-server-web-ci-dev.yml
similarity index 92%
rename from .github/workflows/jan-server-web-ci.yml
rename to .github/workflows/jan-server-web-ci-dev.yml
index 921d77bee..95dd5f91f 100644
--- a/.github/workflows/jan-server-web-ci.yml
+++ b/.github/workflows/jan-server-web-ci-dev.yml
@@ -11,6 +11,8 @@ on:
jobs:
build-and-preview:
runs-on: [ubuntu-24-04-docker]
+ env:
+ JAN_API_BASE: "https://api-dev.jan.ai/v1"
permissions:
pull-requests: write
contents: write
@@ -50,7 +52,7 @@ jobs:
- name: Build docker image
run: |
- docker build -t ${{ steps.vars.outputs.FULL_IMAGE }} .
+ docker build --build-arg JAN_API_BASE=${{ env.JAN_API_BASE }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
- name: Push docker image
if: github.event_name == 'push'
diff --git a/.github/workflows/jan-server-web-cicd-prod.yml b/.github/workflows/jan-server-web-ci-prod.yml
similarity index 89%
rename from .github/workflows/jan-server-web-cicd-prod.yml
rename to .github/workflows/jan-server-web-ci-prod.yml
index 54b776adf..dda1f3672 100644
--- a/.github/workflows/jan-server-web-cicd-prod.yml
+++ b/.github/workflows/jan-server-web-ci-prod.yml
@@ -13,7 +13,8 @@ jobs:
deployments: write
pull-requests: write
env:
- JAN_API_BASE: "https://api.jan.ai/jan/v1"
+ JAN_API_BASE: "https://api.jan.ai/v1"
+ GA_MEASUREMENT_ID: "G-YK53MX8M8M"
CLOUDFLARE_PROJECT_NAME: "jan-server-web"
steps:
- uses: actions/checkout@v4
@@ -41,6 +42,9 @@ jobs:
- name: Install dependencies
run: make config-yarn && yarn install && yarn build:core && make build-web-app
+ env:
+ JAN_API_BASE: ${{ env.JAN_API_BASE }}
+ GA_MEASUREMENT_ID: ${{ env.GA_MEASUREMENT_ID }}
- name: Publish to Cloudflare Pages Production
uses: cloudflare/pages-action@v1
diff --git a/.github/workflows/jan-server-web-ci-stag.yml b/.github/workflows/jan-server-web-ci-stag.yml
new file mode 100644
index 000000000..dda88390b
--- /dev/null
+++ b/.github/workflows/jan-server-web-ci-stag.yml
@@ -0,0 +1,60 @@
+name: Jan Web Server build image and push to Harbor Registry
+
+on:
+ push:
+ branches:
+ - stag-web
+ pull_request:
+ branches:
+ - stag-web
+
+jobs:
+ build-and-preview:
+ runs-on: [ubuntu-24-04-docker]
+ env:
+ JAN_API_BASE: "https://api-stag.jan.ai/v1"
+ permissions:
+ pull-requests: write
+ contents: write
+ steps:
+ - name: Checkout source repo
+ uses: actions/checkout@v4
+
+ - name: Login to Harbor Registry
+ uses: docker/login-action@v3
+ with:
+ registry: registry.menlo.ai
+ username: ${{ secrets.HARBOR_USERNAME }}
+ password: ${{ secrets.HARBOR_PASSWORD }}
+
+ - name: Install dependencies
+ run: |
+ (type -p wget >/dev/null || (sudo apt update && sudo apt install wget -y)) \
+ && sudo mkdir -p -m 755 /etc/apt/keyrings \
+ && out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
+ && cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
+ && sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
+ && sudo mkdir -p -m 755 /etc/apt/sources.list.d \
+ && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
+ && sudo apt update
+ sudo apt-get install -y jq gettext
+
+ - name: Set image tag
+ id: vars
+ run: |
+ if [[ "${{ github.event_name }}" == "pull_request" ]]; then
+ IMAGE_TAG="web:preview-${{ github.sha }}"
+ else
+ IMAGE_TAG="web:stag-${{ github.sha }}"
+ fi
+ echo "IMAGE_TAG=${IMAGE_TAG}" >> $GITHUB_OUTPUT
+ echo "FULL_IMAGE=registry.menlo.ai/jan-server/${IMAGE_TAG}" >> $GITHUB_OUTPUT
+
+ - name: Build docker image
+ run: |
+ docker build --build-arg JAN_API_BASE=${{ env.JAN_API_BASE }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
+
+ - name: Push docker image
+ if: github.event_name == 'push'
+ run: |
+ docker push ${{ steps.vars.outputs.FULL_IMAGE }}
diff --git a/.github/workflows/template-tauri-build-macos-external.yml b/.github/workflows/template-tauri-build-macos-external.yml
index e69e298a6..8f61b86fa 100644
--- a/.github/workflows/template-tauri-build-macos-external.yml
+++ b/.github/workflows/template-tauri-build-macos-external.yml
@@ -89,7 +89,6 @@ jobs:
- name: Build app
run: |
- rustup target add x86_64-apple-darwin
make build
env:
APP_PATH: '.'
diff --git a/.github/workflows/template-tauri-build-macos.yml b/.github/workflows/template-tauri-build-macos.yml
index 40cf4e839..4646041cf 100644
--- a/.github/workflows/template-tauri-build-macos.yml
+++ b/.github/workflows/template-tauri-build-macos.yml
@@ -92,31 +92,6 @@ jobs:
run: |
cargo install ctoml
- - name: Create bun and uv universal
- run: |
- mkdir -p ./src-tauri/resources/bin/
- cd ./src-tauri/resources/bin/
- curl -L -o bun-darwin-x64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-x64.zip
- curl -L -o bun-darwin-aarch64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-aarch64.zip
- unzip bun-darwin-x64.zip
- unzip bun-darwin-aarch64.zip
- lipo -create -output bun-universal-apple-darwin bun-darwin-x64/bun bun-darwin-aarch64/bun
- cp -f bun-darwin-aarch64/bun bun-aarch64-apple-darwin
- cp -f bun-darwin-x64/bun bun-x86_64-apple-darwin
- cp -f bun-universal-apple-darwin bun
-
- curl -L -o uv-x86_64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-x86_64-apple-darwin.tar.gz
- curl -L -o uv-arm64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-aarch64-apple-darwin.tar.gz
- tar -xzf uv-x86_64.tar.gz
- tar -xzf uv-arm64.tar.gz
- mv uv-x86_64-apple-darwin uv-x86_64
- mv uv-aarch64-apple-darwin uv-aarch64
- lipo -create -output uv-universal-apple-darwin uv-x86_64/uv uv-aarch64/uv
- cp -f uv-x86_64/uv uv-x86_64-apple-darwin
- cp -f uv-aarch64/uv uv-aarch64-apple-darwin
- cp -f uv-universal-apple-darwin uv
- ls -la
-
- name: Update app version based on latest release tag with build number
run: |
echo "Version: ${{ inputs.new_version }}"
@@ -167,7 +142,6 @@ jobs:
- name: Build app
run: |
- rustup target add x86_64-apple-darwin
make build
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/Dockerfile b/Dockerfile
index 4271d06d0..8431f6115 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,9 @@
# Stage 1: Build stage with Node.js and Yarn v4
FROM node:20-alpine AS builder
+ARG JAN_API_BASE=https://api-dev.jan.ai/v1
+ENV JAN_API_BASE=$JAN_API_BASE
+
# Install build dependencies
RUN apk add --no-cache \
make \
diff --git a/Makefile b/Makefile
index d10b26fd9..085e42e74 100644
--- a/Makefile
+++ b/Makefile
@@ -30,6 +30,17 @@ endif
yarn build:core
yarn build:extensions && yarn build:extensions-web
+# Install required Rust targets for macOS universal builds
+install-rust-targets:
+ifeq ($(shell uname -s),Darwin)
+ @echo "Detected macOS, installing universal build targets..."
+ rustup target add x86_64-apple-darwin
+ rustup target add aarch64-apple-darwin
+ @echo "Rust targets installed successfully!"
+else
+ @echo "Not macOS; skipping Rust target installation."
+endif
+
dev: install-and-build
yarn download:bin
yarn download:lib
@@ -69,13 +80,8 @@ test: lint
cargo test --manifest-path src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
cargo test --manifest-path src-tauri/utils/Cargo.toml
-# Builds and publishes the app
-build-and-publish: install-and-build
- yarn build
-
# Build
-build: install-and-build
- yarn download:lib
+build: install-and-build install-rust-targets
yarn build
clean:
diff --git a/core/src/browser/extension.ts b/core/src/browser/extension.ts
index 159d4ac68..78f90ba16 100644
--- a/core/src/browser/extension.ts
+++ b/core/src/browser/extension.ts
@@ -126,16 +126,17 @@ export abstract class BaseExtension implements ExtensionType {
settings.forEach((setting) => {
// Keep setting value
if (setting.controllerProps && Array.isArray(oldSettings))
- setting.controllerProps.value = oldSettings.find(
- (e: any) => e.key === setting.key
- )?.controllerProps?.value
+ setting.controllerProps.value =
+ oldSettings.find((e: any) => e.key === setting.key)?.controllerProps?.value ??
+ setting.controllerProps.value
if ('options' in setting.controllerProps)
setting.controllerProps.options = setting.controllerProps.options?.length
? setting.controllerProps.options
: oldSettings.find((e: any) => e.key === setting.key)?.controllerProps?.options
if ('recommended' in setting.controllerProps) {
- const oldRecommended = oldSettings.find((e: any) => e.key === setting.key)?.controllerProps?.recommended
- if (oldRecommended !== undefined && oldRecommended !== "") {
+ const oldRecommended = oldSettings.find((e: any) => e.key === setting.key)
+ ?.controllerProps?.recommended
+ if (oldRecommended !== undefined && oldRecommended !== '') {
setting.controllerProps.recommended = oldRecommended
}
}
diff --git a/core/src/browser/extensions/engines/AIEngine.ts b/core/src/browser/extensions/engines/AIEngine.ts
index 7a223e468..0e8a75fca 100644
--- a/core/src/browser/extensions/engines/AIEngine.ts
+++ b/core/src/browser/extensions/engines/AIEngine.ts
@@ -13,7 +13,7 @@ export interface chatCompletionRequestMessage {
}
export interface Content {
- type: 'text' | 'input_image' | 'input_audio'
+ type: 'text' | 'image_url' | 'input_audio'
text?: string
image_url?: string
input_audio?: InputAudio
@@ -54,6 +54,8 @@ export type ToolChoice = 'none' | 'auto' | 'required' | ToolCallSpec
export interface chatCompletionRequest {
model: string // Model ID, though for local it might be implicit via sessionInfo
messages: chatCompletionRequestMessage[]
+ thread_id?: string // Thread/conversation ID for context tracking
+ return_progress?: boolean
tools?: Tool[]
tool_choice?: ToolChoice
// Core sampling parameters
@@ -119,6 +121,13 @@ export interface chatCompletionChunkChoice {
finish_reason?: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call' | null
}
+export interface chatCompletionPromptProgress {
+ cache: number
+ processed: number
+ time_ms: number
+ total: number
+}
+
export interface chatCompletionChunk {
id: string
object: 'chat.completion.chunk'
@@ -126,6 +135,7 @@ export interface chatCompletionChunk {
model: string
choices: chatCompletionChunkChoice[]
system_fingerprint?: string
+ prompt_progress?: chatCompletionPromptProgress
}
export interface chatCompletionChoice {
@@ -173,6 +183,7 @@ export interface SessionInfo {
model_id: string //name of the model
model_path: string // path of the loaded model
api_key: string
+ mmproj_path?: string
}
export interface UnloadResult {
diff --git a/docs/public/assets/images/changelog/Jan_v0.4.3.gif b/docs/public/assets/images/changelog/Jan_v0.4.3.gif
deleted file mode 100644
index c6dc943f7..000000000
Binary files a/docs/public/assets/images/changelog/Jan_v0.4.3.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/Jan_v0.4.5.gif b/docs/public/assets/images/changelog/Jan_v0.4.5.gif
deleted file mode 100644
index 17ed2821b..000000000
Binary files a/docs/public/assets/images/changelog/Jan_v0.4.5.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/changelog0.6.6.gif b/docs/public/assets/images/changelog/changelog0.6.6.gif
deleted file mode 100644
index a4e710270..000000000
Binary files a/docs/public/assets/images/changelog/changelog0.6.6.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-images.gif b/docs/public/assets/images/changelog/jan-images.gif
deleted file mode 100644
index eb7731397..000000000
Binary files a/docs/public/assets/images/changelog/jan-images.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-import-vlm-model.gif b/docs/public/assets/images/changelog/jan-import-vlm-model.gif
new file mode 100644
index 000000000..d9f5bb7ba
Binary files /dev/null and b/docs/public/assets/images/changelog/jan-import-vlm-model.gif differ
diff --git a/docs/public/assets/images/changelog/jan-v0-4-14-phi3.gif b/docs/public/assets/images/changelog/jan-v0-4-14-phi3.gif
deleted file mode 100644
index 845554e1c..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0-4-14-phi3.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0-5-13.gif b/docs/public/assets/images/changelog/jan-v0-5-13.gif
deleted file mode 100644
index 5466692e8..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0-5-13.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0-5-14-deepseek-r1.gif b/docs/public/assets/images/changelog/jan-v0-5-14-deepseek-r1.gif
deleted file mode 100644
index c1732ec60..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0-5-14-deepseek-r1.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0-5-17-gemm3-patch.gif b/docs/public/assets/images/changelog/jan-v0-5-17-gemm3-patch.gif
deleted file mode 100644
index f28eebb2c..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0-5-17-gemm3-patch.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0.5.10.gif b/docs/public/assets/images/changelog/jan-v0.5.10.gif
deleted file mode 100644
index 31e6ecdc1..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0.5.10.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0.5.11.gif b/docs/public/assets/images/changelog/jan-v0.5.11.gif
deleted file mode 100644
index 51cfb70a0..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0.5.11.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0.5.12.gif b/docs/public/assets/images/changelog/jan-v0.5.12.gif
deleted file mode 100644
index dfa279ded..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0.5.12.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0.5.7.gif b/docs/public/assets/images/changelog/jan-v0.5.7.gif
deleted file mode 100644
index cddbf62b0..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0.5.7.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0.5.8.gif b/docs/public/assets/images/changelog/jan-v0.5.8.gif
deleted file mode 100644
index 7a1063c79..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0.5.8.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0.5.9.gif b/docs/public/assets/images/changelog/jan-v0.5.9.gif
deleted file mode 100644
index 451e7237c..000000000
Binary files a/docs/public/assets/images/changelog/jan-v0.5.9.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan-v0.6.10-auto-optimize.gif b/docs/public/assets/images/changelog/jan-v0.6.10-auto-optimize.gif
new file mode 100644
index 000000000..cbd0c7a1c
Binary files /dev/null and b/docs/public/assets/images/changelog/jan-v0.6.10-auto-optimize.gif differ
diff --git a/docs/public/assets/images/changelog/jan_cohere_commandr.gif b/docs/public/assets/images/changelog/jan_cohere_commandr.gif
deleted file mode 100644
index d22e1e6ce..000000000
Binary files a/docs/public/assets/images/changelog/jan_cohere_commandr.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_hugging_face.gif b/docs/public/assets/images/changelog/jan_hugging_face.gif
deleted file mode 100644
index de98689b3..000000000
Binary files a/docs/public/assets/images/changelog/jan_hugging_face.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_llama3.gif b/docs/public/assets/images/changelog/jan_llama3.gif
deleted file mode 100644
index aee9415e8..000000000
Binary files a/docs/public/assets/images/changelog/jan_llama3.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_mistral_api.gif b/docs/public/assets/images/changelog/jan_mistral_api.gif
deleted file mode 100644
index cc44ae167..000000000
Binary files a/docs/public/assets/images/changelog/jan_mistral_api.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_nano_128.gif b/docs/public/assets/images/changelog/jan_nano_128.gif
deleted file mode 100644
index a439667da..000000000
Binary files a/docs/public/assets/images/changelog/jan_nano_128.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_nvidia_nim_support.gif b/docs/public/assets/images/changelog/jan_nvidia_nim_support.gif
deleted file mode 100644
index d67edfc95..000000000
Binary files a/docs/public/assets/images/changelog/jan_nvidia_nim_support.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_product_update_feature.gif b/docs/public/assets/images/changelog/jan_product_update_feature.gif
deleted file mode 100644
index da17adbdd..000000000
Binary files a/docs/public/assets/images/changelog/jan_product_update_feature.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_supports_claude_3_5.gif b/docs/public/assets/images/changelog/jan_supports_claude_3_5.gif
deleted file mode 100644
index 9c9ea8c8b..000000000
Binary files a/docs/public/assets/images/changelog/jan_supports_claude_3_5.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_update_groq.gif b/docs/public/assets/images/changelog/jan_update_groq.gif
deleted file mode 100644
index 3527bf3b6..000000000
Binary files a/docs/public/assets/images/changelog/jan_update_groq.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_update_latex.gif b/docs/public/assets/images/changelog/jan_update_latex.gif
deleted file mode 100644
index 11a6fc4f6..000000000
Binary files a/docs/public/assets/images/changelog/jan_update_latex.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_v0.4.13_update.gif b/docs/public/assets/images/changelog/jan_v0.4.13_update.gif
deleted file mode 100644
index 5141eddfc..000000000
Binary files a/docs/public/assets/images/changelog/jan_v0.4.13_update.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_v0.5.0.gif b/docs/public/assets/images/changelog/jan_v0.5.0.gif
deleted file mode 100644
index 305c7d657..000000000
Binary files a/docs/public/assets/images/changelog/jan_v0.5.0.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jan_v0_4_13_openai_gpt4o.gif b/docs/public/assets/images/changelog/jan_v0_4_13_openai_gpt4o.gif
deleted file mode 100644
index 480565cc0..000000000
Binary files a/docs/public/assets/images/changelog/jan_v0_4_13_openai_gpt4o.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/janv0.5.3.gif b/docs/public/assets/images/changelog/janv0.5.3.gif
deleted file mode 100644
index c2226a200..000000000
Binary files a/docs/public/assets/images/changelog/janv0.5.3.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/jn128.gif b/docs/public/assets/images/changelog/jn128.gif
deleted file mode 100644
index 90b77b48f..000000000
Binary files a/docs/public/assets/images/changelog/jn128.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/mcplinear2.gif b/docs/public/assets/images/changelog/mcplinear2.gif
deleted file mode 100644
index c013e8e3b..000000000
Binary files a/docs/public/assets/images/changelog/mcplinear2.gif and /dev/null differ
diff --git a/docs/public/assets/images/changelog/release_v0_6_5.gif b/docs/public/assets/images/changelog/release_v0_6_5.gif
deleted file mode 100644
index 222d8ee80..000000000
Binary files a/docs/public/assets/images/changelog/release_v0_6_5.gif and /dev/null differ
diff --git a/docs/public/assets/videos/enable-tool-call-for-models.mp4 b/docs/public/assets/videos/enable-tool-call-for-models.mp4
deleted file mode 100644
index d6ef37265..000000000
Binary files a/docs/public/assets/videos/enable-tool-call-for-models.mp4 and /dev/null differ
diff --git a/docs/public/assets/videos/jan-nano-demo.mp4 b/docs/public/assets/videos/jan-nano-demo.mp4
deleted file mode 100644
index efcadf999..000000000
Binary files a/docs/public/assets/videos/jan-nano-demo.mp4 and /dev/null differ
diff --git a/docs/public/assets/videos/mcpjupyter.mp4 b/docs/public/assets/videos/mcpjupyter.mp4
deleted file mode 100644
index b9bdf79ed..000000000
Binary files a/docs/public/assets/videos/mcpjupyter.mp4 and /dev/null differ
diff --git a/docs/src/pages/changelog/2023-12-21-faster-inference-across-platform.mdx b/docs/src/pages/changelog/2023-12-21-faster-inference-across-platform.mdx
index d13ff1a96..a4c94b302 100644
--- a/docs/src/pages/changelog/2023-12-21-faster-inference-across-platform.mdx
+++ b/docs/src/pages/changelog/2023-12-21-faster-inference-across-platform.mdx
@@ -3,12 +3,12 @@ title: "Faster inference across: Mac, Windows, Linux, and GPUs"
version: 0.4.3
description: ""
date: 2023-12-21
-ogImage: "/assets/images/changelog/Jan_v0.4.3.gif"
+ogImage: "https://catalog.jan.ai/docs/Jan_v0.4.3.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
### Highlights 🎉
diff --git a/docs/src/pages/changelog/2024-01-29-local-api-server.mdx b/docs/src/pages/changelog/2024-01-29-local-api-server.mdx
index 3a00919fb..8967abffe 100644
--- a/docs/src/pages/changelog/2024-01-29-local-api-server.mdx
+++ b/docs/src/pages/changelog/2024-01-29-local-api-server.mdx
@@ -3,12 +3,12 @@ title: "Local API server"
version: 0.4.5
description: ""
date: 2024-01-29
-ogImage: "/assets/images/changelog/Jan_v0.4.5.gif"
+ogImage: "https://catalog.jan.ai/docs/Jan_v0.4.5.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
### Highlights 🎉
diff --git a/docs/src/pages/changelog/2024-02-05-jan-data-folder.mdx b/docs/src/pages/changelog/2024-02-05-jan-data-folder.mdx
index 5cef9c354..b39a5fd8e 100644
--- a/docs/src/pages/changelog/2024-02-05-jan-data-folder.mdx
+++ b/docs/src/pages/changelog/2024-02-05-jan-data-folder.mdx
@@ -3,12 +3,12 @@ title: "Jan Data Folder"
version: 0.4.6
description: ""
date: 2024-02-05
-ogImage: "/assets/images/changelog/jan_product_update_feature.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_product_update_feature.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
### Highlights 🎉
diff --git a/docs/src/pages/changelog/2024-03-06-ui-revamp-settings.mdx b/docs/src/pages/changelog/2024-03-06-ui-revamp-settings.mdx
index 037d21a8f..db7e73925 100644
--- a/docs/src/pages/changelog/2024-03-06-ui-revamp-settings.mdx
+++ b/docs/src/pages/changelog/2024-03-06-ui-revamp-settings.mdx
@@ -3,12 +3,12 @@ title: "New UI & Codestral Support"
version: 0.5.0
description: "Revamped Jan's UI to make it clearer and more user-friendly"
date: 2024-06-03
-ogImage: "/assets/images/changelog/jan_v0.5.0.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_v0.5.0.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Revamped Jan's UI to make it clearer and more user-friendly.
diff --git a/docs/src/pages/changelog/2024-04-02-groq-api-integration.mdx b/docs/src/pages/changelog/2024-04-02-groq-api-integration.mdx
index 0a35794ab..1efc1a22b 100644
--- a/docs/src/pages/changelog/2024-04-02-groq-api-integration.mdx
+++ b/docs/src/pages/changelog/2024-04-02-groq-api-integration.mdx
@@ -3,12 +3,12 @@ title: "Groq API Integration"
version: 0.4.10
description: ""
date: 2024-04-02
-ogImage: "/assets/images/changelog/jan_update_groq.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_update_groq.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
### Highlights 🎉
diff --git a/docs/src/pages/changelog/2024-04-15-new-mistral-extension.mdx b/docs/src/pages/changelog/2024-04-15-new-mistral-extension.mdx
index 3ce6cbf81..dc2905596 100644
--- a/docs/src/pages/changelog/2024-04-15-new-mistral-extension.mdx
+++ b/docs/src/pages/changelog/2024-04-15-new-mistral-extension.mdx
@@ -3,12 +3,12 @@ title: "New Mistral Extension"
version: 0.4.11
description: "Jan has a new Mistral Extension letting you chat with larger Mistral models via Mistral API"
date: 2024-04-15
-ogImage: "/assets/images/changelog/jan_mistral_api.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_mistral_api.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
### Highlights 🎉
diff --git a/docs/src/pages/changelog/2024-04-25-llama3-command-r-hugginface.mdx b/docs/src/pages/changelog/2024-04-25-llama3-command-r-hugginface.mdx
index 7c6b01a3e..485a72b74 100644
--- a/docs/src/pages/changelog/2024-04-25-llama3-command-r-hugginface.mdx
+++ b/docs/src/pages/changelog/2024-04-25-llama3-command-r-hugginface.mdx
@@ -3,29 +3,29 @@ title: 'Jan now supports Llama3 and Command R+'
version: 0.4.12
description: "Jan has added compatibility with Llama3 & Command R+"
date: 2024-04-25
-ogImage: "/assets/images/changelog/jan_llama3.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_llama3.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Jan has added compatibility with Meta’s open-source language model, `Llama3`, through the integration with `llamacpp` (thanks to [@ggerganov](https://github.com/ggerganov)).
Additionally, `Command R+` is now supported. It is the first open-source model to surpass GPT-4 on the [LMSys leaderboard](https://chat.lmsys.org/?leaderboard).
-
+
## Import Huggingface models directly
Users can now import Huggingface models into Jan. Simply copy the model’s link from Huggingface and paste it into the search bar on Jan Hub.
-
+
## Enhanced LaTeX understanding
Jan now understands LaTeX, allowing users to process and understand complex mathematical expressions more effectively.
-
+
For more details, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.4.12).
diff --git a/docs/src/pages/changelog/2024-05-20-llamacpp-upgrade-new-remote-models.mdx b/docs/src/pages/changelog/2024-05-20-llamacpp-upgrade-new-remote-models.mdx
index c3ecb905d..8b2670893 100644
--- a/docs/src/pages/changelog/2024-05-20-llamacpp-upgrade-new-remote-models.mdx
+++ b/docs/src/pages/changelog/2024-05-20-llamacpp-upgrade-new-remote-models.mdx
@@ -3,12 +3,12 @@ title: "Jan now supports more GGUF models"
version: 0.4.13
description: "We rebased to llamacpp b2865."
date: 2024-05-20
-ogImage: "/assets/images/changelog/jan_v0.4.13_update.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_v0.4.13_update.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
With this release, more GGUF models should work now! We rebased to llamacpp b2865!
@@ -20,12 +20,12 @@ Jan now supports `Anthropic API` models `Command R` and `Command R+`, along with
Jan supports `Martian`, a dynamic LLM router that routes between multiple models and allows users to reduce costs by 20% to 97%. Jan also supports `OpenRouter`, helping users select the best model for each query.
-
+
## GPT-4o Access
Users can now connect to OpenAI's new model GPT-4o.
-
+
For more details, see the [GitHub release notes.](https://github.com/menloresearch/jan/releases/tag/v0.4.13)
diff --git a/docs/src/pages/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium.mdx b/docs/src/pages/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium.mdx
index a6823050e..9d46987d1 100644
--- a/docs/src/pages/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium.mdx
+++ b/docs/src/pages/changelog/2024-05-28-cohere-aya-23-8b-35b-phi-3-medium.mdx
@@ -3,12 +3,12 @@ title: "Jan now compatible with Aya 23 8B & 35B and Phi-3-Medium"
version: 0.4.14
description: "Jan now supports Cohere's Aya 23 8B & 35B and Microsoft's Phi-3-Medium."
date: 2024-05-28
-ogImage: "/assets/images/changelog/jan-v0-4-14-phi3.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0-4-14-phi3.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Jan now supports `Cohere`'s new models `Aya 23 (8B)` & `Aya 23 (35B)` and `Microsoft`'s `Phi-3-Medium`.
diff --git a/docs/src/pages/changelog/2024-06-21-nvidia-nim-support.mdx b/docs/src/pages/changelog/2024-06-21-nvidia-nim-support.mdx
index 27ead7ff2..a081945fc 100644
--- a/docs/src/pages/changelog/2024-06-21-nvidia-nim-support.mdx
+++ b/docs/src/pages/changelog/2024-06-21-nvidia-nim-support.mdx
@@ -3,12 +3,12 @@ title: "Jan supports NVIDIA NIM"
version: 0.5.1
description: "Jan has integrated NVIDIA NIM and supports Qwen 2 7B"
date: 2024-06-21
-ogImage: "/assets/images/changelog/jan_nvidia_nim_support.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_nvidia_nim_support.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
## NVIDIA NIM
diff --git a/docs/src/pages/changelog/2024-07-15-claude-3-5-support.mdx b/docs/src/pages/changelog/2024-07-15-claude-3-5-support.mdx
index b80c31cfe..afc6589eb 100644
--- a/docs/src/pages/changelog/2024-07-15-claude-3-5-support.mdx
+++ b/docs/src/pages/changelog/2024-07-15-claude-3-5-support.mdx
@@ -3,12 +3,12 @@ title: "Jan supports Claude 3.5 Sonnet"
version: 0.5.2
description: "You can run Claude 3.5 Sonnet in Jan"
date: 2024-07-15
-ogImage: "/assets/images/changelog/jan_supports_claude_3_5.gif"
+ogImage: "https://catalog.jan.ai/docs/jan_supports_claude_3_5.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
## Claude 3.5 Sonnet
diff --git a/docs/src/pages/changelog/2024-09-01-llama3-1-gemma2-support.mdx b/docs/src/pages/changelog/2024-09-01-llama3-1-gemma2-support.mdx
index 413a7380a..dc7f07dfb 100644
--- a/docs/src/pages/changelog/2024-09-01-llama3-1-gemma2-support.mdx
+++ b/docs/src/pages/changelog/2024-09-01-llama3-1-gemma2-support.mdx
@@ -3,12 +3,12 @@ title: "v0.5.3 is out with stability improvements!"
version: 0.5.3
description: "You can run Llama 3.1 and Gemma 2 in Jan"
date: 2024-08-29
-ogImage: "/assets/images/changelog/janv0.5.3.gif"
+ogImage: "https://catalog.jan.ai/docs/janv0.5.3.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
## Llama 3.1 and Gemma 2 Support
diff --git a/docs/src/pages/changelog/2024-10-24-jan-stable.mdx b/docs/src/pages/changelog/2024-10-24-jan-stable.mdx
index 3abdd568e..ddba96367 100644
--- a/docs/src/pages/changelog/2024-10-24-jan-stable.mdx
+++ b/docs/src/pages/changelog/2024-10-24-jan-stable.mdx
@@ -3,12 +3,12 @@ title: "Jan has Stable, Beta and Nightly versions"
version: 0.5.7
description: "This release is mostly focused on bug fixes."
date: 2024-10-24
-ogImage: "/assets/images/changelog/jan-v0.5.7.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0.5.7.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Highlights 🎉
diff --git a/docs/src/pages/changelog/2024-11-22-jan-bugs.mdx b/docs/src/pages/changelog/2024-11-22-jan-bugs.mdx
index 413d32876..7f8ca962e 100644
--- a/docs/src/pages/changelog/2024-11-22-jan-bugs.mdx
+++ b/docs/src/pages/changelog/2024-11-22-jan-bugs.mdx
@@ -3,12 +3,12 @@ title: "Model downloads & running issues fixed"
version: 0.5.9
description: "Jan v0.5.9 is here: fixing what needed fixing."
date: 2024-11-22
-ogImage: "/assets/images/changelog/jan-v0.5.9.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0.5.9.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Jan v0.5.9 is here: fixing what needed fixing
diff --git a/docs/src/pages/changelog/2024-11.14-jan-supports-qwen-coder.mdx b/docs/src/pages/changelog/2024-11.14-jan-supports-qwen-coder.mdx
index 72f650021..2a7e0953c 100644
--- a/docs/src/pages/changelog/2024-11.14-jan-supports-qwen-coder.mdx
+++ b/docs/src/pages/changelog/2024-11.14-jan-supports-qwen-coder.mdx
@@ -3,12 +3,12 @@ title: "Jan supports Qwen2.5-Coder 14B & 32B"
version: 0.5.8
description: "Jan v0.5.8 is out: Jan supports Qwen2.5-Coder 14B & 32B through Cortex"
date: 2024-11-14
-ogImage: "/assets/images/changelog/jan-v0.5.8.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0.5.8.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Jan v0.5.8 is out: Jan supports Qwen2.5-Coder 14B & 32B through Cortex
diff --git a/docs/src/pages/changelog/2024-12-03-jan-is-faster.mdx b/docs/src/pages/changelog/2024-12-03-jan-is-faster.mdx
index 9eca1ed86..0b00bd8f1 100644
--- a/docs/src/pages/changelog/2024-12-03-jan-is-faster.mdx
+++ b/docs/src/pages/changelog/2024-12-03-jan-is-faster.mdx
@@ -3,12 +3,12 @@ title: "Jan v0.5.10 is live"
version: 0.5.10
description: "Jan is faster, smoother, and more reliable."
date: 2024-12-03
-ogImage: "/assets/images/changelog/jan-v0.5.10.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0.5.10.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Jan v0.5.10 is live: Jan is faster, smoother, and more reliable.
diff --git a/docs/src/pages/changelog/2024-12-05-jan-hot-fix-mac.mdx b/docs/src/pages/changelog/2024-12-05-jan-hot-fix-mac.mdx
index 25283118e..c20033f6b 100644
--- a/docs/src/pages/changelog/2024-12-05-jan-hot-fix-mac.mdx
+++ b/docs/src/pages/changelog/2024-12-05-jan-hot-fix-mac.mdx
@@ -3,12 +3,12 @@ title: "Jan v0.5.11 is here!"
version: 0.5.11
description: "Critical issues fixed, Mac installation updated."
date: 2024-12-05
-ogImage: "/assets/images/changelog/jan-v0.5.11.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0.5.11.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Jan v0.5.11 is here - critical issues fixed, Mac installation updated.
diff --git a/docs/src/pages/changelog/2024-12-30-jan-new-privacy.mdx b/docs/src/pages/changelog/2024-12-30-jan-new-privacy.mdx
index 370ef3efe..84c735b8b 100644
--- a/docs/src/pages/changelog/2024-12-30-jan-new-privacy.mdx
+++ b/docs/src/pages/changelog/2024-12-30-jan-new-privacy.mdx
@@ -3,12 +3,12 @@ title: "Jan gives you full control over your privacy"
version: 0.5.12
description: "Improved Privacy settings to give full control over analytics"
date: 2024-12-30
-ogImage: "/assets/images/changelog/jan-v0.5.12.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0.5.12.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
Jan v0.5.11 is here - critical issues fixed, Mac installation updated.
diff --git a/docs/src/pages/changelog/2025-05-14-jan-qwen3-patch.mdx b/docs/src/pages/changelog/2025-05-14-jan-qwen3-patch.mdx
index 874bdd129..1c06e6f97 100644
--- a/docs/src/pages/changelog/2025-05-14-jan-qwen3-patch.mdx
+++ b/docs/src/pages/changelog/2025-05-14-jan-qwen3-patch.mdx
@@ -3,12 +3,12 @@ title: "Qwen3 support is now more reliable."
version: 0.5.17
description: "Jan v0.5.17 is out: Qwen3 support is now more reliable"
date: 2025-05-14
-ogImage: "/assets/images/changelog/jan-v0-5-17-gemm3-patch.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-v0-5-17-gemm3-patch.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
👋 Jan v0.5.17 is out: Qwen3 support is now more reliable
diff --git a/docs/src/pages/changelog/2025-06-26-jan-nano-mcp.mdx b/docs/src/pages/changelog/2025-06-26-jan-nano-mcp.mdx
index 19c3d5f8e..0f16d7afa 100644
--- a/docs/src/pages/changelog/2025-06-26-jan-nano-mcp.mdx
+++ b/docs/src/pages/changelog/2025-06-26-jan-nano-mcp.mdx
@@ -3,12 +3,12 @@ title: "Jan v0.6.3 brings new features and models!"
version: 0.6.3
description: "Unlocking MCP for everyone and bringing our latest model to Jan!"
date: 2025-06-26
-ogImage: "/assets/images/changelog/jn128.gif"
+ogImage: "https://catalog.jan.ai/docs/jn128.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
## Highlights 🎉
diff --git a/docs/src/pages/changelog/2025-07-17-responsive-ui.mdx b/docs/src/pages/changelog/2025-07-17-responsive-ui.mdx
index db7c25249..8cdd7b4bb 100644
--- a/docs/src/pages/changelog/2025-07-17-responsive-ui.mdx
+++ b/docs/src/pages/changelog/2025-07-17-responsive-ui.mdx
@@ -3,12 +3,12 @@ title: "Jan v0.6.5 brings responsive UI and MCP examples!"
version: 0.6.5
description: "New MCP examples, updated pages, and bug fixes!"
date: 2025-07-17
-ogImage: "/assets/images/changelog/release_v0_6_5.gif"
+ogImage: "https://catalog.jan.ai/docs/release_v0_6_5.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
## Highlights 🎉
diff --git a/docs/src/pages/changelog/2025-07-31-llamacpp-tutorials.mdx b/docs/src/pages/changelog/2025-07-31-llamacpp-tutorials.mdx
index 65a16de7e..d9a29a325 100644
--- a/docs/src/pages/changelog/2025-07-31-llamacpp-tutorials.mdx
+++ b/docs/src/pages/changelog/2025-07-31-llamacpp-tutorials.mdx
@@ -3,12 +3,12 @@ title: "Jan v0.6.6: Enhanced llama.cpp integration and smarter model management"
version: 0.6.6
description: "Major llama.cpp improvements, Hugging Face provider support, and refined MCP experience"
date: 2025-07-31
-ogImage: "/assets/images/changelog/changelog0.6.6.gif"
+ogImage: "https://catalog.jan.ai/docs/changelog0.6.6.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
-
+
## Highlights 🎉
diff --git a/docs/src/pages/changelog/2025-08-14-general-improvs.mdx b/docs/src/pages/changelog/2025-08-14-general-improvs.mdx
index 0fbcd555a..4dffecd03 100644
--- a/docs/src/pages/changelog/2025-08-14-general-improvs.mdx
+++ b/docs/src/pages/changelog/2025-08-14-general-improvs.mdx
@@ -3,13 +3,13 @@ title: "Jan v0.6.8: Engine fixes, new MCP tutorials, and cleaner docs"
version: 0.6.8
description: "Llama.cpp stability upgrades, Linear/Todoist MCP tutorials, new model pages (Lucy, Jan‑v1), and docs reorganization"
date: 2025-08-14
-ogImage: "/assets/images/changelog/mcplinear2.gif"
+ogImage: "https://catalog.jan.ai/docs/mcplinear2.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
import { Callout } from 'nextra/components'
-
+
## Highlights 🎉
diff --git a/docs/src/pages/changelog/2025-08-28-image-support.mdx b/docs/src/pages/changelog/2025-08-28-image-support.mdx
index f3f6efda1..92e187f9d 100644
--- a/docs/src/pages/changelog/2025-08-28-image-support.mdx
+++ b/docs/src/pages/changelog/2025-08-28-image-support.mdx
@@ -3,13 +3,13 @@ title: "Jan v0.6.9: Image support, stable MCP, and powerful model tools"
version: 0.6.9
description: "Major multimodal support with image uploads, MCP out of experimental, auto-detect model capabilities, and enhanced tool calling"
date: 2025-08-28
-ogImage: "/assets/images/changelog/jan-images.gif"
+ogImage: "https://catalog.jan.ai/docs/jan-images.gif"
---
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
import { Callout } from 'nextra/components'
-
+
## Highlights 🎉
diff --git a/docs/src/pages/changelog/2025-09-18-auto-optimize-vision-imports.mdx b/docs/src/pages/changelog/2025-09-18-auto-optimize-vision-imports.mdx
new file mode 100644
index 000000000..e9d814e1a
--- /dev/null
+++ b/docs/src/pages/changelog/2025-09-18-auto-optimize-vision-imports.mdx
@@ -0,0 +1,48 @@
+---
+title: "Jan v0.6.10: Auto Optimize, custom backends, and vision model imports"
+version: 0.6.10
+description: "New experimental Auto Optimize feature, custom llama.cpp backend support, vision model imports, and critical bug fixes"
+date: 2025-09-18
+ogImage: "/assets/images/changelog/jan-v0.6.10-auto-optimize.gif"
+---
+
+import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
+import { Callout } from 'nextra/components'
+
+
+
+## Highlights 🎉
+
+- **Auto Optimize**: One-click hardware-aware performance tuning for llama.cpp.
+- **Custom Backend Support**: Import and manage your preferred llama.cpp versions.
+- **Import Vision Models**: Seamlessly import and use vision-capable models.
+
+### 🚀 Auto Optimize (Experimental)
+
+**Intelligent performance tuning** — Jan can now apply the best llama.cpp settings for your specific hardware:
+- **Hardware analysis**: Automatically detects your CPU, GPU, and memory configuration
+- **One-click optimization**: Applies optimal parameters with a single click in model settings
+
+
+Auto Optimize is currently experimental and will be refined based on user feedback. It analyzes your system specs and applies proven configurations for optimal llama.cpp performance.
+
+
+### 👁️ Vision Model Imports
+
+
+
+**Enhanced multimodal support** — Import and use vision models seamlessly:
+- **Direct vision model import**: Import vision-capable models from any source
+- **Improved compatibility**: Better handling of multimodal model formats
+
+### 🔧 Custom Backend Support
+
+**Import your preferred llama.cpp version** — Full control over your AI backend:
+- **Custom llama.cpp versions**: Import and use any llama.cpp build you prefer
+- **Version flexibility**: Use bleeding-edge builds or stable releases
+- **Backup CDN**: New CDN fallback when GitHub downloads fail
+- **User confirmation**: Prompts before auto-updating llama.cpp
+
+Update your Jan or [download the latest](https://jan.ai/).
+
+For the complete list of changes, see the [GitHub release notes](https://github.com/janhq/jan/releases/tag/v0.6.10).
diff --git a/docs/src/pages/docs/_assets/canva2.png b/docs/src/pages/docs/_assets/canva2.png
deleted file mode 100644
index 63f934132..000000000
Binary files a/docs/src/pages/docs/_assets/canva2.png and /dev/null differ
diff --git a/docs/src/pages/docs/_assets/jan-nano-demo.gif b/docs/src/pages/docs/_assets/jan-nano-demo.gif
deleted file mode 100644
index a2b87619f..000000000
Binary files a/docs/src/pages/docs/_assets/jan-nano-demo.gif and /dev/null differ
diff --git a/docs/src/pages/docs/_assets/jan-nano0.png b/docs/src/pages/docs/_assets/jan-nano0.png
deleted file mode 100644
index f2da8b5f7..000000000
Binary files a/docs/src/pages/docs/_assets/jan-nano0.png and /dev/null differ
diff --git a/docs/src/pages/docs/_assets/jan_v1_demo.gif b/docs/src/pages/docs/_assets/jan_v1_demo.gif
deleted file mode 100644
index 9e4298c82..000000000
Binary files a/docs/src/pages/docs/_assets/jan_v1_demo.gif and /dev/null differ
diff --git a/docs/src/pages/docs/_assets/lucy_demo.gif b/docs/src/pages/docs/_assets/lucy_demo.gif
deleted file mode 100644
index 824fa2111..000000000
Binary files a/docs/src/pages/docs/_assets/lucy_demo.gif and /dev/null differ
diff --git a/docs/src/pages/docs/_assets/mcplinear2.gif b/docs/src/pages/docs/_assets/mcplinear2.gif
deleted file mode 100644
index c013e8e3b..000000000
Binary files a/docs/src/pages/docs/_assets/mcplinear2.gif and /dev/null differ
diff --git a/docs/src/pages/docs/_assets/mcptodoist_extreme.gif b/docs/src/pages/docs/_assets/mcptodoist_extreme.gif
deleted file mode 100644
index 6ae64050f..000000000
Binary files a/docs/src/pages/docs/_assets/mcptodoist_extreme.gif and /dev/null differ
diff --git a/docs/src/pages/docs/jan-models/jan-nano-128.mdx b/docs/src/pages/docs/jan-models/jan-nano-128.mdx
index de619fae2..c4a675719 100644
--- a/docs/src/pages/docs/jan-models/jan-nano-128.mdx
+++ b/docs/src/pages/docs/jan-models/jan-nano-128.mdx
@@ -87,7 +87,7 @@ Jan-Nano-128k has been rigorously evaluated on the SimpleQA benchmark using our
### Demo
diff --git a/docs/src/pages/docs/jan-models/jan-nano-32.mdx b/docs/src/pages/docs/jan-models/jan-nano-32.mdx
index 81573de65..b216f3b96 100644
--- a/docs/src/pages/docs/jan-models/jan-nano-32.mdx
+++ b/docs/src/pages/docs/jan-models/jan-nano-32.mdx
@@ -20,7 +20,7 @@ import { Callout } from 'nextra/components'
# Jan Nano
-
+
## Why Jan Nano?
@@ -81,7 +81,7 @@ Add the serper MCP to Jan via the **Settings** > **MCP Servers** tab.
**Step 6**
Open up a new chat and ask Jan-Nano to search the web for you.
-
+
## Queries to Try
diff --git a/docs/src/pages/docs/jan-models/jan-v1.mdx b/docs/src/pages/docs/jan-models/jan-v1.mdx
index f77e727fa..fe34292c1 100644
--- a/docs/src/pages/docs/jan-models/jan-v1.mdx
+++ b/docs/src/pages/docs/jan-models/jan-v1.mdx
@@ -58,7 +58,7 @@ These benchmarks (EQBench, CreativeWriting, and IFBench) measure the model's abi
### Demo
-
+
### Deployment Options
diff --git a/docs/src/pages/docs/jan-models/lucy.mdx b/docs/src/pages/docs/jan-models/lucy.mdx
index ac4006359..3d6b7adb2 100644
--- a/docs/src/pages/docs/jan-models/lucy.mdx
+++ b/docs/src/pages/docs/jan-models/lucy.mdx
@@ -55,7 +55,7 @@ To use Lucy's web search capabilities, you'll need a Serper API key. Get one at
### Demo
-
+
### Deployment Options
diff --git a/docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx b/docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx
index 4086831f0..64459b268 100644
--- a/docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx
+++ b/docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx
@@ -204,7 +204,7 @@ Generate synthetic data with numpy, move it to a pandas dataframe and create a p
Watch the complete output unfold:
diff --git a/docs/src/pages/docs/mcp-examples/design/canva.mdx b/docs/src/pages/docs/mcp-examples/design/canva.mdx
index e38b719fe..365ccc6b3 100644
--- a/docs/src/pages/docs/mcp-examples/design/canva.mdx
+++ b/docs/src/pages/docs/mcp-examples/design/canva.mdx
@@ -98,7 +98,7 @@ When you first use Canva tools:
- Canva authentication page appears in your default browser
- Log in with your Canva account
-
+
2. **Team Selection & Permissions**
- Select your team (if you have multiple)
diff --git a/docs/src/pages/docs/mcp-examples/productivity/linear.mdx b/docs/src/pages/docs/mcp-examples/productivity/linear.mdx
index 01423e942..1419ee373 100644
--- a/docs/src/pages/docs/mcp-examples/productivity/linear.mdx
+++ b/docs/src/pages/docs/mcp-examples/productivity/linear.mdx
@@ -128,7 +128,7 @@ You should see all Linear tools in the chat interface:
Watch AI transform mundane tasks into epic narratives:
-
+
## Creative Examples
diff --git a/docs/src/pages/docs/mcp-examples/productivity/todoist.mdx b/docs/src/pages/docs/mcp-examples/productivity/todoist.mdx
index 2d7844bf0..384789d3e 100644
--- a/docs/src/pages/docs/mcp-examples/productivity/todoist.mdx
+++ b/docs/src/pages/docs/mcp-examples/productivity/todoist.mdx
@@ -101,7 +101,7 @@ You should see the Todoist tools in the tools panel:
Now you can manage your todo list through natural conversation:
-
+
## Example Prompts
diff --git a/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx b/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx
index fca8ed526..4d0df7cc5 100644
--- a/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx
+++ b/docs/src/pages/post/benchmarking-nvidia-tensorrt-llm.mdx
@@ -103,7 +103,7 @@ Note: `ngl` is the abbreviation of `Number of GPU Layers` with the range from `0
### NVIDIA GeForce RTX 4090 GPU
-
+
*Jan is built on this Dual-4090 workstation, which recently got upgraded to a nice case*

diff --git a/docs/src/pages/post/jan-v1-for-research.mdx b/docs/src/pages/post/jan-v1-for-research.mdx
index b23f17d2f..7cafd5245 100644
--- a/docs/src/pages/post/jan-v1-for-research.mdx
+++ b/docs/src/pages/post/jan-v1-for-research.mdx
@@ -13,7 +13,7 @@ date: 2025-08-22
This cookbook will transform your Jan-V1 from a basic Q&A tool into a comprehensive research assistant. By the end of this guide, you'll have a custom-configured model that generates detailed reports with proper citations instead of surface-level answers.
-
+
## Key Points
diff --git a/extensions-web/src/conversational-web/api.ts b/extensions-web/src/conversational-web/api.ts
new file mode 100644
index 000000000..0e398eb05
--- /dev/null
+++ b/extensions-web/src/conversational-web/api.ts
@@ -0,0 +1,160 @@
+/**
+ * Conversation API wrapper using JanAuthProvider
+ */
+
+import { getSharedAuthService, JanAuthService } from '../shared/auth'
+import { CONVERSATION_API_ROUTES } from './const'
+import {
+ Conversation,
+ ConversationResponse,
+ ListConversationsParams,
+ ListConversationsResponse,
+ PaginationParams,
+ PaginatedResponse,
+ ConversationItem,
+ ListConversationItemsParams,
+ ListConversationItemsResponse
+} from './types'
+
+declare const JAN_API_BASE: string
+
+export class RemoteApi {
+ private authService: JanAuthService
+
+ constructor() {
+ this.authService = getSharedAuthService()
+ }
+
+ async createConversation(
+ data: Conversation
+ ): Promise {
+ const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATIONS}`
+
+ return this.authService.makeAuthenticatedRequest(
+ url,
+ {
+ method: 'POST',
+ body: JSON.stringify(data),
+ }
+ )
+ }
+
+ async updateConversation(
+ conversationId: string,
+ data: Conversation
+ ): Promise {
+ const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATION_BY_ID(conversationId)}`
+
+ return this.authService.makeAuthenticatedRequest(
+ url,
+ {
+ method: 'PATCH',
+ body: JSON.stringify(data),
+ }
+ )
+ }
+
+ async listConversations(
+ params?: ListConversationsParams
+ ): Promise {
+ const queryParams = new URLSearchParams()
+
+ if (params?.limit !== undefined) {
+ queryParams.append('limit', params.limit.toString())
+ }
+ if (params?.after) {
+ queryParams.append('after', params.after)
+ }
+ if (params?.order) {
+ queryParams.append('order', params.order)
+ }
+
+ const queryString = queryParams.toString()
+ const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATIONS}${queryString ? `?${queryString}` : ''}`
+
+ return this.authService.makeAuthenticatedRequest(
+ url,
+ {
+ method: 'GET',
+ }
+ )
+ }
+
+ /**
+ * Generic method to fetch all pages of paginated data
+ */
+ async fetchAllPaginated(
+ fetchFn: (params: PaginationParams) => Promise>,
+ initialParams?: Partial
+ ): Promise {
+ const allItems: T[] = []
+ let after: string | undefined = undefined
+ let hasMore = true
+ const limit = initialParams?.limit || 100
+
+ while (hasMore) {
+ const response = await fetchFn({
+ limit,
+ after,
+ ...initialParams,
+ })
+
+ allItems.push(...response.data)
+ hasMore = response.has_more
+ after = response.last_id
+ }
+
+ return allItems
+ }
+
+ async getAllConversations(): Promise {
+ return this.fetchAllPaginated(
+ (params) => this.listConversations(params)
+ )
+ }
+
+ async deleteConversation(conversationId: string): Promise {
+ const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATION_BY_ID(conversationId)}`
+
+ await this.authService.makeAuthenticatedRequest(
+ url,
+ {
+ method: 'DELETE',
+ }
+ )
+ }
+
+ async listConversationItems(
+ conversationId: string,
+ params?: Omit
+ ): Promise {
+ const queryParams = new URLSearchParams()
+
+ if (params?.limit !== undefined) {
+ queryParams.append('limit', params.limit.toString())
+ }
+ if (params?.after) {
+ queryParams.append('after', params.after)
+ }
+ if (params?.order) {
+ queryParams.append('order', params.order)
+ }
+
+ const queryString = queryParams.toString()
+ const url = `${JAN_API_BASE}${CONVERSATION_API_ROUTES.CONVERSATION_ITEMS(conversationId)}${queryString ? `?${queryString}` : ''}`
+
+ return this.authService.makeAuthenticatedRequest(
+ url,
+ {
+ method: 'GET',
+ }
+ )
+ }
+
+ async getAllConversationItems(conversationId: string): Promise {
+ return this.fetchAllPaginated(
+ (params) => this.listConversationItems(conversationId, params),
+ { limit: 100, order: 'asc' }
+ )
+ }
+}
diff --git a/extensions-web/src/conversational-web/const.ts b/extensions-web/src/conversational-web/const.ts
new file mode 100644
index 000000000..0ad7e9049
--- /dev/null
+++ b/extensions-web/src/conversational-web/const.ts
@@ -0,0 +1,17 @@
+/**
+ * API Constants for Conversational Web
+ */
+
+
+export const CONVERSATION_API_ROUTES = {
+ CONVERSATIONS: '/conversations',
+ CONVERSATION_BY_ID: (id: string) => `/conversations/${id}`,
+ CONVERSATION_ITEMS: (id: string) => `/conversations/${id}/items`,
+} as const
+
+export const DEFAULT_ASSISTANT = {
+ id: 'jan',
+ name: 'Jan',
+ avatar: '👋',
+ created_at: 1747029866.542,
+}
\ No newline at end of file
diff --git a/extensions-web/src/conversational-web/extension.ts b/extensions-web/src/conversational-web/extension.ts
new file mode 100644
index 000000000..7c31f1c31
--- /dev/null
+++ b/extensions-web/src/conversational-web/extension.ts
@@ -0,0 +1,154 @@
+/**
+ * Web Conversational Extension
+ * Implements thread and message management using IndexedDB
+ */
+
+import {
+ Thread,
+ ThreadMessage,
+ ConversationalExtension,
+ ThreadAssistantInfo,
+} from '@janhq/core'
+import { RemoteApi } from './api'
+import { getDefaultAssistant, ObjectParser, combineConversationItemsToMessages } from './utils'
+
+export default class ConversationalExtensionWeb extends ConversationalExtension {
+ private remoteApi: RemoteApi | undefined
+
+ async onLoad() {
+ console.log('Loading Web Conversational Extension')
+ this.remoteApi = new RemoteApi()
+ }
+
+ onUnload() {}
+
+ // Thread Management
+ async listThreads(): Promise {
+ try {
+ if (!this.remoteApi) {
+ throw new Error('RemoteApi not initialized')
+ }
+ const conversations = await this.remoteApi.getAllConversations()
+ console.log('!!!Listed threads:', conversations.map(ObjectParser.conversationToThread))
+ return conversations.map(ObjectParser.conversationToThread)
+ } catch (error) {
+ console.error('Failed to list threads:', error)
+ return []
+ }
+ }
+
+ async createThread(thread: Thread): Promise {
+ try {
+ if (!this.remoteApi) {
+ throw new Error('RemoteApi not initialized')
+ }
+ const response = await this.remoteApi.createConversation(
+ ObjectParser.threadToConversation(thread)
+ )
+ // Create a new thread object with the server's ID
+ const createdThread = {
+ ...thread,
+ id: response.id,
+ assistants: thread.assistants.map(getDefaultAssistant)
+ }
+ console.log('!!!Created thread:', createdThread)
+ return createdThread
+ } catch (error) {
+ console.error('Failed to create thread:', error)
+ throw error
+ }
+ }
+
+ async modifyThread(thread: Thread): Promise {
+ try {
+ if (!this.remoteApi) {
+ throw new Error('RemoteApi not initialized')
+ }
+ await this.remoteApi.updateConversation(
+ thread.id,
+ ObjectParser.threadToConversation(thread)
+ )
+ console.log('!!!Modified thread:', thread)
+ } catch (error) {
+ console.error('Failed to modify thread:', error)
+ throw error
+ }
+ }
+
+ async deleteThread(threadId: string): Promise {
+ try {
+ if (!this.remoteApi) {
+ throw new Error('RemoteApi not initialized')
+ }
+ await this.remoteApi.deleteConversation(threadId)
+ console.log('!!!Deleted thread:', threadId)
+ } catch (error) {
+ console.error('Failed to delete thread:', error)
+ throw error
+ }
+ }
+
+ // Message Management
+ async createMessage(message: ThreadMessage): Promise {
+ console.log('!!!Created message:', message)
+ return message
+ }
+
+ async listMessages(threadId: string): Promise {
+ try {
+ if (!this.remoteApi) {
+ throw new Error('RemoteApi not initialized')
+ }
+ console.log('!!!Listing messages for thread:', threadId)
+
+ // Fetch all conversation items from the API
+ const items = await this.remoteApi.getAllConversationItems(threadId)
+
+ // Convert and combine conversation items to thread messages
+ const messages = combineConversationItemsToMessages(items, threadId)
+
+ console.log('!!!Fetched messages:', messages)
+ return messages
+ } catch (error) {
+ console.error('Failed to list messages:', error)
+ return []
+ }
+ }
+
+ async modifyMessage(message: ThreadMessage): Promise {
+ console.log('!!!Modified message:', message)
+ return message
+ }
+
+ async deleteMessage(threadId: string, messageId: string): Promise {
+ console.log('!!!Deleted message:', threadId, messageId)
+ }
+
+ async getThreadAssistant(threadId: string): Promise {
+ console.log('!!!Getting assistant for thread:', threadId)
+ return { id: 'jan', name: 'Jan', model: { id: 'jan-v1-4b' } }
+ }
+
+ async createThreadAssistant(
+ threadId: string,
+ assistant: ThreadAssistantInfo
+ ): Promise {
+ console.log('!!!Creating assistant for thread:', threadId, assistant)
+ return assistant
+ }
+
+ async modifyThreadAssistant(
+ threadId: string,
+ assistant: ThreadAssistantInfo
+ ): Promise {
+ console.log('!!!Modifying assistant for thread:', threadId, assistant)
+ return assistant
+ }
+
+ async getThreadAssistantInfo(
+ threadId: string
+ ): Promise {
+ console.log('!!!Getting assistant info for thread:', threadId)
+ return { id: 'jan', name: 'Jan', model: { id: 'jan-v1-4b' } }
+ }
+}
diff --git a/extensions-web/src/conversational-web/index.ts b/extensions-web/src/conversational-web/index.ts
index 5f9ae260e..7bedfdd80 100644
--- a/extensions-web/src/conversational-web/index.ts
+++ b/extensions-web/src/conversational-web/index.ts
@@ -1,347 +1,3 @@
-/**
- * Web Conversational Extension
- * Implements thread and message management using IndexedDB
- */
+import ConversationalExtensionWeb from './extension'
-import { Thread, ThreadMessage, ConversationalExtension, ThreadAssistantInfo } from '@janhq/core'
-import { getSharedDB } from '../shared/db'
-
-export default class ConversationalExtensionWeb extends ConversationalExtension {
- private db: IDBDatabase | null = null
-
- async onLoad() {
- console.log('Loading Web Conversational Extension')
- this.db = await getSharedDB()
- }
-
- onUnload() {
- // Don't close shared DB, other extensions might be using it
- this.db = null
- }
-
- private ensureDB(): void {
- if (!this.db) {
- throw new Error('Database not initialized. Call onLoad() first.')
- }
- }
-
- // Thread Management
- async listThreads(): Promise {
- return this.getThreads()
- }
-
- async getThreads(): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['threads'], 'readonly')
- const store = transaction.objectStore('threads')
- const request = store.getAll()
-
- request.onsuccess = () => {
- const threads = request.result || []
- // Sort by updated desc (most recent first)
- threads.sort((a, b) => (b.updated || 0) - (a.updated || 0))
- resolve(threads)
- }
-
- request.onerror = () => {
- reject(request.error)
- }
- })
- }
-
- async createThread(thread: Thread): Promise {
- await this.saveThread(thread)
- return thread
- }
-
- async modifyThread(thread: Thread): Promise {
- await this.saveThread(thread)
- }
-
- async saveThread(thread: Thread): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['threads'], 'readwrite')
- const store = transaction.objectStore('threads')
-
- const threadToStore = {
- ...thread,
- created: thread.created || Date.now() / 1000,
- updated: Date.now() / 1000,
- }
-
- const request = store.put(threadToStore)
-
- request.onsuccess = () => {
- console.log('Thread saved:', thread.id)
- resolve()
- }
-
- request.onerror = () => {
- console.error('Failed to save thread:', request.error)
- reject(request.error)
- }
- })
- }
-
- async deleteThread(threadId: string): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['threads', 'messages'], 'readwrite')
- const threadsStore = transaction.objectStore('threads')
- const messagesStore = transaction.objectStore('messages')
-
- // Delete thread
- const deleteThreadRequest = threadsStore.delete(threadId)
-
- // Delete all messages in the thread
- const messageIndex = messagesStore.index('thread_id')
- const messagesRequest = messageIndex.openCursor(IDBKeyRange.only(threadId))
-
- messagesRequest.onsuccess = (event) => {
- const cursor = (event.target as IDBRequest).result
- if (cursor) {
- cursor.delete()
- cursor.continue()
- }
- }
-
- transaction.oncomplete = () => {
- console.log('Thread and messages deleted:', threadId)
- resolve()
- }
-
- transaction.onerror = () => {
- console.error('Failed to delete thread:', transaction.error)
- reject(transaction.error)
- }
- })
- }
-
- // Message Management
- async createMessage(message: ThreadMessage): Promise {
- await this.addNewMessage(message)
- return message
- }
-
- async listMessages(threadId: string): Promise {
- return this.getAllMessages(threadId)
- }
-
- async modifyMessage(message: ThreadMessage): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['messages'], 'readwrite')
- const store = transaction.objectStore('messages')
-
- const messageToStore = {
- ...message,
- updated: Date.now() / 1000,
- }
-
- const request = store.put(messageToStore)
-
- request.onsuccess = () => {
- console.log('Message updated:', message.id)
- resolve(message)
- }
-
- request.onerror = () => {
- console.error('Failed to update message:', request.error)
- reject(request.error)
- }
- })
- }
-
- async deleteMessage(threadId: string, messageId: string): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['messages'], 'readwrite')
- const store = transaction.objectStore('messages')
- const request = store.delete(messageId)
-
- request.onsuccess = () => {
- console.log('Message deleted:', messageId)
- resolve()
- }
-
- request.onerror = () => {
- console.error('Failed to delete message:', request.error)
- reject(request.error)
- }
- })
- }
-
- async addNewMessage(message: ThreadMessage): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['messages'], 'readwrite')
- const store = transaction.objectStore('messages')
-
- const messageToStore = {
- ...message,
- created_at: message.created_at || Date.now() / 1000,
- }
-
- const request = store.add(messageToStore)
-
- request.onsuccess = () => {
- console.log('Message added:', message.id)
- resolve()
- }
-
- request.onerror = () => {
- console.error('Failed to add message:', request.error)
- reject(request.error)
- }
- })
- }
-
- async writeMessages(threadId: string, messages: ThreadMessage[]): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['messages'], 'readwrite')
- const store = transaction.objectStore('messages')
-
- // First, delete existing messages for this thread
- const index = store.index('thread_id')
- const deleteRequest = index.openCursor(IDBKeyRange.only(threadId))
-
- deleteRequest.onsuccess = (event) => {
- const cursor = (event.target as IDBRequest).result
- if (cursor) {
- cursor.delete()
- cursor.continue()
- } else {
- // After deleting old messages, add new ones
- const addPromises = messages.map(message => {
- return new Promise((resolveAdd, rejectAdd) => {
- const messageToStore = {
- ...message,
- thread_id: threadId,
- created_at: message.created_at || Date.now() / 1000,
- }
-
- const addRequest = store.add(messageToStore)
- addRequest.onsuccess = () => resolveAdd()
- addRequest.onerror = () => rejectAdd(addRequest.error)
- })
- })
-
- Promise.all(addPromises)
- .then(() => {
- console.log(`${messages.length} messages written for thread:`, threadId)
- resolve()
- })
- .catch(reject)
- }
- }
-
- deleteRequest.onerror = () => {
- reject(deleteRequest.error)
- }
- })
- }
-
- async getAllMessages(threadId: string): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['messages'], 'readonly')
- const store = transaction.objectStore('messages')
- const index = store.index('thread_id')
- const request = index.getAll(threadId)
-
- request.onsuccess = () => {
- const messages = request.result || []
- // Sort by created_at asc (chronological order)
- messages.sort((a, b) => (a.created_at || 0) - (b.created_at || 0))
- resolve(messages)
- }
-
- request.onerror = () => {
- reject(request.error)
- }
- })
- }
-
- // Thread Assistant Info (simplified - stored with thread)
- async getThreadAssistant(threadId: string): Promise {
- const info = await this.getThreadAssistantInfo(threadId)
- if (!info) {
- throw new Error(`Thread assistant info not found for thread ${threadId}`)
- }
- return info
- }
-
- async createThreadAssistant(threadId: string, assistant: ThreadAssistantInfo): Promise {
- await this.saveThreadAssistantInfo(threadId, assistant)
- return assistant
- }
-
- async modifyThreadAssistant(threadId: string, assistant: ThreadAssistantInfo): Promise {
- await this.saveThreadAssistantInfo(threadId, assistant)
- return assistant
- }
-
- async saveThreadAssistantInfo(threadId: string, assistantInfo: ThreadAssistantInfo): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['threads'], 'readwrite')
- const store = transaction.objectStore('threads')
-
- // Get existing thread and update with assistant info
- const getRequest = store.get(threadId)
-
- getRequest.onsuccess = () => {
- const thread = getRequest.result
- if (!thread) {
- reject(new Error(`Thread ${threadId} not found`))
- return
- }
-
- const updatedThread = {
- ...thread,
- assistantInfo,
- updated_at: Date.now() / 1000,
- }
-
- const putRequest = store.put(updatedThread)
- putRequest.onsuccess = () => resolve()
- putRequest.onerror = () => reject(putRequest.error)
- }
-
- getRequest.onerror = () => {
- reject(getRequest.error)
- }
- })
- }
-
- async getThreadAssistantInfo(threadId: string): Promise {
- this.ensureDB()
-
- return new Promise((resolve, reject) => {
- const transaction = this.db!.transaction(['threads'], 'readonly')
- const store = transaction.objectStore('threads')
- const request = store.get(threadId)
-
- request.onsuccess = () => {
- const thread = request.result
- resolve(thread?.assistantInfo)
- }
-
- request.onerror = () => {
- reject(request.error)
- }
- })
- }
-}
\ No newline at end of file
+export default ConversationalExtensionWeb
diff --git a/extensions-web/src/conversational-web/types.ts b/extensions-web/src/conversational-web/types.ts
new file mode 100644
index 000000000..a6057da5d
--- /dev/null
+++ b/extensions-web/src/conversational-web/types.ts
@@ -0,0 +1,93 @@
+/**
+ * TypeScript Types for Conversational API
+ */
+
+export interface PaginationParams {
+ limit?: number
+ after?: string
+ order?: 'asc' | 'desc'
+}
+
+export interface PaginatedResponse {
+ data: T[]
+ has_more: boolean
+ object: 'list'
+ first_id?: string
+ last_id?: string
+}
+
+export interface ConversationMetadata {
+ model_provider?: string
+ model_id?: string
+ is_favorite?: string
+}
+
+export interface Conversation {
+ title?: string
+ metadata?: ConversationMetadata
+}
+
+export interface ConversationResponse {
+ id: string
+ object: 'conversation'
+ title?: string
+ created_at: number
+ metadata: ConversationMetadata
+}
+
+export type ListConversationsParams = PaginationParams
+export type ListConversationsResponse = PaginatedResponse
+
+// Conversation Items types
+export interface ConversationItemAnnotation {
+ end_index?: number
+ file_id?: string
+ index?: number
+ start_index?: number
+ text?: string
+ type?: string
+ url?: string
+}
+
+export interface ConversationItemContent {
+ file?: {
+ file_id?: string
+ mime_type?: string
+ name?: string
+ size?: number
+ }
+ finish_reason?: string
+ image?: {
+ detail?: string
+ file_id?: string
+ url?: string
+ }
+ input_text?: string
+ output_text?: {
+ annotations?: ConversationItemAnnotation[]
+ text?: string
+ }
+ reasoning_content?: string
+ text?: {
+ value?: string
+ }
+ type?: string
+}
+
+export interface ConversationItem {
+ content?: ConversationItemContent[]
+ created_at: number
+ id: string
+ object: string
+ role: string
+ status?: string
+ type?: string
+}
+
+export interface ListConversationItemsParams extends PaginationParams {
+ conversation_id: string
+}
+
+export interface ListConversationItemsResponse extends PaginatedResponse {
+ total?: number
+}
diff --git a/extensions-web/src/conversational-web/utils.ts b/extensions-web/src/conversational-web/utils.ts
new file mode 100644
index 000000000..6448d9f4d
--- /dev/null
+++ b/extensions-web/src/conversational-web/utils.ts
@@ -0,0 +1,247 @@
+import { Thread, ThreadAssistantInfo, ThreadMessage, ContentType } from '@janhq/core'
+import { Conversation, ConversationResponse, ConversationItem } from './types'
+import { DEFAULT_ASSISTANT } from './const'
+
+export class ObjectParser {
+ static threadToConversation(thread: Thread): Conversation {
+ const modelName = thread.assistants?.[0]?.model?.id || undefined
+ const modelProvider = thread.assistants?.[0]?.model?.engine || undefined
+ const isFavorite = thread.metadata?.is_favorite?.toString() || 'false'
+ let metadata = {}
+ if (modelName && modelProvider) {
+ metadata = {
+ model_id: modelName,
+ model_provider: modelProvider,
+ is_favorite: isFavorite,
+ }
+ }
+ return {
+ title: shortenConversationTitle(thread.title),
+ metadata,
+ }
+ }
+
+ static conversationToThread(conversation: ConversationResponse): Thread {
+ const assistants: ThreadAssistantInfo[] = []
+ if (
+ conversation.metadata?.model_id &&
+ conversation.metadata?.model_provider
+ ) {
+ assistants.push({
+ ...DEFAULT_ASSISTANT,
+ model: {
+ id: conversation.metadata.model_id,
+ engine: conversation.metadata.model_provider,
+ },
+ })
+ } else {
+ assistants.push({
+ ...DEFAULT_ASSISTANT,
+ model: {
+ id: 'jan-v1-4b',
+ engine: 'jan',
+ },
+ })
+ }
+
+ const isFavorite = conversation.metadata?.is_favorite === 'true'
+ return {
+ id: conversation.id,
+ title: conversation.title || '',
+ assistants,
+ created: conversation.created_at,
+ updated: conversation.created_at,
+ model: {
+ id: conversation.metadata.model_id,
+ provider: conversation.metadata.model_provider,
+ },
+ isFavorite,
+ metadata: { is_favorite: isFavorite },
+ } as unknown as Thread
+ }
+
+ static conversationItemToThreadMessage(
+ item: ConversationItem,
+ threadId: string
+ ): ThreadMessage {
+ // Extract text content and metadata from the item
+ let textContent = ''
+ let reasoningContent = ''
+ const imageUrls: string[] = []
+ let toolCalls: any[] = []
+ let finishReason = ''
+
+ if (item.content && item.content.length > 0) {
+ for (const content of item.content) {
+ // Handle text content
+ if (content.text?.value) {
+ textContent = content.text.value
+ }
+ // Handle output_text for assistant messages
+ if (content.output_text?.text) {
+ textContent = content.output_text.text
+ }
+ // Handle reasoning content
+ if (content.reasoning_content) {
+ reasoningContent = content.reasoning_content
+ }
+ // Handle image content
+ if (content.image?.url) {
+ imageUrls.push(content.image.url)
+ }
+ // Extract finish_reason
+ if (content.finish_reason) {
+ finishReason = content.finish_reason
+ }
+ }
+ }
+
+ // Handle tool calls parsing for assistant messages
+ if (item.role === 'assistant' && finishReason === 'tool_calls') {
+ try {
+ // Tool calls are embedded as JSON string in textContent
+ const toolCallMatch = textContent.match(/\[.*\]/)
+ if (toolCallMatch) {
+ const toolCallsData = JSON.parse(toolCallMatch[0])
+ toolCalls = toolCallsData.map((toolCall: any) => ({
+ tool: {
+ id: toolCall.id || 'unknown',
+ function: {
+ name: toolCall.function?.name || 'unknown',
+ arguments: toolCall.function?.arguments || '{}'
+ },
+ type: toolCall.type || 'function'
+ },
+ response: {
+ error: '',
+ content: []
+ },
+ state: 'ready'
+ }))
+ // Remove tool calls JSON from text content, keep only reasoning
+ textContent = ''
+ }
+ } catch (error) {
+ console.error('Failed to parse tool calls:', error)
+ }
+ }
+
+ // Format final content with reasoning if present
+ let finalTextValue = ''
+ if (reasoningContent) {
+ finalTextValue = `${reasoningContent}`
+ }
+ if (textContent) {
+ finalTextValue += textContent
+ }
+
+ // Build content array for ThreadMessage
+ const messageContent: any[] = [
+ {
+ type: ContentType.Text,
+ text: {
+ value: finalTextValue || '',
+ annotations: [],
+ },
+ },
+ ]
+
+ // Add image content if present
+ for (const imageUrl of imageUrls) {
+ messageContent.push({
+ type: 'image_url' as ContentType,
+ image_url: {
+ url: imageUrl,
+ },
+ })
+ }
+
+ // Build metadata
+ const metadata: any = {}
+ if (toolCalls.length > 0) {
+ metadata.tool_calls = toolCalls
+ }
+
+ // Map status from server format to frontend format
+ const mappedStatus = item.status === 'completed' ? 'ready' : item.status || 'ready'
+
+ return {
+ type: 'text',
+ id: item.id,
+ object: 'thread.message',
+ thread_id: threadId,
+ role: item.role as 'user' | 'assistant',
+ content: messageContent,
+ created_at: item.created_at * 1000, // Convert to milliseconds
+ completed_at: 0,
+ status: mappedStatus,
+ metadata,
+ } as ThreadMessage
+ }
+}
+
+const shortenConversationTitle = (title: string): string => {
+ const maxLength = 50
+ return title.length <= maxLength ? title : title.substring(0, maxLength)
+}
+
+export const getDefaultAssistant = (
+ assistant: ThreadAssistantInfo
+): ThreadAssistantInfo => {
+ return { ...assistant, instructions: undefined }
+}
+
+/**
+ * Utility function to combine conversation items into thread messages
+ * Handles tool response merging and message consolidation
+ */
+export const combineConversationItemsToMessages = (
+ items: ConversationItem[],
+ threadId: string
+): ThreadMessage[] => {
+ const messages: ThreadMessage[] = []
+ const toolResponseMap = new Map()
+
+ // First pass: collect tool responses
+ for (const item of items) {
+ if (item.role === 'tool') {
+ const toolContent = item.content?.[0]?.text?.value || ''
+ toolResponseMap.set(item.id, {
+ error: '',
+ content: [
+ {
+ type: 'text',
+ text: toolContent
+ }
+ ]
+ })
+ }
+ }
+
+ // Second pass: build messages and merge tool responses
+ for (const item of items) {
+ // Skip tool messages as they will be merged into assistant messages
+ if (item.role === 'tool') {
+ continue
+ }
+
+ const message = ObjectParser.conversationItemToThreadMessage(item, threadId)
+
+ // If this is an assistant message with tool calls, merge tool responses
+ if (message.role === 'assistant' && message.metadata?.tool_calls && Array.isArray(message.metadata.tool_calls)) {
+ const toolCalls = message.metadata.tool_calls as any[]
+ let toolResponseIndex = 0
+
+ for (const [responseId, responseData] of toolResponseMap.entries()) {
+ if (toolResponseIndex < toolCalls.length) {
+ toolCalls[toolResponseIndex].response = responseData
+ toolResponseIndex++
+ }
+ }
+ }
+
+ messages.push(message)
+ }
+
+ return messages
+}
diff --git a/extensions-web/src/jan-provider-web/api.ts b/extensions-web/src/jan-provider-web/api.ts
index 16c4dc70e..436ee06b6 100644
--- a/extensions-web/src/jan-provider-web/api.ts
+++ b/extensions-web/src/jan-provider-web/api.ts
@@ -24,6 +24,7 @@ export interface JanChatMessage {
export interface JanChatCompletionRequest {
model: string
messages: JanChatMessage[]
+ conversation_id?: string
temperature?: number
max_tokens?: number
top_p?: number
@@ -93,7 +94,7 @@ export class JanApiClient {
janProviderStore.clearError()
const response = await this.authService.makeAuthenticatedRequest(
- `${JAN_API_BASE}/models`
+ `${JAN_API_BASE}/conv/models`
)
const models = response.data || []
@@ -115,12 +116,16 @@ export class JanApiClient {
janProviderStore.clearError()
return await this.authService.makeAuthenticatedRequest(
- `${JAN_API_BASE}/chat/completions`,
+ `${JAN_API_BASE}/conv/chat/completions`,
{
method: 'POST',
body: JSON.stringify({
...request,
stream: false,
+ store: true,
+ store_reasoning: true,
+ conversation: request.conversation_id,
+ conversation_id: undefined,
}),
}
)
@@ -142,7 +147,7 @@ export class JanApiClient {
const authHeader = await this.authService.getAuthHeader()
- const response = await fetch(`${JAN_API_BASE}/chat/completions`, {
+ const response = await fetch(`${JAN_API_BASE}/conv/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
@@ -151,6 +156,10 @@ export class JanApiClient {
body: JSON.stringify({
...request,
stream: true,
+ store: true,
+ store_reasoning: true,
+ conversation: request.conversation_id,
+ conversation_id: undefined,
}),
})
diff --git a/extensions-web/src/jan-provider-web/provider.ts b/extensions-web/src/jan-provider-web/provider.ts
index 5c829ed34..216da66c9 100644
--- a/extensions-web/src/jan-provider-web/provider.ts
+++ b/extensions-web/src/jan-provider-web/provider.ts
@@ -144,6 +144,7 @@ export default class JanProviderWeb extends AIEngine {
const janRequest = {
model: modelId,
messages: janMessages,
+ conversation_id: opts.thread_id,
temperature: opts.temperature ?? undefined,
max_tokens: opts.n_predict ?? undefined,
top_p: opts.top_p ?? undefined,
diff --git a/extensions-web/src/shared/auth/api.ts b/extensions-web/src/shared/auth/api.ts
index 1bfdae3c7..61163984b 100644
--- a/extensions-web/src/shared/auth/api.ts
+++ b/extensions-web/src/shared/auth/api.ts
@@ -13,7 +13,7 @@ declare const JAN_API_BASE: string
*/
export async function logoutUser(): Promise {
const response = await fetch(`${JAN_API_BASE}${AUTH_ENDPOINTS.LOGOUT}`, {
- method: 'POST',
+ method: 'GET',
credentials: 'include',
headers: {
'Content-Type': 'application/json',
diff --git a/extensions-web/src/shared/auth/broadcast.ts b/extensions-web/src/shared/auth/broadcast.ts
index 8a2d316d4..37062ee76 100644
--- a/extensions-web/src/shared/auth/broadcast.ts
+++ b/extensions-web/src/shared/auth/broadcast.ts
@@ -1,16 +1,69 @@
/**
* Authentication Broadcast Channel Handler
- * Manages cross-tab communication for auth state changes
+ * Manages both cross-tab and same-tab communication for auth state changes
+ *
+ * Architecture:
+ * - BroadcastChannel API: For cross-tab communication
+ * - LocalBroadcastChannel: For same-tab communication via CustomEvents
*/
-import { AUTH_BROADCAST_CHANNEL, AUTH_EVENTS } from './const'
+import { AUTH_BROADCAST_CHANNEL, AUTH_EVENT_NAME, AUTH_EVENTS } from './const'
import type { AuthBroadcastMessage } from './types'
+/**
+ * LocalBroadcastChannel - Handles same-tab communication via custom events
+ * Mimics the BroadcastChannel API but uses CustomEvents internally
+ * This is needed because BroadcastChannel doesn't deliver messages to the same context
+ */
+class LocalBroadcastChannel {
+ private eventName: string
+
+ constructor(eventName: string) {
+ this.eventName = eventName
+ }
+
+ /**
+ * Post a message via custom event (same-tab only)
+ */
+ postMessage(data: any): void {
+ const customEvent = new CustomEvent(this.eventName, {
+ detail: data
+ })
+ window.dispatchEvent(customEvent)
+ }
+
+ /**
+ * Listen for custom events
+ */
+ addEventListener(type: 'message', listener: (event: MessageEvent) => void): void {
+ const customEventListener = (event: Event) => {
+ const customEvent = event as CustomEvent
+ // Convert CustomEvent to MessageEvent format for consistency
+ const messageEvent = {
+ data: customEvent.detail
+ } as MessageEvent
+ listener(messageEvent)
+ }
+ window.addEventListener(this.eventName, customEventListener)
+ }
+
+ /**
+ * Remove custom event listener
+ */
+ removeEventListener(type: 'message', listener: (event: MessageEvent) => void): void {
+ // Note: This won't work perfectly due to function reference issues
+ // In practice, we handle this with cleanup functions in AuthBroadcast
+ window.removeEventListener(this.eventName, listener as any)
+ }
+}
+
export class AuthBroadcast {
private broadcastChannel: BroadcastChannel | null = null
+ private localBroadcastChannel: LocalBroadcastChannel
constructor() {
this.setupBroadcastChannel()
+ this.localBroadcastChannel = new LocalBroadcastChannel(AUTH_EVENT_NAME)
}
/**
@@ -27,17 +80,22 @@ export class AuthBroadcast {
}
/**
- * Broadcast auth event to other tabs
+ * Broadcast auth event to all tabs (including current)
*/
broadcastEvent(type: AuthBroadcastMessage): void {
+ const message = { type }
+
+ // Broadcast to other tabs via BroadcastChannel
if (this.broadcastChannel) {
try {
- const message = { type }
this.broadcastChannel.postMessage(message)
} catch (error) {
console.warn('Failed to broadcast auth event:', error)
}
}
+
+ // Also broadcast to same tab via LocalBroadcastChannel
+ this.localBroadcastChannel.postMessage(message)
}
/**
@@ -55,22 +113,41 @@ export class AuthBroadcast {
}
/**
- * Subscribe to auth events
+ * Subscribe to auth events (from all sources)
*/
onAuthEvent(
listener: (event: MessageEvent<{ type: AuthBroadcastMessage }>) => void
): () => void {
+ const cleanupFunctions: Array<() => void> = []
+
+ // Subscribe to BroadcastChannel for cross-tab events
if (this.broadcastChannel) {
this.broadcastChannel.addEventListener('message', listener)
-
- // Return cleanup function
- return () => {
+ cleanupFunctions.push(() => {
this.broadcastChannel?.removeEventListener('message', listener)
- }
+ })
}
- // Return no-op cleanup if no broadcast channel
- return () => {}
+ // Subscribe to LocalBroadcastChannel for same-tab events
+ // We need to keep track of the actual listener function for proper cleanup
+ const localEventListener = (event: Event) => {
+ const customEvent = event as CustomEvent
+ const messageEvent = {
+ data: customEvent.detail
+ } as MessageEvent<{ type: AuthBroadcastMessage }>
+ listener(messageEvent)
+ }
+
+ // Add listener directly to window since LocalBroadcastChannel's removeEventListener has limitations
+ window.addEventListener(AUTH_EVENT_NAME, localEventListener)
+ cleanupFunctions.push(() => {
+ window.removeEventListener(AUTH_EVENT_NAME, localEventListener)
+ })
+
+ // Return combined cleanup function
+ return () => {
+ cleanupFunctions.forEach(cleanup => cleanup())
+ }
}
/**
diff --git a/extensions-web/src/shared/auth/const.ts b/extensions-web/src/shared/auth/const.ts
index efd5ad196..5b086e999 100644
--- a/extensions-web/src/shared/auth/const.ts
+++ b/extensions-web/src/shared/auth/const.ts
@@ -19,9 +19,14 @@ export const AUTH_ENDPOINTS = {
// Token expiry buffer
export const TOKEN_EXPIRY_BUFFER = 60 * 1000 // 1 minute buffer before expiry
-// Broadcast channel for cross-tab communication
+// Broadcast channel name for cross-tab communication (BroadcastChannel API)
+// Used to sync auth state between different browser tabs
export const AUTH_BROADCAST_CHANNEL = 'jan_auth_channel'
+// Custom event name for same-tab communication (window.dispatchEvent)
+// Used to notify components within the same tab about auth state changes
+export const AUTH_EVENT_NAME = 'jan-auth-event'
+
// Auth events
export const AUTH_EVENTS = {
LOGIN: 'auth:login',
diff --git a/extensions-web/src/shared/auth/service.ts b/extensions-web/src/shared/auth/service.ts
index c9a15bb33..ecedb4d62 100644
--- a/extensions-web/src/shared/auth/service.ts
+++ b/extensions-web/src/shared/auth/service.ts
@@ -158,7 +158,7 @@ export class JanAuthService {
/**
* Get current authenticated user
*/
- async getCurrentUser(): Promise {
+ async getCurrentUser(forceRefresh: boolean = false): Promise {
await this.ensureInitialized()
const authType = this.getAuthState()
@@ -166,7 +166,8 @@ export class JanAuthService {
return null
}
- if (this.currentUser) {
+ // Force refresh if requested or if cache is cleared
+ if (!forceRefresh && this.currentUser) {
return this.currentUser
}
@@ -200,6 +201,9 @@ export class JanAuthService {
this.clearAuthState()
+ // Ensure guest access after logout
+ await this.ensureGuestAccess()
+
this.authBroadcast.broadcastLogout()
if (window.location.pathname !== '/') {
@@ -208,6 +212,8 @@ export class JanAuthService {
} catch (error) {
console.error('Logout failed:', error)
this.clearAuthState()
+ // Try to ensure guest access even on error
+ this.ensureGuestAccess().catch(console.error)
}
}
@@ -359,8 +365,12 @@ export class JanAuthService {
this.authBroadcast.onAuthEvent((event) => {
switch (event.data.type) {
case AUTH_EVENTS.LOGIN:
- // Another tab logged in, refresh our state
- this.initialize().catch(console.error)
+ // Another tab logged in, clear cached data to force refresh
+ // Clear current user cache so next getCurrentUser() call fetches fresh data
+ this.currentUser = null
+ // Clear token cache so next getValidAccessToken() call refreshes
+ this.accessToken = null
+ this.tokenExpiryTime = 0
break
case AUTH_EVENTS.LOGOUT:
diff --git a/extensions-web/src/shared/db.ts b/extensions-web/src/shared/db.ts
deleted file mode 100644
index 175d6a2b5..000000000
--- a/extensions-web/src/shared/db.ts
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Shared IndexedDB utilities for web extensions
- */
-
-import type { IndexedDBConfig } from '../types'
-
-/**
- * Default database configuration for Jan web extensions
- */
-const DEFAULT_DB_CONFIG: IndexedDBConfig = {
- dbName: 'jan-web-db',
- version: 1,
- stores: [
- {
- name: 'assistants',
- keyPath: 'id',
- indexes: [
- { name: 'name', keyPath: 'name' },
- { name: 'created_at', keyPath: 'created_at' }
- ]
- },
- {
- name: 'threads',
- keyPath: 'id',
- indexes: [
- { name: 'title', keyPath: 'title' },
- { name: 'created_at', keyPath: 'created_at' },
- { name: 'updated_at', keyPath: 'updated_at' }
- ]
- },
- {
- name: 'messages',
- keyPath: 'id',
- indexes: [
- { name: 'thread_id', keyPath: 'thread_id' },
- { name: 'created_at', keyPath: 'created_at' }
- ]
- }
- ]
-}
-
-/**
- * Shared IndexedDB instance
- */
-let sharedDB: IDBDatabase | null = null
-
-/**
- * Get or create the shared IndexedDB instance
- */
-export const getSharedDB = async (config: IndexedDBConfig = DEFAULT_DB_CONFIG): Promise => {
- if (sharedDB && sharedDB.name === config.dbName) {
- return sharedDB
- }
-
- return new Promise((resolve, reject) => {
- const request = indexedDB.open(config.dbName, config.version)
-
- request.onerror = () => {
- reject(new Error(`Failed to open database: ${request.error?.message}`))
- }
-
- request.onsuccess = () => {
- sharedDB = request.result
- resolve(sharedDB)
- }
-
- request.onupgradeneeded = (event) => {
- const db = (event.target as IDBOpenDBRequest).result
-
- // Create object stores
- for (const store of config.stores) {
- let objectStore: IDBObjectStore
-
- if (db.objectStoreNames.contains(store.name)) {
- // Store exists, might need to update indexes
- continue
- } else {
- // Create new store
- objectStore = db.createObjectStore(store.name, { keyPath: store.keyPath })
- }
-
- // Create indexes
- if (store.indexes) {
- for (const index of store.indexes) {
- try {
- objectStore.createIndex(index.name, index.keyPath, { unique: index.unique || false })
- } catch (error) {
- // Index might already exist, ignore
- }
- }
- }
- }
- }
- })
-}
-
-/**
- * Close the shared database connection
- */
-export const closeSharedDB = () => {
- if (sharedDB) {
- sharedDB.close()
- sharedDB = null
- }
-}
\ No newline at end of file
diff --git a/extensions-web/src/shared/index.ts b/extensions-web/src/shared/index.ts
index 447e6e8e1..f140b2ec7 100644
--- a/extensions-web/src/shared/index.ts
+++ b/extensions-web/src/shared/index.ts
@@ -1,3 +1 @@
-export { getSharedDB } from './db'
-
export * from './auth'
diff --git a/extensions/llamacpp-extension/src/index.ts b/extensions/llamacpp-extension/src/index.ts
index a086b74db..7229552a2 100644
--- a/extensions/llamacpp-extension/src/index.ts
+++ b/extensions/llamacpp-extension/src/index.ts
@@ -21,6 +21,7 @@ import {
events,
AppEvent,
DownloadEvent,
+ chatCompletionRequestMessage,
} from '@janhq/core'
import { error, info, warn } from '@tauri-apps/plugin-log'
@@ -36,12 +37,12 @@ import {
import { invoke } from '@tauri-apps/api/core'
import { getProxyConfig } from './util'
import { basename } from '@tauri-apps/api/path'
-import {
- GgufMetadata,
- readGgufMetadata,
-} from '@janhq/tauri-plugin-llamacpp-api'
+import { readGgufMetadata } from '@janhq/tauri-plugin-llamacpp-api'
import { getSystemUsage, getSystemInfo } from '@janhq/tauri-plugin-hardware-api'
+// Error message constant - matches web-app/src/utils/error.ts
+const OUT_OF_CONTEXT_SIZE = 'the request exceeds the available context size.'
+
type LlamacppConfig = {
version_backend: string
auto_update_engine: boolean
@@ -175,7 +176,7 @@ export default class llamacpp_extension extends AIEngine {
provider: string = 'llamacpp'
autoUnload: boolean = true
llamacpp_env: string = ''
- memoryMode: string = 'high'
+ memoryMode: string = ''
readonly providerId: string = 'llamacpp'
private config: LlamacppConfig
@@ -207,7 +208,7 @@ export default class llamacpp_extension extends AIEngine {
this.autoUnload = this.config.auto_unload
this.llamacpp_env = this.config.llamacpp_env
- this.memoryMode = this.config.memory_util
+ this.memoryMode = this.config.memory_util || 'high'
// This sets the base directory where model files for this provider are stored.
this.providerPath = await joinPath([
@@ -1541,7 +1542,7 @@ export default class llamacpp_extension extends AIEngine {
args.push('--main-gpu', String(cfg.main_gpu))
// Boolean flags
- if (!cfg.ctx_shift) args.push('--no-context-shift')
+ if (cfg.ctx_shift) args.push('--context-shift')
if (Number(version.replace(/^b/, '')) >= 6325) {
if (!cfg.flash_attn) args.push('--flash-attn', 'off') //default: auto = ON when supported
} else {
@@ -1739,6 +1740,13 @@ export default class llamacpp_extension extends AIEngine {
try {
const data = JSON.parse(jsonStr)
const chunk = data as chatCompletionChunk
+
+ // Check for out-of-context error conditions
+ if (chunk.choices?.[0]?.finish_reason === 'length') {
+ // finish_reason 'length' indicates context limit was hit
+ throw new Error(OUT_OF_CONTEXT_SIZE)
+ }
+
yield chunk
} catch (e) {
logger.error('Error parsing JSON from stream or server error:', e)
@@ -1795,6 +1803,13 @@ export default class llamacpp_extension extends AIEngine {
'Content-Type': 'application/json',
'Authorization': `Bearer ${sessionInfo.api_key}`,
}
+ // always enable prompt progress return if stream is true
+ // Requires llamacpp version > b6399
+ // Example json returned from server
+ // {"choices":[{"finish_reason":null,"index":0,"delta":{"role":"assistant","content":null}}],"created":1758113912,"id":"chatcmpl-UwZwgxQKyJMo7WzMzXlsi90YTUK2BJro","model":"qwen","system_fingerprint":"b1-e4912fc","object":"chat.completion.chunk","prompt_progress":{"total":36,"cache":0,"processed":36,"time_ms":5706760300}}
+ // (chunk.prompt_progress?.processed / chunk.prompt_progress?.total) * 100
+ // chunk.prompt_progress?.cache is for past tokens already in kv cache
+ opts.return_progress = true
const body = JSON.stringify(opts)
if (opts.stream) {
@@ -1817,7 +1832,15 @@ export default class llamacpp_extension extends AIEngine {
)
}
- return (await response.json()) as chatCompletion
+ const completionResponse = (await response.json()) as chatCompletion
+
+ // Check for out-of-context error conditions
+ if (completionResponse.choices?.[0]?.finish_reason === 'length') {
+ // finish_reason 'length' indicates context limit was hit
+ throw new Error(OUT_OF_CONTEXT_SIZE)
+ }
+
+ return completionResponse
}
override async delete(modelId: string): Promise {
@@ -2018,24 +2041,6 @@ export default class llamacpp_extension extends AIEngine {
totalMemory,
}
}
- private async getKVCachePerToken(
- meta: Record
- ): Promise {
- const arch = meta['general.architecture']
- const nLayer = Number(meta[`${arch}.block_count`])
- const nHead = Number(meta[`${arch}.attention.head_count`])
-
- // Get head dimensions
- const nHeadKV = Number(meta[`${arch}.attention.head_count_kv`]) || nHead
- const embeddingLen = Number(meta[`${arch}.embedding_length`])
- const headDim = embeddingLen / nHead
-
- // KV cache uses head_count_kv (for GQA models) or head_count
- // Each token needs K and V, both are fp16 (2 bytes)
- const bytesPerToken = nHeadKV * headDim * 2 * 2 * nLayer // K+V, fp16, all layers
-
- return bytesPerToken
- }
private async getLayerSize(
path: string,
@@ -2082,10 +2087,9 @@ export default class llamacpp_extension extends AIEngine {
gguf.metadata
)
- // Fixed KV cache calculation
- const kvCachePerToken = await this.getKVCachePerToken(gguf.metadata)
+ const kvCachePerToken = (await this.estimateKVCache(gguf.metadata))
+ .perTokenSize
- // Debug logging
logger.info(
`Model size: ${modelSize}, Layer size: ${layerSize}, Total layers: ${totalLayers}, KV cache per token: ${kvCachePerToken}`
)
@@ -2101,33 +2105,25 @@ export default class llamacpp_extension extends AIEngine {
throw new Error(`Invalid layer size: ${layerSize}`)
}
- // GPU overhead factor (20% reserved for GPU operations, alignment, etc.)
- const GPU_OVERHEAD_FACTOR = 0.8
-
- // VRAM budget with overhead consideration
+ // Reserve memory for OS, other applications, and fixed engine overhead.
const VRAM_RESERVE_GB = 0.5
const VRAM_RESERVE_BYTES = VRAM_RESERVE_GB * 1024 * 1024 * 1024
- const usableVRAM = Math.max(
- 0,
- (memoryInfo.totalVRAM - VRAM_RESERVE_BYTES) * GPU_OVERHEAD_FACTOR
- )
+ const ENGINE_FIXED_OVERHEAD_BYTES = 0.2 * 1024 * 1024 * 1024 // For scratch buffers etc.
// Get model's maximum context length
const arch = gguf.metadata['general.architecture']
const modelMaxContextLength =
- Number(gguf.metadata[`${arch}.context_length`]) || 131072 // Default fallback
+ Number(gguf.metadata[`${arch}.context_length`]) || 8192
- // Set minimum context length
- const MIN_CONTEXT_LENGTH = 2048 // Reduced from 4096 for better compatibility
+ const MIN_CONTEXT_LENGTH = 1024
- // System RAM budget
+ // Memory percentages applied to both VRAM and RAM
const memoryPercentages = { high: 0.7, medium: 0.5, low: 0.4 }
logger.info(
`Memory info - Total (VRAM + RAM): ${memoryInfo.totalMemory}, Total VRAM: ${memoryInfo.totalVRAM}, Mode: ${this.memoryMode}`
)
- // Validate memory info
if (!memoryInfo.totalMemory || isNaN(memoryInfo.totalMemory)) {
throw new Error(`Invalid total memory: ${memoryInfo.totalMemory}`)
}
@@ -2140,210 +2136,170 @@ export default class llamacpp_extension extends AIEngine {
)
}
- // Calculate actual system RAM
- const actualSystemRAM = Math.max(
+ // Apply memory mode to both VRAM and RAM separately
+ const memoryModeMultiplier = memoryPercentages[this.memoryMode]
+ const usableVRAM = Math.max(
0,
- memoryInfo.totalMemory - memoryInfo.totalVRAM
+ memoryInfo.totalVRAM * memoryModeMultiplier -
+ VRAM_RESERVE_BYTES -
+ ENGINE_FIXED_OVERHEAD_BYTES
)
- const usableSystemMemory =
- actualSystemRAM * memoryPercentages[this.memoryMode]
+
+ const actualSystemRAM = Math.max(0, memoryInfo.totalRAM)
+ const usableSystemMemory = actualSystemRAM * memoryModeMultiplier
logger.info(
- `Actual System RAM: ${actualSystemRAM}, Usable VRAM: ${usableVRAM}, Usable System Memory: ${usableSystemMemory}`
+ `Actual System RAM: ${actualSystemRAM}, Usable VRAM for plan: ${usableVRAM}, Usable System Memory: ${usableSystemMemory}`
)
- // --- Priority 1: Allocate mmproj (if exists) ---
- let offloadMmproj = false
- let remainingVRAM = usableVRAM
-
- if (mmprojSize > 0) {
- if (mmprojSize <= remainingVRAM) {
- offloadMmproj = true
- remainingVRAM -= mmprojSize
- logger.info(`MMProj allocated to VRAM: ${mmprojSize} bytes`)
- } else {
- logger.info(`MMProj will use CPU RAM: ${mmprojSize} bytes`)
- }
- }
-
- // --- Priority 2: Calculate optimal layer/context balance ---
let gpuLayers = 0
- let maxContextLength = MIN_CONTEXT_LENGTH
+ let maxContextLength = 0
let noOffloadKVCache = false
let mode: ModelPlan['mode'] = 'Unsupported'
+ let offloadMmproj = false
- // Calculate how much VRAM we need for different context sizes
- const contextSizes = [2048, 4096, 8192, 16384, 32768, 65536, 131072]
- const targetContext = requestedCtx || modelMaxContextLength
-
- // Find the best balance of layers and context
- let bestConfig = {
- layers: 0,
- context: MIN_CONTEXT_LENGTH,
- vramUsed: 0,
+ let remainingVRAM = usableVRAM
+ if (mmprojSize > 0 && mmprojSize <= remainingVRAM) {
+ offloadMmproj = true
+ remainingVRAM -= mmprojSize
}
+ const vramForMinContext = (
+ await this.estimateKVCache(gguf.metadata, MIN_CONTEXT_LENGTH)
+ ).size
- for (const ctxSize of contextSizes) {
- if (ctxSize > targetContext) break
-
- const kvCacheSize = ctxSize * kvCachePerToken
- const availableForLayers = remainingVRAM - kvCacheSize
-
- if (availableForLayers <= 0) continue
-
- const possibleLayers = Math.min(
- Math.floor(availableForLayers / layerSize),
- totalLayers
+ const ramForModel = modelSize + (offloadMmproj ? 0 : mmprojSize)
+ if (ramForModel + vramForMinContext > usableSystemMemory + usableVRAM) {
+ logger.error(
+ `Model unsupported. Not enough resources for model and min context.`
)
-
- if (possibleLayers > 0) {
- const totalVramNeeded = possibleLayers * layerSize + kvCacheSize
-
- // Verify this fits with some margin
- if (totalVramNeeded <= remainingVRAM * 0.95) {
- bestConfig = {
- layers: possibleLayers,
- context: ctxSize,
- vramUsed: totalVramNeeded,
- }
- }
+ return {
+ gpuLayers: 0,
+ maxContextLength: 0,
+ noOffloadKVCache: true,
+ mode: 'Unsupported',
+ offloadMmproj: false,
}
}
- // Apply the best configuration found
- if (bestConfig.layers > 0) {
- gpuLayers = bestConfig.layers
- maxContextLength = bestConfig.context
+ const targetContext = Math.min(
+ requestedCtx || modelMaxContextLength,
+ modelMaxContextLength
+ )
+
+ let targetContextSize = (
+ await this.estimateKVCache(gguf.metadata, targetContext)
+ ).size
+
+ // Use `kvCachePerToken` for all VRAM calculations
+ if (modelSize + targetContextSize <= remainingVRAM) {
+ mode = 'GPU'
+ gpuLayers = totalLayers
+ maxContextLength = targetContext
noOffloadKVCache = false
- mode = gpuLayers === totalLayers ? 'GPU' : 'Hybrid'
+ logger.info(
+ 'Planning: Ideal case fits. All layers and target context in VRAM.'
+ )
+ } else if (modelSize <= remainingVRAM) {
+ mode = 'GPU'
+ gpuLayers = totalLayers
+ noOffloadKVCache = false
+ const vramLeftForContext = remainingVRAM - modelSize
+ maxContextLength = Math.floor(vramLeftForContext / kvCachePerToken)
+
+ // Add safety check to prevent OOM
+ const safetyBuffer = 0.9 // Use 90% of calculated context to be safe
+ maxContextLength = Math.floor(maxContextLength * safetyBuffer)
logger.info(
- `Best GPU config: ${gpuLayers}/${totalLayers} layers, ${maxContextLength} context, ` +
- `VRAM used: ${bestConfig.vramUsed}/${remainingVRAM} bytes`
+ `Planning: All layers fit in VRAM, but context must be reduced. VRAM left: ${vramLeftForContext}, kvCachePerToken: ${kvCachePerToken}, calculated context: ${maxContextLength}`
)
} else {
- // Fallback: Try minimal GPU layers with KV cache on CPU
- gpuLayers = Math.min(
- Math.floor((remainingVRAM * 0.9) / layerSize), // Use 90% for layers
- totalLayers
- )
+ const vramAvailableForLayers = remainingVRAM - vramForMinContext
- if (gpuLayers > 0) {
- // Calculate available system RAM for KV cache
- const cpuLayers = totalLayers - gpuLayers
- const modelCPUSize = cpuLayers * layerSize
- const mmprojCPUSize = mmprojSize > 0 && !offloadMmproj ? mmprojSize : 0
- const systemRAMUsed = modelCPUSize + mmprojCPUSize
- const availableSystemRAMForKVCache = Math.max(
- 0,
- usableSystemMemory - systemRAMUsed
+ if (vramAvailableForLayers >= layerSize) {
+ mode = 'Hybrid'
+ gpuLayers = Math.min(
+ Math.floor(vramAvailableForLayers / layerSize),
+ totalLayers
)
+ noOffloadKVCache = false
+ const vramUsedByLayers = gpuLayers * layerSize
+ const vramLeftForContext = remainingVRAM - vramUsedByLayers
+ maxContextLength = Math.floor(vramLeftForContext / kvCachePerToken)
- // Calculate context that fits in system RAM
- const systemRAMContext = Math.min(
- Math.floor(availableSystemRAMForKVCache / kvCachePerToken),
- targetContext
+ logger.info(
+ 'Planning: Hybrid mode. Offloading layers to fit context in VRAM.'
)
+ }
+ }
- if (systemRAMContext >= MIN_CONTEXT_LENGTH) {
- maxContextLength = systemRAMContext
- noOffloadKVCache = true
+ // Fallback logic: try different configurations if no VRAM-based plan worked
+ if (mode === 'Unsupported') {
+ logger.info('Planning: Trying fallback configurations...')
+
+ // Try putting some layers on GPU with KV cache in RAM
+ const possibleGpuLayers = Math.floor(remainingVRAM / layerSize)
+ if (possibleGpuLayers > 0) {
+ gpuLayers = Math.min(possibleGpuLayers, totalLayers)
+ const ramUsedByCpuLayers = (totalLayers - gpuLayers) * layerSize
+ const ramUsedByMmproj = !offloadMmproj ? mmprojSize : 0
+ const availableRamForKv =
+ usableSystemMemory - (ramUsedByCpuLayers + ramUsedByMmproj)
+ // Note: Use `kvCachePerToken` for RAM calculation, as the overhead is GPU-specific
+ const contextInRam = Math.floor(availableRamForKv / kvCachePerToken)
+
+ if (contextInRam >= MIN_CONTEXT_LENGTH) {
mode = 'Hybrid'
-
- logger.info(
- `Hybrid mode: ${gpuLayers}/${totalLayers} layers on GPU, ` +
- `${maxContextLength} context on CPU RAM`
- )
- } else {
- // Can't fit reasonable context even with CPU RAM
- // Reduce GPU layers further
- gpuLayers = Math.floor(gpuLayers / 2)
- maxContextLength = MIN_CONTEXT_LENGTH
+ maxContextLength = contextInRam
noOffloadKVCache = true
- mode = gpuLayers > 0 ? 'Hybrid' : 'CPU'
+ logger.info(
+ `Planning: Fallback hybrid - GPU layers: ${gpuLayers}, Context in RAM: ${maxContextLength}`
+ )
}
- } else {
- // Pure CPU mode
+ }
+
+ // If still unsupported, try pure CPU mode
+ if (mode === 'Unsupported') {
gpuLayers = 0
noOffloadKVCache = true
-
- // Calculate context for pure CPU mode
- const totalCPUMemoryNeeded = modelSize + (mmprojSize || 0)
- const availableForKVCache = Math.max(
- 0,
- usableSystemMemory - totalCPUMemoryNeeded
- )
-
- maxContextLength = Math.min(
- Math.max(
- MIN_CONTEXT_LENGTH,
- Math.floor(availableForKVCache / kvCachePerToken)
- ),
- targetContext
- )
-
- mode = maxContextLength >= MIN_CONTEXT_LENGTH ? 'CPU' : 'Unsupported'
- }
- }
-
- // Safety check: Verify total GPU memory usage
- if (gpuLayers > 0 && !noOffloadKVCache) {
- const estimatedGPUUsage =
- gpuLayers * layerSize +
- maxContextLength * kvCachePerToken +
- (offloadMmproj ? mmprojSize : 0)
-
- if (estimatedGPUUsage > memoryInfo.totalVRAM * 0.9) {
- logger.warn(
- `GPU memory usage (${estimatedGPUUsage}) exceeds safe limit. Adjusting...`
- )
-
- // Reduce context first
- while (
- maxContextLength > MIN_CONTEXT_LENGTH &&
- estimatedGPUUsage > memoryInfo.totalVRAM * 0.9
- ) {
- maxContextLength = Math.floor(maxContextLength / 2)
- const newEstimate =
- gpuLayers * layerSize +
- maxContextLength * kvCachePerToken +
- (offloadMmproj ? mmprojSize : 0)
- if (newEstimate <= memoryInfo.totalVRAM * 0.9) break
- }
-
- // If still too much, reduce layers
- if (estimatedGPUUsage > memoryInfo.totalVRAM * 0.9) {
- gpuLayers = Math.floor(gpuLayers * 0.7)
- mode = gpuLayers > 0 ? 'Hybrid' : 'CPU'
- noOffloadKVCache = true // Move KV cache to CPU
+ offloadMmproj = false
+ const ramUsedByModel = modelSize + mmprojSize
+ const availableRamForKv = usableSystemMemory - ramUsedByModel
+ maxContextLength = Math.floor(availableRamForKv / kvCachePerToken)
+ if (maxContextLength >= MIN_CONTEXT_LENGTH) {
+ mode = 'CPU'
+ logger.info(`Planning: CPU mode - Context: ${maxContextLength}`)
}
}
}
- // Apply user-requested context limit if specified
+ if (mode === 'CPU' || noOffloadKVCache) {
+ offloadMmproj = false
+ }
+
if (requestedCtx && requestedCtx > 0) {
maxContextLength = Math.min(maxContextLength, requestedCtx)
- logger.info(
- `User requested context: ${requestedCtx}, final: ${maxContextLength}`
- )
}
- // Ensure we never exceed model's maximum context
maxContextLength = Math.min(maxContextLength, modelMaxContextLength)
- // Final validation
- if (gpuLayers <= 0 && maxContextLength < MIN_CONTEXT_LENGTH) {
+ if (maxContextLength < MIN_CONTEXT_LENGTH) {
mode = 'Unsupported'
}
- // Ensure maxContextLength is valid
- maxContextLength = isNaN(maxContextLength)
- ? MIN_CONTEXT_LENGTH
- : Math.max(MIN_CONTEXT_LENGTH, maxContextLength)
+ if (mode === 'Unsupported') {
+ gpuLayers = 0
+ maxContextLength = 0
+ }
+
+ maxContextLength = isNaN(maxContextLength)
+ ? 0
+ : Math.floor(maxContextLength)
- // Log final plan
const mmprojInfo = mmprojPath
- ? `, mmprojSize=${(mmprojSize / (1024 * 1024)).toFixed(2)}MB, offloadMmproj=${offloadMmproj}`
+ ? `, mmprojSize=${(mmprojSize / (1024 * 1024)).toFixed(
+ 2
+ )}MB, offloadMmproj=${offloadMmproj}`
: ''
logger.info(
@@ -2360,14 +2316,13 @@ export default class llamacpp_extension extends AIEngine {
offloadMmproj,
}
}
-
/**
* estimate KVCache size from a given metadata
*/
private async estimateKVCache(
meta: Record,
ctx_size?: number
- ): Promise {
+ ): Promise<{ size: number; perTokenSize: number }> {
const arch = meta['general.architecture']
if (!arch) throw new Error('Invalid metadata: architecture not found')
@@ -2403,12 +2358,14 @@ export default class llamacpp_extension extends AIEngine {
)
}
- let ctxLen: number
- if (!ctx_size) {
- ctxLen = Number(meta[`${arch}.context_length`])
- } else {
- ctxLen = ctx_size
- }
+ const maxCtx = Number(meta[`${arch}.context_length`])
+ if (!maxCtx) throw new Error('Invalid metadata: context_length not found')
+
+ // b) If the user supplied a value, clamp it to the model's max
+ let ctxLen = ctx_size ? Math.min(ctx_size, maxCtx) : maxCtx
+
+ logger.info(`Final context length used for KV size: ${ctxLen}`)
+ logger.info(`nLayer: ${nLayer}, nHead: ${nHead}, headDim (K+V): ${headDim}`)
logger.info(`ctxLen: ${ctxLen}`)
logger.info(`nLayer: ${nLayer}`)
@@ -2421,10 +2378,10 @@ export default class llamacpp_extension extends AIEngine {
// fp16 = 8 bits * 2 = 16
const bytesPerElement = 2
- // Total KV cache size per token = nHead * headDim * bytesPerElement
- const kvPerToken = nHead * headDim * bytesPerElement
+ // Total KV cache size per token = nHead * headDim * bytesPerElement * nLayer
+ const kvPerToken = nHead * headDim * bytesPerElement * nLayer
- return ctxLen * nLayer * kvPerToken
+ return { size: ctxLen * kvPerToken, perTokenSize: kvPerToken }
}
private async getModelSize(path: string): Promise {
@@ -2458,9 +2415,9 @@ export default class llamacpp_extension extends AIEngine {
const gguf = await readGgufMetadata(path)
let kvCacheSize: number
if (ctx_size) {
- kvCacheSize = await this.estimateKVCache(gguf.metadata, ctx_size)
+ kvCacheSize = (await this.estimateKVCache(gguf.metadata, ctx_size)).size
} else {
- kvCacheSize = await this.estimateKVCache(gguf.metadata)
+ kvCacheSize = (await this.estimateKVCache(gguf.metadata)).size
}
// Total memory consumption = model weights + kvcache
@@ -2470,14 +2427,15 @@ export default class llamacpp_extension extends AIEngine {
)
// Use 80% of total memory as the usable limit
- const USABLE_MEMORY_PERCENTAGE = 0.8
+ const USABLE_MEMORY_PERCENTAGE = 0.9
const usableTotalMemory =
- memoryInfo.totalMemory * USABLE_MEMORY_PERCENTAGE
+ memoryInfo.totalRAM * USABLE_MEMORY_PERCENTAGE +
+ memoryInfo.totalVRAM * USABLE_MEMORY_PERCENTAGE
const usableVRAM = memoryInfo.totalVRAM * USABLE_MEMORY_PERCENTAGE
- // Check if model fits in total memory at all
- if (modelSize > usableTotalMemory) {
- return 'RED'
+ // Check if model fits in total memory at all (this is the hard limit)
+ if (totalRequired > usableTotalMemory) {
+ return 'RED' // Truly impossible to run
}
// Check if everything fits in VRAM (ideal case)
@@ -2485,14 +2443,11 @@ export default class llamacpp_extension extends AIEngine {
return 'GREEN'
}
- // Check if model fits in VRAM but total requirement exceeds VRAM
- // OR if total requirement fits in total memory but not in VRAM
- if (modelSize <= usableVRAM || totalRequired <= usableTotalMemory) {
- return 'YELLOW'
- }
-
- // If we get here, nothing fits properly
- return 'RED'
+ // If we get here, it means:
+ // - Total requirement fits in combined memory
+ // - But doesn't fit entirely in VRAM
+ // This is the CPU-GPU hybrid scenario
+ return 'YELLOW'
} catch (e) {
throw new Error(String(e))
}
@@ -2537,8 +2492,151 @@ export default class llamacpp_extension extends AIEngine {
logger.error('Failed to validate GGUF file:', error)
return {
isValid: false,
- error: `Failed to read model metadata: ${error instanceof Error ? error.message : 'Unknown error'}`,
+ error: `Failed to read model metadata: ${
+ error instanceof Error ? error.message : 'Unknown error'
+ }`,
}
}
}
+
+ async getTokensCount(opts: chatCompletionRequest): Promise {
+ const sessionInfo = await this.findSessionByModel(opts.model)
+ if (!sessionInfo) {
+ throw new Error(`No active session found for model: ${opts.model}`)
+ }
+
+ // Check if the process is alive
+ const result = await invoke('plugin:llamacpp|is_process_running', {
+ pid: sessionInfo.pid,
+ })
+ if (result) {
+ try {
+ await fetch(`http://localhost:${sessionInfo.port}/health`)
+ } catch (e) {
+ this.unload(sessionInfo.model_id)
+ throw new Error('Model appears to have crashed! Please reload!')
+ }
+ } else {
+ throw new Error('Model has crashed! Please reload!')
+ }
+
+ const baseUrl = `http://localhost:${sessionInfo.port}`
+ const headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': `Bearer ${sessionInfo.api_key}`,
+ }
+
+ // Count image tokens first
+ let imageTokens = 0
+ const hasImages = opts.messages.some(
+ (msg) =>
+ Array.isArray(msg.content) &&
+ msg.content.some((content) => content.type === 'image_url')
+ )
+
+ if (hasImages) {
+ logger.info('Conversation has images')
+ try {
+ // Read mmproj metadata to get vision parameters
+ logger.info(`MMPROJ PATH: ${sessionInfo.mmproj_path}`)
+
+ const metadata = await readGgufMetadata(sessionInfo.mmproj_path)
+ logger.info(`mmproj metadata: ${JSON.stringify(metadata.metadata)}`)
+ imageTokens = await this.calculateImageTokens(
+ opts.messages,
+ metadata.metadata
+ )
+ } catch (error) {
+ logger.warn('Failed to calculate image tokens:', error)
+ // Fallback to a rough estimate if metadata reading fails
+ imageTokens = this.estimateImageTokensFallback(opts.messages)
+ }
+ }
+
+ // Calculate text tokens
+ const messages = JSON.stringify({ messages: opts.messages })
+
+ let parseResponse = await fetch(`${baseUrl}/apply-template`, {
+ method: 'POST',
+ headers: headers,
+ body: messages,
+ })
+
+ if (!parseResponse.ok) {
+ const errorData = await parseResponse.json().catch(() => null)
+ throw new Error(
+ `API request failed with status ${
+ parseResponse.status
+ }: ${JSON.stringify(errorData)}`
+ )
+ }
+
+ const parsedPrompt = await parseResponse.json()
+
+ const response = await fetch(`${baseUrl}/tokenize`, {
+ method: 'POST',
+ headers: headers,
+ body: JSON.stringify({
+ content: parsedPrompt.prompt,
+ }),
+ })
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => null)
+ throw new Error(
+ `API request failed with status ${response.status}: ${JSON.stringify(
+ errorData
+ )}`
+ )
+ }
+
+ const dataTokens = await response.json()
+ const textTokens = dataTokens.tokens?.length || 0
+
+ return textTokens + imageTokens
+ }
+
+ private async calculateImageTokens(
+ messages: chatCompletionRequestMessage[],
+ metadata: Record
+ ): Promise {
+ // Extract vision parameters from metadata
+ const projectionDim = Math.floor(Number(metadata['clip.vision.projection_dim']) / 10) || 256
+
+ // Count images in messages
+ let imageCount = 0
+ for (const message of messages) {
+ if (Array.isArray(message.content)) {
+ imageCount += message.content.filter(
+ (content) => content.type === 'image_url'
+ ).length
+ }
+ }
+
+ logger.info(
+ `Calculated ${projectionDim} tokens per image, ${imageCount} images total`
+ )
+ return projectionDim * imageCount - imageCount // remove the lingering <__image__> placeholder token
+ }
+
+ private estimateImageTokensFallback(
+ messages: chatCompletionRequestMessage[]
+ ): number {
+ // Fallback estimation if metadata reading fails
+ const estimatedTokensPerImage = 256 // Gemma's siglip
+
+ let imageCount = 0
+ for (const message of messages) {
+ if (Array.isArray(message.content)) {
+ imageCount += message.content.filter(
+ (content) => content.type === 'image_url'
+ ).length
+ }
+ }
+
+ logger.warn(
+ `Fallback estimation: ${estimatedTokensPerImage} tokens per image, ${imageCount} images total`
+ )
+ return imageCount * estimatedTokensPerImage - imageCount // remove the lingering <__image__> placeholder token
+ }
}
diff --git a/mise.toml b/mise.toml
index 85d87aade..e52d230a8 100644
--- a/mise.toml
+++ b/mise.toml
@@ -124,11 +124,6 @@ run = [
"yarn build"
]
-[tasks.build-and-publish]
-description = "Build and publish the application (matches Makefile)"
-depends = ["install-and-build"]
-run = "yarn build"
-
# ============================================================================
# QUALITY ASSURANCE TASKS
# ============================================================================
diff --git a/package.json b/package.json
index 52791790f..2ec212088 100644
--- a/package.json
+++ b/package.json
@@ -27,9 +27,9 @@
"copy:assets:tauri": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"",
"download:lib": "node ./scripts/download-lib.mjs",
"download:bin": "node ./scripts/download-bin.mjs",
- "build:tauri:win32": "yarn download:bin && yarn tauri build",
- "build:tauri:linux": "yarn download:bin && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build --verbose && ./src-tauri/build-utils/buildAppImage.sh",
- "build:tauri:darwin": "yarn tauri build --target universal-apple-darwin",
+ "build:tauri:win32": "yarn download:bin && yarn download:lib && yarn tauri build",
+ "build:tauri:linux": "yarn download:bin && yarn download:lib && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh",
+ "build:tauri:darwin": "yarn download:bin && yarn tauri build --target universal-apple-darwin",
"build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os",
"build:tauri:plugin:api": "cd src-tauri/plugins && yarn install && yarn workspaces foreach -Apt run build",
"build:icon": "tauri icon ./src-tauri/icons/icon.png",
diff --git a/specs/QA-checklist.md b/specs/QA-checklist.md
deleted file mode 100644
index ddfee32d2..000000000
--- a/specs/QA-checklist.md
+++ /dev/null
@@ -1,191 +0,0 @@
-# Regression test
-
-**Release Version:** v0.6.0
-
-**Operating System:**
-
----
-
-## A. Installation, Update, and Uninstallation
-
-### 1. Users install app (New user flow)
-
-- [ ] :rocket: Installation package is not corrupted and passes all security checks.
-- [ ] :key: App launches successfully after installation.
-
-### 2. Users update app (Existing user flow)
-
-- [ ] :key: Validate that the update does not corrupt user data or settings.
-- [ ] :key: App restarts or prompts the user to restart after an update.
-- [ ] When updating the app, check if the `/models` directory has any JSON/YML files that change according to the update.
-- [ ] Updating the app also updates extensions correctly, test functionality changes.
-
-### 3. Users uninstall / close app
-
-- [ ] :key: After closing the app, all models are unloaded.
-- [ ] :key::warning: Uninstallation process removes the app successfully from the system.
-- [ ] Clean the data folder and open the app to check if it creates all the necessary folders, especially models and extensions.
-
-## B. Overview
-
-### 1. Shortcut key
-
-- [ ] :key: Test each shortcut key to confirm it works as described (My models, navigating, opening, closing, etc.).
-
-### 2. Users check the `active model`
-
-- [ ] :key: The app correctly displays the state of the loading model (e.g., loading, ready, error).
-- [ ] :key: Confirm that the app allows users to switch between models if multiple are available.
-- [ ] Check that the app provides feedback or instructions if the model fails to load.
-- [ ] Verify the troubleshooting assistant correctly capture hardware / log info [#1784](https://github.com/menloresearch/jan/issues/1784)
-
-## C. Thread
-
-### 1. Users can chat with Jan, the default assistant
-
-- [ ] :key: Sending a message enables users to receive responses from model.
-- [ ] :key: Conversation thread is maintained without any loss of data upon sending multiple messages.
-- [ ] Users should be able to edit msg and the assistant will re-generate the answer based on the edited version of the message.
-- [ ] Test for the ability to send different types of messages (e.g., text, emojis, code blocks).
-- [ ] Check the output format of the AI (code blocks, JSON, markdown, ...).
-- [ ] :key: Validate the scroll functionality in the chat window for lengthy conversations.
-- [ ] User can copy / delete the response.
-- [ ] :key: Check the `clear message` / `delete entire chat` button works.
-- [ ] Deleting all the chat retains the model instruction and settings.
-- [ ] :key: Appropriate error handling and messaging if the assistant fails to respond.
-- [ ] Test assistant's ability to maintain context over multiple exchanges.
-- [ ] :key: Check the `create new chat` button, and new conversation will have an automatically generated thread title based on users msg.
-- [ ] Changing `models` mid-thread the app can still handle it.
-- [ ] Check the `regenerate` button renews the response (single / multiple times).
-- [ ] Check the `Instructions` update correctly after the user updates it midway (mid-thread).
-
-### 2. Users can customize chat settings like model parameters via both the GUI & model.yml
-
-- [ ] Adjust model parameters (e.g., Temperature, Top K, Top P) from the GUI and verify they are reflected in the chat behavior.
-- [ ] :key: Changes can be saved and persisted between sessions.
-- [ ] Users can access and modify the model.yml file.
-- [ ] :key: Changes made in model.yml are correctly applied to the chat session upon reload or restart.
-- [ ] Check the maximum and minimum limits of the adjustable parameters and how they affect the assistant's responses.
-- [ ] :key: Users switch between threads with different models, the app can handle it.
-
-### 3. Model dropdown
-
-- :key: Model list should highlight recommended based on user RAM (this is not really correct, I think it's based on static formula)
-- [ ] Model size should display (for both installed and imported models)
-
-### 4. Users can click on a history thread
-
-- [ ] Chat window displays the entire conversation from the selected history thread without any missing messages.
-- [ ] Historical threads reflect the exact state of the chat at that time, including settings.
-- [ ] :key: Ability to delete or clean old threads.
-- [ ] Changing the title of the thread updates correctly.
-
-### 5. Users can config instructions for the assistant.
-
-- [ ] Instructions set by the user are being followed by the assistant in subsequent conversations.
-- [ ] :key: Changes to instructions are updated in real time and do not require a restart of the application or session.
-- [ ] :key: Ability to reset instructions to default or clear them completely.
-- [ ] :key: RAG - Users can import documents and the system should process queries about the uploaded file, providing accurate and appropriate responses in the conversation thread.
-- [ ] :key: Jan can see - Users can import image and Model with vision can generate responses (e.g. LLaVa model). [#294](https://github.com/menloresearch/jan/issues/294)
-
-## D. Hub
-
-### 1. Users can discover recommended models
-
-- :key: Each model's recommendations are consistent with the user’s activity and preferences.
-- [ ] Search models and verify results / action on the results
-
-### 2. Users can download models suitable for their devices, e.g. compatible with their RAM
-
-- [ ] Model list should be in order: Featured > Remote > Local
-- [ ] :key: Ensure that models are labeled with RAM requirements.
-- [ ] :key: Check the download model functionality and validate if the cancel download feature works correctly.
-
-### 3. Users can download models via a HuggingFace URL [#1740](https://github.com/menloresearch/jan/issues/1740)
-
-- [ ] :key: Import via Hugging Face Id / full HuggingFace URL, check the progress bar reflects the download process
-- [ ] :key: Test deeplink import [#2876](https://github.com/menloresearch/jan/issues/2876)
-- [ ] :key: Users can use / remove the imported model.
-
-### 4. Users can import new models to the Hub
-
-- [ ] :key: Ensure import successfully via drag / drop or upload GGUF.
-- [ ] :key: Verify Move model binary file / Keep Original Files & Symlink option are working
-- [ ] Users can add more info to the imported model / edit name
-- [ ] :key: Ensure the new model updates after restarting the app.
-
-### 5. Users can use the model as they want
-
-- [ ] :key: Check `start` / `stop` / `delete` button response exactly what it does.
-- [ ] Check if starting another model stops the other model entirely.
-- [ ] :rocket: Navigate to `hub` > Click `Use` button to use model. Expect to jump to thread and see the model in dropdown model selector.
-- [ ] :key: Check when deleting a model it will delete all the files on the user's computer.
-- [ ] :warning:The recommended tags should present right for the user's hardware.
-
-### 6. Users can Integrate With a Remote Server
-
-- [ ] :key: Import openAI GPT model https://jan.ai/guides/using-models/integrate-with-remote-server/ and the model displayed in Hub / Thread dropdown
-- [ ] Users can use the remote model properly (openAI GPT, Groq)
-
-## E. System Monitor
-
-### 1. Users can see disk and RAM utilization
-
-- [ ] :key: Verify that the RAM and VRAM utilization graphs accurately reported in real time.
-- [ ] :key: Validate that the utilization percentages reflect the actual usage compared to the system's total available resources.
-- [ ] :key: Ensure that the system monitors updates dynamically as the models run and stop.
-
-### 2. Users can start and stop models based on system health
-
-- [ ] :key: Verify the `Start/Stop` action for a model, the system resource usage reflects this change.
-- [ ] Confirm that any changes in model status (start/stop) are logged or reported to the user for transparency.
-- [ ] :key: Check the functionality of `App log` to ensure it opens the correct folder in the system file explorer.
-
-## F. Settings
-
-### 1. Appearance
-
-- [ ] :key: Test the `Light`, `Dark`, and `System` theme settings to ensure they are functioning as expected.
-- [ ] Confirm that the application saves the theme preference and persists it across sessions.
-- [ ] Validate that all elements of the UI are compatible with the theme changes and maintain legibility and contrast.
-
-### 2. Extensions [TBU]
-
-- Validate the `Install Extensions` process by selecting and installing a plugin file.
-- [ ] Enable / disable extensions and the UI should reflex accordingly
-
-### 3. Extension group
-
-- [ ] :key: Users can set valid Endpoint and API Key to use remote models
-- [ ] Monitoring extension should allow users to enable / disable log and set log Cleaning Interval
-
-### 4. Advanced settings
-
-- [ ] :key: Test the `Experimental Mode` toggle to confirm it enables or disables experimental features as intended.
-- [ ] :key: Check the functionality of `Open App Directory` to ensure it opens the correct folder in the system file explorer.
-- [ ] Users can move **Jan data folder**
-- [ ] Validate that changes in advanced settings are applied immediately or provide appropriate instructions if a restart is needed.
-- [ ] Attemp to test downloading model from hub using **HTTP Proxy** [guideline](https://github.com/menloresearch/jan/pull/1562)
-- [ ] Logs that are older than 7 days or exceed 1MB in size will be automatically cleared upon starting the application.
-- [ ] Users can click on Reset button to **factory reset** app settings to its original state & delete all usage data.
- - [ ] Keep the current app data location
- - [ ] Reset the current app data location
-- [ ] Users can enable the setting and chat using quick ask.
-
-### 5. Engine
-
-- [ ] :key: TensorRT Engine - Users able to chat with the model
-- [ ] :key: Onnx Engine - Users able to chat with the model
-- [ ] :key: Other remote Engine - Users able to chat with the model
-
-## G. Local API server
-
-### 1. Local Server Usage with Server Options
-
-- [ ] :key: Explore API Reference: Swagger API for sending/receiving requests
- - [ ] Use default server option
- - [ ] Configure and use custom server options
-- [ ] Test starting/stopping the local API server with different Model/Model settings
-- [ ] Server logs captured with correct Server Options provided
-- [ ] Verify functionality of Open logs/Clear feature
-- [ ] Ensure that threads and other functions impacting the model are disabled while the local server is running
diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock
index 8de56a967..1e0d86600 100644
--- a/src-tauri/Cargo.lock
+++ b/src-tauri/Cargo.lock
@@ -1030,9 +1030,9 @@ dependencies = [
[[package]]
name = "dlopen2"
-version = "0.7.0"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e1297103d2bbaea85724fcee6294c2d50b1081f9ad47d0f6f6f61eda65315a6"
+checksum = "b54f373ccf864bf587a89e880fb7610f8d73f3045f13580948ccbcaff26febff"
dependencies = [
"dlopen2_derive",
"libc",
@@ -2946,6 +2946,16 @@ dependencies = [
"objc2-core-foundation",
]
+[[package]]
+name = "objc2-javascript-core"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9052cb1bb50a4c161d934befcf879526fb87ae9a68858f241e693ca46225cf5a"
+dependencies = [
+ "objc2 0.6.1",
+ "objc2-core-foundation",
+]
+
[[package]]
name = "objc2-metal"
version = "0.2.2"
@@ -2994,6 +3004,17 @@ dependencies = [
"objc2-foundation 0.3.1",
]
+[[package]]
+name = "objc2-security"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1f8e0ef3ab66b08c42644dcb34dba6ec0a574bbd8adbb8bdbdc7a2779731a44"
+dependencies = [
+ "bitflags 2.9.1",
+ "objc2 0.6.1",
+ "objc2-core-foundation",
+]
+
[[package]]
name = "objc2-ui-kit"
version = "0.3.1"
@@ -3018,6 +3039,8 @@ dependencies = [
"objc2-app-kit",
"objc2-core-foundation",
"objc2-foundation 0.3.1",
+ "objc2-javascript-core",
+ "objc2-security",
]
[[package]]
@@ -4468,9 +4491,9 @@ dependencies = [
[[package]]
name = "serialize-to-javascript"
-version = "0.1.1"
+version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9823f2d3b6a81d98228151fdeaf848206a7855a7a042bbf9bf870449a66cafb"
+checksum = "04f3666a07a197cdb77cdf306c32be9b7f598d7060d50cfd4d5aa04bfd92f6c5"
dependencies = [
"serde",
"serde_json",
@@ -4479,13 +4502,13 @@ dependencies = [
[[package]]
name = "serialize-to-javascript-impl"
-version = "0.1.1"
+version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "74064874e9f6a15f04c1f3cb627902d0e6b410abbf36668afa873c61889f1763"
+checksum = "772ee033c0916d670af7860b6e1ef7d658a4629a6d0b4c8c3e67f09b3765b75d"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.104",
]
[[package]]
@@ -4869,11 +4892,12 @@ dependencies = [
[[package]]
name = "tao"
-version = "0.34.0"
+version = "0.34.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49c380ca75a231b87b6c9dd86948f035012e7171d1a7c40a9c2890489a7ffd8a"
+checksum = "959469667dbcea91e5485fc48ba7dd6023face91bb0f1a14681a70f99847c3f7"
dependencies = [
"bitflags 2.9.1",
+ "block2 0.6.1",
"core-foundation 0.10.1",
"core-graphics",
"crossbeam-channel",
@@ -4942,12 +4966,13 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
[[package]]
name = "tauri"
-version = "2.7.0"
+version = "2.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "352a4bc7bf6c25f5624227e3641adf475a6535707451b09bb83271df8b7a6ac7"
+checksum = "d4d1d3b3dc4c101ac989fd7db77e045cc6d91a25349cd410455cb5c57d510c1c"
dependencies = [
"anyhow",
"bytes",
+ "cookie",
"dirs",
"dunce",
"embed_plist",
@@ -4966,6 +4991,7 @@ dependencies = [
"objc2-app-kit",
"objc2-foundation 0.3.1",
"objc2-ui-kit",
+ "objc2-web-kit",
"percent-encoding",
"plist",
"raw-window-handle",
@@ -4993,9 +5019,9 @@ dependencies = [
[[package]]
name = "tauri-build"
-version = "2.3.1"
+version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "182d688496c06bf08ea896459bf483eb29cdff35c1c4c115fb14053514303064"
+checksum = "9c432ccc9ff661803dab74c6cd78de11026a578a9307610bbc39d3c55be7943f"
dependencies = [
"anyhow",
"cargo_toml",
@@ -5009,15 +5035,15 @@ dependencies = [
"serde_json",
"tauri-utils",
"tauri-winres",
- "toml 0.8.23",
+ "toml 0.9.5",
"walkdir",
]
[[package]]
name = "tauri-codegen"
-version = "2.3.1"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b54a99a6cd8e01abcfa61508177e6096a4fe2681efecee9214e962f2f073ae4a"
+checksum = "1ab3a62cf2e6253936a8b267c2e95839674e7439f104fa96ad0025e149d54d8a"
dependencies = [
"base64 0.22.1",
"ico",
@@ -5041,9 +5067,9 @@ dependencies = [
[[package]]
name = "tauri-macros"
-version = "2.3.2"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7945b14dc45e23532f2ded6e120170bbdd4af5ceaa45784a6b33d250fbce3f9e"
+checksum = "4368ea8094e7045217edb690f493b55b30caf9f3e61f79b4c24b6db91f07995e"
dependencies = [
"heck 0.5.0",
"proc-macro2",
@@ -5055,9 +5081,9 @@ dependencies = [
[[package]]
name = "tauri-plugin"
-version = "2.3.1"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5bd5c1e56990c70a906ef67a9851bbdba9136d26075ee9a2b19c8b46986b3e02"
+checksum = "9946a3cede302eac0c6eb6c6070ac47b1768e326092d32efbb91f21ed58d978f"
dependencies = [
"anyhow",
"glob",
@@ -5066,17 +5092,18 @@ dependencies = [
"serde",
"serde_json",
"tauri-utils",
- "toml 0.8.23",
+ "toml 0.9.5",
"walkdir",
]
[[package]]
name = "tauri-plugin-deep-link"
-version = "2.4.1"
+version = "2.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fec67f32d7a06d80bd3dc009fdb678c35a66116d9cb8cd2bb32e406c2b5bbd2"
+checksum = "cd67112fb1131834c2a7398ffcba520dbbf62c17de3b10329acd1a3554b1a9bb"
dependencies = [
"dunce",
+ "plist",
"rust-ini",
"serde",
"serde_json",
@@ -5237,9 +5264,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-os"
-version = "2.3.0"
+version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05bccb4c6de4299beec5a9b070878a01bce9e2c945aa7a75bcea38bcba4c675d"
+checksum = "77a1c77ebf6f20417ab2a74e8c310820ba52151406d0c80fbcea7df232e3f6ba"
dependencies = [
"gethostname",
"log",
@@ -5276,9 +5303,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-single-instance"
-version = "2.3.2"
+version = "2.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50a0e5a4ce43cb3a733c3aef85e8478bc769dac743c615e26639cbf5d953faf7"
+checksum = "fb9cac815bf11c4a80fb498666bcdad66d65b89e3ae24669e47806febb76389c"
dependencies = [
"serde",
"serde_json",
@@ -5340,9 +5367,9 @@ dependencies = [
[[package]]
name = "tauri-runtime"
-version = "2.7.1"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b1cc885be806ea15ff7b0eb47098a7b16323d9228876afda329e34e2d6c4676"
+checksum = "d4cfc9ad45b487d3fded5a4731a567872a4812e9552e3964161b08edabf93846"
dependencies = [
"cookie",
"dpi",
@@ -5351,20 +5378,23 @@ dependencies = [
"jni",
"objc2 0.6.1",
"objc2-ui-kit",
+ "objc2-web-kit",
"raw-window-handle",
"serde",
"serde_json",
"tauri-utils",
"thiserror 2.0.12",
"url",
+ "webkit2gtk",
+ "webview2-com",
"windows 0.61.3",
]
[[package]]
name = "tauri-runtime-wry"
-version = "2.7.2"
+version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fe653a2fbbef19fe898efc774bc52c8742576342a33d3d028c189b57eb1d2439"
+checksum = "c1fe9d48bd122ff002064e88cfcd7027090d789c4302714e68fcccba0f4b7807"
dependencies = [
"gtk",
"http 1.3.1",
@@ -5389,9 +5419,9 @@ dependencies = [
[[package]]
name = "tauri-utils"
-version = "2.6.0"
+version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9330c15cabfe1d9f213478c9e8ec2b0c76dab26bb6f314b8ad1c8a568c1d186e"
+checksum = "41a3852fdf9a4f8fbeaa63dc3e9a85284dd6ef7200751f0bd66ceee30c93f212"
dependencies = [
"anyhow",
"cargo_metadata",
@@ -5417,7 +5447,7 @@ dependencies = [
"serde_with",
"swift-rs",
"thiserror 2.0.12",
- "toml 0.8.23",
+ "toml 0.9.5",
"url",
"urlpattern",
"uuid",
@@ -6921,14 +6951,15 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "wry"
-version = "0.52.1"
+version = "0.53.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "12a714d9ba7075aae04a6e50229d6109e3d584774b99a6a8c60de1698ca111b9"
+checksum = "31f0e9642a0d061f6236c54ccae64c2722a7879ad4ec7dff59bd376d446d8e90"
dependencies = [
"base64 0.22.1",
"block2 0.6.1",
"cookie",
"crossbeam-channel",
+ "dirs",
"dpi",
"dunce",
"gdkx11",
diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml
index 353c3efff..1152623be 100644
--- a/src-tauri/Cargo.toml
+++ b/src-tauri/Cargo.toml
@@ -22,6 +22,7 @@ default = [
"tauri/macos-private-api",
"tauri/tray-icon",
"tauri/test",
+ "tauri/custom-protocol"
]
test-tauri = [
"tauri/wry",
@@ -58,7 +59,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_yaml = "0.9.34"
tar = "0.4"
-tauri-plugin-deep-link = "2"
+tauri-plugin-deep-link = { version = "2.3.4" }
tauri-plugin-dialog = "2.2.1"
tauri-plugin-hardware = { path = "./plugins/tauri-plugin-hardware" }
tauri-plugin-http = { version = "2", features = ["unsafe-headers"] }
@@ -75,7 +76,7 @@ url = "2.5"
uuid = { version = "1.7", features = ["v4"] }
[dependencies.tauri]
-version = "2.5.0"
+version = "2.8.5"
default-features = false
features = ["protocol-asset", "macos-private-api", "test"]
@@ -92,4 +93,4 @@ windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] }
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
tauri-plugin-updater = "2"
once_cell = "1.18"
-tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
+tauri-plugin-single-instance = { version = "2.3.4", features = ["deep-link"] }
diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs
index 79ec81f5a..96ecb36bc 100644
--- a/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs
+++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/commands.rs
@@ -12,7 +12,7 @@ use tokio::time::Instant;
use crate::device::{get_devices_from_backend, DeviceInfo};
use crate::error::{ErrorCode, LlamacppError, ServerError, ServerResult};
-use crate::path::{validate_binary_path, validate_model_path, validate_mmproj_path};
+use crate::path::{validate_binary_path, validate_mmproj_path, validate_model_path};
use crate::process::{
find_session_by_model_id, get_all_active_sessions, get_all_loaded_model_ids,
get_random_available_port, is_process_running_by_pid,
@@ -55,7 +55,20 @@ pub async fn load_llama_model(
let port = parse_port_from_args(&args);
let model_path_pb = validate_model_path(&mut args)?;
- let _mmproj_path_pb = validate_mmproj_path(&mut args)?;
+ let mmproj_path_pb = validate_mmproj_path(&mut args)?;
+
+ let mmproj_path_string = if let Some(ref _mmproj_pb) = mmproj_path_pb {
+ // Find the actual mmproj path from args after validation/conversion
+ if let Some(mmproj_index) = args.iter().position(|arg| arg == "--mmproj") {
+ Some(args[mmproj_index + 1].clone())
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ log::info!("MMPROJ Path string: {}", &mmproj_path_string.as_ref().unwrap_or(&"None".to_string()));
let api_key: String;
@@ -211,6 +224,7 @@ pub async fn load_llama_model(
model_id: model_id,
model_path: model_path_pb.display().to_string(),
api_key: api_key,
+ mmproj_path: mmproj_path_string,
};
// Insert session info to process_map
@@ -265,7 +279,7 @@ pub async fn unload_llama_model(
pub async fn get_devices(
backend_path: &str,
library_path: Option<&str>,
- envs: HashMap
+ envs: HashMap,
) -> ServerResult> {
get_devices_from_backend(backend_path, library_path, envs).await
}
diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/error.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/error.rs
index 647b2fead..d26e612fb 100644
--- a/src-tauri/plugins/tauri-plugin-llamacpp/src/error.rs
+++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/error.rs
@@ -48,6 +48,7 @@ impl LlamacppError {
let lower_stderr = stderr.to_lowercase();
// TODO: add others
let is_out_of_memory = lower_stderr.contains("out of memory")
+ || lower_stderr.contains("failed to allocate")
|| lower_stderr.contains("insufficient memory")
|| lower_stderr.contains("erroroutofdevicememory") // vulkan specific
|| lower_stderr.contains("kiogpucommandbuffercallbackerroroutofmemory") // Metal-specific error code
diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs
index 359a27951..2aad02ecf 100644
--- a/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs
+++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/state.rs
@@ -11,6 +11,8 @@ pub struct SessionInfo {
pub model_id: String,
pub model_path: String, // path of the loaded model
pub api_key: String,
+ #[serde(default)]
+ pub mmproj_path: Option,
}
pub struct LLamaBackendSession {
diff --git a/src-tauri/src/core/server/proxy.rs b/src-tauri/src/core/server/proxy.rs
index 9b33d4ba5..d6056e126 100644
--- a/src-tauri/src/core/server/proxy.rs
+++ b/src-tauri/src/core/server/proxy.rs
@@ -215,7 +215,14 @@ async fn proxy_request(
let path = get_destination_path(original_path, &config.prefix);
let method = parts.method.clone();
- let whitelisted_paths = ["/", "/openapi.json", "/favicon.ico"];
+ let whitelisted_paths = [
+ "/",
+ "/openapi.json",
+ "/favicon.ico",
+ "/docs/swagger-ui.css",
+ "/docs/swagger-ui-bundle.js",
+ "/docs/swagger-ui-standalone-preset.js",
+ ];
let is_whitelisted_path = whitelisted_paths.contains(&path.as_str());
if !is_whitelisted_path {
@@ -448,6 +455,82 @@ async fn proxy_request(
return Ok(response_builder.body(Body::from(body_str)).unwrap());
}
+
+ (hyper::Method::GET, "/openapi.json") => {
+ let body = include_str!("../../../static/openapi.json"); // relative to src-tauri/src/
+ return Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header(hyper::header::CONTENT_TYPE, "application/json")
+ .body(Body::from(body))
+ .unwrap());
+ }
+
+ // DOCS route
+ (hyper::Method::GET, "/") => {
+ let html = r#"
+
+
+
+
+ API Docs
+
+
+
+
+
+
+
+
+ "#;
+
+ let mut response_builder = Response::builder()
+ .status(StatusCode::OK)
+ .header(hyper::header::CONTENT_TYPE, "text/html");
+
+ response_builder = add_cors_headers_with_host_and_origin(
+ response_builder,
+ &host_header,
+ &origin_header,
+ &config.trusted_hosts,
+ );
+
+ return Ok(response_builder.body(Body::from(html)).unwrap());
+ }
+
+ (hyper::Method::GET, "/docs/swagger-ui.css") => {
+ let css = include_str!("../../../static/swagger-ui/swagger-ui.css");
+ return Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header(hyper::header::CONTENT_TYPE, "text/css")
+ .body(Body::from(css))
+ .unwrap());
+ }
+
+ (hyper::Method::GET, "/docs/swagger-ui-bundle.js") => {
+ let js = include_str!("../../../static/swagger-ui/swagger-ui-bundle.js");
+ return Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header(hyper::header::CONTENT_TYPE, "application/javascript")
+ .body(Body::from(js))
+ .unwrap());
+ }
+
+ (hyper::Method::GET, "/favicon.ico") => {
+ let icon = include_bytes!("../../../static/swagger-ui/favicon.ico");
+ return Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header(hyper::header::CONTENT_TYPE, "image/x-icon")
+ .body(Body::from(icon.as_ref()))
+ .unwrap());
+ }
+
_ => {
let is_explicitly_whitelisted_get = method == hyper::Method::GET
&& whitelisted_paths.contains(&destination_path.as_str());
diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs
index 59b1db5c0..185d259db 100644
--- a/src-tauri/src/lib.rs
+++ b/src-tauri/src/lib.rs
@@ -8,7 +8,8 @@ use core::{
};
use jan_utils::generate_app_token;
use std::{collections::HashMap, sync::Arc};
-use tauri::{Manager, RunEvent};
+use tauri_plugin_deep_link::DeepLinkExt;
+use tauri::{Emitter, Manager, RunEvent};
use tauri_plugin_llamacpp::cleanup_llama_processes;
use tokio::sync::Mutex;
@@ -22,6 +23,15 @@ pub fn run() {
builder = builder.plugin(tauri_plugin_single_instance::init(|_app, argv, _cwd| {
println!("a new app instance was opened with {argv:?} and the deep link event was already triggered");
// when defining deep link schemes at runtime, you must also check `argv` here
+ let arg = argv.iter().find(|arg| arg.starts_with("jan://"));
+ if let Some(deep_link) = arg {
+ println!("deep link: {deep_link}");
+ // handle the deep link, e.g., emit an event to the webview
+ _app.app_handle().emit("deep-link", deep_link).unwrap();
+ if let Some(window) = _app.app_handle().get_webview_window("main") {
+ let _ = window.set_focus();
+ }
+ }
}));
}
@@ -153,7 +163,6 @@ pub fn run() {
#[cfg(any(windows, target_os = "linux"))]
{
- use tauri_plugin_deep_link::DeepLinkExt;
app.deep_link().register_all()?;
}
setup_mcp(app);
diff --git a/src-tauri/static/openapi.json b/src-tauri/static/openapi.json
new file mode 100644
index 000000000..375c31b7d
--- /dev/null
+++ b/src-tauri/static/openapi.json
@@ -0,0 +1,690 @@
+{
+ "openapi": "3.0.0",
+ "info": {
+ "title": "Jan API Server Endpoints",
+ "description": "Jan API server provides a REST API interface for seamless interaction with large language models (LLMs) for third party apps. Compatible with the OpenAI API spec, it enables straightforward API access of models available in Jan.",
+ "version": "1.0"
+ },
+ "servers": [
+ {
+ "url": "http://localhost:1337/v1",
+ "description": "Jan API server"
+ }
+ ],
+ "tags": [
+ {
+ "name": "Models",
+ "description": "Endpoints for model discovery and management"
+ },
+ {
+ "name": "Inference",
+ "description": "Endpoint for generating completions (chat or text) from a model"
+ }
+ ],
+ "paths": {
+ "/models": {
+ "get": {
+ "summary": "List loaded models",
+ "description": "Returns information about the loaded model(s). The list always contains a single element describing the current model. See the OpenAI *Models* API documentation for details.",
+ "operationId": "listModels",
+ "tags": ["Models"],
+ "responses": {
+ "200": {
+ "description": "A list containing a single model object",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ListModelsResponseDto"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/chat/completions": {
+ "post": {
+ "summary": "Create chat completion",
+ "description": "Generates a completion for the supplied prompt. Streaming mode is supported. All extra options described in the documentation are optional and follow the OpenAI‑compatible naming.",
+ "operationId": "createChatCompletion",
+ "tags": ["Inference"],
+ "requestBody": {
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateChatCompletionDto"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Completion result",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ChatCompletionResponseDto"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "securitySchemes": {
+ "bearerAuth": {
+ "type": "http",
+ "scheme": "bearer",
+ "bearerFormat": "API-Key",
+ "description": "API key required for all endpoints."
+ }
+ },
+ "schemas": {
+ "ModelDto": {
+ "type": "object",
+ "description": "Model metadata as returned by `/v1/models`.",
+ "properties": {
+ "source_url": {
+ "type": "string",
+ "description": "URL to the source of the model."
+ },
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for the model (normally the file path)."
+ },
+ "object": {
+ "type": "string",
+ "enum": ["model"]
+ },
+ "name": {
+ "type": "string",
+ "description": "Human‑readable name of the model."
+ },
+ "version": {
+ "type": "string",
+ "default": "1.0",
+ "description": "Version string of the model."
+ },
+ "description": {
+ "type": "string",
+ "description": "Long description of the model."
+ },
+ "format": {
+ "type": "string",
+ "description": "File format (e.g., gguf)."
+ },
+ "ctx_len": {
+ "type": "integer",
+ "description": "Context length the model was trained with."
+ },
+ "prompt_template": {
+ "type": "string",
+ "description": "Template used to build prompts."
+ },
+ "temperature": {
+ "type": "number",
+ "description": "Default temperature for generation."
+ },
+ "top_p": {
+ "type": "number",
+ "description": "Default nucleus‑sampling probability."
+ },
+ "stream": {
+ "type": "boolean",
+ "description": "Whether streaming is enabled by default."
+ },
+ "max_tokens": {
+ "type": "integer",
+ "description": "Maximum tokens the model can generate."
+ },
+ "stop": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Default stop sequences."
+ },
+ "frequency_penalty": {
+ "type": "number",
+ "description": "Default frequency penalty."
+ },
+ "presence_penalty": {
+ "type": "number",
+ "description": "Default presence penalty."
+ },
+ "author": {
+ "type": "string"
+ },
+ "tags": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "size": {
+ "type": "integer"
+ },
+ "cover": {
+ "type": "string"
+ },
+ "engine": {
+ "type": "string"
+ }
+ },
+ "required": [
+ "source_url",
+ "id",
+ "object",
+ "name",
+ "version",
+ "description",
+ "format",
+ "ctx_len",
+ "prompt_template",
+ "temperature",
+ "top_p",
+ "stream",
+ "max_tokens",
+ "stop",
+ "frequency_penalty",
+ "presence_penalty",
+ "author",
+ "tags",
+ "size",
+ "cover",
+ "engine"
+ ]
+ },
+ "ListModelsResponseDto": {
+ "type": "object",
+ "description": "Response for `GET /v1/models` – a list that always contains a single model entry.",
+ "properties": {
+ "object": {
+ "type": "string",
+ "enum": ["list"]
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ModelDto"
+ }
+ }
+ },
+ "required": ["object", "data"]
+ },
+ "CreateChatCompletionDto": {
+ "type": "object",
+ "description": "Request body for `POST /v1/chat/completion`. All fields follow the OpenAI naming; fields marked *optional* are not required.",
+ "properties": {
+ "model": {
+ "type": "string",
+ "description": "The unique identifier of the model to use."
+ },
+ "messages": {
+ "type": "array",
+ "description": "Array of chat messages that form the prompt.",
+ "items": {
+ "$ref": "#/components/schemas/ChatCompletionMessage"
+ }
+ },
+ "stream": {
+ "type": "boolean",
+ "description": "If true, the server streams tokens as they are generated.",
+ "default": false
+ },
+ "max_tokens": {
+ "type": "integer",
+ "description": "Maximum number of tokens to generate."
+ },
+ "stop": {
+ "type": "array",
+ "description": "Stop sequences – generation stops before emitting any of these strings.",
+ "items": {
+ "type": "string"
+ },
+ "default": []
+ },
+ "temperature": {
+ "type": "number",
+ "description": "Adjusts randomness of the output.",
+ "default": 0.8
+ },
+ "dynatemp_range": {
+ "type": "number",
+ "description": "Dynamic‑temperature range – final temperature is sampled from `[temperature‑range, temperature+range]`.",
+ "default": 0
+ },
+ "dynatemp_exponent": {
+ "type": "number",
+ "description": "Exponent for dynamic‑temperature scaling.",
+ "default": 1
+ },
+ "top_k": {
+ "type": "integer",
+ "description": "Restricts sampling to the K most probable tokens.",
+ "default": 40
+ },
+ "top_p": {
+ "type": "number",
+ "description": "Nucleus sampling cutoff (cumulative probability).",
+ "default": 0.95
+ },
+ "min_p": {
+ "type": "number",
+ "description": "Minimum probability for a token to be considered relative to the most likely token.",
+ "default": 0.05
+ },
+ "typical_p": {
+ "type": "number",
+ "description": "Enable locally typical sampling with parameter p.",
+ "default": 1.0
+ },
+ "n_predict": {
+ "type": "integer",
+ "description": "Maximum number of tokens to predict; -1 = unlimited, 0 = evaluate prompt only.",
+ "default": -1
+ },
+ "n_indent": {
+ "type": "integer",
+ "description": "Minimum line indentation for generated code.",
+ "default": 0
+ },
+ "n_keep": {
+ "type": "integer",
+ "description": "Tokens from the prompt to retain when context is exceeded. 0 = none, -1 = all.",
+ "default": 0
+ },
+ "presence_penalty": {
+ "type": "number",
+ "description": "Presence penalty (0.0 = disabled).",
+ "default": 0
+ },
+ "frequency_penalty": {
+ "type": "number",
+ "description": "Frequency penalty (0.0 = disabled).",
+ "default": 0
+ },
+ "repeat_penalty": {
+ "type": "number",
+ "description": "Repetition penalty for token sequences.",
+ "default": 1.1
+ },
+ "repeat_last_n": {
+ "type": "integer",
+ "description": "How many last tokens to consider for repeat penalty (0 = disabled, -1 = context size).",
+ "default": 64
+ },
+ "dry_multiplier": {
+ "type": "number",
+ "description": "DRY (Don’t Repeat Yourself) multiplier (0.0 = disabled).",
+ "default": 0
+ },
+ "dry_base": {
+ "type": "number",
+ "description": "DRY base value.",
+ "default": 1.75
+ },
+ "dry_allowed_length": {
+ "type": "integer",
+ "description": "Length after which DRY penalty grows exponentially.",
+ "default": 2
+ },
+ "dry_penalty_last_n": {
+ "type": "integer",
+ "description": "How many tokens to scan for DRY repetitions (0 = disabled, -1 = context size).",
+ "default": -1
+ },
+ "dry_sequence_breakers": {
+ "type": "array",
+ "description": "Sequence breakers for DRY sampling.",
+ "items": {
+ "type": "string"
+ },
+ "default": ["\\n", ":", "\"", "*"]
+ },
+ "xtc_probability": {
+ "type": "number",
+ "description": "Probability for token removal via XTC sampler (0.0 = disabled).",
+ "default": 0
+ },
+ "xtc_threshold": {
+ "type": "number",
+ "description": "Minimum probability threshold for XTC.",
+ "default": 0.1
+ },
+ "mirostat": {
+ "type": "integer",
+ "description": "Enable Mirostat sampling (0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0).",
+ "default": 0
+ },
+ "mirostat_tau": {
+ "type": "number",
+ "description": "Target entropy for Mirostat.",
+ "default": 5
+ },
+ "mirostat_eta": {
+ "type": "number",
+ "description": "Learning rate for Mirostat.",
+ "default": 0.1
+ },
+ "grammar": {
+ "type": "string",
+ "description": "Grammar for grammar‑based sampling."
+ },
+ "json_schema": {
+ "type": "object",
+ "description": "JSON schema for grammar‑based sampling."
+ },
+ "seed": {
+ "type": "integer",
+ "description": "RNG seed; -1 = random.",
+ "default": -1
+ },
+ "ignore_eos": {
+ "type": "boolean",
+ "description": "Continue generating after EOS token.",
+ "default": false
+ },
+ "logit_bias": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "boolean"
+ }
+ ]
+ },
+ "description": "Modify the likelihood of a token appearing. Accepts a JSON object mapping token IDs to a bias value from -100 to 100, or `false` to ban a token.",
+ "default": {}
+ },
+ "n_probs": {
+ "type": "integer",
+ "description": "If > 0, top-N token probabilities are returned.",
+ "default": 0
+ },
+ "min_keep": {
+ "type": "integer",
+ "description": "Force samplers to return at least N tokens.",
+ "default": 0
+ },
+ "t_max_predict_ms": {
+ "type": "integer",
+ "description": "Maximum generation time in milliseconds (0 = disabled).",
+ "default": 0
+ },
+ "id_slot": {
+ "type": "integer",
+ "description": "Assign the request to a specific inference slot (-1 = auto).",
+ "default": -1
+ },
+ "cache_prompt": {
+ "type": "boolean",
+ "description": "Reuse KV cache from previous requests when possible.",
+ "default": true
+ },
+ "return_tokens": {
+ "type": "boolean",
+ "description": "Include raw token IDs in the response.",
+ "default": false
+ },
+ "samplers": {
+ "type": "array",
+ "description": "Ordered list of samplers to apply.",
+ "items": {
+ "type": "string"
+ },
+ "default": [
+ "dry",
+ "top_k",
+ "typ_p",
+ "top_p",
+ "min_p",
+ "xtc",
+ "temperature"
+ ]
+ },
+ "timings_per_token": {
+ "type": "boolean",
+ "description": "Include timing information per token.",
+ "default": false
+ },
+ "return_progress": {
+ "type": "boolean",
+ "description": "Include prompt‑processing progress when streaming.",
+ "default": false
+ },
+ "post_sampling_probs": {
+ "type": "boolean",
+ "description": "Return top‑N probabilities *after* sampling.",
+ "default": false
+ },
+ "response_fields": {
+ "type": "array",
+ "description": "Select which fields to include in the response.",
+ "items": {
+ "type": "string"
+ }
+ },
+ "lora": {
+ "type": "array",
+ "description": "LoRA adapters to apply for this request.",
+ "items": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "scale": {
+ "type": "number"
+ }
+ },
+ "required": ["id", "scale"]
+ }
+ },
+ "multimodal_data": {
+ "type": "array",
+ "description": "Base64‑encoded multimodal data (images, audio, …). Must match the number of `<__media__>` markers in the prompt.",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "required": ["model", "messages"]
+ },
+ "ChatCompletionResponseDto": {
+ "type": "object",
+ "description": "Response from `POST /v1/chat/completion`.",
+ "properties": {
+ "id": {
+ "type": "string",
+ "description": "Unique identifier for the completion."
+ },
+ "object": {
+ "type": "string",
+ "enum": ["chat.completion"]
+ },
+ "created": {
+ "type": "integer",
+ "description": "Unix timestamp of creation."
+ },
+ "model": {
+ "type": "string",
+ "description": "Model used for the completion."
+ },
+ "choices": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ChoiceDto"
+ },
+ "description": "List of generated choices."
+ },
+ "usage": {
+ "$ref": "#/components/schemas/UsageDto"
+ },
+ "system_fingerprint": {
+ "type": "string",
+ "description": "Fingerprint of the system configuration used."
+ },
+ "generation_settings": {
+ "type": "object",
+ "description": "The generation settings used for the completion."
+ },
+ "prompt": {
+ "type": "string",
+ "description": "The processed prompt."
+ },
+ "truncated": {
+ "type": "boolean",
+ "description": "True if the context size was exceeded."
+ },
+ "tokens_cached": {
+ "type": "integer",
+ "description": "Number of tokens from the prompt which were reused from a cache."
+ },
+ "tokens_evaluated": {
+ "type": "integer",
+ "description": "Number of tokens evaluated in total from the prompt."
+ }
+ },
+ "required": ["id", "object", "created", "model", "choices", "usage"]
+ },
+ "ChatCompletionMessage": {
+ "type": "object",
+ "description": "A single turn in a chat conversation.",
+ "properties": {
+ "role": {
+ "type": "string",
+ "enum": ["system", "assistant", "user"],
+ "description": "Who sent the message."
+ },
+ "content": {
+ "type": "string",
+ "description": "The textual content of the message."
+ }
+ },
+ "required": ["role", "content"]
+ },
+ "ChoiceDto": {
+ "type": "object",
+ "properties": {
+ "index": {
+ "type": "integer"
+ },
+ "message": {
+ "$ref": "#/components/schemas/ChatCompletionMessage"
+ },
+ "finish_reason": {
+ "type": "string",
+ "description": "Why the generation stopped (e.g., `stop`, `length`, `model`)."
+ },
+ "logprobs": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ChatChoiceLogprobs"
+ },
+ "description": "Log probability information for the choice, if requested.",
+ "nullable": true
+ }
+ },
+ "required": ["index", "message", "finish_reason"]
+ },
+ "UsageDto": {
+ "type": "object",
+ "description": "Token usage statistics.",
+ "properties": {
+ "prompt_tokens": {
+ "type": "integer"
+ },
+ "completion_tokens": {
+ "type": "integer"
+ },
+ "total_tokens": {
+ "type": "integer"
+ }
+ },
+ "required": ["prompt_tokens", "completion_tokens", "total_tokens"]
+ },
+ "LogprobContent": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "description": "The token ID."
+ },
+ "token": {
+ "type": "string",
+ "description": "The token string."
+ },
+ "logprob": {
+ "type": "number",
+ "description": "The log probability of this token."
+ },
+ "prob": {
+ "type": "number",
+ "description": "The probability of this token (if post_sampling_probs is true)."
+ },
+ "bytes": {
+ "type": "array",
+ "items": {
+ "type": "integer"
+ },
+ "description": "The token represented as a list of bytes."
+ }
+ }
+ },
+ "ChatChoiceLogprobs": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "description": "The token ID."
+ },
+ "token": {
+ "type": "string",
+ "description": "The most likely token."
+ },
+ "logprob": {
+ "type": "number",
+ "description": "The log probability of the most likely token."
+ },
+ "prob": {
+ "type": "number",
+ "description": "The probability of the most likely token (if post_sampling_probs is true)."
+ },
+ "bytes": {
+ "type": "array",
+ "items": {
+ "type": "integer"
+ },
+ "description": "The most likely token represented as a list of bytes."
+ },
+ "top_logprobs": {
+ "type": "array",
+ "description": "List of the most likely tokens and their log probs.",
+ "items": {
+ "$ref": "#/components/schemas/LogprobContent"
+ }
+ },
+ "top_probs": {
+ "type": "array",
+ "description": "List of the most likely tokens and their probs (if post_sampling_probs is true).",
+ "items": {
+ "$ref": "#/components/schemas/LogprobContent"
+ }
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+}
diff --git a/src-tauri/static/swagger-ui/favicon.ico b/src-tauri/static/swagger-ui/favicon.ico
new file mode 100644
index 000000000..ca006fa7d
Binary files /dev/null and b/src-tauri/static/swagger-ui/favicon.ico differ
diff --git a/src-tauri/static/swagger-ui/swagger-ui-bundle.js b/src-tauri/static/swagger-ui/swagger-ui-bundle.js
new file mode 100644
index 000000000..2ef29f201
--- /dev/null
+++ b/src-tauri/static/swagger-ui/swagger-ui-bundle.js
@@ -0,0 +1,2 @@
+/*! For license information please see swagger-ui-bundle.js.LICENSE.txt */
+!function webpackUniversalModuleDefinition(s,o){"object"==typeof exports&&"object"==typeof module?module.exports=o():"function"==typeof define&&define.amd?define([],o):"object"==typeof exports?exports.SwaggerUIBundle=o():s.SwaggerUIBundle=o()}(this,(()=>(()=>{var s={251:(s,o)=>{o.read=function(s,o,i,a,u){var _,w,x=8*u-a-1,C=(1<>1,L=-7,B=i?u-1:0,$=i?-1:1,U=s[o+B];for(B+=$,_=U&(1<<-L)-1,U>>=-L,L+=x;L>0;_=256*_+s[o+B],B+=$,L-=8);for(w=_&(1<<-L)-1,_>>=-L,L+=a;L>0;w=256*w+s[o+B],B+=$,L-=8);if(0===_)_=1-j;else{if(_===C)return w?NaN:1/0*(U?-1:1);w+=Math.pow(2,a),_-=j}return(U?-1:1)*w*Math.pow(2,_-a)},o.write=function(s,o,i,a,u,_){var w,x,C,j=8*_-u-1,L=(1<>1,$=23===u?Math.pow(2,-24)-Math.pow(2,-77):0,U=a?0:_-1,V=a?1:-1,z=o<0||0===o&&1/o<0?1:0;for(o=Math.abs(o),isNaN(o)||o===1/0?(x=isNaN(o)?1:0,w=L):(w=Math.floor(Math.log(o)/Math.LN2),o*(C=Math.pow(2,-w))<1&&(w--,C*=2),(o+=w+B>=1?$/C:$*Math.pow(2,1-B))*C>=2&&(w++,C/=2),w+B>=L?(x=0,w=L):w+B>=1?(x=(o*C-1)*Math.pow(2,u),w+=B):(x=o*Math.pow(2,B-1)*Math.pow(2,u),w=0));u>=8;s[i+U]=255&x,U+=V,x/=256,u-=8);for(w=w<0;s[i+U]=255&w,U+=V,w/=256,j-=8);s[i+U-V]|=128*z}},462:(s,o,i)=>{"use strict";var a=i(40975);s.exports=a},659:(s,o,i)=>{var a=i(51873),u=Object.prototype,_=u.hasOwnProperty,w=u.toString,x=a?a.toStringTag:void 0;s.exports=function getRawTag(s){var o=_.call(s,x),i=s[x];try{s[x]=void 0;var a=!0}catch(s){}var u=w.call(s);return a&&(o?s[x]=i:delete s[x]),u}},694:(s,o,i)=>{"use strict";i(91599);var a=i(37257);i(12560),s.exports=a},953:(s,o,i)=>{"use strict";s.exports=i(53375)},1733:s=>{var o=/[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g;s.exports=function asciiWords(s){return s.match(o)||[]}},1882:(s,o,i)=>{var a=i(72552),u=i(23805);s.exports=function isFunction(s){if(!u(s))return!1;var o=a(s);return"[object Function]"==o||"[object GeneratorFunction]"==o||"[object AsyncFunction]"==o||"[object Proxy]"==o}},1907:(s,o,i)=>{"use strict";var a=i(41505),u=Function.prototype,_=u.call,w=a&&u.bind.bind(_,_);s.exports=a?w:function(s){return function(){return _.apply(s,arguments)}}},2205:function(s,o,i){var a;a=void 0!==i.g?i.g:this,s.exports=function(s){if(s.CSS&&s.CSS.escape)return s.CSS.escape;var cssEscape=function(s){if(0==arguments.length)throw new TypeError("`CSS.escape` requires an argument.");for(var o,i=String(s),a=i.length,u=-1,_="",w=i.charCodeAt(0);++u=1&&o<=31||127==o||0==u&&o>=48&&o<=57||1==u&&o>=48&&o<=57&&45==w?"\\"+o.toString(16)+" ":0==u&&1==a&&45==o||!(o>=128||45==o||95==o||o>=48&&o<=57||o>=65&&o<=90||o>=97&&o<=122)?"\\"+i.charAt(u):i.charAt(u):_+="�";return _};return s.CSS||(s.CSS={}),s.CSS.escape=cssEscape,cssEscape}(a)},2209:(s,o,i)=>{"use strict";var a,u=i(9404),_=function productionTypeChecker(){invariant(!1,"ImmutablePropTypes type checking code is stripped in production.")};_.isRequired=_;var w=function getProductionTypeChecker(){return _};function getPropType(s){var o=typeof s;return Array.isArray(s)?"array":s instanceof RegExp?"object":s instanceof u.Iterable?"Immutable."+s.toSource().split(" ")[0]:o}function createChainableTypeChecker(s){function checkType(o,i,a,u,_,w){for(var x=arguments.length,C=Array(x>6?x-6:0),j=6;j>",null!=i[a]?s.apply(void 0,[i,a,u,_,w].concat(C)):o?new Error("Required "+_+" `"+w+"` was not specified in `"+u+"`."):void 0}var o=checkType.bind(null,!1);return o.isRequired=checkType.bind(null,!0),o}function createIterableSubclassTypeChecker(s,o){return function createImmutableTypeChecker(s,o){return createChainableTypeChecker((function validate(i,a,u,_,w){var x=i[a];if(!o(x)){var C=getPropType(x);return new Error("Invalid "+_+" `"+w+"` of type `"+C+"` supplied to `"+u+"`, expected `"+s+"`.")}return null}))}("Iterable."+s,(function(s){return u.Iterable.isIterable(s)&&o(s)}))}(a={listOf:w,mapOf:w,orderedMapOf:w,setOf:w,orderedSetOf:w,stackOf:w,iterableOf:w,recordOf:w,shape:w,contains:w,mapContains:w,orderedMapContains:w,list:_,map:_,orderedMap:_,set:_,orderedSet:_,stack:_,seq:_,record:_,iterable:_}).iterable.indexed=createIterableSubclassTypeChecker("Indexed",u.Iterable.isIndexed),a.iterable.keyed=createIterableSubclassTypeChecker("Keyed",u.Iterable.isKeyed),s.exports=a},2404:(s,o,i)=>{var a=i(60270);s.exports=function isEqual(s,o){return a(s,o)}},2523:s=>{s.exports=function baseFindIndex(s,o,i,a){for(var u=s.length,_=i+(a?1:-1);a?_--:++_{"use strict";var a=i(45951),u=Object.defineProperty;s.exports=function(s,o){try{u(a,s,{value:o,configurable:!0,writable:!0})}catch(i){a[s]=o}return o}},2694:(s,o,i)=>{"use strict";var a=i(6925);function emptyFunction(){}function emptyFunctionWithReset(){}emptyFunctionWithReset.resetWarningCache=emptyFunction,s.exports=function(){function shim(s,o,i,u,_,w){if(w!==a){var x=new Error("Calling PropTypes validators directly is not supported by the `prop-types` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");throw x.name="Invariant Violation",x}}function getShim(){return shim}shim.isRequired=shim;var s={array:shim,bigint:shim,bool:shim,func:shim,number:shim,object:shim,string:shim,symbol:shim,any:shim,arrayOf:getShim,element:shim,elementType:shim,instanceOf:getShim,node:shim,objectOf:getShim,oneOf:getShim,oneOfType:getShim,shape:getShim,exact:getShim,checkPropTypes:emptyFunctionWithReset,resetWarningCache:emptyFunction};return s.PropTypes=s,s}},2874:s=>{s.exports={}},2875:(s,o,i)=>{"use strict";var a=i(23045),u=i(80376);s.exports=Object.keys||function keys(s){return a(s,u)}},2955:(s,o,i)=>{"use strict";var a,u=i(65606);function _defineProperty(s,o,i){return(o=function _toPropertyKey(s){var o=function _toPrimitive(s,o){if("object"!=typeof s||null===s)return s;var i=s[Symbol.toPrimitive];if(void 0!==i){var a=i.call(s,o||"default");if("object"!=typeof a)return a;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===o?String:Number)(s)}(s,"string");return"symbol"==typeof o?o:String(o)}(o))in s?Object.defineProperty(s,o,{value:i,enumerable:!0,configurable:!0,writable:!0}):s[o]=i,s}var _=i(86238),w=Symbol("lastResolve"),x=Symbol("lastReject"),C=Symbol("error"),j=Symbol("ended"),L=Symbol("lastPromise"),B=Symbol("handlePromise"),$=Symbol("stream");function createIterResult(s,o){return{value:s,done:o}}function readAndResolve(s){var o=s[w];if(null!==o){var i=s[$].read();null!==i&&(s[L]=null,s[w]=null,s[x]=null,o(createIterResult(i,!1)))}}function onReadable(s){u.nextTick(readAndResolve,s)}var U=Object.getPrototypeOf((function(){})),V=Object.setPrototypeOf((_defineProperty(a={get stream(){return this[$]},next:function next(){var s=this,o=this[C];if(null!==o)return Promise.reject(o);if(this[j])return Promise.resolve(createIterResult(void 0,!0));if(this[$].destroyed)return new Promise((function(o,i){u.nextTick((function(){s[C]?i(s[C]):o(createIterResult(void 0,!0))}))}));var i,a=this[L];if(a)i=new Promise(function wrapForNext(s,o){return function(i,a){s.then((function(){o[j]?i(createIterResult(void 0,!0)):o[B](i,a)}),a)}}(a,this));else{var _=this[$].read();if(null!==_)return Promise.resolve(createIterResult(_,!1));i=new Promise(this[B])}return this[L]=i,i}},Symbol.asyncIterator,(function(){return this})),_defineProperty(a,"return",(function _return(){var s=this;return new Promise((function(o,i){s[$].destroy(null,(function(s){s?i(s):o(createIterResult(void 0,!0))}))}))})),a),U);s.exports=function createReadableStreamAsyncIterator(s){var o,i=Object.create(V,(_defineProperty(o={},$,{value:s,writable:!0}),_defineProperty(o,w,{value:null,writable:!0}),_defineProperty(o,x,{value:null,writable:!0}),_defineProperty(o,C,{value:null,writable:!0}),_defineProperty(o,j,{value:s._readableState.endEmitted,writable:!0}),_defineProperty(o,B,{value:function value(s,o){var a=i[$].read();a?(i[L]=null,i[w]=null,i[x]=null,s(createIterResult(a,!1))):(i[w]=s,i[x]=o)},writable:!0}),o));return i[L]=null,_(s,(function(s){if(s&&"ERR_STREAM_PREMATURE_CLOSE"!==s.code){var o=i[x];return null!==o&&(i[L]=null,i[w]=null,i[x]=null,o(s)),void(i[C]=s)}var a=i[w];null!==a&&(i[L]=null,i[w]=null,i[x]=null,a(createIterResult(void 0,!0))),i[j]=!0})),s.on("readable",onReadable.bind(null,i)),i}},3110:(s,o,i)=>{const a=i(5187),u=i(85015),_=i(98023),w=i(53812),x=i(23805),C=i(85105),j=i(86804);class Namespace{constructor(s){this.elementMap={},this.elementDetection=[],this.Element=j.Element,this.KeyValuePair=j.KeyValuePair,s&&s.noDefault||this.useDefault(),this._attributeElementKeys=[],this._attributeElementArrayKeys=[]}use(s){return s.namespace&&s.namespace({base:this}),s.load&&s.load({base:this}),this}useDefault(){return this.register("null",j.NullElement).register("string",j.StringElement).register("number",j.NumberElement).register("boolean",j.BooleanElement).register("array",j.ArrayElement).register("object",j.ObjectElement).register("member",j.MemberElement).register("ref",j.RefElement).register("link",j.LinkElement),this.detect(a,j.NullElement,!1).detect(u,j.StringElement,!1).detect(_,j.NumberElement,!1).detect(w,j.BooleanElement,!1).detect(Array.isArray,j.ArrayElement,!1).detect(x,j.ObjectElement,!1),this}register(s,o){return this._elements=void 0,this.elementMap[s]=o,this}unregister(s){return this._elements=void 0,delete this.elementMap[s],this}detect(s,o,i){return void 0===i||i?this.elementDetection.unshift([s,o]):this.elementDetection.push([s,o]),this}toElement(s){if(s instanceof this.Element)return s;let o;for(let i=0;i{const o=s[0].toUpperCase()+s.substr(1);this._elements[o]=this.elementMap[s]}))),this._elements}get serialiser(){return new C(this)}}C.prototype.Namespace=Namespace,s.exports=Namespace},3121:(s,o,i)=>{"use strict";var a=i(65482),u=Math.min;s.exports=function(s){var o=a(s);return o>0?u(o,9007199254740991):0}},3209:(s,o,i)=>{var a=i(91596),u=i(53320),_=i(36306),w="__lodash_placeholder__",x=128,C=Math.min;s.exports=function mergeData(s,o){var i=s[1],j=o[1],L=i|j,B=L<131,$=j==x&&8==i||j==x&&256==i&&s[7].length<=o[8]||384==j&&o[7].length<=o[8]&&8==i;if(!B&&!$)return s;1&j&&(s[2]=o[2],L|=1&i?0:4);var U=o[3];if(U){var V=s[3];s[3]=V?a(V,U,o[4]):U,s[4]=V?_(s[3],w):o[4]}return(U=o[5])&&(V=s[5],s[5]=V?u(V,U,o[6]):U,s[6]=V?_(s[5],w):o[6]),(U=o[7])&&(s[7]=U),j&x&&(s[8]=null==s[8]?o[8]:C(s[8],o[8])),null==s[9]&&(s[9]=o[9]),s[0]=o[0],s[1]=L,s}},3650:(s,o,i)=>{var a=i(74335)(Object.keys,Object);s.exports=a},3656:(s,o,i)=>{s=i.nmd(s);var a=i(9325),u=i(89935),_=o&&!o.nodeType&&o,w=_&&s&&!s.nodeType&&s,x=w&&w.exports===_?a.Buffer:void 0,C=(x?x.isBuffer:void 0)||u;s.exports=C},4509:(s,o,i)=>{var a=i(12651);s.exports=function mapCacheHas(s){return a(this,s).has(s)}},4640:s=>{"use strict";var o=String;s.exports=function(s){try{return o(s)}catch(s){return"Object"}}},4664:(s,o,i)=>{var a=i(79770),u=i(63345),_=Object.prototype.propertyIsEnumerable,w=Object.getOwnPropertySymbols,x=w?function(s){return null==s?[]:(s=Object(s),a(w(s),(function(o){return _.call(s,o)})))}:u;s.exports=x},4901:(s,o,i)=>{var a=i(72552),u=i(30294),_=i(40346),w={};w["[object Float32Array]"]=w["[object Float64Array]"]=w["[object Int8Array]"]=w["[object Int16Array]"]=w["[object Int32Array]"]=w["[object Uint8Array]"]=w["[object Uint8ClampedArray]"]=w["[object Uint16Array]"]=w["[object Uint32Array]"]=!0,w["[object Arguments]"]=w["[object Array]"]=w["[object ArrayBuffer]"]=w["[object Boolean]"]=w["[object DataView]"]=w["[object Date]"]=w["[object Error]"]=w["[object Function]"]=w["[object Map]"]=w["[object Number]"]=w["[object Object]"]=w["[object RegExp]"]=w["[object Set]"]=w["[object String]"]=w["[object WeakMap]"]=!1,s.exports=function baseIsTypedArray(s){return _(s)&&u(s.length)&&!!w[a(s)]}},4993:(s,o,i)=>{"use strict";var a=i(16946),u=i(74239);s.exports=function(s){return a(u(s))}},5187:s=>{s.exports=function isNull(s){return null===s}},5419:s=>{s.exports=function(s,o,i,a){var u=new Blob(void 0!==a?[a,s]:[s],{type:i||"application/octet-stream"});if(void 0!==window.navigator.msSaveBlob)window.navigator.msSaveBlob(u,o);else{var _=window.URL&&window.URL.createObjectURL?window.URL.createObjectURL(u):window.webkitURL.createObjectURL(u),w=document.createElement("a");w.style.display="none",w.href=_,w.setAttribute("download",o),void 0===w.download&&w.setAttribute("target","_blank"),document.body.appendChild(w),w.click(),setTimeout((function(){document.body.removeChild(w),window.URL.revokeObjectURL(_)}),200)}}},5556:(s,o,i)=>{s.exports=i(2694)()},5861:(s,o,i)=>{var a=i(55580),u=i(68223),_=i(32804),w=i(76545),x=i(28303),C=i(72552),j=i(47473),L="[object Map]",B="[object Promise]",$="[object Set]",U="[object WeakMap]",V="[object DataView]",z=j(a),Y=j(u),Z=j(_),ee=j(w),ie=j(x),ae=C;(a&&ae(new a(new ArrayBuffer(1)))!=V||u&&ae(new u)!=L||_&&ae(_.resolve())!=B||w&&ae(new w)!=$||x&&ae(new x)!=U)&&(ae=function(s){var o=C(s),i="[object Object]"==o?s.constructor:void 0,a=i?j(i):"";if(a)switch(a){case z:return V;case Y:return L;case Z:return B;case ee:return $;case ie:return U}return o}),s.exports=ae},6048:s=>{s.exports=function negate(s){if("function"!=typeof s)throw new TypeError("Expected a function");return function(){var o=arguments;switch(o.length){case 0:return!s.call(this);case 1:return!s.call(this,o[0]);case 2:return!s.call(this,o[0],o[1]);case 3:return!s.call(this,o[0],o[1],o[2])}return!s.apply(this,o)}}},6188:s=>{"use strict";s.exports=Math.max},6205:s=>{s.exports={ROOT:0,GROUP:1,POSITION:2,SET:3,RANGE:4,REPETITION:5,REFERENCE:6,CHAR:7}},6233:(s,o,i)=>{const a=i(6048),u=i(10316),_=i(92340);class ArrayElement extends u{constructor(s,o,i){super(s||[],o,i),this.element="array"}primitive(){return"array"}get(s){return this.content[s]}getValue(s){const o=this.get(s);if(o)return o.toValue()}getIndex(s){return this.content[s]}set(s,o){return this.content[s]=this.refract(o),this}remove(s){const o=this.content.splice(s,1);return o.length?o[0]:null}map(s,o){return this.content.map(s,o)}flatMap(s,o){return this.map(s,o).reduce(((s,o)=>s.concat(o)),[])}compactMap(s,o){const i=[];return this.forEach((a=>{const u=s.bind(o)(a);u&&i.push(u)})),i}filter(s,o){return new _(this.content.filter(s,o))}reject(s,o){return this.filter(a(s),o)}reduce(s,o){let i,a;void 0!==o?(i=0,a=this.refract(o)):(i=1,a="object"===this.primitive()?this.first.value:this.first);for(let o=i;o{s.bind(o)(i,this.refract(a))}))}shift(){return this.content.shift()}unshift(s){this.content.unshift(this.refract(s))}push(s){return this.content.push(this.refract(s)),this}add(s){this.push(s)}findElements(s,o){const i=o||{},a=!!i.recursive,u=void 0===i.results?[]:i.results;return this.forEach(((o,i,_)=>{a&&void 0!==o.findElements&&o.findElements(s,{results:u,recursive:a}),s(o,i,_)&&u.push(o)})),u}find(s){return new _(this.findElements(s,{recursive:!0}))}findByElement(s){return this.find((o=>o.element===s))}findByClass(s){return this.find((o=>o.classes.includes(s)))}getById(s){return this.find((o=>o.id.toValue()===s)).first}includes(s){return this.content.some((o=>o.equals(s)))}contains(s){return this.includes(s)}empty(){return new this.constructor([])}"fantasy-land/empty"(){return this.empty()}concat(s){return new this.constructor(this.content.concat(s.content))}"fantasy-land/concat"(s){return this.concat(s)}"fantasy-land/map"(s){return new this.constructor(this.map(s))}"fantasy-land/chain"(s){return this.map((o=>s(o)),this).reduce(((s,o)=>s.concat(o)),this.empty())}"fantasy-land/filter"(s){return new this.constructor(this.content.filter(s))}"fantasy-land/reduce"(s,o){return this.content.reduce(s,o)}get length(){return this.content.length}get isEmpty(){return 0===this.content.length}get first(){return this.getIndex(0)}get second(){return this.getIndex(1)}get last(){return this.getIndex(this.length-1)}}ArrayElement.empty=function empty(){return new this},ArrayElement["fantasy-land/empty"]=ArrayElement.empty,"undefined"!=typeof Symbol&&(ArrayElement.prototype[Symbol.iterator]=function symbol(){return this.content[Symbol.iterator]()}),s.exports=ArrayElement},6499:(s,o,i)=>{"use strict";var a=i(1907),u=0,_=Math.random(),w=a(1..toString);s.exports=function(s){return"Symbol("+(void 0===s?"":s)+")_"+w(++u+_,36)}},6549:s=>{"use strict";s.exports=Object.getOwnPropertyDescriptor},6925:s=>{"use strict";s.exports="SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED"},7057:(s,o,i)=>{"use strict";var a=i(11470).charAt,u=i(90160),_=i(64932),w=i(60183),x=i(59550),C="String Iterator",j=_.set,L=_.getterFor(C);w(String,"String",(function(s){j(this,{type:C,string:u(s),index:0})}),(function next(){var s,o=L(this),i=o.string,u=o.index;return u>=i.length?x(void 0,!0):(s=a(i,u),o.index+=s.length,x(s,!1))}))},7176:(s,o,i)=>{"use strict";var a,u=i(73126),_=i(75795);try{a=[].__proto__===Array.prototype}catch(s){if(!s||"object"!=typeof s||!("code"in s)||"ERR_PROTO_ACCESS"!==s.code)throw s}var w=!!a&&_&&_(Object.prototype,"__proto__"),x=Object,C=x.getPrototypeOf;s.exports=w&&"function"==typeof w.get?u([w.get]):"function"==typeof C&&function getDunder(s){return C(null==s?s:x(s))}},7309:(s,o,i)=>{var a=i(62006)(i(24713));s.exports=a},7376:s=>{"use strict";s.exports=!0},7463:(s,o,i)=>{"use strict";var a=i(98828),u=i(62250),_=/#|\.prototype\./,isForced=function(s,o){var i=x[w(s)];return i===j||i!==C&&(u(o)?a(o):!!o)},w=isForced.normalize=function(s){return String(s).replace(_,".").toLowerCase()},x=isForced.data={},C=isForced.NATIVE="N",j=isForced.POLYFILL="P";s.exports=isForced},7666:(s,o,i)=>{var a=i(84851),u=i(953);function _extends(){var o;return s.exports=_extends=a?u(o=a).call(o):function(s){for(var o=1;o{const a=i(6205);o.wordBoundary=()=>({type:a.POSITION,value:"b"}),o.nonWordBoundary=()=>({type:a.POSITION,value:"B"}),o.begin=()=>({type:a.POSITION,value:"^"}),o.end=()=>({type:a.POSITION,value:"$"})},8068:s=>{"use strict";var o=(()=>{var s=Object.defineProperty,o=Object.getOwnPropertyDescriptor,i=Object.getOwnPropertyNames,a=Object.getOwnPropertySymbols,u=Object.prototype.hasOwnProperty,_=Object.prototype.propertyIsEnumerable,__defNormalProp=(o,i,a)=>i in o?s(o,i,{enumerable:!0,configurable:!0,writable:!0,value:a}):o[i]=a,__spreadValues=(s,o)=>{for(var i in o||(o={}))u.call(o,i)&&__defNormalProp(s,i,o[i]);if(a)for(var i of a(o))_.call(o,i)&&__defNormalProp(s,i,o[i]);return s},__publicField=(s,o,i)=>__defNormalProp(s,"symbol"!=typeof o?o+"":o,i),w={};((o,i)=>{for(var a in i)s(o,a,{get:i[a],enumerable:!0})})(w,{DEFAULT_OPTIONS:()=>C,DEFAULT_UUID_LENGTH:()=>x,default:()=>B});var x=6,C={dictionary:"alphanum",shuffle:!0,debug:!1,length:x,counter:0},j=class _ShortUniqueId{constructor(s={}){__publicField(this,"counter"),__publicField(this,"debug"),__publicField(this,"dict"),__publicField(this,"version"),__publicField(this,"dictIndex",0),__publicField(this,"dictRange",[]),__publicField(this,"lowerBound",0),__publicField(this,"upperBound",0),__publicField(this,"dictLength",0),__publicField(this,"uuidLength"),__publicField(this,"_digit_first_ascii",48),__publicField(this,"_digit_last_ascii",58),__publicField(this,"_alpha_lower_first_ascii",97),__publicField(this,"_alpha_lower_last_ascii",123),__publicField(this,"_hex_last_ascii",103),__publicField(this,"_alpha_upper_first_ascii",65),__publicField(this,"_alpha_upper_last_ascii",91),__publicField(this,"_number_dict_ranges",{digits:[this._digit_first_ascii,this._digit_last_ascii]}),__publicField(this,"_alpha_dict_ranges",{lowerCase:[this._alpha_lower_first_ascii,this._alpha_lower_last_ascii],upperCase:[this._alpha_upper_first_ascii,this._alpha_upper_last_ascii]}),__publicField(this,"_alpha_lower_dict_ranges",{lowerCase:[this._alpha_lower_first_ascii,this._alpha_lower_last_ascii]}),__publicField(this,"_alpha_upper_dict_ranges",{upperCase:[this._alpha_upper_first_ascii,this._alpha_upper_last_ascii]}),__publicField(this,"_alphanum_dict_ranges",{digits:[this._digit_first_ascii,this._digit_last_ascii],lowerCase:[this._alpha_lower_first_ascii,this._alpha_lower_last_ascii],upperCase:[this._alpha_upper_first_ascii,this._alpha_upper_last_ascii]}),__publicField(this,"_alphanum_lower_dict_ranges",{digits:[this._digit_first_ascii,this._digit_last_ascii],lowerCase:[this._alpha_lower_first_ascii,this._alpha_lower_last_ascii]}),__publicField(this,"_alphanum_upper_dict_ranges",{digits:[this._digit_first_ascii,this._digit_last_ascii],upperCase:[this._alpha_upper_first_ascii,this._alpha_upper_last_ascii]}),__publicField(this,"_hex_dict_ranges",{decDigits:[this._digit_first_ascii,this._digit_last_ascii],alphaDigits:[this._alpha_lower_first_ascii,this._hex_last_ascii]}),__publicField(this,"_dict_ranges",{_number_dict_ranges:this._number_dict_ranges,_alpha_dict_ranges:this._alpha_dict_ranges,_alpha_lower_dict_ranges:this._alpha_lower_dict_ranges,_alpha_upper_dict_ranges:this._alpha_upper_dict_ranges,_alphanum_dict_ranges:this._alphanum_dict_ranges,_alphanum_lower_dict_ranges:this._alphanum_lower_dict_ranges,_alphanum_upper_dict_ranges:this._alphanum_upper_dict_ranges,_hex_dict_ranges:this._hex_dict_ranges}),__publicField(this,"log",((...s)=>{const o=[...s];o[0]="[short-unique-id] ".concat(s[0]),!0!==this.debug||"undefined"==typeof console||null===console||console.log(...o)})),__publicField(this,"_normalizeDictionary",((s,o)=>{let i;if(s&&Array.isArray(s)&&s.length>1)i=s;else{i=[],this.dictIndex=0;const o="_".concat(s,"_dict_ranges"),a=this._dict_ranges[o];let u=0;for(const[,s]of Object.entries(a)){const[o,i]=s;u+=Math.abs(i-o)}i=new Array(u);let _=0;for(const[,s]of Object.entries(a)){this.dictRange=s,this.lowerBound=this.dictRange[0],this.upperBound=this.dictRange[1];const o=this.lowerBound<=this.upperBound,a=this.lowerBound,u=this.upperBound;if(o)for(let s=a;su;s--)i[_++]=String.fromCharCode(s),this.dictIndex=s}i.length=_}if(o){for(let s=i.length-1;s>0;s--){const o=Math.floor(Math.random()*(s+1));[i[s],i[o]]=[i[o],i[s]]}}return i})),__publicField(this,"setDictionary",((s,o)=>{this.dict=this._normalizeDictionary(s,o),this.dictLength=this.dict.length,this.setCounter(0)})),__publicField(this,"seq",(()=>this.sequentialUUID())),__publicField(this,"sequentialUUID",(()=>{const s=this.dictLength,o=this.dict;let i=this.counter;const a=[];do{const u=i%s;i=Math.trunc(i/s),a.push(o[u])}while(0!==i);const u=a.join("");return this.counter+=1,u})),__publicField(this,"rnd",((s=this.uuidLength||x)=>this.randomUUID(s))),__publicField(this,"randomUUID",((s=this.uuidLength||x)=>{if(null==s||s<1)throw new Error("Invalid UUID Length Provided");const o=new Array(s),i=this.dictLength,a=this.dict;for(let u=0;uthis.formattedUUID(s,o))),__publicField(this,"formattedUUID",((s,o)=>{const i={$r:this.randomUUID,$s:this.sequentialUUID,$t:this.stamp};return s.replace(/\$[rs]\d{0,}|\$t0|\$t[1-9]\d{1,}/g,(s=>{const a=s.slice(0,2),u=Number.parseInt(s.slice(2),10);return"$s"===a?i[a]().padStart(u,"0"):"$t"===a&&o?i[a](u,o):i[a](u)}))})),__publicField(this,"availableUUIDs",((s=this.uuidLength)=>Number.parseFloat(([...new Set(this.dict)].length**s).toFixed(0)))),__publicField(this,"_collisionCache",new Map),__publicField(this,"approxMaxBeforeCollision",((s=this.availableUUIDs(this.uuidLength))=>{const o=s,i=this._collisionCache.get(o);if(void 0!==i)return i;const a=Number.parseFloat(Math.sqrt(Math.PI/2*s).toFixed(20));return this._collisionCache.set(o,a),a})),__publicField(this,"collisionProbability",((s=this.availableUUIDs(this.uuidLength),o=this.uuidLength)=>Number.parseFloat((this.approxMaxBeforeCollision(s)/this.availableUUIDs(o)).toFixed(20)))),__publicField(this,"uniqueness",((s=this.availableUUIDs(this.uuidLength))=>{const o=Number.parseFloat((1-this.approxMaxBeforeCollision(s)/s).toFixed(20));return o>1?1:o<0?0:o})),__publicField(this,"getVersion",(()=>this.version)),__publicField(this,"stamp",((s,o)=>{const i=Math.floor(+(o||new Date)/1e3).toString(16);if("number"==typeof s&&0===s)return i;if("number"!=typeof s||s<10)throw new Error(["Param finalLength must be a number greater than or equal to 10,","or 0 if you want the raw hexadecimal timestamp"].join("\n"));const a=s-9,u=Math.round(Math.random()*(a>15?15:a)),_=this.randomUUID(a);return"".concat(_.substring(0,u)).concat(i).concat(_.substring(u)).concat(u.toString(16))})),__publicField(this,"parseStamp",((s,o)=>{if(o&&!/t0|t[1-9]\d{1,}/.test(o))throw new Error("Cannot extract date from a formated UUID with no timestamp in the format");const i=o?o.replace(/\$[rs]\d{0,}|\$t0|\$t[1-9]\d{1,}/g,(s=>{const o={$r:s=>[...Array(s)].map((()=>"r")).join(""),$s:s=>[...Array(s)].map((()=>"s")).join(""),$t:s=>[...Array(s)].map((()=>"t")).join("")},i=s.slice(0,2),a=Number.parseInt(s.slice(2),10);return o[i](a)})).replace(/^(.*?)(t{8,})(.*)$/g,((o,i,a)=>s.substring(i.length,i.length+a.length))):s;if(8===i.length)return new Date(1e3*Number.parseInt(i,16));if(i.length<10)throw new Error("Stamp length invalid");const a=Number.parseInt(i.substring(i.length-1),16);return new Date(1e3*Number.parseInt(i.substring(a,a+8),16))})),__publicField(this,"setCounter",(s=>{this.counter=s})),__publicField(this,"validate",((s,o)=>{const i=o?this._normalizeDictionary(o):this.dict;return s.split("").every((s=>i.includes(s)))}));const o=__spreadValues(__spreadValues({},C),s);this.counter=0,this.debug=!1,this.dict=[],this.version="5.3.2";const{dictionary:i,shuffle:a,length:u,counter:_}=o;this.uuidLength=u,this.setDictionary(i,a),this.setCounter(_),this.debug=o.debug,this.log(this.dict),this.log("Generator instantiated with Dictionary Size ".concat(this.dictLength," and counter set to ").concat(this.counter)),this.log=this.log.bind(this),this.setDictionary=this.setDictionary.bind(this),this.setCounter=this.setCounter.bind(this),this.seq=this.seq.bind(this),this.sequentialUUID=this.sequentialUUID.bind(this),this.rnd=this.rnd.bind(this),this.randomUUID=this.randomUUID.bind(this),this.fmt=this.fmt.bind(this),this.formattedUUID=this.formattedUUID.bind(this),this.availableUUIDs=this.availableUUIDs.bind(this),this.approxMaxBeforeCollision=this.approxMaxBeforeCollision.bind(this),this.collisionProbability=this.collisionProbability.bind(this),this.uniqueness=this.uniqueness.bind(this),this.getVersion=this.getVersion.bind(this),this.stamp=this.stamp.bind(this),this.parseStamp=this.parseStamp.bind(this)}};__publicField(j,"default",j);var L,B=j;return L=w,((a,_,w,x)=>{if(_&&"object"==typeof _||"function"==typeof _)for(let C of i(_))u.call(a,C)||C===w||s(a,C,{get:()=>_[C],enumerable:!(x=o(_,C))||x.enumerable});return a})(s({},"__esModule",{value:!0}),L)})();s.exports=o.default,"undefined"!=typeof window&&(o=o.default)},9325:(s,o,i)=>{var a=i(34840),u="object"==typeof self&&self&&self.Object===Object&&self,_=a||u||Function("return this")();s.exports=_},9404:function(s){s.exports=function(){"use strict";var s=Array.prototype.slice;function createClass(s,o){o&&(s.prototype=Object.create(o.prototype)),s.prototype.constructor=s}function Iterable(s){return isIterable(s)?s:Seq(s)}function KeyedIterable(s){return isKeyed(s)?s:KeyedSeq(s)}function IndexedIterable(s){return isIndexed(s)?s:IndexedSeq(s)}function SetIterable(s){return isIterable(s)&&!isAssociative(s)?s:SetSeq(s)}function isIterable(s){return!(!s||!s[o])}function isKeyed(s){return!(!s||!s[i])}function isIndexed(s){return!(!s||!s[a])}function isAssociative(s){return isKeyed(s)||isIndexed(s)}function isOrdered(s){return!(!s||!s[u])}createClass(KeyedIterable,Iterable),createClass(IndexedIterable,Iterable),createClass(SetIterable,Iterable),Iterable.isIterable=isIterable,Iterable.isKeyed=isKeyed,Iterable.isIndexed=isIndexed,Iterable.isAssociative=isAssociative,Iterable.isOrdered=isOrdered,Iterable.Keyed=KeyedIterable,Iterable.Indexed=IndexedIterable,Iterable.Set=SetIterable;var o="@@__IMMUTABLE_ITERABLE__@@",i="@@__IMMUTABLE_KEYED__@@",a="@@__IMMUTABLE_INDEXED__@@",u="@@__IMMUTABLE_ORDERED__@@",_="delete",w=5,x=1<>>0;if(""+i!==o||4294967295===i)return NaN;o=i}return o<0?ensureSize(s)+o:o}function returnTrue(){return!0}function wholeSlice(s,o,i){return(0===s||void 0!==i&&s<=-i)&&(void 0===o||void 0!==i&&o>=i)}function resolveBegin(s,o){return resolveIndex(s,o,0)}function resolveEnd(s,o){return resolveIndex(s,o,o)}function resolveIndex(s,o,i){return void 0===s?i:s<0?Math.max(0,o+s):void 0===o?s:Math.min(o,s)}var $=0,U=1,V=2,z="function"==typeof Symbol&&Symbol.iterator,Y="@@iterator",Z=z||Y;function Iterator(s){this.next=s}function iteratorValue(s,o,i,a){var u=0===s?o:1===s?i:[o,i];return a?a.value=u:a={value:u,done:!1},a}function iteratorDone(){return{value:void 0,done:!0}}function hasIterator(s){return!!getIteratorFn(s)}function isIterator(s){return s&&"function"==typeof s.next}function getIterator(s){var o=getIteratorFn(s);return o&&o.call(s)}function getIteratorFn(s){var o=s&&(z&&s[z]||s[Y]);if("function"==typeof o)return o}function isArrayLike(s){return s&&"number"==typeof s.length}function Seq(s){return null==s?emptySequence():isIterable(s)?s.toSeq():seqFromValue(s)}function KeyedSeq(s){return null==s?emptySequence().toKeyedSeq():isIterable(s)?isKeyed(s)?s.toSeq():s.fromEntrySeq():keyedSeqFromValue(s)}function IndexedSeq(s){return null==s?emptySequence():isIterable(s)?isKeyed(s)?s.entrySeq():s.toIndexedSeq():indexedSeqFromValue(s)}function SetSeq(s){return(null==s?emptySequence():isIterable(s)?isKeyed(s)?s.entrySeq():s:indexedSeqFromValue(s)).toSetSeq()}Iterator.prototype.toString=function(){return"[Iterator]"},Iterator.KEYS=$,Iterator.VALUES=U,Iterator.ENTRIES=V,Iterator.prototype.inspect=Iterator.prototype.toSource=function(){return this.toString()},Iterator.prototype[Z]=function(){return this},createClass(Seq,Iterable),Seq.of=function(){return Seq(arguments)},Seq.prototype.toSeq=function(){return this},Seq.prototype.toString=function(){return this.__toString("Seq {","}")},Seq.prototype.cacheResult=function(){return!this._cache&&this.__iterateUncached&&(this._cache=this.entrySeq().toArray(),this.size=this._cache.length),this},Seq.prototype.__iterate=function(s,o){return seqIterate(this,s,o,!0)},Seq.prototype.__iterator=function(s,o){return seqIterator(this,s,o,!0)},createClass(KeyedSeq,Seq),KeyedSeq.prototype.toKeyedSeq=function(){return this},createClass(IndexedSeq,Seq),IndexedSeq.of=function(){return IndexedSeq(arguments)},IndexedSeq.prototype.toIndexedSeq=function(){return this},IndexedSeq.prototype.toString=function(){return this.__toString("Seq [","]")},IndexedSeq.prototype.__iterate=function(s,o){return seqIterate(this,s,o,!1)},IndexedSeq.prototype.__iterator=function(s,o){return seqIterator(this,s,o,!1)},createClass(SetSeq,Seq),SetSeq.of=function(){return SetSeq(arguments)},SetSeq.prototype.toSetSeq=function(){return this},Seq.isSeq=isSeq,Seq.Keyed=KeyedSeq,Seq.Set=SetSeq,Seq.Indexed=IndexedSeq;var ee,ie,ae,ce="@@__IMMUTABLE_SEQ__@@";function ArraySeq(s){this._array=s,this.size=s.length}function ObjectSeq(s){var o=Object.keys(s);this._object=s,this._keys=o,this.size=o.length}function IterableSeq(s){this._iterable=s,this.size=s.length||s.size}function IteratorSeq(s){this._iterator=s,this._iteratorCache=[]}function isSeq(s){return!(!s||!s[ce])}function emptySequence(){return ee||(ee=new ArraySeq([]))}function keyedSeqFromValue(s){var o=Array.isArray(s)?new ArraySeq(s).fromEntrySeq():isIterator(s)?new IteratorSeq(s).fromEntrySeq():hasIterator(s)?new IterableSeq(s).fromEntrySeq():"object"==typeof s?new ObjectSeq(s):void 0;if(!o)throw new TypeError("Expected Array or iterable object of [k, v] entries, or keyed object: "+s);return o}function indexedSeqFromValue(s){var o=maybeIndexedSeqFromValue(s);if(!o)throw new TypeError("Expected Array or iterable object of values: "+s);return o}function seqFromValue(s){var o=maybeIndexedSeqFromValue(s)||"object"==typeof s&&new ObjectSeq(s);if(!o)throw new TypeError("Expected Array or iterable object of values, or keyed object: "+s);return o}function maybeIndexedSeqFromValue(s){return isArrayLike(s)?new ArraySeq(s):isIterator(s)?new IteratorSeq(s):hasIterator(s)?new IterableSeq(s):void 0}function seqIterate(s,o,i,a){var u=s._cache;if(u){for(var _=u.length-1,w=0;w<=_;w++){var x=u[i?_-w:w];if(!1===o(x[1],a?x[0]:w,s))return w+1}return w}return s.__iterateUncached(o,i)}function seqIterator(s,o,i,a){var u=s._cache;if(u){var _=u.length-1,w=0;return new Iterator((function(){var s=u[i?_-w:w];return w++>_?iteratorDone():iteratorValue(o,a?s[0]:w-1,s[1])}))}return s.__iteratorUncached(o,i)}function fromJS(s,o){return o?fromJSWith(o,s,"",{"":s}):fromJSDefault(s)}function fromJSWith(s,o,i,a){return Array.isArray(o)?s.call(a,i,IndexedSeq(o).map((function(i,a){return fromJSWith(s,i,a,o)}))):isPlainObj(o)?s.call(a,i,KeyedSeq(o).map((function(i,a){return fromJSWith(s,i,a,o)}))):o}function fromJSDefault(s){return Array.isArray(s)?IndexedSeq(s).map(fromJSDefault).toList():isPlainObj(s)?KeyedSeq(s).map(fromJSDefault).toMap():s}function isPlainObj(s){return s&&(s.constructor===Object||void 0===s.constructor)}function is(s,o){if(s===o||s!=s&&o!=o)return!0;if(!s||!o)return!1;if("function"==typeof s.valueOf&&"function"==typeof o.valueOf){if((s=s.valueOf())===(o=o.valueOf())||s!=s&&o!=o)return!0;if(!s||!o)return!1}return!("function"!=typeof s.equals||"function"!=typeof o.equals||!s.equals(o))}function deepEqual(s,o){if(s===o)return!0;if(!isIterable(o)||void 0!==s.size&&void 0!==o.size&&s.size!==o.size||void 0!==s.__hash&&void 0!==o.__hash&&s.__hash!==o.__hash||isKeyed(s)!==isKeyed(o)||isIndexed(s)!==isIndexed(o)||isOrdered(s)!==isOrdered(o))return!1;if(0===s.size&&0===o.size)return!0;var i=!isAssociative(s);if(isOrdered(s)){var a=s.entries();return o.every((function(s,o){var u=a.next().value;return u&&is(u[1],s)&&(i||is(u[0],o))}))&&a.next().done}var u=!1;if(void 0===s.size)if(void 0===o.size)"function"==typeof s.cacheResult&&s.cacheResult();else{u=!0;var _=s;s=o,o=_}var w=!0,x=o.__iterate((function(o,a){if(i?!s.has(o):u?!is(o,s.get(a,j)):!is(s.get(a,j),o))return w=!1,!1}));return w&&s.size===x}function Repeat(s,o){if(!(this instanceof Repeat))return new Repeat(s,o);if(this._value=s,this.size=void 0===o?1/0:Math.max(0,o),0===this.size){if(ie)return ie;ie=this}}function invariant(s,o){if(!s)throw new Error(o)}function Range(s,o,i){if(!(this instanceof Range))return new Range(s,o,i);if(invariant(0!==i,"Cannot step a Range by 0"),s=s||0,void 0===o&&(o=1/0),i=void 0===i?1:Math.abs(i),oa?iteratorDone():iteratorValue(s,u,i[o?a-u++:u++])}))},createClass(ObjectSeq,KeyedSeq),ObjectSeq.prototype.get=function(s,o){return void 0===o||this.has(s)?this._object[s]:o},ObjectSeq.prototype.has=function(s){return this._object.hasOwnProperty(s)},ObjectSeq.prototype.__iterate=function(s,o){for(var i=this._object,a=this._keys,u=a.length-1,_=0;_<=u;_++){var w=a[o?u-_:_];if(!1===s(i[w],w,this))return _+1}return _},ObjectSeq.prototype.__iterator=function(s,o){var i=this._object,a=this._keys,u=a.length-1,_=0;return new Iterator((function(){var w=a[o?u-_:_];return _++>u?iteratorDone():iteratorValue(s,w,i[w])}))},ObjectSeq.prototype[u]=!0,createClass(IterableSeq,IndexedSeq),IterableSeq.prototype.__iterateUncached=function(s,o){if(o)return this.cacheResult().__iterate(s,o);var i=getIterator(this._iterable),a=0;if(isIterator(i))for(var u;!(u=i.next()).done&&!1!==s(u.value,a++,this););return a},IterableSeq.prototype.__iteratorUncached=function(s,o){if(o)return this.cacheResult().__iterator(s,o);var i=getIterator(this._iterable);if(!isIterator(i))return new Iterator(iteratorDone);var a=0;return new Iterator((function(){var o=i.next();return o.done?o:iteratorValue(s,a++,o.value)}))},createClass(IteratorSeq,IndexedSeq),IteratorSeq.prototype.__iterateUncached=function(s,o){if(o)return this.cacheResult().__iterate(s,o);for(var i,a=this._iterator,u=this._iteratorCache,_=0;_=a.length){var o=i.next();if(o.done)return o;a[u]=o.value}return iteratorValue(s,u,a[u++])}))},createClass(Repeat,IndexedSeq),Repeat.prototype.toString=function(){return 0===this.size?"Repeat []":"Repeat [ "+this._value+" "+this.size+" times ]"},Repeat.prototype.get=function(s,o){return this.has(s)?this._value:o},Repeat.prototype.includes=function(s){return is(this._value,s)},Repeat.prototype.slice=function(s,o){var i=this.size;return wholeSlice(s,o,i)?this:new Repeat(this._value,resolveEnd(o,i)-resolveBegin(s,i))},Repeat.prototype.reverse=function(){return this},Repeat.prototype.indexOf=function(s){return is(this._value,s)?0:-1},Repeat.prototype.lastIndexOf=function(s){return is(this._value,s)?this.size:-1},Repeat.prototype.__iterate=function(s,o){for(var i=0;i=0&&o=0&&ii?iteratorDone():iteratorValue(s,_++,w)}))},Range.prototype.equals=function(s){return s instanceof Range?this._start===s._start&&this._end===s._end&&this._step===s._step:deepEqual(this,s)},createClass(Collection,Iterable),createClass(KeyedCollection,Collection),createClass(IndexedCollection,Collection),createClass(SetCollection,Collection),Collection.Keyed=KeyedCollection,Collection.Indexed=IndexedCollection,Collection.Set=SetCollection;var le="function"==typeof Math.imul&&-2===Math.imul(4294967295,2)?Math.imul:function imul(s,o){var i=65535&(s|=0),a=65535&(o|=0);return i*a+((s>>>16)*a+i*(o>>>16)<<16>>>0)|0};function smi(s){return s>>>1&1073741824|3221225471&s}function hash(s){if(!1===s||null==s)return 0;if("function"==typeof s.valueOf&&(!1===(s=s.valueOf())||null==s))return 0;if(!0===s)return 1;var o=typeof s;if("number"===o){if(s!=s||s===1/0)return 0;var i=0|s;for(i!==s&&(i^=4294967295*s);s>4294967295;)i^=s/=4294967295;return smi(i)}if("string"===o)return s.length>Se?cachedHashString(s):hashString(s);if("function"==typeof s.hashCode)return s.hashCode();if("object"===o)return hashJSObj(s);if("function"==typeof s.toString)return hashString(s.toString());throw new Error("Value type "+o+" cannot be hashed.")}function cachedHashString(s){var o=Pe[s];return void 0===o&&(o=hashString(s),xe===we&&(xe=0,Pe={}),xe++,Pe[s]=o),o}function hashString(s){for(var o=0,i=0;i0)switch(s.nodeType){case 1:return s.uniqueID;case 9:return s.documentElement&&s.documentElement.uniqueID}}var fe,ye="function"==typeof WeakMap;ye&&(fe=new WeakMap);var be=0,_e="__immutablehash__";"function"==typeof Symbol&&(_e=Symbol(_e));var Se=16,we=255,xe=0,Pe={};function assertNotInfinite(s){invariant(s!==1/0,"Cannot perform this action with an infinite size.")}function Map(s){return null==s?emptyMap():isMap(s)&&!isOrdered(s)?s:emptyMap().withMutations((function(o){var i=KeyedIterable(s);assertNotInfinite(i.size),i.forEach((function(s,i){return o.set(i,s)}))}))}function isMap(s){return!(!s||!s[Re])}createClass(Map,KeyedCollection),Map.of=function(){var o=s.call(arguments,0);return emptyMap().withMutations((function(s){for(var i=0;i=o.length)throw new Error("Missing value for key: "+o[i]);s.set(o[i],o[i+1])}}))},Map.prototype.toString=function(){return this.__toString("Map {","}")},Map.prototype.get=function(s,o){return this._root?this._root.get(0,void 0,s,o):o},Map.prototype.set=function(s,o){return updateMap(this,s,o)},Map.prototype.setIn=function(s,o){return this.updateIn(s,j,(function(){return o}))},Map.prototype.remove=function(s){return updateMap(this,s,j)},Map.prototype.deleteIn=function(s){return this.updateIn(s,(function(){return j}))},Map.prototype.update=function(s,o,i){return 1===arguments.length?s(this):this.updateIn([s],o,i)},Map.prototype.updateIn=function(s,o,i){i||(i=o,o=void 0);var a=updateInDeepMap(this,forceIterator(s),o,i);return a===j?void 0:a},Map.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):emptyMap()},Map.prototype.merge=function(){return mergeIntoMapWith(this,void 0,arguments)},Map.prototype.mergeWith=function(o){return mergeIntoMapWith(this,o,s.call(arguments,1))},Map.prototype.mergeIn=function(o){var i=s.call(arguments,1);return this.updateIn(o,emptyMap(),(function(s){return"function"==typeof s.merge?s.merge.apply(s,i):i[i.length-1]}))},Map.prototype.mergeDeep=function(){return mergeIntoMapWith(this,deepMerger,arguments)},Map.prototype.mergeDeepWith=function(o){var i=s.call(arguments,1);return mergeIntoMapWith(this,deepMergerWith(o),i)},Map.prototype.mergeDeepIn=function(o){var i=s.call(arguments,1);return this.updateIn(o,emptyMap(),(function(s){return"function"==typeof s.mergeDeep?s.mergeDeep.apply(s,i):i[i.length-1]}))},Map.prototype.sort=function(s){return OrderedMap(sortFactory(this,s))},Map.prototype.sortBy=function(s,o){return OrderedMap(sortFactory(this,o,s))},Map.prototype.withMutations=function(s){var o=this.asMutable();return s(o),o.wasAltered()?o.__ensureOwner(this.__ownerID):this},Map.prototype.asMutable=function(){return this.__ownerID?this:this.__ensureOwner(new OwnerID)},Map.prototype.asImmutable=function(){return this.__ensureOwner()},Map.prototype.wasAltered=function(){return this.__altered},Map.prototype.__iterator=function(s,o){return new MapIterator(this,s,o)},Map.prototype.__iterate=function(s,o){var i=this,a=0;return this._root&&this._root.iterate((function(o){return a++,s(o[1],o[0],i)}),o),a},Map.prototype.__ensureOwner=function(s){return s===this.__ownerID?this:s?makeMap(this.size,this._root,s,this.__hash):(this.__ownerID=s,this.__altered=!1,this)},Map.isMap=isMap;var Te,Re="@@__IMMUTABLE_MAP__@@",$e=Map.prototype;function ArrayMapNode(s,o){this.ownerID=s,this.entries=o}function BitmapIndexedNode(s,o,i){this.ownerID=s,this.bitmap=o,this.nodes=i}function HashArrayMapNode(s,o,i){this.ownerID=s,this.count=o,this.nodes=i}function HashCollisionNode(s,o,i){this.ownerID=s,this.keyHash=o,this.entries=i}function ValueNode(s,o,i){this.ownerID=s,this.keyHash=o,this.entry=i}function MapIterator(s,o,i){this._type=o,this._reverse=i,this._stack=s._root&&mapIteratorFrame(s._root)}function mapIteratorValue(s,o){return iteratorValue(s,o[0],o[1])}function mapIteratorFrame(s,o){return{node:s,index:0,__prev:o}}function makeMap(s,o,i,a){var u=Object.create($e);return u.size=s,u._root=o,u.__ownerID=i,u.__hash=a,u.__altered=!1,u}function emptyMap(){return Te||(Te=makeMap(0))}function updateMap(s,o,i){var a,u;if(s._root){var _=MakeRef(L),w=MakeRef(B);if(a=updateNode(s._root,s.__ownerID,0,void 0,o,i,_,w),!w.value)return s;u=s.size+(_.value?i===j?-1:1:0)}else{if(i===j)return s;u=1,a=new ArrayMapNode(s.__ownerID,[[o,i]])}return s.__ownerID?(s.size=u,s._root=a,s.__hash=void 0,s.__altered=!0,s):a?makeMap(u,a):emptyMap()}function updateNode(s,o,i,a,u,_,w,x){return s?s.update(o,i,a,u,_,w,x):_===j?s:(SetRef(x),SetRef(w),new ValueNode(o,a,[u,_]))}function isLeafNode(s){return s.constructor===ValueNode||s.constructor===HashCollisionNode}function mergeIntoNode(s,o,i,a,u){if(s.keyHash===a)return new HashCollisionNode(o,a,[s.entry,u]);var _,x=(0===i?s.keyHash:s.keyHash>>>i)&C,j=(0===i?a:a>>>i)&C;return new BitmapIndexedNode(o,1<>>=1)w[C]=1&i?o[_++]:void 0;return w[a]=u,new HashArrayMapNode(s,_+1,w)}function mergeIntoMapWith(s,o,i){for(var a=[],u=0;u>1&1431655765))+(s>>2&858993459))+(s>>4)&252645135,s+=s>>8,127&(s+=s>>16)}function setIn(s,o,i,a){var u=a?s:arrCopy(s);return u[o]=i,u}function spliceIn(s,o,i,a){var u=s.length+1;if(a&&o+1===u)return s[o]=i,s;for(var _=new Array(u),w=0,x=0;x=qe)return createNodes(s,C,a,u);var U=s&&s===this.ownerID,V=U?C:arrCopy(C);return $?x?L===B-1?V.pop():V[L]=V.pop():V[L]=[a,u]:V.push([a,u]),U?(this.entries=V,this):new ArrayMapNode(s,V)}},BitmapIndexedNode.prototype.get=function(s,o,i,a){void 0===o&&(o=hash(i));var u=1<<((0===s?o:o>>>s)&C),_=this.bitmap;return _&u?this.nodes[popCount(_&u-1)].get(s+w,o,i,a):a},BitmapIndexedNode.prototype.update=function(s,o,i,a,u,_,x){void 0===i&&(i=hash(a));var L=(0===o?i:i>>>o)&C,B=1<=ze)return expandNodes(s,z,$,L,Z);if(U&&!Z&&2===z.length&&isLeafNode(z[1^V]))return z[1^V];if(U&&Z&&1===z.length&&isLeafNode(Z))return Z;var ee=s&&s===this.ownerID,ie=U?Z?$:$^B:$|B,ae=U?Z?setIn(z,V,Z,ee):spliceOut(z,V,ee):spliceIn(z,V,Z,ee);return ee?(this.bitmap=ie,this.nodes=ae,this):new BitmapIndexedNode(s,ie,ae)},HashArrayMapNode.prototype.get=function(s,o,i,a){void 0===o&&(o=hash(i));var u=(0===s?o:o>>>s)&C,_=this.nodes[u];return _?_.get(s+w,o,i,a):a},HashArrayMapNode.prototype.update=function(s,o,i,a,u,_,x){void 0===i&&(i=hash(a));var L=(0===o?i:i>>>o)&C,B=u===j,$=this.nodes,U=$[L];if(B&&!U)return this;var V=updateNode(U,s,o+w,i,a,u,_,x);if(V===U)return this;var z=this.count;if(U){if(!V&&--z0&&a=0&&s>>o&C;if(a>=this.array.length)return new VNode([],s);var u,_=0===a;if(o>0){var x=this.array[a];if((u=x&&x.removeBefore(s,o-w,i))===x&&_)return this}if(_&&!u)return this;var j=editableVNode(this,s);if(!_)for(var L=0;L>>o&C;if(u>=this.array.length)return this;if(o>0){var _=this.array[u];if((a=_&&_.removeAfter(s,o-w,i))===_&&u===this.array.length-1)return this}var x=editableVNode(this,s);return x.array.splice(u+1),a&&(x.array[u]=a),x};var Xe,Qe,et={};function iterateList(s,o){var i=s._origin,a=s._capacity,u=getTailOffset(a),_=s._tail;return iterateNodeOrLeaf(s._root,s._level,0);function iterateNodeOrLeaf(s,o,i){return 0===o?iterateLeaf(s,i):iterateNode(s,o,i)}function iterateLeaf(s,w){var C=w===u?_&&_.array:s&&s.array,j=w>i?0:i-w,L=a-w;return L>x&&(L=x),function(){if(j===L)return et;var s=o?--L:j++;return C&&C[s]}}function iterateNode(s,u,_){var C,j=s&&s.array,L=_>i?0:i-_>>u,B=1+(a-_>>u);return B>x&&(B=x),function(){for(;;){if(C){var s=C();if(s!==et)return s;C=null}if(L===B)return et;var i=o?--B:L++;C=iterateNodeOrLeaf(j&&j[i],u-w,_+(i<=s.size||o<0)return s.withMutations((function(s){o<0?setListBounds(s,o).set(0,i):setListBounds(s,0,o+1).set(o,i)}));o+=s._origin;var a=s._tail,u=s._root,_=MakeRef(B);return o>=getTailOffset(s._capacity)?a=updateVNode(a,s.__ownerID,0,o,i,_):u=updateVNode(u,s.__ownerID,s._level,o,i,_),_.value?s.__ownerID?(s._root=u,s._tail=a,s.__hash=void 0,s.__altered=!0,s):makeList(s._origin,s._capacity,s._level,u,a):s}function updateVNode(s,o,i,a,u,_){var x,j=a>>>i&C,L=s&&j0){var B=s&&s.array[j],$=updateVNode(B,o,i-w,a,u,_);return $===B?s:((x=editableVNode(s,o)).array[j]=$,x)}return L&&s.array[j]===u?s:(SetRef(_),x=editableVNode(s,o),void 0===u&&j===x.array.length-1?x.array.pop():x.array[j]=u,x)}function editableVNode(s,o){return o&&s&&o===s.ownerID?s:new VNode(s?s.array.slice():[],o)}function listNodeFor(s,o){if(o>=getTailOffset(s._capacity))return s._tail;if(o<1<0;)i=i.array[o>>>a&C],a-=w;return i}}function setListBounds(s,o,i){void 0!==o&&(o|=0),void 0!==i&&(i|=0);var a=s.__ownerID||new OwnerID,u=s._origin,_=s._capacity,x=u+o,j=void 0===i?_:i<0?_+i:u+i;if(x===u&&j===_)return s;if(x>=j)return s.clear();for(var L=s._level,B=s._root,$=0;x+$<0;)B=new VNode(B&&B.array.length?[void 0,B]:[],a),$+=1<<(L+=w);$&&(x+=$,u+=$,j+=$,_+=$);for(var U=getTailOffset(_),V=getTailOffset(j);V>=1<U?new VNode([],a):z;if(z&&V>U&&x<_&&z.array.length){for(var Z=B=editableVNode(B,a),ee=L;ee>w;ee-=w){var ie=U>>>ee&C;Z=Z.array[ie]=editableVNode(Z.array[ie],a)}Z.array[U>>>w&C]=z}if(j<_&&(Y=Y&&Y.removeAfter(a,0,j)),x>=V)x-=V,j-=V,L=w,B=null,Y=Y&&Y.removeBefore(a,0,x);else if(x>u||V>>L&C;if(ae!==V>>>L&C)break;ae&&($+=(1<u&&(B=B.removeBefore(a,L,x-$)),B&&Vu&&(u=x.size),isIterable(w)||(x=x.map((function(s){return fromJS(s)}))),a.push(x)}return u>s.size&&(s=s.setSize(u)),mergeIntoCollectionWith(s,o,a)}function getTailOffset(s){return s>>w<=x&&w.size>=2*_.size?(a=(u=w.filter((function(s,o){return void 0!==s&&C!==o}))).toKeyedSeq().map((function(s){return s[0]})).flip().toMap(),s.__ownerID&&(a.__ownerID=u.__ownerID=s.__ownerID)):(a=_.remove(o),u=C===w.size-1?w.pop():w.set(C,void 0))}else if(L){if(i===w.get(C)[1])return s;a=_,u=w.set(C,[o,i])}else a=_.set(o,w.size),u=w.set(w.size,[o,i]);return s.__ownerID?(s.size=a.size,s._map=a,s._list=u,s.__hash=void 0,s):makeOrderedMap(a,u)}function ToKeyedSequence(s,o){this._iter=s,this._useKeys=o,this.size=s.size}function ToIndexedSequence(s){this._iter=s,this.size=s.size}function ToSetSequence(s){this._iter=s,this.size=s.size}function FromEntriesSequence(s){this._iter=s,this.size=s.size}function flipFactory(s){var o=makeSequence(s);return o._iter=s,o.size=s.size,o.flip=function(){return s},o.reverse=function(){var o=s.reverse.apply(this);return o.flip=function(){return s.reverse()},o},o.has=function(o){return s.includes(o)},o.includes=function(o){return s.has(o)},o.cacheResult=cacheResultThrough,o.__iterateUncached=function(o,i){var a=this;return s.__iterate((function(s,i){return!1!==o(i,s,a)}),i)},o.__iteratorUncached=function(o,i){if(o===V){var a=s.__iterator(o,i);return new Iterator((function(){var s=a.next();if(!s.done){var o=s.value[0];s.value[0]=s.value[1],s.value[1]=o}return s}))}return s.__iterator(o===U?$:U,i)},o}function mapFactory(s,o,i){var a=makeSequence(s);return a.size=s.size,a.has=function(o){return s.has(o)},a.get=function(a,u){var _=s.get(a,j);return _===j?u:o.call(i,_,a,s)},a.__iterateUncached=function(a,u){var _=this;return s.__iterate((function(s,u,w){return!1!==a(o.call(i,s,u,w),u,_)}),u)},a.__iteratorUncached=function(a,u){var _=s.__iterator(V,u);return new Iterator((function(){var u=_.next();if(u.done)return u;var w=u.value,x=w[0];return iteratorValue(a,x,o.call(i,w[1],x,s),u)}))},a}function reverseFactory(s,o){var i=makeSequence(s);return i._iter=s,i.size=s.size,i.reverse=function(){return s},s.flip&&(i.flip=function(){var o=flipFactory(s);return o.reverse=function(){return s.flip()},o}),i.get=function(i,a){return s.get(o?i:-1-i,a)},i.has=function(i){return s.has(o?i:-1-i)},i.includes=function(o){return s.includes(o)},i.cacheResult=cacheResultThrough,i.__iterate=function(o,i){var a=this;return s.__iterate((function(s,i){return o(s,i,a)}),!i)},i.__iterator=function(o,i){return s.__iterator(o,!i)},i}function filterFactory(s,o,i,a){var u=makeSequence(s);return a&&(u.has=function(a){var u=s.get(a,j);return u!==j&&!!o.call(i,u,a,s)},u.get=function(a,u){var _=s.get(a,j);return _!==j&&o.call(i,_,a,s)?_:u}),u.__iterateUncached=function(u,_){var w=this,x=0;return s.__iterate((function(s,_,C){if(o.call(i,s,_,C))return x++,u(s,a?_:x-1,w)}),_),x},u.__iteratorUncached=function(u,_){var w=s.__iterator(V,_),x=0;return new Iterator((function(){for(;;){var _=w.next();if(_.done)return _;var C=_.value,j=C[0],L=C[1];if(o.call(i,L,j,s))return iteratorValue(u,a?j:x++,L,_)}}))},u}function countByFactory(s,o,i){var a=Map().asMutable();return s.__iterate((function(u,_){a.update(o.call(i,u,_,s),0,(function(s){return s+1}))})),a.asImmutable()}function groupByFactory(s,o,i){var a=isKeyed(s),u=(isOrdered(s)?OrderedMap():Map()).asMutable();s.__iterate((function(_,w){u.update(o.call(i,_,w,s),(function(s){return(s=s||[]).push(a?[w,_]:_),s}))}));var _=iterableClass(s);return u.map((function(o){return reify(s,_(o))}))}function sliceFactory(s,o,i,a){var u=s.size;if(void 0!==o&&(o|=0),void 0!==i&&(i===1/0?i=u:i|=0),wholeSlice(o,i,u))return s;var _=resolveBegin(o,u),w=resolveEnd(i,u);if(_!=_||w!=w)return sliceFactory(s.toSeq().cacheResult(),o,i,a);var x,C=w-_;C==C&&(x=C<0?0:C);var j=makeSequence(s);return j.size=0===x?x:s.size&&x||void 0,!a&&isSeq(s)&&x>=0&&(j.get=function(o,i){return(o=wrapIndex(this,o))>=0&&ox)return iteratorDone();var s=u.next();return a||o===U?s:iteratorValue(o,C-1,o===$?void 0:s.value[1],s)}))},j}function takeWhileFactory(s,o,i){var a=makeSequence(s);return a.__iterateUncached=function(a,u){var _=this;if(u)return this.cacheResult().__iterate(a,u);var w=0;return s.__iterate((function(s,u,x){return o.call(i,s,u,x)&&++w&&a(s,u,_)})),w},a.__iteratorUncached=function(a,u){var _=this;if(u)return this.cacheResult().__iterator(a,u);var w=s.__iterator(V,u),x=!0;return new Iterator((function(){if(!x)return iteratorDone();var s=w.next();if(s.done)return s;var u=s.value,C=u[0],j=u[1];return o.call(i,j,C,_)?a===V?s:iteratorValue(a,C,j,s):(x=!1,iteratorDone())}))},a}function skipWhileFactory(s,o,i,a){var u=makeSequence(s);return u.__iterateUncached=function(u,_){var w=this;if(_)return this.cacheResult().__iterate(u,_);var x=!0,C=0;return s.__iterate((function(s,_,j){if(!x||!(x=o.call(i,s,_,j)))return C++,u(s,a?_:C-1,w)})),C},u.__iteratorUncached=function(u,_){var w=this;if(_)return this.cacheResult().__iterator(u,_);var x=s.__iterator(V,_),C=!0,j=0;return new Iterator((function(){var s,_,L;do{if((s=x.next()).done)return a||u===U?s:iteratorValue(u,j++,u===$?void 0:s.value[1],s);var B=s.value;_=B[0],L=B[1],C&&(C=o.call(i,L,_,w))}while(C);return u===V?s:iteratorValue(u,_,L,s)}))},u}function concatFactory(s,o){var i=isKeyed(s),a=[s].concat(o).map((function(s){return isIterable(s)?i&&(s=KeyedIterable(s)):s=i?keyedSeqFromValue(s):indexedSeqFromValue(Array.isArray(s)?s:[s]),s})).filter((function(s){return 0!==s.size}));if(0===a.length)return s;if(1===a.length){var u=a[0];if(u===s||i&&isKeyed(u)||isIndexed(s)&&isIndexed(u))return u}var _=new ArraySeq(a);return i?_=_.toKeyedSeq():isIndexed(s)||(_=_.toSetSeq()),(_=_.flatten(!0)).size=a.reduce((function(s,o){if(void 0!==s){var i=o.size;if(void 0!==i)return s+i}}),0),_}function flattenFactory(s,o,i){var a=makeSequence(s);return a.__iterateUncached=function(a,u){var _=0,w=!1;function flatDeep(s,x){var C=this;s.__iterate((function(s,u){return(!o||x0}function zipWithFactory(s,o,i){var a=makeSequence(s);return a.size=new ArraySeq(i).map((function(s){return s.size})).min(),a.__iterate=function(s,o){for(var i,a=this.__iterator(U,o),u=0;!(i=a.next()).done&&!1!==s(i.value,u++,this););return u},a.__iteratorUncached=function(s,a){var u=i.map((function(s){return s=Iterable(s),getIterator(a?s.reverse():s)})),_=0,w=!1;return new Iterator((function(){var i;return w||(i=u.map((function(s){return s.next()})),w=i.some((function(s){return s.done}))),w?iteratorDone():iteratorValue(s,_++,o.apply(null,i.map((function(s){return s.value}))))}))},a}function reify(s,o){return isSeq(s)?o:s.constructor(o)}function validateEntry(s){if(s!==Object(s))throw new TypeError("Expected [K, V] tuple: "+s)}function resolveSize(s){return assertNotInfinite(s.size),ensureSize(s)}function iterableClass(s){return isKeyed(s)?KeyedIterable:isIndexed(s)?IndexedIterable:SetIterable}function makeSequence(s){return Object.create((isKeyed(s)?KeyedSeq:isIndexed(s)?IndexedSeq:SetSeq).prototype)}function cacheResultThrough(){return this._iter.cacheResult?(this._iter.cacheResult(),this.size=this._iter.size,this):Seq.prototype.cacheResult.call(this)}function defaultComparator(s,o){return s>o?1:s=0;i--)o={value:arguments[i],next:o};return this.__ownerID?(this.size=s,this._head=o,this.__hash=void 0,this.__altered=!0,this):makeStack(s,o)},Stack.prototype.pushAll=function(s){if(0===(s=IndexedIterable(s)).size)return this;assertNotInfinite(s.size);var o=this.size,i=this._head;return s.reverse().forEach((function(s){o++,i={value:s,next:i}})),this.__ownerID?(this.size=o,this._head=i,this.__hash=void 0,this.__altered=!0,this):makeStack(o,i)},Stack.prototype.pop=function(){return this.slice(1)},Stack.prototype.unshift=function(){return this.push.apply(this,arguments)},Stack.prototype.unshiftAll=function(s){return this.pushAll(s)},Stack.prototype.shift=function(){return this.pop.apply(this,arguments)},Stack.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._head=void 0,this.__hash=void 0,this.__altered=!0,this):emptyStack()},Stack.prototype.slice=function(s,o){if(wholeSlice(s,o,this.size))return this;var i=resolveBegin(s,this.size);if(resolveEnd(o,this.size)!==this.size)return IndexedCollection.prototype.slice.call(this,s,o);for(var a=this.size-i,u=this._head;i--;)u=u.next;return this.__ownerID?(this.size=a,this._head=u,this.__hash=void 0,this.__altered=!0,this):makeStack(a,u)},Stack.prototype.__ensureOwner=function(s){return s===this.__ownerID?this:s?makeStack(this.size,this._head,s,this.__hash):(this.__ownerID=s,this.__altered=!1,this)},Stack.prototype.__iterate=function(s,o){if(o)return this.reverse().__iterate(s);for(var i=0,a=this._head;a&&!1!==s(a.value,i++,this);)a=a.next;return i},Stack.prototype.__iterator=function(s,o){if(o)return this.reverse().__iterator(s);var i=0,a=this._head;return new Iterator((function(){if(a){var o=a.value;return a=a.next,iteratorValue(s,i++,o)}return iteratorDone()}))},Stack.isStack=isStack;var at,ct="@@__IMMUTABLE_STACK__@@",lt=Stack.prototype;function makeStack(s,o,i,a){var u=Object.create(lt);return u.size=s,u._head=o,u.__ownerID=i,u.__hash=a,u.__altered=!1,u}function emptyStack(){return at||(at=makeStack(0))}function mixin(s,o){var keyCopier=function(i){s.prototype[i]=o[i]};return Object.keys(o).forEach(keyCopier),Object.getOwnPropertySymbols&&Object.getOwnPropertySymbols(o).forEach(keyCopier),s}lt[ct]=!0,lt.withMutations=$e.withMutations,lt.asMutable=$e.asMutable,lt.asImmutable=$e.asImmutable,lt.wasAltered=$e.wasAltered,Iterable.Iterator=Iterator,mixin(Iterable,{toArray:function(){assertNotInfinite(this.size);var s=new Array(this.size||0);return this.valueSeq().__iterate((function(o,i){s[i]=o})),s},toIndexedSeq:function(){return new ToIndexedSequence(this)},toJS:function(){return this.toSeq().map((function(s){return s&&"function"==typeof s.toJS?s.toJS():s})).__toJS()},toJSON:function(){return this.toSeq().map((function(s){return s&&"function"==typeof s.toJSON?s.toJSON():s})).__toJS()},toKeyedSeq:function(){return new ToKeyedSequence(this,!0)},toMap:function(){return Map(this.toKeyedSeq())},toObject:function(){assertNotInfinite(this.size);var s={};return this.__iterate((function(o,i){s[i]=o})),s},toOrderedMap:function(){return OrderedMap(this.toKeyedSeq())},toOrderedSet:function(){return OrderedSet(isKeyed(this)?this.valueSeq():this)},toSet:function(){return Set(isKeyed(this)?this.valueSeq():this)},toSetSeq:function(){return new ToSetSequence(this)},toSeq:function(){return isIndexed(this)?this.toIndexedSeq():isKeyed(this)?this.toKeyedSeq():this.toSetSeq()},toStack:function(){return Stack(isKeyed(this)?this.valueSeq():this)},toList:function(){return List(isKeyed(this)?this.valueSeq():this)},toString:function(){return"[Iterable]"},__toString:function(s,o){return 0===this.size?s+o:s+" "+this.toSeq().map(this.__toStringMapper).join(", ")+" "+o},concat:function(){return reify(this,concatFactory(this,s.call(arguments,0)))},includes:function(s){return this.some((function(o){return is(o,s)}))},entries:function(){return this.__iterator(V)},every:function(s,o){assertNotInfinite(this.size);var i=!0;return this.__iterate((function(a,u,_){if(!s.call(o,a,u,_))return i=!1,!1})),i},filter:function(s,o){return reify(this,filterFactory(this,s,o,!0))},find:function(s,o,i){var a=this.findEntry(s,o);return a?a[1]:i},forEach:function(s,o){return assertNotInfinite(this.size),this.__iterate(o?s.bind(o):s)},join:function(s){assertNotInfinite(this.size),s=void 0!==s?""+s:",";var o="",i=!0;return this.__iterate((function(a){i?i=!1:o+=s,o+=null!=a?a.toString():""})),o},keys:function(){return this.__iterator($)},map:function(s,o){return reify(this,mapFactory(this,s,o))},reduce:function(s,o,i){var a,u;return assertNotInfinite(this.size),arguments.length<2?u=!0:a=o,this.__iterate((function(o,_,w){u?(u=!1,a=o):a=s.call(i,a,o,_,w)})),a},reduceRight:function(s,o,i){var a=this.toKeyedSeq().reverse();return a.reduce.apply(a,arguments)},reverse:function(){return reify(this,reverseFactory(this,!0))},slice:function(s,o){return reify(this,sliceFactory(this,s,o,!0))},some:function(s,o){return!this.every(not(s),o)},sort:function(s){return reify(this,sortFactory(this,s))},values:function(){return this.__iterator(U)},butLast:function(){return this.slice(0,-1)},isEmpty:function(){return void 0!==this.size?0===this.size:!this.some((function(){return!0}))},count:function(s,o){return ensureSize(s?this.toSeq().filter(s,o):this)},countBy:function(s,o){return countByFactory(this,s,o)},equals:function(s){return deepEqual(this,s)},entrySeq:function(){var s=this;if(s._cache)return new ArraySeq(s._cache);var o=s.toSeq().map(entryMapper).toIndexedSeq();return o.fromEntrySeq=function(){return s.toSeq()},o},filterNot:function(s,o){return this.filter(not(s),o)},findEntry:function(s,o,i){var a=i;return this.__iterate((function(i,u,_){if(s.call(o,i,u,_))return a=[u,i],!1})),a},findKey:function(s,o){var i=this.findEntry(s,o);return i&&i[0]},findLast:function(s,o,i){return this.toKeyedSeq().reverse().find(s,o,i)},findLastEntry:function(s,o,i){return this.toKeyedSeq().reverse().findEntry(s,o,i)},findLastKey:function(s,o){return this.toKeyedSeq().reverse().findKey(s,o)},first:function(){return this.find(returnTrue)},flatMap:function(s,o){return reify(this,flatMapFactory(this,s,o))},flatten:function(s){return reify(this,flattenFactory(this,s,!0))},fromEntrySeq:function(){return new FromEntriesSequence(this)},get:function(s,o){return this.find((function(o,i){return is(i,s)}),void 0,o)},getIn:function(s,o){for(var i,a=this,u=forceIterator(s);!(i=u.next()).done;){var _=i.value;if((a=a&&a.get?a.get(_,j):j)===j)return o}return a},groupBy:function(s,o){return groupByFactory(this,s,o)},has:function(s){return this.get(s,j)!==j},hasIn:function(s){return this.getIn(s,j)!==j},isSubset:function(s){return s="function"==typeof s.includes?s:Iterable(s),this.every((function(o){return s.includes(o)}))},isSuperset:function(s){return(s="function"==typeof s.isSubset?s:Iterable(s)).isSubset(this)},keyOf:function(s){return this.findKey((function(o){return is(o,s)}))},keySeq:function(){return this.toSeq().map(keyMapper).toIndexedSeq()},last:function(){return this.toSeq().reverse().first()},lastKeyOf:function(s){return this.toKeyedSeq().reverse().keyOf(s)},max:function(s){return maxFactory(this,s)},maxBy:function(s,o){return maxFactory(this,o,s)},min:function(s){return maxFactory(this,s?neg(s):defaultNegComparator)},minBy:function(s,o){return maxFactory(this,o?neg(o):defaultNegComparator,s)},rest:function(){return this.slice(1)},skip:function(s){return this.slice(Math.max(0,s))},skipLast:function(s){return reify(this,this.toSeq().reverse().skip(s).reverse())},skipWhile:function(s,o){return reify(this,skipWhileFactory(this,s,o,!0))},skipUntil:function(s,o){return this.skipWhile(not(s),o)},sortBy:function(s,o){return reify(this,sortFactory(this,o,s))},take:function(s){return this.slice(0,Math.max(0,s))},takeLast:function(s){return reify(this,this.toSeq().reverse().take(s).reverse())},takeWhile:function(s,o){return reify(this,takeWhileFactory(this,s,o))},takeUntil:function(s,o){return this.takeWhile(not(s),o)},valueSeq:function(){return this.toIndexedSeq()},hashCode:function(){return this.__hash||(this.__hash=hashIterable(this))}});var ut=Iterable.prototype;ut[o]=!0,ut[Z]=ut.values,ut.__toJS=ut.toArray,ut.__toStringMapper=quoteString,ut.inspect=ut.toSource=function(){return this.toString()},ut.chain=ut.flatMap,ut.contains=ut.includes,mixin(KeyedIterable,{flip:function(){return reify(this,flipFactory(this))},mapEntries:function(s,o){var i=this,a=0;return reify(this,this.toSeq().map((function(u,_){return s.call(o,[_,u],a++,i)})).fromEntrySeq())},mapKeys:function(s,o){var i=this;return reify(this,this.toSeq().flip().map((function(a,u){return s.call(o,a,u,i)})).flip())}});var pt=KeyedIterable.prototype;function keyMapper(s,o){return o}function entryMapper(s,o){return[o,s]}function not(s){return function(){return!s.apply(this,arguments)}}function neg(s){return function(){return-s.apply(this,arguments)}}function quoteString(s){return"string"==typeof s?JSON.stringify(s):String(s)}function defaultZipper(){return arrCopy(arguments)}function defaultNegComparator(s,o){return so?-1:0}function hashIterable(s){if(s.size===1/0)return 0;var o=isOrdered(s),i=isKeyed(s),a=o?1:0;return murmurHashOfSize(s.__iterate(i?o?function(s,o){a=31*a+hashMerge(hash(s),hash(o))|0}:function(s,o){a=a+hashMerge(hash(s),hash(o))|0}:o?function(s){a=31*a+hash(s)|0}:function(s){a=a+hash(s)|0}),a)}function murmurHashOfSize(s,o){return o=le(o,3432918353),o=le(o<<15|o>>>-15,461845907),o=le(o<<13|o>>>-13,5),o=le((o=o+3864292196^s)^o>>>16,2246822507),o=smi((o=le(o^o>>>13,3266489909))^o>>>16)}function hashMerge(s,o){return s^o+2654435769+(s<<6)+(s>>2)}return pt[i]=!0,pt[Z]=ut.entries,pt.__toJS=ut.toObject,pt.__toStringMapper=function(s,o){return JSON.stringify(o)+": "+quoteString(s)},mixin(IndexedIterable,{toKeyedSeq:function(){return new ToKeyedSequence(this,!1)},filter:function(s,o){return reify(this,filterFactory(this,s,o,!1))},findIndex:function(s,o){var i=this.findEntry(s,o);return i?i[0]:-1},indexOf:function(s){var o=this.keyOf(s);return void 0===o?-1:o},lastIndexOf:function(s){var o=this.lastKeyOf(s);return void 0===o?-1:o},reverse:function(){return reify(this,reverseFactory(this,!1))},slice:function(s,o){return reify(this,sliceFactory(this,s,o,!1))},splice:function(s,o){var i=arguments.length;if(o=Math.max(0|o,0),0===i||2===i&&!o)return this;s=resolveBegin(s,s<0?this.count():this.size);var a=this.slice(0,s);return reify(this,1===i?a:a.concat(arrCopy(arguments,2),this.slice(s+o)))},findLastIndex:function(s,o){var i=this.findLastEntry(s,o);return i?i[0]:-1},first:function(){return this.get(0)},flatten:function(s){return reify(this,flattenFactory(this,s,!1))},get:function(s,o){return(s=wrapIndex(this,s))<0||this.size===1/0||void 0!==this.size&&s>this.size?o:this.find((function(o,i){return i===s}),void 0,o)},has:function(s){return(s=wrapIndex(this,s))>=0&&(void 0!==this.size?this.size===1/0||s{"use strict";i(71340);var a=i(92046);s.exports=a.Object.assign},9957:(s,o,i)=>{"use strict";var a=Function.prototype.call,u=Object.prototype.hasOwnProperty,_=i(66743);s.exports=_.call(a,u)},9999:(s,o,i)=>{var a=i(37217),u=i(83729),_=i(16547),w=i(74733),x=i(43838),C=i(93290),j=i(23007),L=i(92271),B=i(48948),$=i(50002),U=i(83349),V=i(5861),z=i(76189),Y=i(77199),Z=i(35529),ee=i(56449),ie=i(3656),ae=i(87730),ce=i(23805),le=i(38440),pe=i(95950),de=i(37241),fe="[object Arguments]",ye="[object Function]",be="[object Object]",_e={};_e[fe]=_e["[object Array]"]=_e["[object ArrayBuffer]"]=_e["[object DataView]"]=_e["[object Boolean]"]=_e["[object Date]"]=_e["[object Float32Array]"]=_e["[object Float64Array]"]=_e["[object Int8Array]"]=_e["[object Int16Array]"]=_e["[object Int32Array]"]=_e["[object Map]"]=_e["[object Number]"]=_e[be]=_e["[object RegExp]"]=_e["[object Set]"]=_e["[object String]"]=_e["[object Symbol]"]=_e["[object Uint8Array]"]=_e["[object Uint8ClampedArray]"]=_e["[object Uint16Array]"]=_e["[object Uint32Array]"]=!0,_e["[object Error]"]=_e[ye]=_e["[object WeakMap]"]=!1,s.exports=function baseClone(s,o,i,Se,we,xe){var Pe,Te=1&o,Re=2&o,$e=4&o;if(i&&(Pe=we?i(s,Se,we,xe):i(s)),void 0!==Pe)return Pe;if(!ce(s))return s;var qe=ee(s);if(qe){if(Pe=z(s),!Te)return j(s,Pe)}else{var ze=V(s),We=ze==ye||"[object GeneratorFunction]"==ze;if(ie(s))return C(s,Te);if(ze==be||ze==fe||We&&!we){if(Pe=Re||We?{}:Z(s),!Te)return Re?B(s,x(Pe,s)):L(s,w(Pe,s))}else{if(!_e[ze])return we?s:{};Pe=Y(s,ze,Te)}}xe||(xe=new a);var He=xe.get(s);if(He)return He;xe.set(s,Pe),le(s)?s.forEach((function(a){Pe.add(baseClone(a,o,i,a,s,xe))})):ae(s)&&s.forEach((function(a,u){Pe.set(u,baseClone(a,o,i,u,s,xe))}));var Ye=qe?void 0:($e?Re?U:$:Re?de:pe)(s);return u(Ye||s,(function(a,u){Ye&&(a=s[u=a]),_(Pe,u,baseClone(a,o,i,u,s,xe))})),Pe}},10023:(s,o,i)=>{const a=i(6205),INTS=()=>[{type:a.RANGE,from:48,to:57}],WORDS=()=>[{type:a.CHAR,value:95},{type:a.RANGE,from:97,to:122},{type:a.RANGE,from:65,to:90}].concat(INTS()),WHITESPACE=()=>[{type:a.CHAR,value:9},{type:a.CHAR,value:10},{type:a.CHAR,value:11},{type:a.CHAR,value:12},{type:a.CHAR,value:13},{type:a.CHAR,value:32},{type:a.CHAR,value:160},{type:a.CHAR,value:5760},{type:a.RANGE,from:8192,to:8202},{type:a.CHAR,value:8232},{type:a.CHAR,value:8233},{type:a.CHAR,value:8239},{type:a.CHAR,value:8287},{type:a.CHAR,value:12288},{type:a.CHAR,value:65279}];o.words=()=>({type:a.SET,set:WORDS(),not:!1}),o.notWords=()=>({type:a.SET,set:WORDS(),not:!0}),o.ints=()=>({type:a.SET,set:INTS(),not:!1}),o.notInts=()=>({type:a.SET,set:INTS(),not:!0}),o.whitespace=()=>({type:a.SET,set:WHITESPACE(),not:!1}),o.notWhitespace=()=>({type:a.SET,set:WHITESPACE(),not:!0}),o.anyChar=()=>({type:a.SET,set:[{type:a.CHAR,value:10},{type:a.CHAR,value:13},{type:a.CHAR,value:8232},{type:a.CHAR,value:8233}],not:!0})},10043:(s,o,i)=>{"use strict";var a=i(54018),u=String,_=TypeError;s.exports=function(s){if(a(s))return s;throw new _("Can't set "+u(s)+" as a prototype")}},10076:s=>{"use strict";s.exports=Function.prototype.call},10124:(s,o,i)=>{var a=i(9325);s.exports=function(){return a.Date.now()}},10300:(s,o,i)=>{"use strict";var a=i(13930),u=i(82159),_=i(36624),w=i(4640),x=i(73448),C=TypeError;s.exports=function(s,o){var i=arguments.length<2?x(s):o;if(u(i))return _(a(i,s));throw new C(w(s)+" is not iterable")}},10316:(s,o,i)=>{const a=i(2404),u=i(55973),_=i(92340);class Element{constructor(s,o,i){o&&(this.meta=o),i&&(this.attributes=i),this.content=s}freeze(){Object.isFrozen(this)||(this._meta&&(this.meta.parent=this,this.meta.freeze()),this._attributes&&(this.attributes.parent=this,this.attributes.freeze()),this.children.forEach((s=>{s.parent=this,s.freeze()}),this),this.content&&Array.isArray(this.content)&&Object.freeze(this.content),Object.freeze(this))}primitive(){}clone(){const s=new this.constructor;return s.element=this.element,this.meta.length&&(s._meta=this.meta.clone()),this.attributes.length&&(s._attributes=this.attributes.clone()),this.content?this.content.clone?s.content=this.content.clone():Array.isArray(this.content)?s.content=this.content.map((s=>s.clone())):s.content=this.content:s.content=this.content,s}toValue(){return this.content instanceof Element?this.content.toValue():this.content instanceof u?{key:this.content.key.toValue(),value:this.content.value?this.content.value.toValue():void 0}:this.content&&this.content.map?this.content.map((s=>s.toValue()),this):this.content}toRef(s){if(""===this.id.toValue())throw Error("Cannot create reference to an element that does not contain an ID");const o=new this.RefElement(this.id.toValue());return s&&(o.path=s),o}findRecursive(...s){if(arguments.length>1&&!this.isFrozen)throw new Error("Cannot find recursive with multiple element names without first freezing the element. Call `element.freeze()`");const o=s.pop();let i=new _;const append=(s,o)=>(s.push(o),s),checkElement=(s,i)=>{i.element===o&&s.push(i);const a=i.findRecursive(o);return a&&a.reduce(append,s),i.content instanceof u&&(i.content.key&&checkElement(s,i.content.key),i.content.value&&checkElement(s,i.content.value)),s};return this.content&&(this.content.element&&checkElement(i,this.content),Array.isArray(this.content)&&this.content.reduce(checkElement,i)),s.isEmpty||(i=i.filter((o=>{let i=o.parents.map((s=>s.element));for(const o in s){const a=s[o],u=i.indexOf(a);if(-1===u)return!1;i=i.splice(0,u)}return!0}))),i}set(s){return this.content=s,this}equals(s){return a(this.toValue(),s)}getMetaProperty(s,o){if(!this.meta.hasKey(s)){if(this.isFrozen){const s=this.refract(o);return s.freeze(),s}this.meta.set(s,o)}return this.meta.get(s)}setMetaProperty(s,o){this.meta.set(s,o)}get element(){return this._storedElement||"element"}set element(s){this._storedElement=s}get content(){return this._content}set content(s){if(s instanceof Element)this._content=s;else if(s instanceof _)this.content=s.elements;else if("string"==typeof s||"number"==typeof s||"boolean"==typeof s||"null"===s||null==s)this._content=s;else if(s instanceof u)this._content=s;else if(Array.isArray(s))this._content=s.map(this.refract);else{if("object"!=typeof s)throw new Error("Cannot set content to given value");this._content=Object.keys(s).map((o=>new this.MemberElement(o,s[o])))}}get meta(){if(!this._meta){if(this.isFrozen){const s=new this.ObjectElement;return s.freeze(),s}this._meta=new this.ObjectElement}return this._meta}set meta(s){s instanceof this.ObjectElement?this._meta=s:this.meta.set(s||{})}get attributes(){if(!this._attributes){if(this.isFrozen){const s=new this.ObjectElement;return s.freeze(),s}this._attributes=new this.ObjectElement}return this._attributes}set attributes(s){s instanceof this.ObjectElement?this._attributes=s:this.attributes.set(s||{})}get id(){return this.getMetaProperty("id","")}set id(s){this.setMetaProperty("id",s)}get classes(){return this.getMetaProperty("classes",[])}set classes(s){this.setMetaProperty("classes",s)}get title(){return this.getMetaProperty("title","")}set title(s){this.setMetaProperty("title",s)}get description(){return this.getMetaProperty("description","")}set description(s){this.setMetaProperty("description",s)}get links(){return this.getMetaProperty("links",[])}set links(s){this.setMetaProperty("links",s)}get isFrozen(){return Object.isFrozen(this)}get parents(){let{parent:s}=this;const o=new _;for(;s;)o.push(s),s=s.parent;return o}get children(){if(Array.isArray(this.content))return new _(this.content);if(this.content instanceof u){const s=new _([this.content.key]);return this.content.value&&s.push(this.content.value),s}return this.content instanceof Element?new _([this.content]):new _}get recursiveChildren(){const s=new _;return this.children.forEach((o=>{s.push(o),o.recursiveChildren.forEach((o=>{s.push(o)}))})),s}}s.exports=Element},10392:s=>{s.exports=function getValue(s,o){return null==s?void 0:s[o]}},10487:(s,o,i)=>{"use strict";var a=i(96897),u=i(30655),_=i(73126),w=i(12205);s.exports=function callBind(s){var o=_(arguments),i=s.length-(arguments.length-1);return a(o,1+(i>0?i:0),!0)},u?u(s.exports,"apply",{value:w}):s.exports.apply=w},10776:(s,o,i)=>{var a=i(30756),u=i(95950);s.exports=function getMatchData(s){for(var o=u(s),i=o.length;i--;){var _=o[i],w=s[_];o[i]=[_,w,a(w)]}return o}},10866:(s,o,i)=>{const a=i(6048),u=i(92340);class ObjectSlice extends u{map(s,o){return this.elements.map((i=>s.bind(o)(i.value,i.key,i)))}filter(s,o){return new ObjectSlice(this.elements.filter((i=>s.bind(o)(i.value,i.key,i))))}reject(s,o){return this.filter(a(s.bind(o)))}forEach(s,o){return this.elements.forEach(((i,a)=>{s.bind(o)(i.value,i.key,i,a)}))}keys(){return this.map(((s,o)=>o.toValue()))}values(){return this.map((s=>s.toValue()))}}s.exports=ObjectSlice},11002:s=>{"use strict";s.exports=Function.prototype.apply},11042:(s,o,i)=>{"use strict";var a=i(85582),u=i(1907),_=i(24443),w=i(87170),x=i(36624),C=u([].concat);s.exports=a("Reflect","ownKeys")||function ownKeys(s){var o=_.f(x(s)),i=w.f;return i?C(o,i(s)):o}},11091:(s,o,i)=>{"use strict";var a=i(45951),u=i(76024),_=i(92361),w=i(62250),x=i(13846).f,C=i(7463),j=i(92046),L=i(28311),B=i(61626),$=i(49724);i(36128);var wrapConstructor=function(s){var Wrapper=function(o,i,a){if(this instanceof Wrapper){switch(arguments.length){case 0:return new s;case 1:return new s(o);case 2:return new s(o,i)}return new s(o,i,a)}return u(s,this,arguments)};return Wrapper.prototype=s.prototype,Wrapper};s.exports=function(s,o){var i,u,U,V,z,Y,Z,ee,ie,ae=s.target,ce=s.global,le=s.stat,pe=s.proto,de=ce?a:le?a[ae]:a[ae]&&a[ae].prototype,fe=ce?j:j[ae]||B(j,ae,{})[ae],ye=fe.prototype;for(V in o)u=!(i=C(ce?V:ae+(le?".":"#")+V,s.forced))&&de&&$(de,V),Y=fe[V],u&&(Z=s.dontCallGetSet?(ie=x(de,V))&&ie.value:de[V]),z=u&&Z?Z:o[V],(i||pe||typeof Y!=typeof z)&&(ee=s.bind&&u?L(z,a):s.wrap&&u?wrapConstructor(z):pe&&w(z)?_(z):z,(s.sham||z&&z.sham||Y&&Y.sham)&&B(ee,"sham",!0),B(fe,V,ee),pe&&($(j,U=ae+"Prototype")||B(j,U,{}),B(j[U],V,z),s.real&&ye&&(i||!ye[V])&&B(ye,V,z)))}},11287:s=>{s.exports=function getHolder(s){return s.placeholder}},11331:(s,o,i)=>{var a=i(72552),u=i(28879),_=i(40346),w=Function.prototype,x=Object.prototype,C=w.toString,j=x.hasOwnProperty,L=C.call(Object);s.exports=function isPlainObject(s){if(!_(s)||"[object Object]"!=a(s))return!1;var o=u(s);if(null===o)return!0;var i=j.call(o,"constructor")&&o.constructor;return"function"==typeof i&&i instanceof i&&C.call(i)==L}},11470:(s,o,i)=>{"use strict";var a=i(1907),u=i(65482),_=i(90160),w=i(74239),x=a("".charAt),C=a("".charCodeAt),j=a("".slice),createMethod=function(s){return function(o,i){var a,L,B=_(w(o)),$=u(i),U=B.length;return $<0||$>=U?s?"":void 0:(a=C(B,$))<55296||a>56319||$+1===U||(L=C(B,$+1))<56320||L>57343?s?x(B,$):a:s?j(B,$,$+2):L-56320+(a-55296<<10)+65536}};s.exports={codeAt:createMethod(!1),charAt:createMethod(!0)}},11842:(s,o,i)=>{var a=i(82819),u=i(9325);s.exports=function createBind(s,o,i){var _=1&o,w=a(s);return function wrapper(){return(this&&this!==u&&this instanceof wrapper?w:s).apply(_?i:this,arguments)}}},12205:(s,o,i)=>{"use strict";var a=i(66743),u=i(11002),_=i(13144);s.exports=function applyBind(){return _(a,u,arguments)}},12242:(s,o,i)=>{const a=i(10316);s.exports=class BooleanElement extends a{constructor(s,o,i){super(s,o,i),this.element="boolean"}primitive(){return"boolean"}}},12507:(s,o,i)=>{var a=i(28754),u=i(49698),_=i(63912),w=i(13222);s.exports=function createCaseFirst(s){return function(o){o=w(o);var i=u(o)?_(o):void 0,x=i?i[0]:o.charAt(0),C=i?a(i,1).join(""):o.slice(1);return x[s]()+C}}},12560:(s,o,i)=>{"use strict";i(99363);var a=i(19287),u=i(45951),_=i(14840),w=i(93742);for(var x in a)_(u[x],x),w[x]=w.Array},12651:(s,o,i)=>{var a=i(74218);s.exports=function getMapData(s,o){var i=s.__data__;return a(o)?i["string"==typeof o?"string":"hash"]:i.map}},12749:(s,o,i)=>{var a=i(81042),u=Object.prototype.hasOwnProperty;s.exports=function hashHas(s){var o=this.__data__;return a?void 0!==o[s]:u.call(o,s)}},13144:(s,o,i)=>{"use strict";var a=i(66743),u=i(11002),_=i(10076),w=i(47119);s.exports=w||a.call(_,u)},13222:(s,o,i)=>{var a=i(77556);s.exports=function toString(s){return null==s?"":a(s)}},13846:(s,o,i)=>{"use strict";var a=i(39447),u=i(13930),_=i(22574),w=i(75817),x=i(4993),C=i(70470),j=i(49724),L=i(73648),B=Object.getOwnPropertyDescriptor;o.f=a?B:function getOwnPropertyDescriptor(s,o){if(s=x(s),o=C(o),L)try{return B(s,o)}catch(s){}if(j(s,o))return w(!u(_.f,s,o),s[o])}},13930:(s,o,i)=>{"use strict";var a=i(41505),u=Function.prototype.call;s.exports=a?u.bind(u):function(){return u.apply(u,arguments)}},14248:s=>{s.exports=function arraySome(s,o){for(var i=-1,a=null==s?0:s.length;++i{s.exports=function arrayPush(s,o){for(var i=-1,a=o.length,u=s.length;++i{const a=i(10316);s.exports=class RefElement extends a{constructor(s,o,i){super(s||[],o,i),this.element="ref",this.path||(this.path="element")}get path(){return this.attributes.get("path")}set path(s){this.attributes.set("path",s)}}},14744:s=>{"use strict";var o=function isMergeableObject(s){return function isNonNullObject(s){return!!s&&"object"==typeof s}(s)&&!function isSpecial(s){var o=Object.prototype.toString.call(s);return"[object RegExp]"===o||"[object Date]"===o||function isReactElement(s){return s.$$typeof===i}(s)}(s)};var i="function"==typeof Symbol&&Symbol.for?Symbol.for("react.element"):60103;function cloneUnlessOtherwiseSpecified(s,o){return!1!==o.clone&&o.isMergeableObject(s)?deepmerge(function emptyTarget(s){return Array.isArray(s)?[]:{}}(s),s,o):s}function defaultArrayMerge(s,o,i){return s.concat(o).map((function(s){return cloneUnlessOtherwiseSpecified(s,i)}))}function getKeys(s){return Object.keys(s).concat(function getEnumerableOwnPropertySymbols(s){return Object.getOwnPropertySymbols?Object.getOwnPropertySymbols(s).filter((function(o){return Object.propertyIsEnumerable.call(s,o)})):[]}(s))}function propertyIsOnObject(s,o){try{return o in s}catch(s){return!1}}function mergeObject(s,o,i){var a={};return i.isMergeableObject(s)&&getKeys(s).forEach((function(o){a[o]=cloneUnlessOtherwiseSpecified(s[o],i)})),getKeys(o).forEach((function(u){(function propertyIsUnsafe(s,o){return propertyIsOnObject(s,o)&&!(Object.hasOwnProperty.call(s,o)&&Object.propertyIsEnumerable.call(s,o))})(s,u)||(propertyIsOnObject(s,u)&&i.isMergeableObject(o[u])?a[u]=function getMergeFunction(s,o){if(!o.customMerge)return deepmerge;var i=o.customMerge(s);return"function"==typeof i?i:deepmerge}(u,i)(s[u],o[u],i):a[u]=cloneUnlessOtherwiseSpecified(o[u],i))})),a}function deepmerge(s,i,a){(a=a||{}).arrayMerge=a.arrayMerge||defaultArrayMerge,a.isMergeableObject=a.isMergeableObject||o,a.cloneUnlessOtherwiseSpecified=cloneUnlessOtherwiseSpecified;var u=Array.isArray(i);return u===Array.isArray(s)?u?a.arrayMerge(s,i,a):mergeObject(s,i,a):cloneUnlessOtherwiseSpecified(i,a)}deepmerge.all=function deepmergeAll(s,o){if(!Array.isArray(s))throw new Error("first argument should be an array");return s.reduce((function(s,i){return deepmerge(s,i,o)}),{})};var a=deepmerge;s.exports=a},14792:(s,o,i)=>{var a=i(13222),u=i(55808);s.exports=function capitalize(s){return u(a(s).toLowerCase())}},14840:(s,o,i)=>{"use strict";var a=i(52623),u=i(74284).f,_=i(61626),w=i(49724),x=i(54878),C=i(76264)("toStringTag");s.exports=function(s,o,i,j){var L=i?s:s&&s.prototype;L&&(w(L,C)||u(L,C,{configurable:!0,value:o}),j&&!a&&_(L,"toString",x))}},14974:s=>{s.exports=function safeGet(s,o){if(("constructor"!==o||"function"!=typeof s[o])&&"__proto__"!=o)return s[o]}},15287:(s,o)=>{"use strict";var i=Symbol.for("react.element"),a=Symbol.for("react.portal"),u=Symbol.for("react.fragment"),_=Symbol.for("react.strict_mode"),w=Symbol.for("react.profiler"),x=Symbol.for("react.provider"),C=Symbol.for("react.context"),j=Symbol.for("react.forward_ref"),L=Symbol.for("react.suspense"),B=Symbol.for("react.memo"),$=Symbol.for("react.lazy"),U=Symbol.iterator;var V={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},z=Object.assign,Y={};function E(s,o,i){this.props=s,this.context=o,this.refs=Y,this.updater=i||V}function F(){}function G(s,o,i){this.props=s,this.context=o,this.refs=Y,this.updater=i||V}E.prototype.isReactComponent={},E.prototype.setState=function(s,o){if("object"!=typeof s&&"function"!=typeof s&&null!=s)throw Error("setState(...): takes an object of state variables to update or a function which returns an object of state variables.");this.updater.enqueueSetState(this,s,o,"setState")},E.prototype.forceUpdate=function(s){this.updater.enqueueForceUpdate(this,s,"forceUpdate")},F.prototype=E.prototype;var Z=G.prototype=new F;Z.constructor=G,z(Z,E.prototype),Z.isPureReactComponent=!0;var ee=Array.isArray,ie=Object.prototype.hasOwnProperty,ae={current:null},ce={key:!0,ref:!0,__self:!0,__source:!0};function M(s,o,a){var u,_={},w=null,x=null;if(null!=o)for(u in void 0!==o.ref&&(x=o.ref),void 0!==o.key&&(w=""+o.key),o)ie.call(o,u)&&!ce.hasOwnProperty(u)&&(_[u]=o[u]);var C=arguments.length-2;if(1===C)_.children=a;else if(1{var a=i(96131);s.exports=function arrayIncludes(s,o){return!!(null==s?0:s.length)&&a(s,o,0)>-1}},15340:()=>{},15377:(s,o,i)=>{"use strict";var a=i(92861).Buffer,u=i(64634),_=i(74372),w=ArrayBuffer.isView||function isView(s){try{return _(s),!0}catch(s){return!1}},x="undefined"!=typeof Uint8Array,C="undefined"!=typeof ArrayBuffer&&"undefined"!=typeof Uint8Array,j=C&&(a.prototype instanceof Uint8Array||a.TYPED_ARRAY_SUPPORT);s.exports=function toBuffer(s,o){if(s instanceof a)return s;if("string"==typeof s)return a.from(s,o);if(C&&w(s)){if(0===s.byteLength)return a.alloc(0);if(j){var i=a.from(s.buffer,s.byteOffset,s.byteLength);if(i.byteLength===s.byteLength)return i}var _=s instanceof Uint8Array?s:new Uint8Array(s.buffer,s.byteOffset,s.byteLength),L=a.from(_);if(L.length===s.byteLength)return L}if(x&&s instanceof Uint8Array)return a.from(s);var B=u(s);if(B)for(var $=0;$255||~~U!==U)throw new RangeError("Array items must be numbers in the range 0-255.")}if(B||a.isBuffer(s)&&s.constructor&&"function"==typeof s.constructor.isBuffer&&s.constructor.isBuffer(s))return a.from(s);throw new TypeError('The "data" argument must be a string, an Array, a Buffer, a Uint8Array, or a DataView.')}},15389:(s,o,i)=>{var a=i(93663),u=i(87978),_=i(83488),w=i(56449),x=i(50583);s.exports=function baseIteratee(s){return"function"==typeof s?s:null==s?_:"object"==typeof s?w(s)?u(s[0],s[1]):a(s):x(s)}},15972:(s,o,i)=>{"use strict";var a=i(49724),u=i(62250),_=i(39298),w=i(92522),x=i(57382),C=w("IE_PROTO"),j=Object,L=j.prototype;s.exports=x?j.getPrototypeOf:function(s){var o=_(s);if(a(o,C))return o[C];var i=o.constructor;return u(i)&&o instanceof i?i.prototype:o instanceof j?L:null}},16038:(s,o,i)=>{var a=i(5861),u=i(40346);s.exports=function baseIsSet(s){return u(s)&&"[object Set]"==a(s)}},16426:s=>{s.exports=function(){var s=document.getSelection();if(!s.rangeCount)return function(){};for(var o=document.activeElement,i=[],a=0;a{var a=i(43360),u=i(75288),_=Object.prototype.hasOwnProperty;s.exports=function assignValue(s,o,i){var w=s[o];_.call(s,o)&&u(w,i)&&(void 0!==i||o in s)||a(s,o,i)}},16708:(s,o,i)=>{"use strict";var a,u=i(65606);function CorkedRequest(s){var o=this;this.next=null,this.entry=null,this.finish=function(){!function onCorkedFinish(s,o,i){var a=s.entry;s.entry=null;for(;a;){var u=a.callback;o.pendingcb--,u(i),a=a.next}o.corkedRequestsFree.next=s}(o,s)}}s.exports=Writable,Writable.WritableState=WritableState;var _={deprecate:i(94643)},w=i(40345),x=i(48287).Buffer,C=(void 0!==i.g?i.g:"undefined"!=typeof window?window:"undefined"!=typeof self?self:{}).Uint8Array||function(){};var j,L=i(75896),B=i(65291).getHighWaterMark,$=i(86048).F,U=$.ERR_INVALID_ARG_TYPE,V=$.ERR_METHOD_NOT_IMPLEMENTED,z=$.ERR_MULTIPLE_CALLBACK,Y=$.ERR_STREAM_CANNOT_PIPE,Z=$.ERR_STREAM_DESTROYED,ee=$.ERR_STREAM_NULL_VALUES,ie=$.ERR_STREAM_WRITE_AFTER_END,ae=$.ERR_UNKNOWN_ENCODING,ce=L.errorOrDestroy;function nop(){}function WritableState(s,o,_){a=a||i(25382),s=s||{},"boolean"!=typeof _&&(_=o instanceof a),this.objectMode=!!s.objectMode,_&&(this.objectMode=this.objectMode||!!s.writableObjectMode),this.highWaterMark=B(this,s,"writableHighWaterMark",_),this.finalCalled=!1,this.needDrain=!1,this.ending=!1,this.ended=!1,this.finished=!1,this.destroyed=!1;var w=!1===s.decodeStrings;this.decodeStrings=!w,this.defaultEncoding=s.defaultEncoding||"utf8",this.length=0,this.writing=!1,this.corked=0,this.sync=!0,this.bufferProcessing=!1,this.onwrite=function(s){!function onwrite(s,o){var i=s._writableState,a=i.sync,_=i.writecb;if("function"!=typeof _)throw new z;if(function onwriteStateUpdate(s){s.writing=!1,s.writecb=null,s.length-=s.writelen,s.writelen=0}(i),o)!function onwriteError(s,o,i,a,_){--o.pendingcb,i?(u.nextTick(_,a),u.nextTick(finishMaybe,s,o),s._writableState.errorEmitted=!0,ce(s,a)):(_(a),s._writableState.errorEmitted=!0,ce(s,a),finishMaybe(s,o))}(s,i,a,o,_);else{var w=needFinish(i)||s.destroyed;w||i.corked||i.bufferProcessing||!i.bufferedRequest||clearBuffer(s,i),a?u.nextTick(afterWrite,s,i,w,_):afterWrite(s,i,w,_)}}(o,s)},this.writecb=null,this.writelen=0,this.bufferedRequest=null,this.lastBufferedRequest=null,this.pendingcb=0,this.prefinished=!1,this.errorEmitted=!1,this.emitClose=!1!==s.emitClose,this.autoDestroy=!!s.autoDestroy,this.bufferedRequestCount=0,this.corkedRequestsFree=new CorkedRequest(this)}function Writable(s){var o=this instanceof(a=a||i(25382));if(!o&&!j.call(Writable,this))return new Writable(s);this._writableState=new WritableState(s,this,o),this.writable=!0,s&&("function"==typeof s.write&&(this._write=s.write),"function"==typeof s.writev&&(this._writev=s.writev),"function"==typeof s.destroy&&(this._destroy=s.destroy),"function"==typeof s.final&&(this._final=s.final)),w.call(this)}function doWrite(s,o,i,a,u,_,w){o.writelen=a,o.writecb=w,o.writing=!0,o.sync=!0,o.destroyed?o.onwrite(new Z("write")):i?s._writev(u,o.onwrite):s._write(u,_,o.onwrite),o.sync=!1}function afterWrite(s,o,i,a){i||function onwriteDrain(s,o){0===o.length&&o.needDrain&&(o.needDrain=!1,s.emit("drain"))}(s,o),o.pendingcb--,a(),finishMaybe(s,o)}function clearBuffer(s,o){o.bufferProcessing=!0;var i=o.bufferedRequest;if(s._writev&&i&&i.next){var a=o.bufferedRequestCount,u=new Array(a),_=o.corkedRequestsFree;_.entry=i;for(var w=0,x=!0;i;)u[w]=i,i.isBuf||(x=!1),i=i.next,w+=1;u.allBuffers=x,doWrite(s,o,!0,o.length,u,"",_.finish),o.pendingcb++,o.lastBufferedRequest=null,_.next?(o.corkedRequestsFree=_.next,_.next=null):o.corkedRequestsFree=new CorkedRequest(o),o.bufferedRequestCount=0}else{for(;i;){var C=i.chunk,j=i.encoding,L=i.callback;if(doWrite(s,o,!1,o.objectMode?1:C.length,C,j,L),i=i.next,o.bufferedRequestCount--,o.writing)break}null===i&&(o.lastBufferedRequest=null)}o.bufferedRequest=i,o.bufferProcessing=!1}function needFinish(s){return s.ending&&0===s.length&&null===s.bufferedRequest&&!s.finished&&!s.writing}function callFinal(s,o){s._final((function(i){o.pendingcb--,i&&ce(s,i),o.prefinished=!0,s.emit("prefinish"),finishMaybe(s,o)}))}function finishMaybe(s,o){var i=needFinish(o);if(i&&(function prefinish(s,o){o.prefinished||o.finalCalled||("function"!=typeof s._final||o.destroyed?(o.prefinished=!0,s.emit("prefinish")):(o.pendingcb++,o.finalCalled=!0,u.nextTick(callFinal,s,o)))}(s,o),0===o.pendingcb&&(o.finished=!0,s.emit("finish"),o.autoDestroy))){var a=s._readableState;(!a||a.autoDestroy&&a.endEmitted)&&s.destroy()}return i}i(56698)(Writable,w),WritableState.prototype.getBuffer=function getBuffer(){for(var s=this.bufferedRequest,o=[];s;)o.push(s),s=s.next;return o},function(){try{Object.defineProperty(WritableState.prototype,"buffer",{get:_.deprecate((function writableStateBufferGetter(){return this.getBuffer()}),"_writableState.buffer is deprecated. Use _writableState.getBuffer instead.","DEP0003")})}catch(s){}}(),"function"==typeof Symbol&&Symbol.hasInstance&&"function"==typeof Function.prototype[Symbol.hasInstance]?(j=Function.prototype[Symbol.hasInstance],Object.defineProperty(Writable,Symbol.hasInstance,{value:function value(s){return!!j.call(this,s)||this===Writable&&(s&&s._writableState instanceof WritableState)}})):j=function realHasInstance(s){return s instanceof this},Writable.prototype.pipe=function(){ce(this,new Y)},Writable.prototype.write=function(s,o,i){var a=this._writableState,_=!1,w=!a.objectMode&&function _isUint8Array(s){return x.isBuffer(s)||s instanceof C}(s);return w&&!x.isBuffer(s)&&(s=function _uint8ArrayToBuffer(s){return x.from(s)}(s)),"function"==typeof o&&(i=o,o=null),w?o="buffer":o||(o=a.defaultEncoding),"function"!=typeof i&&(i=nop),a.ending?function writeAfterEnd(s,o){var i=new ie;ce(s,i),u.nextTick(o,i)}(this,i):(w||function validChunk(s,o,i,a){var _;return null===i?_=new ee:"string"==typeof i||o.objectMode||(_=new U("chunk",["string","Buffer"],i)),!_||(ce(s,_),u.nextTick(a,_),!1)}(this,a,s,i))&&(a.pendingcb++,_=function writeOrBuffer(s,o,i,a,u,_){if(!i){var w=function decodeChunk(s,o,i){s.objectMode||!1===s.decodeStrings||"string"!=typeof o||(o=x.from(o,i));return o}(o,a,u);a!==w&&(i=!0,u="buffer",a=w)}var C=o.objectMode?1:a.length;o.length+=C;var j=o.length-1))throw new ae(s);return this._writableState.defaultEncoding=s,this},Object.defineProperty(Writable.prototype,"writableBuffer",{enumerable:!1,get:function get(){return this._writableState&&this._writableState.getBuffer()}}),Object.defineProperty(Writable.prototype,"writableHighWaterMark",{enumerable:!1,get:function get(){return this._writableState.highWaterMark}}),Writable.prototype._write=function(s,o,i){i(new V("_write()"))},Writable.prototype._writev=null,Writable.prototype.end=function(s,o,i){var a=this._writableState;return"function"==typeof s?(i=s,s=null,o=null):"function"==typeof o&&(i=o,o=null),null!=s&&this.write(s,o),a.corked&&(a.corked=1,this.uncork()),a.ending||function endWritable(s,o,i){o.ending=!0,finishMaybe(s,o),i&&(o.finished?u.nextTick(i):s.once("finish",i));o.ended=!0,s.writable=!1}(this,a,i),this},Object.defineProperty(Writable.prototype,"writableLength",{enumerable:!1,get:function get(){return this._writableState.length}}),Object.defineProperty(Writable.prototype,"destroyed",{enumerable:!1,get:function get(){return void 0!==this._writableState&&this._writableState.destroyed},set:function set(s){this._writableState&&(this._writableState.destroyed=s)}}),Writable.prototype.destroy=L.destroy,Writable.prototype._undestroy=L.undestroy,Writable.prototype._destroy=function(s,o){o(s)}},16946:(s,o,i)=>{"use strict";var a=i(1907),u=i(98828),_=i(45807),w=Object,x=a("".split);s.exports=u((function(){return!w("z").propertyIsEnumerable(0)}))?function(s){return"String"===_(s)?x(s,""):w(s)}:w},16962:(s,o)=>{o.aliasToReal={each:"forEach",eachRight:"forEachRight",entries:"toPairs",entriesIn:"toPairsIn",extend:"assignIn",extendAll:"assignInAll",extendAllWith:"assignInAllWith",extendWith:"assignInWith",first:"head",conforms:"conformsTo",matches:"isMatch",property:"get",__:"placeholder",F:"stubFalse",T:"stubTrue",all:"every",allPass:"overEvery",always:"constant",any:"some",anyPass:"overSome",apply:"spread",assoc:"set",assocPath:"set",complement:"negate",compose:"flowRight",contains:"includes",dissoc:"unset",dissocPath:"unset",dropLast:"dropRight",dropLastWhile:"dropRightWhile",equals:"isEqual",identical:"eq",indexBy:"keyBy",init:"initial",invertObj:"invert",juxt:"over",omitAll:"omit",nAry:"ary",path:"get",pathEq:"matchesProperty",pathOr:"getOr",paths:"at",pickAll:"pick",pipe:"flow",pluck:"map",prop:"get",propEq:"matchesProperty",propOr:"getOr",props:"at",symmetricDifference:"xor",symmetricDifferenceBy:"xorBy",symmetricDifferenceWith:"xorWith",takeLast:"takeRight",takeLastWhile:"takeRightWhile",unapply:"rest",unnest:"flatten",useWith:"overArgs",where:"conformsTo",whereEq:"isMatch",zipObj:"zipObject"},o.aryMethod={1:["assignAll","assignInAll","attempt","castArray","ceil","create","curry","curryRight","defaultsAll","defaultsDeepAll","floor","flow","flowRight","fromPairs","invert","iteratee","memoize","method","mergeAll","methodOf","mixin","nthArg","over","overEvery","overSome","rest","reverse","round","runInContext","spread","template","trim","trimEnd","trimStart","uniqueId","words","zipAll"],2:["add","after","ary","assign","assignAllWith","assignIn","assignInAllWith","at","before","bind","bindAll","bindKey","chunk","cloneDeepWith","cloneWith","concat","conformsTo","countBy","curryN","curryRightN","debounce","defaults","defaultsDeep","defaultTo","delay","difference","divide","drop","dropRight","dropRightWhile","dropWhile","endsWith","eq","every","filter","find","findIndex","findKey","findLast","findLastIndex","findLastKey","flatMap","flatMapDeep","flattenDepth","forEach","forEachRight","forIn","forInRight","forOwn","forOwnRight","get","groupBy","gt","gte","has","hasIn","includes","indexOf","intersection","invertBy","invoke","invokeMap","isEqual","isMatch","join","keyBy","lastIndexOf","lt","lte","map","mapKeys","mapValues","matchesProperty","maxBy","meanBy","merge","mergeAllWith","minBy","multiply","nth","omit","omitBy","overArgs","pad","padEnd","padStart","parseInt","partial","partialRight","partition","pick","pickBy","propertyOf","pull","pullAll","pullAt","random","range","rangeRight","rearg","reject","remove","repeat","restFrom","result","sampleSize","some","sortBy","sortedIndex","sortedIndexOf","sortedLastIndex","sortedLastIndexOf","sortedUniqBy","split","spreadFrom","startsWith","subtract","sumBy","take","takeRight","takeRightWhile","takeWhile","tap","throttle","thru","times","trimChars","trimCharsEnd","trimCharsStart","truncate","union","uniqBy","uniqWith","unset","unzipWith","without","wrap","xor","zip","zipObject","zipObjectDeep"],3:["assignInWith","assignWith","clamp","differenceBy","differenceWith","findFrom","findIndexFrom","findLastFrom","findLastIndexFrom","getOr","includesFrom","indexOfFrom","inRange","intersectionBy","intersectionWith","invokeArgs","invokeArgsMap","isEqualWith","isMatchWith","flatMapDepth","lastIndexOfFrom","mergeWith","orderBy","padChars","padCharsEnd","padCharsStart","pullAllBy","pullAllWith","rangeStep","rangeStepRight","reduce","reduceRight","replace","set","slice","sortedIndexBy","sortedLastIndexBy","transform","unionBy","unionWith","update","xorBy","xorWith","zipWith"],4:["fill","setWith","updateWith"]},o.aryRearg={2:[1,0],3:[2,0,1],4:[3,2,0,1]},o.iterateeAry={dropRightWhile:1,dropWhile:1,every:1,filter:1,find:1,findFrom:1,findIndex:1,findIndexFrom:1,findKey:1,findLast:1,findLastFrom:1,findLastIndex:1,findLastIndexFrom:1,findLastKey:1,flatMap:1,flatMapDeep:1,flatMapDepth:1,forEach:1,forEachRight:1,forIn:1,forInRight:1,forOwn:1,forOwnRight:1,map:1,mapKeys:1,mapValues:1,partition:1,reduce:2,reduceRight:2,reject:1,remove:1,some:1,takeRightWhile:1,takeWhile:1,times:1,transform:2},o.iterateeRearg={mapKeys:[1],reduceRight:[1,0]},o.methodRearg={assignInAllWith:[1,0],assignInWith:[1,2,0],assignAllWith:[1,0],assignWith:[1,2,0],differenceBy:[1,2,0],differenceWith:[1,2,0],getOr:[2,1,0],intersectionBy:[1,2,0],intersectionWith:[1,2,0],isEqualWith:[1,2,0],isMatchWith:[2,1,0],mergeAllWith:[1,0],mergeWith:[1,2,0],padChars:[2,1,0],padCharsEnd:[2,1,0],padCharsStart:[2,1,0],pullAllBy:[2,1,0],pullAllWith:[2,1,0],rangeStep:[1,2,0],rangeStepRight:[1,2,0],setWith:[3,1,2,0],sortedIndexBy:[2,1,0],sortedLastIndexBy:[2,1,0],unionBy:[1,2,0],unionWith:[1,2,0],updateWith:[3,1,2,0],xorBy:[1,2,0],xorWith:[1,2,0],zipWith:[1,2,0]},o.methodSpread={assignAll:{start:0},assignAllWith:{start:0},assignInAll:{start:0},assignInAllWith:{start:0},defaultsAll:{start:0},defaultsDeepAll:{start:0},invokeArgs:{start:2},invokeArgsMap:{start:2},mergeAll:{start:0},mergeAllWith:{start:0},partial:{start:1},partialRight:{start:1},without:{start:1},zipAll:{start:0}},o.mutate={array:{fill:!0,pull:!0,pullAll:!0,pullAllBy:!0,pullAllWith:!0,pullAt:!0,remove:!0,reverse:!0},object:{assign:!0,assignAll:!0,assignAllWith:!0,assignIn:!0,assignInAll:!0,assignInAllWith:!0,assignInWith:!0,assignWith:!0,defaults:!0,defaultsAll:!0,defaultsDeep:!0,defaultsDeepAll:!0,merge:!0,mergeAll:!0,mergeAllWith:!0,mergeWith:!0},set:{set:!0,setWith:!0,unset:!0,update:!0,updateWith:!0}},o.realToAlias=function(){var s=Object.prototype.hasOwnProperty,i=o.aliasToReal,a={};for(var u in i){var _=i[u];s.call(a,_)?a[_].push(u):a[_]=[u]}return a}(),o.remap={assignAll:"assign",assignAllWith:"assignWith",assignInAll:"assignIn",assignInAllWith:"assignInWith",curryN:"curry",curryRightN:"curryRight",defaultsAll:"defaults",defaultsDeepAll:"defaultsDeep",findFrom:"find",findIndexFrom:"findIndex",findLastFrom:"findLast",findLastIndexFrom:"findLastIndex",getOr:"get",includesFrom:"includes",indexOfFrom:"indexOf",invokeArgs:"invoke",invokeArgsMap:"invokeMap",lastIndexOfFrom:"lastIndexOf",mergeAll:"merge",mergeAllWith:"mergeWith",padChars:"pad",padCharsEnd:"padEnd",padCharsStart:"padStart",propertyOf:"get",rangeStep:"range",rangeStepRight:"rangeRight",restFrom:"rest",spreadFrom:"spread",trimChars:"trim",trimCharsEnd:"trimEnd",trimCharsStart:"trimStart",zipAll:"zip"},o.skipFixed={castArray:!0,flow:!0,flowRight:!0,iteratee:!0,mixin:!0,rearg:!0,runInContext:!0},o.skipRearg={add:!0,assign:!0,assignIn:!0,bind:!0,bindKey:!0,concat:!0,difference:!0,divide:!0,eq:!0,gt:!0,gte:!0,isEqual:!0,lt:!0,lte:!0,matchesProperty:!0,merge:!0,multiply:!0,overArgs:!0,partial:!0,partialRight:!0,propertyOf:!0,random:!0,range:!0,rangeRight:!0,subtract:!0,zip:!0,zipObject:!0,zipObjectDeep:!0}},17255:(s,o,i)=>{var a=i(47422);s.exports=function basePropertyDeep(s){return function(o){return a(o,s)}}},17285:s=>{function source(s){return s?"string"==typeof s?s:s.source:null}function lookahead(s){return concat("(?=",s,")")}function concat(...s){return s.map((s=>source(s))).join("")}function either(...s){return"("+s.map((s=>source(s))).join("|")+")"}s.exports=function xml(s){const o=concat(/[A-Z_]/,function optional(s){return concat("(",s,")?")}(/[A-Z0-9_.-]*:/),/[A-Z0-9_.-]*/),i={className:"symbol",begin:/&[a-z]+;|[0-9]+;|[a-f0-9]+;/},a={begin:/\s/,contains:[{className:"meta-keyword",begin:/#?[a-z_][a-z1-9_-]+/,illegal:/\n/}]},u=s.inherit(a,{begin:/\(/,end:/\)/}),_=s.inherit(s.APOS_STRING_MODE,{className:"meta-string"}),w=s.inherit(s.QUOTE_STRING_MODE,{className:"meta-string"}),x={endsWithParent:!0,illegal:/,relevance:0,contains:[{className:"attr",begin:/[A-Za-z0-9._:-]+/,relevance:0},{begin:/=\s*/,relevance:0,contains:[{className:"string",endsParent:!0,variants:[{begin:/"/,end:/"/,contains:[i]},{begin:/'/,end:/'/,contains:[i]},{begin:/[^\s"'=<>`]+/}]}]}]};return{name:"HTML, XML",aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],case_insensitive:!0,contains:[{className:"meta",begin://,relevance:10,contains:[a,w,_,u,{begin:/\[/,end:/\]/,contains:[{className:"meta",begin://,contains:[a,u,w,_]}]}]},s.COMMENT(//,{relevance:10}),{begin://,relevance:10},i,{className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{className:"tag",begin:/