diff --git a/.github/workflows/jan-docs.yml b/.github/workflows/jan-docs.yml
index 3e92903c5..e6cc0977b 100644
--- a/.github/workflows/jan-docs.yml
+++ b/.github/workflows/jan-docs.yml
@@ -53,6 +53,9 @@ jobs:
- name: Install dependencies
working-directory: docs
run: yarn install
+ - name: Clean output directory
+ working-directory: docs
+ run: rm -rf out/* .next/*
- name: Build website
working-directory: docs
run: export NODE_ENV=production && yarn build && cp _redirects out/_redirects && cp _headers out/_headers
diff --git a/.github/workflows/jan-server-web-ci.yml b/.github/workflows/jan-server-web-ci-dev.yml
similarity index 92%
rename from .github/workflows/jan-server-web-ci.yml
rename to .github/workflows/jan-server-web-ci-dev.yml
index 921d77bee..95dd5f91f 100644
--- a/.github/workflows/jan-server-web-ci.yml
+++ b/.github/workflows/jan-server-web-ci-dev.yml
@@ -11,6 +11,8 @@ on:
jobs:
build-and-preview:
runs-on: [ubuntu-24-04-docker]
+ env:
+ JAN_API_BASE: "https://api-dev.jan.ai/v1"
permissions:
pull-requests: write
contents: write
@@ -50,7 +52,7 @@ jobs:
- name: Build docker image
run: |
- docker build -t ${{ steps.vars.outputs.FULL_IMAGE }} .
+ docker build --build-arg JAN_API_BASE=${{ env.JAN_API_BASE }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
- name: Push docker image
if: github.event_name == 'push'
diff --git a/.github/workflows/jan-server-web-cicd-prod.yml b/.github/workflows/jan-server-web-ci-prod.yml
similarity index 91%
rename from .github/workflows/jan-server-web-cicd-prod.yml
rename to .github/workflows/jan-server-web-ci-prod.yml
index de1a07697..dda1f3672 100644
--- a/.github/workflows/jan-server-web-cicd-prod.yml
+++ b/.github/workflows/jan-server-web-ci-prod.yml
@@ -13,7 +13,7 @@ jobs:
deployments: write
pull-requests: write
env:
- JAN_API_BASE: "https://api.jan.ai/jan/v1"
+ JAN_API_BASE: "https://api.jan.ai/v1"
GA_MEASUREMENT_ID: "G-YK53MX8M8M"
CLOUDFLARE_PROJECT_NAME: "jan-server-web"
steps:
@@ -42,6 +42,9 @@ jobs:
- name: Install dependencies
run: make config-yarn && yarn install && yarn build:core && make build-web-app
+ env:
+ JAN_API_BASE: ${{ env.JAN_API_BASE }}
+ GA_MEASUREMENT_ID: ${{ env.GA_MEASUREMENT_ID }}
- name: Publish to Cloudflare Pages Production
uses: cloudflare/pages-action@v1
diff --git a/.github/workflows/jan-server-web-ci-stag.yml b/.github/workflows/jan-server-web-ci-stag.yml
new file mode 100644
index 000000000..dda88390b
--- /dev/null
+++ b/.github/workflows/jan-server-web-ci-stag.yml
@@ -0,0 +1,60 @@
+name: Jan Web Server build image and push to Harbor Registry
+
+on:
+ push:
+ branches:
+ - stag-web
+ pull_request:
+ branches:
+ - stag-web
+
+jobs:
+ build-and-preview:
+ runs-on: [ubuntu-24-04-docker]
+ env:
+ JAN_API_BASE: "https://api-stag.jan.ai/v1"
+ permissions:
+ pull-requests: write
+ contents: write
+ steps:
+ - name: Checkout source repo
+ uses: actions/checkout@v4
+
+ - name: Login to Harbor Registry
+ uses: docker/login-action@v3
+ with:
+ registry: registry.menlo.ai
+ username: ${{ secrets.HARBOR_USERNAME }}
+ password: ${{ secrets.HARBOR_PASSWORD }}
+
+ - name: Install dependencies
+ run: |
+ (type -p wget >/dev/null || (sudo apt update && sudo apt install wget -y)) \
+ && sudo mkdir -p -m 755 /etc/apt/keyrings \
+ && out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
+ && cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
+ && sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
+ && sudo mkdir -p -m 755 /etc/apt/sources.list.d \
+ && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
+ && sudo apt update
+ sudo apt-get install -y jq gettext
+
+ - name: Set image tag
+ id: vars
+ run: |
+ if [[ "${{ github.event_name }}" == "pull_request" ]]; then
+ IMAGE_TAG="web:preview-${{ github.sha }}"
+ else
+ IMAGE_TAG="web:stag-${{ github.sha }}"
+ fi
+ echo "IMAGE_TAG=${IMAGE_TAG}" >> $GITHUB_OUTPUT
+ echo "FULL_IMAGE=registry.menlo.ai/jan-server/${IMAGE_TAG}" >> $GITHUB_OUTPUT
+
+ - name: Build docker image
+ run: |
+ docker build --build-arg JAN_API_BASE=${{ env.JAN_API_BASE }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
+
+ - name: Push docker image
+ if: github.event_name == 'push'
+ run: |
+ docker push ${{ steps.vars.outputs.FULL_IMAGE }}
diff --git a/.github/workflows/template-tauri-build-windows-x64-external.yml b/.github/workflows/template-tauri-build-windows-x64-external.yml
index 59a200093..ed1d601a3 100644
--- a/.github/workflows/template-tauri-build-windows-x64-external.yml
+++ b/.github/workflows/template-tauri-build-windows-x64-external.yml
@@ -49,8 +49,6 @@ jobs:
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
- jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
- mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq '.bundle.windows.signCommand = "echo External build - skipping signature: %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
@@ -59,58 +57,30 @@ jobs:
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------Cargo.toml---------"
cat ./src-tauri/Cargo.toml
-
- generate_build_version() {
- ### Examble
- ### input 0.5.6 output will be 0.5.6 and 0.5.6.0
- ### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
- ### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
- local new_version="$1"
- local base_version
- local t_value
-
- # Check if it has a "-"
- if [[ "$new_version" == *-* ]]; then
- base_version="${new_version%%-*}" # part before -
- suffix="${new_version#*-}" # part after -
-
- # Check if it is rcX-beta
- if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
- t_value="${BASH_REMATCH[1]}"
- else
- t_value="$suffix"
- fi
- else
- base_version="$new_version"
- t_value="0"
- fi
-
- # Export two values
- new_base_version="$base_version"
- new_build_version="${base_version}.${t_value}"
- }
- generate_build_version ${{ inputs.new_version }}
- sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
- sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
-
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
+
+ # Update product name
+ jq --arg name "Jan-${{ inputs.channel }}" '.productName = $name' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
+ mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
+
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
+
+ echo "---------tauri.conf.json---------"
+ cat ./src-tauri/tauri.conf.json
+
+ # Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
+ echo "------------------"
+ cat ./src-tauri/Cargo.toml
+
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
- sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
- sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
- else
- sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
- sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
+ cat ./package.json
fi
- echo "---------nsis.template---------"
- cat ./src-tauri/tauri.bundle.windows.nsis.template
-
- name: Build app
shell: bash
run: |
diff --git a/.github/workflows/template-tauri-build-windows-x64.yml b/.github/workflows/template-tauri-build-windows-x64.yml
index 958b7c9f7..1f25e5295 100644
--- a/.github/workflows/template-tauri-build-windows-x64.yml
+++ b/.github/workflows/template-tauri-build-windows-x64.yml
@@ -95,47 +95,19 @@ jobs:
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
- jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
- mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
-
+
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------Cargo.toml---------"
cat ./src-tauri/Cargo.toml
- generate_build_version() {
- ### Examble
- ### input 0.5.6 output will be 0.5.6 and 0.5.6.0
- ### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
- ### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
- local new_version="$1"
- local base_version
- local t_value
-
- # Check if it has a "-"
- if [[ "$new_version" == *-* ]]; then
- base_version="${new_version%%-*}" # part before -
- suffix="${new_version#*-}" # part after -
-
- # Check if it is rcX-beta
- if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
- t_value="${BASH_REMATCH[1]}"
- else
- t_value="$suffix"
- fi
- else
- base_version="$new_version"
- t_value="0"
- fi
-
- # Export two values
- new_base_version="$base_version"
- new_build_version="${base_version}.${t_value}"
- }
- generate_build_version ${{ inputs.new_version }}
- sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
- sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
+ # Add sign commands to tauri.windows.conf.json
+ jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
+ mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
+
+ echo "---------tauri.windows.conf.json---------"
+ cat ./src-tauri/tauri.windows.conf.json
# Temporarily enable devtool on prod build
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
@@ -143,8 +115,13 @@ jobs:
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
+ # Update updater endpoint
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
+
+ # Update product name
+ jq --arg name "Jan-${{ inputs.channel }}" '.productName = $name' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
+ mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
@@ -161,15 +138,7 @@ jobs:
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
-
- sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
- sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
- else
- sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
- sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
- echo "---------nsis.template---------"
- cat ./src-tauri/tauri.bundle.windows.nsis.template
- name: Install AzureSignTool
run: |
diff --git a/Dockerfile b/Dockerfile
index b06262ec5..236aa583c 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,9 @@
# Stage 1: Build stage with Node.js and Yarn v4
FROM node:20-alpine AS builder
+ARG JAN_API_BASE=https://api-dev.jan.ai/v1
+ENV JAN_API_BASE=$JAN_API_BASE
+
# Install build dependencies
RUN apk add --no-cache \
make \
diff --git a/Makefile b/Makefile
index 1b4289f0c..085e42e74 100644
--- a/Makefile
+++ b/Makefile
@@ -80,14 +80,8 @@ test: lint
cargo test --manifest-path src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
cargo test --manifest-path src-tauri/utils/Cargo.toml
-# Builds and publishes the app
-build-and-publish: install-and-build install-rust-targets
- yarn build
-
# Build
build: install-and-build install-rust-targets
- yarn download:bin
- yarn download:lib
yarn build
clean:
diff --git a/core/src/browser/extensions/engines/AIEngine.ts b/core/src/browser/extensions/engines/AIEngine.ts
index 7a223e468..0e8a75fca 100644
--- a/core/src/browser/extensions/engines/AIEngine.ts
+++ b/core/src/browser/extensions/engines/AIEngine.ts
@@ -13,7 +13,7 @@ export interface chatCompletionRequestMessage {
}
export interface Content {
- type: 'text' | 'input_image' | 'input_audio'
+ type: 'text' | 'image_url' | 'input_audio'
text?: string
image_url?: string
input_audio?: InputAudio
@@ -54,6 +54,8 @@ export type ToolChoice = 'none' | 'auto' | 'required' | ToolCallSpec
export interface chatCompletionRequest {
model: string // Model ID, though for local it might be implicit via sessionInfo
messages: chatCompletionRequestMessage[]
+ thread_id?: string // Thread/conversation ID for context tracking
+ return_progress?: boolean
tools?: Tool[]
tool_choice?: ToolChoice
// Core sampling parameters
@@ -119,6 +121,13 @@ export interface chatCompletionChunkChoice {
finish_reason?: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call' | null
}
+export interface chatCompletionPromptProgress {
+ cache: number
+ processed: number
+ time_ms: number
+ total: number
+}
+
export interface chatCompletionChunk {
id: string
object: 'chat.completion.chunk'
@@ -126,6 +135,7 @@ export interface chatCompletionChunk {
model: string
choices: chatCompletionChunkChoice[]
system_fingerprint?: string
+ prompt_progress?: chatCompletionPromptProgress
}
export interface chatCompletionChoice {
@@ -173,6 +183,7 @@ export interface SessionInfo {
model_id: string //name of the model
model_path: string // path of the loaded model
api_key: string
+ mmproj_path?: string
}
export interface UnloadResult {
diff --git a/docs/_redirects b/docs/_redirects
index 748da60c5..e69de29bb 100644
--- a/docs/_redirects
+++ b/docs/_redirects
@@ -1,699 +0,0 @@
-/team /about/team 302
-/about/teams /about/team 302
-/about/faq /docs 302
-/about/acknowledgements /docs 302
-/about/community /about 302
-/guides /docs 302
-/docs/troubleshooting/failed-to-fetch /docs/troubleshooting 302
-/guides/troubleshooting/gpu-not-used /docs/troubleshooting#troubleshooting-nvidia-gpu 302
-/guides/troubleshooting /docs/troubleshooting 302
-/docs/troubleshooting/stuck-on-broken-build /docs/troubleshooting 302
-/docs/troubleshooting/somethings-amiss /docs/troubleshooting 302
-/docs/troubleshooting/how-to-get-error-logs /docs/troubleshooting 302
-/docs/troubleshooting/permission-denied /docs/troubleshooting 302
-/docs/troubleshooting/unexpected-token /docs/troubleshooting 302
-/docs/troubleshooting/undefined-issue /docs/troubleshooting 302
-/getting-started/troubleshooting /docs/troubleshooting 302
-/docs/troubleshooting/gpu-not-used /docs/troubleshooting 302
-/guides/integrations/openrouter /docs/remote-models/openrouter 302
-/guides/integrations/continue /integrations/coding/continue-dev 302
-/docs/extension-capabilities /docs/extensions 302
-/guides/using-extensions /docs/extensions 302
-/docs/extension-guides /docs/extensions 302
-/features/extensions /docs/extensions 302
-/integrations/tensorrt /docs/built-in/tensorrt-llm 302
-/guides/using-models/integrate-with-remote-server /docs/remote-inference/generic-openai 302
-/guides/using-models/customize-engine-settings /docs/built-in/llama-cpp 302
-/developers/plugins/azure-openai /docs/remote-models/openai 302
-/docs/api-reference/assistants /api-reference#tag/assistants 302
-/docs/api-reference/models/list /api-reference#tag/models 302
-/docs/api-reference/threads /api-reference#tag/chat 302
-/docs/api-reference/messages /api-reference#tag/messages 302
-/docs/api-reference/models /api-reference#tag/models 302
-/chat /docs/threads 302
-/guides/chatting/manage-history /docs/threads/ 302
-/guides/chatting/start-thread /docs/threads/ 302
-/guides/using-server /docs/local-api/ 302
-/guides/using-server/server /docs/local-api#step-2-srt-and-use-the-built-in-api-server 302
-/docs/get-started /docs 302
-/guides/how-jan-works /about/how-we-work 302
-/acknowledgements /about/acknowledgements 302
-/community /about/community 302
-/faq /about/faq 302
-/how-we-work /about/how-we-work 302
-/wall-of-love /about/wall-of-love 302
-/guides/troubleshooting/failed-to-fetch /docs/troubleshooting 302
-/docs/troubleshooting/gpu-not-used /docs/troubleshooting 302
-/docs/troubleshooting/failed-to-fetch /docs/troubleshooting 302
-/guides/ /docs 302
-/guides/quickstart/ /docs/quickstart 302
-/guides/models/ /docs/models 302
-/guides/threads/ /docs/threads 302
-/guides/local-api/ /docs/local-api 302
-/guides/advanced/ /docs/settings 302
-/guides/engines/llamacpp/ /docs/built-in/llama-cpp 302
-/guides/engines/tensorrt-llm/ /docs/built-in/tensorrt-llm 302
-/guides/engines/lmstudio/ /docs/local-models/lmstudio 302
-/guides/engines/ollama/ /docs/built-in/llama-cpp 302
-/guides/engines/groq/ /docs/remote-models/groq 302
-/guides/engines/mistral/ /docs/remote-models/mistralai 302
-/guides/engines/openai/ /docs/remote-models/openai 302
-/guides/engines/remote-server/ /docs/remote-inference/generic-openai 302
-/extensions/ /docs/extensions 302
-/integrations/discord/ /integrations/messaging/llmcord 302
-/discord https://discord.gg/FTk2MvZwJH 301
-/integrations/interpreter/ /integrations/function-calling/interpreter 302
-/integrations/raycast/ /integrations/workflow-automation/raycast 302
-/docs/integrations/raycast /integrations/workflow-automation/raycast 302
-/docs/integrations /integrations 302
-/docs/engineering/files/ /docs 302
-/integrations/openrouter/ /docs/remote-models/openrouter 302
-/integrations/continue/ /integrations/coding/continue-dev 302
-/troubleshooting/ /docs/troubleshooting 302
-/changelog/changelog-v0.4.9/ /changelog 302
-/changelog/changelog-v0.4.8/ /changelog 302
-/changelog/changelog-v0.4.7/ /changelog 302
-/changelog/changelog-v0.4.6/ /changelog 302
-/changelog/changelog-v0.4.5/ /changelog 302
-/changelog/changelog-v0.4.4/ /changelog 302
-/changelog/changelog-v0.4.3/ /changelog 302
-/changelog/changelog-v0.4.2/ /changelog 302
-/changelog/changelog-v0.4.1/ /changelog 302
-/changelog/changelog-v0.4.0/ /changelog 302
-/changelog/changelog-v0.3.3/ /changelog 302
-/changelog/changelog-v0.3.2/ /changelog 302
-/changelog/changelog-v0.3.1/ /changelog 302
-/changelog/changelog-v0.3.0/ /changelog 302
-/changelog/changelog-v0.2.3/ /changelog 302
-/changelog/changelog-v0.2.2/ /changelog 302
-/changelog/changelog-v0.2.1/ /changelog 302
-/changelog/changelog-v0.2.0/ /changelog 302
-/team/ /about/team 302
-/team/contributor-program/ /about/team 302
-/team/join-us/ /about/team 302
-/how-we-work/ /about/how-we-work 302
-/how-we-work/strategy/ /about/how-we-work/strategy 302
-/how-we-work/project-management/ /about/how-we-work/project-management 302
-/engineering/ /about/how-we-work/engineering 302
-/engineering/ci-cd/ /about/how-we-work/engineering/ci-cd 302
-/engineering/qa/ /about/how-we-work/engineering/qa 302
-/how-we-work/product-design/ /about 302
-/about/how-we-work/product-design /about 302
-/how-we-work/analytics/ /about/how-we-work/analytics 302
-/how-we-work/website-docs/ /about/how-we-work/website-docs 302
-/blog/postmortems/january-10-2024-bitdefender-false-positive-flag/ /post/bitdefender 302
-/guides/error-codes/something-amiss/ /docs/troubleshooting#somethings-amiss 302
-/guides/error-codes/how-to-get-error-logs/ /docs/troubleshooting#how-to-get-error-logs 302
-/guides/chatting/ /docs/threads 302
-/guides/integration/openinterpreter/ /integrations/function-calling/interpreter 302
-/developer/build-assistant/ /docs/assistants 302
-/guides/integrations/ /integrations 302
-/specs/hub/ /docs 302
-/install/windows/ /docs/desktop/windows 302
-/install/linux/ /docs/desktop/linux 302
-/install/nightly/ /docs/desktop/windows 302
-/docs/engineering/fine-tuning/ /docs 302
-/developer/assistant/ /docs/assistants 302
-/guides/common-error/broken-build/ /docs/troubleshooting#broken-build 302
-/guides/using-server/using-server/ /docs/local-api 302
-/guides/integrations/azure-openai-service/ /docs/remote-models/openai 302
-/specs/messages/ /docs/threads 302
-/docs/engineering/models/ /docs/models 302
-/docs/specs/assistants/ /docs/assistants 302
-/docs/engineering/chats/ /docs/threads 302
-/guides/using-extensions/extension-settings/ /docs/extensions 302
-/guides/models/customize-engine/ /docs/models 302
-/guides/integration/mistral/ /docs/remote-models/mistralai 302
-/guides/common-error/ /docs/troubleshooting 302
-/guides/integrations/ollama/ /docs/local-models/ollama 302
-/server-suite/ /api-reference 302
-/guides/integrations/lmstudio/ /docs/local-models/lmstudio 302
-/guides/integrations/mistral-ai/ /docs/remote-models/mistralai 302
-/guides/start-server/ /docs/local-api 302
-/guides/changelog/ /changelog 302
-/guides/models-list/ /docs/models 302
-/guides/thread/ /docs/threads 302
-/docs/engineering/messages/ /docs/threads 302
-/guides/faqs/ /about/faq 302
-/docs/integrations/openrouter/ /docs/remote-models/openrouter 302
-/guides/windows /docs/desktop/windows 302
-/docs/integrations/ollama/ /docs/local-models/ollama 302
-/api/overview/ /api-reference 302
-/docs/extension-guides/ /docs/extensions 302
-/specs/settings/ /docs 302
-/docs/UI/ /docs 302
-/guides/using-models/import-models-using-absolute-filepath/ /docs/models 302
-/install/docker/ /docs/desktop 302
-/install/ /docs/desktop 302
-/install/from-source/ /docs/desktop 302
-/docs/installation/server /docs/desktop 302
-/v1/models /docs/models 302
-/guides/advanced-settings/ /docs/settings 302
-/guides/using-models/install-from-hub/ /docs/models/manage-models#download-from-jan-hub 302
-/guides/using-models/import-manually/ /docs/models 302
-/docs/team/contributor-program/ /about/team 302
-/docs/modules/models /docs/models 302
-/getting-started/install/linux /docs/desktop/linux 302
-/guides/chatting/start-thread/ /docs/threads 302
-/api/files/ /docs 302
-/specs/threads/ /docs/threads 302
-/about/brand-assets /about 302
-/guides/chatting/upload-images/ /docs/threads 302
-/guides/using-models/customize-models/ /docs/models 302
-/docs/modules/models/ /docs/models 302
-/getting-started/install/linux/ /docs/desktop/linux 302
-/specs/chats/ /docs/threads 302
-/specs/engine/ /docs 302
-/specs/data-structures /docs 302
-/docs/extension-capabilities/ /docs/extensions 302
-/docs/get-started/use-local-server/ /docs/local-api 302
-/guides/how-jan-works/ /about/how-we-work 302
-/guides/install/cloud-native /docs/desktop 302
-/guides/windows/ /docs/desktop/windows 302
-/specs/ /docs 302
-/docs/get-started/build-extension/ /docs/extensions 302
-/specs/files/ /docs 302
-/guides/using-models/package-models/ /docs/models 302
-/install/overview/ /docs/desktop/windows 302
-/docs/get-started/extension-anatomy/ /docs/extensions 302
-/docs/get-started/ /docs 302
-/guides/mac/ /docs/desktop/mac 302
-/intro/ /about 302
-/specs/fine-tuning/ /docs 302
-/guides/server/ /docs/desktop 302
-/specs/file-based/ /docs 302
-/docs/extension-guides/monitoring/ /docs/extensions 302
-/api/ /api-reference 302
-/getting-started/build-an-app /docs/quickstart 302
-/features/ai-models/ /docs/models 302
-/reference/store/ /api-reference 302
-/tutorials/build-chat-app /docs/quickstart 302
-/features/acceleration /docs/built-in/llama-cpp 302
-/getting-started/install/mac /docs/desktop/mac 302
-docs/guides/fine-tuning/what-models-can-be-fine-tuned /docs 302
-/docs/specs/threads /docs/threads 302
-/docs/api-reference/fine-tuning /api-reference 302
-/docs/guides/speech-to-text/prompting /docs/quickstart 302
-/docs/guides/legacy-fine-tuning/analyzing-your-fine-tuned-model /docs 302
-/getting-started/install/windows /docs/desktop/windows 302
-/docs/modules/assistants /docs/assistants 302
-/docs/modules/chats /docs/threads 302
-/docs/specs/chats /docs/threads 302
-/docs/modules/files /docs 302
-/tutorials/build-rag-app /docs/tools/retrieval 302
-/docs/models/model-endpoint-compatibility /docs/models 302
-/docs/guides/legacy-fine-tuning/creating-training-data /docs 302
-/docs/specs/models /docs/models 302
-/docs/guides/safety-best-practices/end-user-ids /docs/quickstart 302
-/docs/modules/assistants/ /docs/assistants 302
-/docs/models/overview /docs/models 302
-/docs/api-reference/files /api-reference 302
-/docs/models/tts /docs/models 302
-/docs/guides/fine-tuning /docs 302
-/docs/specs/files /docs 302
-/docs/modules/threads /docs/threads 302
-/guides/linux /docs/desktop/linux 302
-/developer/build-engine/engine-anatomy/ /docs 302
-/developer/engine/ /docs 302
-/docs/product/system-monitor/ /docs 302
-/docs/product/settings/ /docs 302
-/developer/build-assistant/your-first-assistant/ /docs 302
-/engineering/research/ /docs 302
-/guides/troubleshooting/gpu-not-used/ /docs/troubleshooting#troubleshooting-nvidia-gpu 302
-/troubleshooting/gpu-not-used/ /docs/troubleshooting#troubleshooting-nvidia-gpu 302
-/docs/integrations/langchain/ /integrations 302
-/onboarding/ /docs/quickstart 302
-/cortex/docs https://cortex.so/ 301
-/installation/hardware/ /docs/desktop/windows 302
-/docs/features/load-unload /docs 302
-/guides/chatting/upload-docs/ /docs/threads 302
-/developer/build-extension/package-your-assistant/ /docs 302
-/blog/hello-world /blog 302
-/docs/get-started/build-on-mobile/ /docs/quickstart 302
-/ai/anything-v4 /docs 302
-/nitro /docs 302
-/tokenizer /docs 302
-/hardware/examples/3090x1-@dan-jan /docs 302
-/guides/concepts/ /about 302
-/platform/ /docs 302
-/hardware/examples/AMAZON-LINK-HERE /docs 302
-/guides/threads/?productId=openai&prompt=What /docs 302
-/guides/threads/?productId=openjourney&prompt=realistic%20portrait%20of%20an%20gray%20dog,%20bright%20eyes,%20radiant%20and%20ethereal%20intricately%20detailed%20photography,%20cinematic%20lighting,%2050mm%20lens%20with%20bokeh /docs 302
-/guides/threads/?productId=openjourney&prompt=old,%20female%20robot,%20metal,%20rust,%20wisible%20wires,%20destroyed,%20sad,%20dark,%20dirty,%20looking%20at%20viewer,%20portrait,%20photography,%20detailed%20skin,%20realistic,%20photo-realistic,%208k,%20highly%20detailed,%20full%20length%20frame,%20High%20detail%20RAW%20color%20art,%20piercing,%20diffused%20soft%20lighting,%20shallow%20depth%20of%20field,%20sharp%20focus,%20hyperrealism,%20cinematic%20lighting /docs 302
-/guides/threads/?productId=openjourney&prompt=a%20young%20caucasian%20man%20holding%20his%20chin.pablo%20picasso%20style,%20acrylic%20painting,%20trending%20on%20pixiv%20fanbox,%20palette%20knife%20and%20brush.%20strokes /docs 302
-/guides/threads/?productId=airoboros&prompt=Let%27s%20role%20play.%20You%20are%20a%20robot%20in%20a%20post-apocalyptic%20world. /docs 302
-/chat?productId=pirsus-epic-realism /docs 302
-/chat?productId=ether-blu-mix /docs 302
-/chat?productId=deliberate /docs 302
-/chat?productId=wizard_vicuna /docs 302
-/chat?productId=disneypixar /docs 302
-/chat?productId=meina-mix /docs 302
-/chat?productId=anything-v4 /docs 302
-/chat?productId=airoboros /docs 302
-/chat?productId=ghost-mix /docs 302
-/ai/toonyou /docs 302
-/chat?productId=xrica-mix /docs 302
-/ai/openai /docs 302
-/chat?productId=been-you /docs 302
-/chat?productId=toonyou /docs 302
-/handbook/product-and-community/ /about/community 302
-/handbook/contributing-to-jan/how-to-get-involved-and-faq/ /about 302
-/handbook/engineering-exellence/one-the-tools-what-we-use-and-why/ /about 302
-/handbook/from-spaghetti-flinging-to-strategy/how-we-gtm/ /about/how-we-work/strategy 302
-/handbook/product-and-community/our-okrs/ /about 302
-/products-and-innovations/philosophy-behind-product-development/ /about 302
-/handbook/core-contributors/ /about/team 302
-/handbook/contributing-to-jan/feedback-channels/ /about/how-we-work 302
-/handbook/meet-jan/ /docs 302
-/handbook/engineering-exellence/ /about 302
-/blog/tags/hello/ /blog 302
-/about/community/events/nvidia-llm-day-nov-23/ /about 302
-/guides/gpus-and-vram /docs 302
-/careers/ /about/team 302
-/handbook/engineering/ /about/team 302
-/handbook/products-and-innovations/ /about 302
-/handbook/contributing-to-jan/ /about 302
-/handbook/meet-jan/vision-and-mission/ /about 302
-/handbook/products-and-innovations/roadmap-present-and-future-directions/ /about 302
-/handbook/what-we-do/ /about/team 302
-/handbook/onboarding/ /docs 302
-/handbook/products-and-innovations/overview-of-jan-framework-and-its-applications/ /docs 302
-/handbook/product/ /docs 302
-/running /docs 302
-/running?model=Open%20Journey%20SD /docs 302
-/ai/been-you /about 302
-/tokenizer?view=bpe /docs 302
-/docs/engineering/ /docs 302
-/developer/install-and-prerequisites#system-requirements /docs/desktop/windows 302
-/guides/quickstart /docs/quickstart 302
-/guides/models /docs/models 302
-/guides/threads /docs/threads 302
-/guides/local-api /docs/local-api 302
-/guides/advanced /docs/settings 302
-/guides/engines/llamacpp /docs/built-in/llama-cpp 302
-/guides/engines/tensorrt-llm /docs/built-in/tensorrt-llm 302
-/guides/engines/lmstudio /docs/local-models/lmstudio 302
-/guides/engines/ollama /docs/local-models/ollama 302
-/guides/engines/groq /docs/remote-models/groq 302
-/guides/engines/mistral /docs/remote-models/mistralai 302
-/guides/engines/openai /docs/remote-models/openai 302
-/guides/engines/remote-server /docs/remote-inference/generic-openai 302
-/extensions /docs/extensions 302
-/integrations/discord /integrations/messaging/llmcord 302
-/docs/integrations/discord /integrations/messaging/llmcord 302
-/integrations/interpreter /integrations/function-calling/interpreter 302
-/integrations/raycast /integrations/workflow-automation/raycast 302
-/integrations/openrouter /docs/remote-models/openrouter 302
-/integrations/continue /integrations/coding/continue-dev 302
-/troubleshooting /docs/troubleshooting 302
-/changelog/changelog-v0.4.9 /changelog 302
-/changelog/changelog-v0.4.8 /changelog 302
-/changelog/changelog-v0.4.7 /changelog 302
-/changelog/changelog-v0.4.6 /changelog 302
-/changelog/changelog-v0.4.5 /changelog 302
-/changelog/changelog-v0.4.4 /changelog 302
-/changelog/changelog-v0.4.3 /changelog 302
-/changelog/changelog-v0.4.2 /changelog 302
-/changelog/changelog-v0.4.1 /changelog 302
-/changelog/changelog-v0.4.0 /changelog 302
-/changelog/changelog-v0.3.3 /changelog 302
-/changelog/changelog-v0.3.2 /changelog 302
-/changelog/changelog-v0.3.1 /changelog 302
-/changelog/changelog-v0.3.0 /changelog 302
-/changelog/changelog-v0.2.3 /changelog 302
-/changelog/changelog-v0.2.2 /changelog 302
-/changelog/changelog-v0.2.1 /changelog 302
-/changelog/changelog-v0.2.0 /changelog 302
-/guides/troubleshooting/ /docs/troubleshooting 302
-/docs/troubleshooting/failed-to-fetch/ /docs/troubleshooting 302
-/docs/troubleshooting/stuck-on-broken-build/ /docs/troubleshooting 302
-/docs/troubleshooting/somethings-amiss/ /docs/troubleshooting 302
-/docs/troubleshooting/how-to-get-error-logs/ /docs/troubleshooting 302
-/docs/troubleshooting/permission-denied/ /docs/troubleshooting 302
-/docs/troubleshooting/unexpected-token/ /docs/troubleshooting 302
-/docs/troubleshooting/undefined-issue/ /docs/troubleshooting 302
-/getting-started/troubleshooting/ /docs/troubleshooting 302
-/docs/troubleshooting/gpu-not-used/ /docs/troubleshooting#troubleshooting-nvidia-gpu 302
-/guides/integrations/openrouter/ /docs/remote-models/openrouter 302
-/guides/integrations/continue/ /integrations/coding/continue-dev 302
-/guides/using-extensions/ /docs/extensions 302
-/features/extensions/ /docs/extensions 302
-/integrations/tensorrt /docs/built-in/tensorrt-llm 302
-/integrations/tensorrt/ /docs/built-in/tensorrt-llm 302
-/guides/using-models/integrate-with-remote-server/ /docs/remote-inference/generic-openai 302
-/guides/using-models/customize-engine-settings/ /docs/built-in/llama-cpp 302
-/developers/plugins/azure-openai/ /docs/remote-models/openai 302
-/docs/api-reference/assistants/ /api-reference#tag/assistants 302
-/docs/api-reference/models/list/ /api-reference#tag/models 302
-/docs/api-reference/threads/ /api-reference#tag/chat 302
-/docs/api-reference/messages/ /api-reference#tag/messages 302
-/docs/api-reference/models/ /api-reference#tag/models 302
-/chat/ /docs/threads 302
-/guides/chatting/manage-history/ /docs/threads/ 302
-/guides/using-server/ /docs/local-api 302
-/guides/using-server/server /docs/local-api 302
-/guides/server /docs/desktop 302
-/acknowledgements/ /about/acknowledgements 302
-/community/ /about/community 302
-/faq/ /about/faq 302
-/wall-of-love/ /about/wall-of-love 302
-/guides/troubleshooting/failed-to-fetch/ /docs/troubleshooting 302
-/docs/troubleshooting/gpu-not-used/ /docs/troubleshooting#troubleshooting-nvidia-gpu 302
-/docs/troubleshooting/failed-to-fetch/ /docs/troubleshooting 302
-/team/contributor-program /about/team 302
-/team/join-us /about/team 302
-/how-we-work/strategy /about/how-we-work/strategy 302
-/how-we-work/strategy/ /about/how-we-work/strategy 302
-/how-we-work/project-management /about/how-we-work/project-management 302
-/engineering /about/how-we-work/engineering 302
-/engineering/ci-cd /about/how-we-work/engineering/ci-cd 302
-/engineering/qa /about/how-we-work/engineering/qa 302
-/how-we-work/product-design /about 302
-/how-we-work/analytics /about/how-we-work/analytics 302
-/how-we-work/website-docs /about/how-we-work/website-docs 302
-/blog/postmortems/january-10-2024-bitdefender-false-positive-flag /post/bitdefender 302
-/guides/error-codes/something-amiss /docs/troubleshooting#somethings-amiss 302
-/guides/error-codes/how-to-get-error-logs /docs/troubleshooting#how-to-get-error-logs 302
-/guides/chatting /docs/threads 302
-/guides/integration/openinterpreter /integrations/function-calling/interpreter 302
-/developer/build-assistant /docs/assistants 302
-/guides/integrations /integrations 302
-/specs/hub /docs 302
-/install/windows /docs/desktop/windows 302
-/install/linux /docs/desktop/linux 302
-/install/nightly /docs/desktop/windows 302
-/docs/engineering/fine-tuning /docs 302
-/developer/assistant /docs/assistants 302
-/guides/common-error/broken-build /docs/troubleshooting#broken-build 302
-/guides/using-server/using-server /docs/local-api 302
-/guides/integrations/azure-openai-service /docs/remote-models/openai 302
-/specs/messages /docs/threads 302
-/docs/engineering/models /docs/models 302
-/docs/specs/assistants /docs/assistants 302
-/docs/engineering/chats /docs/threads 302
-/guides/using-extensions/extension-settings /docs/extensions 302
-/guides/models/customize-engine /docs/models 302
-/guides/integration/mistral /docs/remote-models/mistralai 302
-/guides/common-error /docs/troubleshooting 302
-/guides/integrations/ollama /docs/local-models/ollama 302
-/server-suite /api-reference 302
-/guides/integrations/lmstudio /docs/local-models/lmstudio 302
-/guides/integrations/mistral-ai /docs/remote-models/mistralai 302
-/guides/start-server /docs/local-api 302
-/guides/changelog /changelog 302
-/guides/models-list /docs/models 302
-/guides/thread /docs/threads 302
-/docs/engineering/messages /docs/threads 302
-/guides/faqs /about/faq 302
-/docs/integrations/openrouter /docs/remote-models/openrouter 302
-/docs/integrations/ollama/ /docs/local-models/ollama 302
-/api/overview /api-reference 302
-/docs/extension-guides /docs/extensions 302
-/specs/settings /docs 302
-/docs/UI /docs 302
-/guides/using-models/import-models-using-absolute-filepath /docs/models 302
-/install/docker /docs/desktop 302
-/v1/models/ /docs/models 302
-/guides/using-models/import-manually /docs/models 302
-/docs/team/contributor-program /about/team 302
-/guides/chatting/start-thread /docs/threads 302
-/api/files /docs 302
-/specs/threads /docs/threads 302
-/about/brand-assets/ /about 302
-/guides/chatting/upload-images /docs/threads 302
-/guides/using-models/customize-models /docs/models 302
-/specs/chats /docs/threads 302
-/specs/engine /docs 302
-/specs/data-structures/ /docs 302
-/docs/extension-capabilities /docs/extensions 302
-/docs/get-started/use-local-server /docs/local-api 302
-/guides/install/cloud-native/ /docs/desktop 302
-/guides/install/ /docs/desktop 302
-/docs/installation/desktop /docs/desktop 302
-/specs /docs 302
-/docs/get-started/build-extension /docs/extensions 302
-/specs/files /docs 302
-/guides/using-models/package-models /docs/models 302
-/guides/using-models/ /docs/models 302
-/install/overview /docs/desktop/windows 302
-/developer/prereq/ /docs 302
-/docs/get-started/extension-anatomy /docs/extensions 302
-/guides/mac /docs/desktop/mac 302
-/intro /about 302
-/specs/fine-tuning /docs 302
-/specs/file-based /docs 302
-/docs/extension-guides/monitoring /docs/extensions 302
-/api /api-reference 302
-/getting-started/build-an-app/ /docs/quickstart 302
-/features/ai-models /docs/models 302
-/reference/store /api-reference 302
-/tutorials/build-chat-app/ /docs/quickstart 302
-/features/acceleration/ /docs/built-in/llama-cpp 302
-/getting-started/install/mac/ /docs/desktop/mac 302
-docs/guides/fine-tuning/what-models-can-be-fine-tuned/ /docs 302
-/docs/specs/threads/ /docs/threads 302
-/docs/api-reference/fine-tuning/ /api-reference 302
-/docs/guides/speech-to-text/prompting/ /docs/quickstart 302
-/docs/guides/legacy-fine-tuning/analyzing-your-fine-tuned-model/ /docs 302
-/getting-started/install/windows/ /docs/desktop/windows 302
-/docs/modules/chats/ /docs/threads 302
-/docs/specs/chats/ /docs/threads 302
-/docs/modules/files/ /docs 302
-/tutorials/build-rag-app/ /docs/tools/retrieval 302
-/docs/models/model-endpoint-compatibility/ /docs/models 302
-/docs/guides/legacy-fine-tuning/creating-training-data/ /docs 302
-/docs/specs/models/ /docs/models 302
-/docs/guides/safety-best-practices/end-user-ids/ /docs/quickstart 302
-/docs/models/overview/ /docs/models 302
-/docs/api-reference/files/ /api-reference 302
-/docs/models/tts/ /docs/models 302
-/docs/guides/fine-tuning/ /docs 302
-/docs/specs/files/ /docs 302
-/docs/modules/threads/ /docs/threads 302
-/guides/linux/ /docs/desktop/linux 302
-/developer/build-engine/engine-anatomy /docs 302
-/developer/engine /docs 302
-/docs/product/system-monitor /docs 302
-/docs/product/settings /docs 302
-/developer/build-assistant/your-first-assistant /docs 302
-/engineering/research /docs 302
-/docs/integrations/langchain /integrations 302
-/onboarding /docs/quickstart 302
-/installation/hardware /docs/desktop/windows 302
-/docs/features/load-unload/ /docs 302
-/guides/chatting/upload-docs /docs/threads 302
-/developer/build-extension/package-your-assistant /docs 302
-/blog/hello-world/ /blog 302
-/docs/get-started/build-on-mobile /docs/quickstart 302
-/ai/anything-v4/ /docs 302
-/nitro/ /docs 302
-/tokenizer/ /docs 302
-/hardware/examples/3090x1-@dan-jan/ /docs 302
-/guides/concepts /about 302
-/platform /docs 302
-/hardware/examples/AMAZON-LINK-HERE/ /docs 302
-/guides/threads/?productId=openai&prompt=What/ /docs 302
-/guides/threads/?productId=openjourney&prompt=realistic%20portrait%20of%20an%20gray%20dog,%20bright%20eyes,%20radiant%20and%20ethereal%20intricately%20detailed%20photography,%20cinematic%20lighting,%2050mm%20lens%20with%20bokeh/ /docs 302
-/guides/threads/?productId=openjourney&prompt=old,%20female%20robot,%20metal,%20rust,%20wisible%20wires,%20destroyed,%20sad,%20dark,%20dirty,%20looking%20at%20viewer,%20portrait,%20photography,%20detailed%20skin,%20realistic,%20photo-realistic,%208k,%20highly%20detailed,%20full%20length%20frame,%20High%20detail%20RAW%20color%20art,%20piercing,%20diffused%20soft%20lighting,%20shallow%20depth%20of%20field,%20sharp%20focus,%20hyperrealism,%20cinematic%20lighting/ /docs 302
-/guides/threads/?productId=openjourney&prompt=a%20young%20caucasian%20man%20holding%20his%20chin.pablo%20picasso%20style,%20acrylic%20painting,%20trending%20on%20pixiv%20fanbox,%20palette%20knife%20and%20brush.%20strokes/ /docs 302
-/guides/threads/?productId=airoboros&prompt=Let%27s%20role%20play.%20You%20are%20a%20robot%20in%20a%20post-apocalyptic%20world./ /docs 302
-/chat?productId=pirsus-epic-realism/ /docs 302
-/chat?productId=ether-blu-mix/ /docs 302
-/chat?productId=deliberate/ /docs 302
-/chat?productId=wizard_vicuna/ /docs 302
-/chat?productId=disneypixar/ /docs 302
-/chat?productId=meina-mix/ /docs 302
-/chat?productId=anything-v4/ /docs 302
-/chat?productId=airoboros/ /docs 302
-/chat?productId=ghost-mix/ /docs 302
-/ai/toonyou/ /docs 302
-/chat?productId=xrica-mix/ /docs 302
-/ai/openai/ /docs 302
-/chat?productId=been-you/ /docs 302
-/chat?productId=toonyou/ /docs 302
-/handbook/product-and-community /about/community 302
-/handbook/contributing-to-jan/how-to-get-involved-and-faq /about 302
-/handbook/engineering-exellence/one-the-tools-what-we-use-and-why /about 302
-/handbook/from-spaghetti-flinging-to-strategy/how-we-gtm /about/how-we-work/strategy 302
-/handbook/product-and-community/our-okrs /about 302
-/products-and-innovations/philosophy-behind-product-development /about 302
-/handbook/core-contributors /about/team 302
-/handbook/contributing-to-jan/feedback-channels /about/how-we-work 302
-/handbook/meet-jan /docs 302
-/handbook/engineering-exellence /about 302
-/blog/tags/hello /blog 302
-/about/community/events/nvidia-llm-day-nov-23 /about 302
-/guides/gpus-and-vram/ /docs 302
-/careers /about/team 302
-/handbook/engineering /about/team 302
-/handbook/products-and-innovations /about 302
-/handbook/contributing-to-jan /about 302
-/handbook/meet-jan/vision-and-mission /about 302
-/handbook/products-and-innovations/roadmap-present-and-future-directions /about 302
-/handbook/what-we-do /about/team 302
-/handbook/onboarding /docs 302
-/handbook/products-and-innovations/overview-of-jan-framework-and-its-applications /docs 302
-/handbook/product /docs 302
-/running/ /docs 302
-/running?model=Open%20Journey%20SD/ /docs 302
-/ai/been-you/ /about 302
-/tokenizer?view=bpe/ /docs 302
-/docs/engineering /docs 302
-/developer /docs 302
-/developer/ /docs 302
-/developer/architecture /docs/architecture 302
-/developer/architecture/ /docs/architecture 302
-/developer/file-based /docs 302
-/developer/file-based/ /docs 302
-/developer/framework /docs 302
-/developer/framework/ /docs 302
-/developer/framework/engineering /docs 302
-/developer/framework/engineering/ /docs 302
-/developer/framework/engineering/assistants /docs/assistants 302
-/developer/framework/engineering/assistants/ /docs/assistants 302
-/developer/framework/engineering/chats /docs/threads 302
-/developer/framework/engineering/chats/ /docs/threads 302
-/developer/framework/engineering/engine /docs 302
-/developer/framework/engineering/engine/ /docs 302
-/developer/framework/engineering/files /docs 302
-/developer/framework/engineering/files/ /docs 302
-/developer/framework/engineering/fine-tuning /docs 302
-/developer/framework/engineering/fine-tuning/ /docs 302
-/developer/framework/engineering/messages /docs/threads 302
-/developer/framework/engineering/messages/ /docs/threads 302
-/developer/framework/engineering/models /docs/models 302
-/developer/framework/engineering/models/ /docs/models 302
-/developer/framework/engineering/prompts /docs 302
-/developer/framework/engineering/prompts/ /docs 302
-/developer/framework/engineering/threads /docs/threads 302
-/developer/framework/engineering/threads/ /docs/threads 302
-/developer/framework/product /docs 302
-/developer/framework/product/ /docs 302
-/developer/framework/product/chat /docs/threads 302
-/developer/framework/product/chat/ /docs/threads 302
-/developer/framework/product/hub /docs 302
-/developer/framework/product/hub/ /docs 302
-/developer/framework/product/jan /about 302
-/developer/framework/product/jan/ /about 302
-/developer/framework/product/settings /docs/settings 302
-/developer/framework/product/settings/ /docs/settings 302
-/developer/framework/product/system-monitor /docs 302
-/developer/framework/product/system-monitor/ /docs 302
-/developer/user-interface /docs 302
-/developer/user-interface/ /docs 302
-/docs/desktop /docs/desktop/windows 302
-/docs/desktop/ /docs/desktop/windows 302
-/docs/inferences/groq /docs/remote-models/groq 302
-/docs/inferences/groq/ /docs/remote-models/groq 302
-/docs/inferences/llamacpp /docs/built-in/llama-cpp 302
-/docs/inferences/llamacpp/ /docs/built-in/llama-cpp 302
-/docs/inferences/lmstudio /docs/local-models/lmstudio 302
-/docs/inferences/lmstudio/ /docs/local-models/lmstudio 302
-/docs/inferences/mistralai /docs/remote-models/mistralai 302
-/docs/inferences/mistralai/ /docs/remote-models/mistralai 302
-/docs/inferences/ollama /docs/local-models/ollama 302
-/docs/inferences/ollama/ /docs/local-models/ollama 302
-/docs/inferences/openai /docs/remote-models/openai 302
-/docs/inferences/openai/ /docs/remote-models/openai 302
-/docs/inferences/remote-server-integration /docs/remote-inference/generic-openai 302
-/docs/inferences/remote-server-integration/ /docs/remote-inference/generic-openai 302
-/docs/inferences/tensorrtllm /docs/built-in/tensorrt-llm 302
-/docs/inferences/tensorrtllm/ /docs/built-in/tensorrt-llm 302
-/docs/integrations/router /docs/remote-models/openrouter 302
-/docs/integrations/router/ /docs/remote-models/openrouter 302
-/docs/server /docs/local-api 302
-/docs/server/ /docs/local-api 302
-/features/ /docs 302
-/features /docs 302
-/features/local/ /docs/local-api 302
-/features/local /docs/local-api 302
-/guides/providers/tensorrt-llm /docs/built-in/tensorrt-llm 302
-/guides/providers/tensorrt-llm/ /docs/built-in/tensorrt-llm 302
-/hardware/recommendations/by-model/ /docs 302
-/hardware/recommendations/by-hardware/ /docs 302
-/product /docs 302
-/product/features /docs 302
-/product/features/agents-framework /docs 302
-/product/features/api-server /docs/local-api 302
-/product/features/data-security /docs 302
-/product/features/extensions-framework /docs/extensions 302
-/product/features/local /docs 302
-/product/features/remote /docs 302
-/product/home-server /docs/local-api 302
-/guides/providers/tensorrt-llm/ /docs/built-in/tensorrt-llm 302
-/docs/tools /docs/tools/retrieval 302
-/docs/local-inference/llamacpp /docs/built-in/llama-cpp 302
-/docs/local-inference/tensorrtllm /docs/built-in/tensorrt-llm 302
-/guides/using-server/server/ /docs/local-api 302
-/integrations/coding/vscode /integrations/coding/continue-dev 302
-/docs/integrations/interpreter /integrations/function-calling/interpreter 302
-/cortex/built-in/llama-cpp /docs 302
-/docs/desktop-installation/linux /docs/desktop/linux 302
-/docs/desktop-installation/windows /docs/desktop/windows 302
-/docs/desktop-installation/mac /docs/desktop/mac 302
-/desktop/ /docs/desktop 302
-/developer/ui/ /docs 302
-/docs/local-inference/lmstudio /docs/local-models/lmstudio 302
-/docs/local-inference/ollama /docs/local-models/ollama 302
-/docs/remote-inference/openai /docs/remote-models/openai 302
-/docs/remote-inference/groq /docs/remote-models/groq 302
-/docs/remote-inference/mistralai /docs/remote-models/mistralai 302
-/docs/remote-inference/openrouter /docs/remote-models/openrouter 302
-/docs/remote-inference/generic-openai /docs/remote-models/generic-openai 302
-/docs/desktop-installation /docs/desktop 302
-/hardware/concepts/gpu-and-vram/ /docs 302
-/hardware/recommendations/by-usecase/ /docs 302
-/about/how-we-work/strategy /about 302
-/docs/engineering/assistants/ /docs 302
-/cortex https://cortex.so/docs/ 301
-/cortex/quickstart https://cortex.so/docs/quickstart 301
-/cortex/hardware https://cortex.so/docs/hardware 301
-/cortex/installation https://cortex.so/docs/category/installation 301
-/cortex/installation/mac https://cortex.so/docs/instalation/mac 301
-/cortex/installation/windows https://cortex.so/docs/instalation/windows 301
-/cortex/installation/linux https://cortex.so/docs/instalation/linux 301
-/cortex/command-line https://cortex.so/docs/command-line 301
-/cortex/ts-library https://cortex.so/docs/ts-library 301
-/cortex/py-library https://cortex.so/docs/py-library 301
-/cortex/server https://cortex.so/docs/server 301
-/cortex/text-generation https://cortex.so/docs/text-generation 301
-/cortex/cli https://cortex.so/docs/cli/ 301
-/cortex/cli/init https://cortex.so/docs/cli/init 301
-/cortex/cli/pull https://cortex.so/docs/cli/pull 301
-/cortex/cli/run https://cortex.so/docs/cli/run 301
-/cortex/cli/models https://cortex.so/docs/cli/models/ 301
-/cortex/cli/models/download https://cortex.so/docs/cli/models/download 301
-/cortex/cli/models/list https://cortex.so/docs/cli/models/list 301
-/cortex/cli/models/get https://cortex.so/docs/cli/models/get 301
-/cortex/cli/models/update https://cortex.so/docs/cli/models/update 301
-/cortex/cli/models/start https://cortex.so/docs/cli/models/start 301
-/cortex/cli/models/stop https://cortex.so/docs/cli/models/stop 301
-/cortex/cli/models/remove https://cortex.so/docs/cli/models/remove 301
-/cortex/cli/ps https://cortex.so/docs/cli/ps 301
-/cortex/cli/chat https://cortex.so/docs/cli/chat 301
-/cortex/cli/kill https://cortex.so/docs/cli/kill 301
-/cortex/cli/serve https://cortex.so/docs/cli/serve 301
-/cortex/architecture https://cortex.so/docs/architecture 301
-/cortex/cortex-cpp https://cortex.so/docs/cortex-cpp 301
-/cortex/cortex-llamacpp https://cortex.so/docs/cortex-llamacpp 301
-/api-reference https://cortex.so/api-reference 301
-/docs/assistants /docs 302
-/docs/server-installation/ /docs/desktop 302
-/docs/server-installation/onprem /docs/desktop 302
-/docs/server-installation/aws /docs/desktop 302
-/docs/server-installation/gcp /docs/desktop 302
-/docs/server-installation/azure /docs/desktop 302
-/about /docs 302
-/api-server /docs/api-server 302
-/cdn-cgi/l/email-protection 302
-/docs/built-in/tensorrt-llm 302
-/docs/desktop/beta /docs 302
-/docs/docs/data-folder /docs/data-folder 302
-/docs/docs/desktop/linux /docs/desktop/linux 302
-/docs/docs/troubleshooting /docs/troubleshooting 302
-/docs/local-engines/llama-cpp 302
-/docs/models/model-parameters 302
-/mcp /docs/mcp 302
-/quickstart /docs/quickstart 302
-/server-examples/continue-dev /docs/server-examples/continue-dev 302
-
diff --git a/docs/bun.lockb b/docs/bun.lockb
new file mode 100755
index 000000000..5ce8b72e7
Binary files /dev/null and b/docs/bun.lockb differ
diff --git a/docs/package.json b/docs/package.json
index 53a2ecad6..0c0513f68 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -21,11 +21,13 @@
"astro-mermaid": "^1.0.4",
"autoprefixer": "^10.0.1",
"axios": "^1.6.8",
+ "class-variance-authority": "^0.7.1",
"date-fns": "^3.6.0",
"embla-carousel-auto-height": "^8.0.0",
"embla-carousel-auto-scroll": "^8.0.0",
"embla-carousel-autoplay": "^8.0.0",
"embla-carousel-react": "^8.0.0",
+ "framer-motion": "^12.23.18",
"fs": "^0.0.1-security",
"gray-matter": "^4.0.3",
"lucide-react": "^0.522.0",
@@ -45,7 +47,7 @@
"react-icons": "^5.0.1",
"react-markdown": "^9.0.1",
"react-share": "^5.1.0",
- "react-tweet": "^3.2.0",
+ "react-tweet": "^3.2.2",
"sass": "^1.72.0",
"sharp": "^0.33.3",
"tailwind-merge": "^2.2.2",
diff --git a/docs/public/assets/fonts/StudioFeixenSans-Bold.otf b/docs/public/assets/fonts/StudioFeixenSans-Bold.otf
new file mode 100644
index 000000000..481b7b413
Binary files /dev/null and b/docs/public/assets/fonts/StudioFeixenSans-Bold.otf differ
diff --git a/docs/public/assets/fonts/StudioFeixenSans-Book.otf b/docs/public/assets/fonts/StudioFeixenSans-Book.otf
new file mode 100644
index 000000000..80f60011f
Binary files /dev/null and b/docs/public/assets/fonts/StudioFeixenSans-Book.otf differ
diff --git a/docs/public/assets/fonts/StudioFeixenSans-Light.otf b/docs/public/assets/fonts/StudioFeixenSans-Light.otf
new file mode 100644
index 000000000..c84da6092
Binary files /dev/null and b/docs/public/assets/fonts/StudioFeixenSans-Light.otf differ
diff --git a/docs/public/assets/fonts/StudioFeixenSans-Medium.otf b/docs/public/assets/fonts/StudioFeixenSans-Medium.otf
new file mode 100644
index 000000000..5a7ca7912
Binary files /dev/null and b/docs/public/assets/fonts/StudioFeixenSans-Medium.otf differ
diff --git a/docs/public/assets/fonts/StudioFeixenSans-Regular.otf b/docs/public/assets/fonts/StudioFeixenSans-Regular.otf
new file mode 100644
index 000000000..7b864f7dd
Binary files /dev/null and b/docs/public/assets/fonts/StudioFeixenSans-Regular.otf differ
diff --git a/docs/public/assets/fonts/StudioFeixenSans-Semibold.otf b/docs/public/assets/fonts/StudioFeixenSans-Semibold.otf
new file mode 100644
index 000000000..7eca0c0fc
Binary files /dev/null and b/docs/public/assets/fonts/StudioFeixenSans-Semibold.otf differ
diff --git a/docs/public/assets/fonts/StudioFeixenSans-Ultralight.otf b/docs/public/assets/fonts/StudioFeixenSans-Ultralight.otf
new file mode 100644
index 000000000..ffb5495d3
Binary files /dev/null and b/docs/public/assets/fonts/StudioFeixenSans-Ultralight.otf differ
diff --git a/docs/public/assets/images/general/og-image-docs.png b/docs/public/assets/images/general/og-image-docs.png
new file mode 100644
index 000000000..5a4d700c4
Binary files /dev/null and b/docs/public/assets/images/general/og-image-docs.png differ
diff --git a/docs/public/assets/images/general/og-image.png b/docs/public/assets/images/general/og-image.png
index c60147079..e9646863b 100644
Binary files a/docs/public/assets/images/general/og-image.png and b/docs/public/assets/images/general/og-image.png differ
diff --git a/docs/public/sitemap-0.xml b/docs/public/sitemap-0.xml
index 517d84329..131222295 100644
--- a/docs/public/sitemap-0.xml
+++ b/docs/public/sitemap-0.xml
@@ -1,148 +1,125 @@
- {cat?.replaceAll('-', ' ')} -
- ) - })} -- {format(String(post.date), 'MMMM do, yyyy')} -
-+ {format(post?.date, 'MMMM do, yyyy')} +
+ {post?.description} +
+ )} + {post?.categories && ( ++ By {post?.author} +
+ )} +- {post.description} -
-- Read more... -
-+ Choose from open models or plug in your favorite online models. +
+ {/* */} ++ Connect your email, files, notes and calendar. Jan works where + you work. +
+ {/* */} ++ Your context carries over, so you don’t repeat yourself. Jan + remembers your context and preferences. +
+ {/* */} ++ Designer, Singapore +
+
- Subscribe to our newsletter on AI
-
- research and building Jan:
-